answer
stringlengths 17
10.2M
|
|---|
package net.miz_hi.smileessence.menu;
import android.app.Activity;
import net.miz_hi.smileessence.cache.TweetCache;
import net.miz_hi.smileessence.command.ICommand;
import net.miz_hi.smileessence.command.post.CommandAppendHashtag;
import net.miz_hi.smileessence.command.post.CommandInsertText;
import net.miz_hi.smileessence.command.post.CommandMakeAnonymous;
import net.miz_hi.smileessence.command.post.CommandParseMorse;
import net.miz_hi.smileessence.data.template.Template;
import net.miz_hi.smileessence.data.template.TemplateManager;
import net.miz_hi.smileessence.dialog.ExpandMenuDialog;
import java.util.ArrayList;
import java.util.List;
public class PostingMenu extends ExpandMenuDialog
{
public PostingMenu(Activity activity)
{
super(activity);
setTitle("");
}
private List<ICommand> getHashtagMenu()
{
List<ICommand> list = new ArrayList<ICommand>();
for (String hashtag : TweetCache.getHashtagList())
{
list.add(new CommandAppendHashtag(hashtag));
}
return list;
}
private List<ICommand> getTemplateMenu()
{
List<ICommand> list = new ArrayList<ICommand>();
for (Template template : TemplateManager.getTemplates())
{
list.add(new CommandInsertText(template.getText()));
}
return list;
}
@Override
public List<MenuElement> getElements()
{
List<MenuElement> list = new ArrayList<MenuElement>();
list.add(new MenuElement(new CommandParseMorse()));
list.add(new MenuElement(new CommandMakeAnonymous()));
MenuElement template = new MenuElement("");
List<ICommand> templates = getTemplateMenu();
if (!templates.isEmpty())
{
for (ICommand iCommand : templates)
{
template.addChild(new MenuElement(iCommand));
}
list.add(template);
}
MenuElement hashtag = new MenuElement("");
List<ICommand> hashtags = getHashtagMenu();
if (!hashtags.isEmpty())
{
for (ICommand iCommand : hashtags)
{
hashtag.addChild(new MenuElement(iCommand));
}
list.add(hashtag);
}
return list;
}
}
|
package nl.mpi.arbil.clarin;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.UUID;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import nl.mpi.arbil.GuiHelper;
import nl.mpi.arbil.LinorgJournal;
import nl.mpi.arbil.LinorgSessionStorage;
import nl.mpi.arbil.LinorgWindowManager;
import nl.mpi.arbil.data.ImdiLoader;
import nl.mpi.arbil.data.ImdiTreeObject;
import org.apache.xmlbeans.SchemaProperty;
import org.apache.xmlbeans.SchemaType;
import org.apache.xmlbeans.SchemaTypeSystem;
import org.apache.xmlbeans.XmlBeans;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlObject;
import org.apache.xmlbeans.XmlOptions;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
public class CmdiComponentBuilder {
public Document getDocument(URI inputUri) throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setValidating(false);
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document document;
if (inputUri == null) {
document = documentBuilder.newDocument();
} else {
String decodeUrlString = URLDecoder.decode(inputUri.toString(), "UTF-8");
document = documentBuilder.parse(decodeUrlString);
}
return document;
}
// private Document createDocument(File inputFile) throws ParserConfigurationException, SAXException, IOException {
// DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
// documentBuilderFactory.setValidating(false);
// DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
// Document document = documentBuilder.newDocument();
// return document;
public void savePrettyFormatting(Document document, File outputFile) {
try {
removeDomIds(document); // remove any dom id attributes left over by the imdi api
// set up input and output
DOMSource dOMSource = new DOMSource(document);
FileOutputStream fileOutputStream = new FileOutputStream(outputFile);
StreamResult xmlOutput = new StreamResult(fileOutputStream);
// configure transformer
Transformer transformer = TransformerFactory.newInstance().newTransformer();
//transformer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "testing.dtd");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
transformer.transform(dOMSource, xmlOutput);
xmlOutput.getOutputStream().close();
// todo: this maybe excessive to do every time
// this schema check has been moved to the point of loading the file rather than saving the file
// XsdChecker xsdChecker = new XsdChecker();
// String checkerResult;
// checkerResult = xsdChecker.simpleCheck(outputFile, outputFile.toURI());
// if (checkerResult != null) {
// hasSchemaError = true;
//// LinorgWindowManager.getSingleInstance().addMessageDialogToQueue(checkerResult, "Schema Check");
//System.out.println(xmlOutput.getWriter().toString());
} catch (IllegalArgumentException illegalArgumentException) {
GuiHelper.linorgBugCatcher.logError(illegalArgumentException);
} catch (TransformerException transformerException) {
GuiHelper.linorgBugCatcher.logError(transformerException);
} catch (TransformerFactoryConfigurationError transformerFactoryConfigurationError) {
System.out.println(transformerFactoryConfigurationError.getMessage());
} catch (FileNotFoundException notFoundException) {
GuiHelper.linorgBugCatcher.logError(notFoundException);
} catch (IOException iOException) {
GuiHelper.linorgBugCatcher.logError(iOException);
}
}
public URI insertResourceProxy(ImdiTreeObject imdiTreeObject, ImdiTreeObject resourceNode) {
// there is no need to save the node at this point because metadatabuilder has already done so
synchronized (imdiTreeObject.getParentDomLockObject()) {
// <.CMD.Resources.ResourceProxyList.ResourceProxy>
// <ResourceProxyList>
// <ResourceProxy id="a_text">
// <ResourceType>Resource</ResourceType>
// <ResourceRef>bla.txt</ResourceRef>
// </ResourceProxy>
String targetXmlPath = imdiTreeObject.getURI().getFragment();
if (targetXmlPath == null) {
// todo: consider making sure that the dom parent node always has a path
targetXmlPath = ".CMD.Components." + imdiTreeObject.getParentDomNode().nodeTemplate.loadedTemplateName;
}
System.out.println("insertResourceProxy: " + targetXmlPath);
// File cmdiNodeFile = imdiTreeObject.getFile();
// String nodeFragment = "";
// geerate a uuid for new resource
String resourceProxyId = UUID.randomUUID().toString();
try {
// load the schema
SchemaType schemaType = getFirstSchemaType(imdiTreeObject.getNodeTemplate().templateFile);
// load the dom
Document targetDocument = getDocument(imdiTreeObject.getURI());
// insert the new section
try {
try {
// if (targetXmlPath == null) {
// targetXmlPath = ".CMD.Components";
Node documentNode = selectSingleNode(targetDocument, targetXmlPath);
Node previousRefNode = documentNode.getAttributes().getNamedItem("ref");
if (previousRefNode != null) {
String previousRefValue = documentNode.getAttributes().getNamedItem("ref").getNodeValue();
// todo: remove old resource nodes that this one overwrites
}
((Element) documentNode).setAttribute("ref", resourceProxyId);
} catch (TransformerException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
}
// printoutDocument(targetDocument);
Node addedResourceNode = insertSectionToXpath(targetDocument, targetDocument.getFirstChild(), schemaType, ".CMD.Resources.ResourceProxyList", ".CMD.Resources.ResourceProxyList.ResourceProxy");
addedResourceNode.getAttributes().getNamedItem("id").setNodeValue(resourceProxyId);
for (Node childNode = addedResourceNode.getFirstChild(); childNode != null; childNode = childNode.getNextSibling()) {
String localName = childNode.getNodeName();
if ("ResourceType".equals(localName)) {
childNode.setTextContent(resourceNode.mpiMimeType);
}
if ("ResourceRef".equals(localName)) {
childNode.setTextContent(resourceNode.getUrlString());
}
}
} catch (Exception exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
}
// bump the history
imdiTreeObject.bumpHistory();
// save the dom
savePrettyFormatting(targetDocument, imdiTreeObject.getFile()); // note that we want to make sure that this gets saved even without changes because we have bumped the history ant there will be no file otherwise
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
} catch (ParserConfigurationException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
} catch (SAXException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
}
return imdiTreeObject.getURI();
}
}
public boolean removeChildNodes(ImdiTreeObject imdiTreeObject, String nodePaths[]) {
if (imdiTreeObject.getNeedsSaveToDisk()) {
imdiTreeObject.saveChangesToCache(true);
}
synchronized (imdiTreeObject.getParentDomLockObject()) {
System.out.println("removeChildNodes: " + imdiTreeObject);
File cmdiNodeFile = imdiTreeObject.getFile();
try {
Document targetDocument = getDocument(imdiTreeObject.getURI());
// collect up all the nodes to be deleted without changing the xpath
ArrayList<Node> selectedNodes = new ArrayList<Node>();
for (String currentNodePath : nodePaths) {
System.out.println("removeChildNodes: " + currentNodePath);
// todo: search for and remove any reource links referenced by this node or its sub nodes
Node documentNode = selectSingleNode(targetDocument, currentNodePath);
selectedNodes.add(documentNode);
}
// delete all the nodes now that the xpath is no longer relevant
for (Node currentNode : selectedNodes) {
// todo: there may be an issue here when deleting a languages node with two languages within it
currentNode.getParentNode().removeChild(currentNode);
}
// bump the history
imdiTreeObject.bumpHistory();
// save the dom
savePrettyFormatting(targetDocument, cmdiNodeFile);
for (String currentNodePath : nodePaths) {
// todo log to jornal file
}
return true;
} catch (ParserConfigurationException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (SAXException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (TransformerException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
return false;
}
}
public boolean setFieldValues(ImdiTreeObject imdiTreeObject, FieldUpdateRequest[] fieldUpdates) {
synchronized (imdiTreeObject.getParentDomLockObject()) {
//new ImdiUtils().addDomIds(imdiTreeObject.getURI()); // testing only
System.out.println("setFieldValues: " + imdiTreeObject);
File cmdiNodeFile = imdiTreeObject.getFile();
try {
Document targetDocument = getDocument(imdiTreeObject.getURI());
for (FieldUpdateRequest currentFieldUpdate : fieldUpdates) {
System.out.println("currentFieldUpdate: " + currentFieldUpdate.fieldPath);
// todo: search for and remove any reource links referenced by this node or its sub nodes
Node documentNode = selectSingleNode(targetDocument, currentFieldUpdate.fieldPath);
NamedNodeMap attributesMap = documentNode.getAttributes();
if (currentFieldUpdate.fieldOldValue.equals(documentNode.getTextContent())) {
documentNode.setTextContent(currentFieldUpdate.fieldNewValue);
} else {
GuiHelper.linorgBugCatcher.logError(new Exception("expecting \'" + currentFieldUpdate.fieldOldValue + "\' not \'" + documentNode.getTextContent() + "\' in " + currentFieldUpdate.fieldPath));
return false;
}
Node keyNameNode = attributesMap.getNamedItem("Name");
if (keyNameNode != null && currentFieldUpdate.keyNameValue != null) {
keyNameNode.setNodeValue(currentFieldUpdate.keyNameValue);
}
Node languageNode = attributesMap.getNamedItem("LanguageId");
if (languageNode == null) {
languageNode = attributesMap.getNamedItem("xml:lang");
}
if (languageNode != null && currentFieldUpdate.fieldLanguageId != null) {
languageNode.setNodeValue(currentFieldUpdate.fieldLanguageId);
}
}
// bump the history
imdiTreeObject.bumpHistory();
// save the dom
savePrettyFormatting(targetDocument, cmdiNodeFile);
for (FieldUpdateRequest currentFieldUpdate : fieldUpdates) {
// log to jornal file
LinorgJournal.getSingleInstance().saveJournalEntry(imdiTreeObject.getUrlString(), currentFieldUpdate.fieldPath, currentFieldUpdate.fieldOldValue, currentFieldUpdate.fieldNewValue, "save");
if (currentFieldUpdate.fieldLanguageId != null) {
LinorgJournal.getSingleInstance().saveJournalEntry(imdiTreeObject.getUrlString(), currentFieldUpdate.fieldPath + ":LanguageId", currentFieldUpdate.fieldLanguageId, "", "save");
}
if (currentFieldUpdate.keyNameValue != null) {
LinorgJournal.getSingleInstance().saveJournalEntry(imdiTreeObject.getUrlString(), currentFieldUpdate.fieldPath + ":Name", currentFieldUpdate.keyNameValue, "", "save");
}
}
return true;
} catch (ParserConfigurationException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (SAXException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (TransformerException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
return false;
}
}
public void testInsertFavouriteComponent() {
try {
ImdiTreeObject favouriteImdiTreeObject1 = ImdiLoader.getSingleInstance().getImdiObjectWithoutLoading(new URI("file:/Users/petwit/.arbil/favourites/fav-784841449583527834.imdi#.METATRANSCRIPT.Session.MDGroup.Actors.Actor"));
ImdiTreeObject favouriteImdiTreeObject2 = ImdiLoader.getSingleInstance().getImdiObjectWithoutLoading(new URI("file:/Users/petwit/.arbil/favourites/fav-784841449583527834.imdi#.METATRANSCRIPT.Session.MDGroup.Actors.Actor(2)"));
ImdiTreeObject destinationImdiTreeObject = ImdiLoader.getSingleInstance().getImdiObjectWithoutLoading(new URI("file:/Users/petwit/.arbil/imdicache/20100527141926/20100527141926.imdi"));
insertFavouriteComponent(destinationImdiTreeObject, favouriteImdiTreeObject1);
insertFavouriteComponent(destinationImdiTreeObject, favouriteImdiTreeObject2);
} catch (URISyntaxException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
}
public URI insertFavouriteComponent(ImdiTreeObject destinationImdiTreeObject, ImdiTreeObject favouriteImdiTreeObject) {
URI returnUri = null;
// this node has already been saved in the metadatabuilder which called this
// but lets check this again in case this gets called elsewhere and to make things consistant
if (destinationImdiTreeObject.getNeedsSaveToDisk()) {
destinationImdiTreeObject.saveChangesToCache(true);
}
try {
Document favouriteDocument;
synchronized (favouriteImdiTreeObject.getParentDomLockObject()) {
favouriteDocument = getDocument(favouriteImdiTreeObject.getURI());
}
synchronized (destinationImdiTreeObject.getParentDomLockObject()) {
Document destinationDocument = getDocument(destinationImdiTreeObject.getURI());
String favouriteXpath = favouriteImdiTreeObject.getURI().getFragment();
String favouriteXpathTrimmed = favouriteXpath.replaceFirst("\\.[^(^.]+$", "");
boolean onlySubNodes = !favouriteXpathTrimmed.equals(favouriteXpath);
System.out.println("favouriteXpath: " + favouriteXpathTrimmed);
String destinationXpath;
if (onlySubNodes) {
destinationXpath = favouriteXpathTrimmed;
} else {
destinationXpath = favouriteXpathTrimmed.replaceFirst("\\.[^.]+$", "");
}
System.out.println("destinationXpath: " + destinationXpath);
Node destinationNode = selectSingleNode(destinationDocument, destinationXpath);
Node selectedNode = selectSingleNode(favouriteDocument, favouriteXpathTrimmed);
Node importedNode = destinationDocument.importNode(selectedNode, true);
Node[] favouriteNodes;
if (onlySubNodes) {
NodeList selectedNodeList = importedNode.getChildNodes();
favouriteNodes = new Node[selectedNodeList.getLength()];
for (int nodeCounter = 0; nodeCounter < selectedNodeList.getLength(); nodeCounter++) {
favouriteNodes[nodeCounter] = selectedNodeList.item(nodeCounter);
}
} else {
favouriteNodes = new Node[]{importedNode};
}
for (Node singleFavouriteNode : favouriteNodes) {
if (singleFavouriteNode.getNodeType() != Node.TEXT_NODE) {
destinationNode.appendChild(singleFavouriteNode);
System.out.println("inserting favouriteNode: " + singleFavouriteNode.getLocalName());
}
}
savePrettyFormatting(destinationDocument, destinationImdiTreeObject.getFile());
try {
String nodeFragment;
if (favouriteNodes.length != 1) {
nodeFragment = destinationXpath; // in this case show the target node
} else {
nodeFragment = convertNodeToNodePath(destinationDocument, favouriteNodes[0], destinationXpath);
}
System.out.println("nodeFragment: " + nodeFragment);
// return the child node url and path in the xml
// first strip off any fragment then add the full node fragment
returnUri = new URI(destinationImdiTreeObject.getURI().toString().split("#")[0] + "#" + nodeFragment);
} catch (URISyntaxException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
}
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (ParserConfigurationException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (SAXException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
} catch (TransformerException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
return returnUri;
}
public URI insertChildComponent(ImdiTreeObject imdiTreeObject, String targetXmlPath, String cmdiComponentId) {
if (imdiTreeObject.getNeedsSaveToDisk()) {
imdiTreeObject.saveChangesToCache(true);
}
synchronized (imdiTreeObject.getParentDomLockObject()) {
System.out.println("insertChildComponent: " + cmdiComponentId);
System.out.println("targetXmlPath: " + targetXmlPath);
// check for issues with the path
if (targetXmlPath == null) {
targetXmlPath = cmdiComponentId.replaceAll("\\.[^.]+$", "");
} else if (targetXmlPath.replaceAll("\\(\\d+\\)", "").length() == cmdiComponentId.length()) {
// trim the last path component if the destination equals the new node path
// i.e. xsdPath: .CMD.Components.Session.Resources.MediaFile.Keys.Key into .CMD.Components.Session.Resources.MediaFile(1).Keys.Key
targetXmlPath = targetXmlPath.replaceAll("\\.[^.]+$", "");
}
// make sure the target xpath has all the required parts
String[] cmdiComponentArray = cmdiComponentId.split("\\.");
String[] targetXmlPathArray = targetXmlPath.replaceAll("\\(\\d+\\)", "").split("\\.");
for (int pathPartCounter = targetXmlPathArray.length; pathPartCounter < cmdiComponentArray.length - 1; pathPartCounter++) {
System.out.println("adding missing path component: " + cmdiComponentArray[pathPartCounter]);
targetXmlPath = targetXmlPath + "." + cmdiComponentArray[pathPartCounter];
}
// end path corrections
System.out.println("trimmed targetXmlPath: " + targetXmlPath);
//String targetXpath = targetNode.getURI().getFragment();
//System.out.println("targetXpath: " + targetXpath);
// File cmdiNodeFile = imdiTreeObject.getFile();
String nodeFragment = "";
try {
// load the schema
SchemaType schemaType = getFirstSchemaType(imdiTreeObject.getNodeTemplate().templateFile);
// load the dom
Document targetDocument = getDocument(imdiTreeObject.getURI());
// insert the new section
try {
// printoutDocument(targetDocument);
Node AddedNode = insertSectionToXpath(targetDocument, targetDocument.getFirstChild(), schemaType, targetXmlPath, cmdiComponentId);
nodeFragment = convertNodeToNodePath(targetDocument, AddedNode, targetXmlPath);
} catch (Exception exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
}
// bump the history
imdiTreeObject.bumpHistory();
// save the dom
savePrettyFormatting(targetDocument, imdiTreeObject.getFile()); // note that we want to make sure that this gets saved even without changes because we have bumped the history ant there will be no file otherwise
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
} catch (ParserConfigurationException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
} catch (SAXException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
}
// diff_match_patch diffTool= new diff_match_patch();
// diffTool.diff_main(targetXpath, targetXpath);
try {
System.out.println("nodeFragment: " + nodeFragment);
// return the child node url and path in the xml
// first strip off any fragment then add the full node fragment
return new URI(imdiTreeObject.getURI().toString().split("#")[0] + "#" + nodeFragment);
} catch (URISyntaxException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
return null;
}
}
}
public void testRemoveArchiveHandles() {
try {
Document workingDocument = getDocument(new URI("http://corpus1.mpi.nl/qfs1/media-archive/Corpusstructure/MPI.imdi"));
removeArchiveHandles(workingDocument);
printoutDocument(workingDocument);
} catch (Exception exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
}
public void removeArchiveHandles(ImdiTreeObject imdiTreeObject) {
synchronized (imdiTreeObject.getParentDomLockObject()) {
try {
Document workingDocument = getDocument(imdiTreeObject.getURI());
removeArchiveHandles(workingDocument);
savePrettyFormatting(workingDocument, imdiTreeObject.getFile());
} catch (Exception exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
}
}
private void removeDomIds(Document targetDocument) {
|
package org.inferred.cjp39.j8stages;
import static org.inferred.cjp39.j8stages.LocalExecutor.runNow;
import static org.inferred.cjp39.j8stages.LocalExecutor.runSoon;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Optional;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.Uninterruptibles;
public class MyFuture<T> implements CompletionStage<T> {
public static <T> MyFuture<T> completed(T value) {
return new MyFuture<>(value);
}
public static <T> MyFuture<T> cancelled() {
return new MyFuture<>(new CancellationException());
}
public static <T> MyFuture<T> completedExceptionally(Throwable t) {
return new MyFuture<>(withFailure(t));
}
/**
* Returns a future maintaining the same completion properties as
* {@link listenableFuture}. Cancelling the returned future will cancel the
* input future; however, setting its value via {@link #complete(Object)} or
* {@link #completeExceptionally(Throwable)} will not modify the input
* future.
*/
public static <T> MyFuture<T> from(ListenableFuture<T> listenableFuture) {
if (listenableFuture instanceof CompletionStage) {
/*
* Converting from a CompletionStage is much more efficient, as no
* exception needs to be thrown and caught. However, until Guava
* catches up with Java 8, we have to assume that nobody would
* create a subclass of ListenableFuture<T> and CompletionStage<U>
* where U != V.
*/
@SuppressWarnings("unchecked")
CompletionStage<T> stage = (CompletionStage<T>) listenableFuture;
return from(stage);
}
MyFuture<T> future = new MyFuture<>();
listenableFuture.addListener(() -> {
try {
T result = Uninterruptibles.getUninterruptibly(listenableFuture);
future.complete(result);
} catch (RuntimeException | ExecutionException | Error e) {
future.completeExceptionally(e.getCause());
}
} , Runnable::run);
future.addInternalCallback(state -> {
if (state instanceof Failure) {
if (((Failure) state).cause instanceof CancellationException) {
listenableFuture.cancel(false);
}
}
});
return future;
}
/**
* Returns a future maintaining the same completion properties as
* {@code stage}. If {@code stage} is already a {@code MyFuture}, it will be
* returned directly. If it is a {@link Future}, it will be cancelled if the
* returned future is cancelled. If it is a {@link CompletableFuture}, it
* will also be update if the returned future is completed.
*/
public static <T> MyFuture<T> from(CompletionStage<T> stage) {
if (stage instanceof MyFuture) {
return (MyFuture<T>) stage;
} else if (stage instanceof CompletableFuture) {
return from((CompletableFuture<T>) stage);
}
MyFuture<T> future = new MyFuture<>();
future.completeFrom(stage);
if (stage instanceof Future) {
future.addInternalCallback(state -> {
if (state instanceof Failure) {
if (((Failure) state).cause instanceof CancellationException) {
((Future<?>) stage).cancel(false);
}
}
});
}
return future;
}
/**
* Returns a future maintaining the same completion properties as
* {@code stage}. {@code stage} will be update if the returned future is
* completed or cancelled.
*/
public static <T> MyFuture<T> from(CompletableFuture<T> stage) {
MyFuture<T> future = new MyFuture<>();
future.completeFrom(stage);
future.addInternalCallback(state -> {
if (state instanceof Failure) {
stage.completeExceptionally(((Failure) state).cause);
} else {
@SuppressWarnings("unchecked")
T value = (T) state;
stage.complete(value);
}
});
return future;
}
/**
* Returns a new CompletionStage that, when either {@code first} or
* {@code second} completes, completes with the corresponding result (or
* exception).
*/
public static <T> MyFuture<T> eitherOf(CompletionStage<? extends T> first, CompletionStage<? extends T> second) {
MyFuture<T> result = new MyFuture<>();
result.completeFrom(first);
result.completeFrom(second);
return result;
}
/**
* Returns a new CompletionStage that, when any input stage completes,
* completes with the corresponding result (or exception).
*/
@SafeVarargs
public static <T> MyFuture<T> anyOf(
CompletionStage<? extends T> first,
CompletionStage<? extends T> second,
CompletionStage<? extends T>... others) {
MyFuture<T> result = eitherOf(first, second);
if (others != null) {
for (CompletionStage<? extends T> other : others) {
result.completeFrom(other);
}
}
return result;
}
public static <T> MyFuture<T> anyOf(Iterable<CompletionStage<? extends T>> stages) {
MyFuture<T> result = new MyFuture<>();
boolean anyStage = false;
for (CompletionStage<? extends T> stage : stages) {
result.completeFrom(stage);
anyStage = true;
}
checkArgument(anyStage, "Must provide at least one input stage");
return result;
}
@VisibleForTesting
static class RethrownException extends Exception {
private static final long serialVersionUID = 1296586904077392396L;
RethrownException(String message) {
super(message);
}
}
@SuppressWarnings("rawtypes")
private static final AtomicReferenceFieldUpdater<MyFuture, Object> STATE =
AtomicReferenceFieldUpdater.newUpdater(MyFuture.class, Object.class, "state");
/**
* Stores the current state, encoded as follows:
* <ul>
* <li>If the stage is incomplete: a {@link Callback} instance
* <li>If the stage completed exceptionally: a {@link Failure} instance
* <li>Otherwise, the result of the stage (may be null).
* </ul>
*/
private volatile Object state;
public MyFuture() {
Optional<Executor> executor = LocalExecutor.multistepExecutor();
state = executor.<Object>map(NoCallbackWithExecutor::new).orElse(NOTHING);
}
private MyFuture(Object state) {
this.state = state;
}
public ListenableFuture<T> toListenableFuture() {
return new ListenableFutureBridge();
}
private static final CancellationAction<CancellationException> CANCELLATION_EXCEPTION =
(cause) -> (CancellationException) new CancellationException().initCause(cause);
private static CompletionException rethrow(Throwable t) {
if (t instanceof RuntimeException) {
RuntimeException e = (RuntimeException) t;
RuntimeException clone = ExceptionCloner.clone(e);
clone.addSuppressed(new RethrownException("Completion exception rethrown"));
throw clone;
} else if (t instanceof Error) {
Error e = (Error) t;
Error clone = ExceptionCloner.clone(e);
clone.addSuppressed(new RethrownException("Completion error rethrown"));
throw clone;
}
return new CompletionException(t);
}
/**
* Returns the result value (or throws any encountered exception) if
* completed, else returns the given valueIfAbsent.
*
* <p>
* If the future completed exceptionally with a runtime exception or error,
* a copy will be thrown; if with a checked exception, it will be wrapped in
* a {@link CompletionException}.
*
* @param valueIfAbsent
* the value to return if not completed
* @return the result value, if completed, else the given valueIfAbsent
* @throws CancellationException
* if the computation was cancelled
* @throws RuntimeException
* if this future completed exceptionally with a
* RuntimeException
* @throws CompletionException
* if this future completed exceptionally with a checked
* exception
* @throws Error
* if this future completed exceptionally with an Error
*/
public T getNow(T valueIfAbsent) {
return internalGet(MyFuture::rethrow, MyFuture::rethrow, null, () -> valueIfAbsent);
}
public T get() throws InterruptedException {
return internalGet(MyFuture::rethrow, MyFuture::rethrow, CountDownLatch::await, null);
}
public T getUninterruptibly() {
return internalGet(MyFuture::rethrow, MyFuture::rethrow, Uninterruptibles::awaitUninterruptibly, null);
}
public T tryGet(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
return internalGet(MyFuture::rethrow, MyFuture::rethrow, latch -> latch.await(timeout, unit), () -> {
throw new TimeoutException();
});
}
public T tryGetUninterruptibly(long timeout, TimeUnit unit) throws TimeoutException {
return internalGet(CompletionException::new, CANCELLATION_EXCEPTION, latch -> {
Uninterruptibles.awaitUninterruptibly(latch, timeout, unit);
} , () -> {
throw new TimeoutException();
});
}
public boolean isCancelled() {
Object currentState = state;
if (currentState instanceof Failure) {
Failure failure = (Failure) currentState;
return (failure.cause instanceof CancellationException);
}
return false;
}
public boolean isCompletedExceptionally() {
return (state instanceof Failure);
}
public boolean isDone() {
return !(state instanceof Callback);
}
/**
* If not already completed, sets the value returned by {@link #get()} and
* related methods to the given value.
*
* <p>
* Synchronous callbacks and dependent stages will be executed on this
* thread, but may not be scheduled immediately, to avoid deep recursion.
*
* @param value
* the result value
* @return {@code true} if this invocation caused this future to transition
* to a completed state, else {@code false}
* @see LocalExecutor
*/
public boolean complete(T value) {
return completeInternal(value);
}
/**
* If not already completed, causes invocations of {@link #get()} and
* related methods to throw the given exception.
*
* <p>
* Synchronous callbacks and dependent stages will be executed on this
* thread, but may not be scheduled immediately, to avoid deep recursion.
*
* @param ex
* the exception
* @return {@code true} if this invocation caused this future to transition
* to a completed state, else {@code false}
* @see LocalExecutor
*/
public boolean completeExceptionally(Throwable t) {
return completeInternal(withFailure(checkNotNull(t)));
}
/**
* If not already completed, completes this CompletableFuture with a
* {@link CancellationException}.
*
* <p>
* Synchronous callbacks and dependent stages will be executed on this
* thread, but may not be scheduled immediately, to avoid deep recursion.
*
* @return {@code true} if this task is now cancelled
* @see LocalExecutor
*/
public boolean cancel() {
return completeInternal(withFailure(new CancellationException())) || isCancelled();
}
public void addCallback(Runnable callback, Executor e) {
addInternalCallback(wrap(callback, e));
}
public void addCallback(FutureCallback<T> callback, Executor e) {
addInternalCallback(wrap(callback, e));
}
@Override
public <U> MyFuture<U> thenApply(Function<? super T, ? extends U> fn) {
return thenApplyAsync(fn, Runnable::run);
}
@Override
public <U> MyFuture<U> thenApplyAsync(Function<? super T, ? extends U> fn) {
return thenApplyAsync(fn, ForkJoinPool.commonPool());
}
@Override
public <U> MyFuture<U> thenApplyAsync(Function<? super T, ? extends U> fn, Executor executor) {
return this.addInternalCallback(new TransformingCallback<T, U>(fn, executor)).getFuture();
}
@Override
public MyFuture<Void> thenAccept(Consumer<? super T> action) {
return thenApply(input -> {
action.accept(input);
return null;
});
}
@Override
public MyFuture<Void> thenAcceptAsync(Consumer<? super T> action) {
return thenApplyAsync(input -> {
action.accept(input);
return null;
});
}
@Override
public MyFuture<Void> thenAcceptAsync(Consumer<? super T> action, Executor executor) {
return thenApplyAsync(input -> {
action.accept(input);
return null;
} , executor);
}
@Override
public MyFuture<Void> thenRun(Runnable action) {
return thenApply(input -> {
action.run();
return null;
});
}
@Override
public MyFuture<Void> thenRunAsync(Runnable action) {
return thenApplyAsync(input -> {
action.run();
return null;
});
}
@Override
public MyFuture<Void> thenRunAsync(Runnable action, Executor executor) {
return thenApplyAsync(input -> {
action.run();
return null;
} , executor);
}
@Override
public <U, V> MyFuture<V> thenCombine(
CompletionStage<? extends U> other,
BiFunction<? super T, ? super U, ? extends V> fn) {
return thenCombineAsync(other, fn, Runnable::run);
}
@Override
public <U, V> MyFuture<V> thenCombineAsync(
CompletionStage<? extends U> other,
BiFunction<? super T, ? super U, ? extends V> fn) {
return thenCombineAsync(other, fn, ForkJoinPool.commonPool());
}
@Override
public <U, V> MyFuture<V> thenCombineAsync(
CompletionStage<? extends U> other,
BiFunction<? super T, ? super U, ? extends V> fn,
Executor executor) {
return addInternalCallback(new CombiningCallback<T, U, V>(other, fn, executor)).getFuture();
}
@Override
public <U> MyFuture<Void> thenAcceptBoth(
CompletionStage<? extends U> other,
BiConsumer<? super T, ? super U> action) {
return thenAcceptBothAsync(other, action, Runnable::run);
}
@Override
public <U> MyFuture<Void> thenAcceptBothAsync(
CompletionStage<? extends U> other,
BiConsumer<? super T, ? super U> action) {
return thenAcceptBothAsync(other, action, ForkJoinPool.commonPool());
}
@Override
public <U> MyFuture<Void> thenAcceptBothAsync(
CompletionStage<? extends U> other,
BiConsumer<? super T, ? super U> action,
Executor executor) {
return thenCombineAsync(other, (t, u) -> {
action.accept(t, u);
return null;
} , executor);
}
@Override
public MyFuture<Void> runAfterBoth(CompletionStage<?> other, Runnable action) {
return runAfterBothAsync(other, action, Runnable::run);
}
@Override
public MyFuture<Void> runAfterBothAsync(CompletionStage<?> other, Runnable action) {
return runAfterBothAsync(other, action, ForkJoinPool.commonPool());
}
@Override
public MyFuture<Void> runAfterBothAsync(CompletionStage<?> other, Runnable action, Executor executor) {
return thenCombineAsync(other, (t, u) -> {
action.run();
return null;
} , executor);
}
/**
* Returns a new future that, when either this future or {@code other}
* completes, completes with the corresponding result (or exception).
*/
public MyFuture<T> or(CompletionStage<? extends T> other) {
return eitherOf(this, other);
}
@Override
public <U> MyFuture<U> applyToEither(CompletionStage<? extends T> other, Function<? super T, U> fn) {
return or(other).thenApply(fn);
}
@Override
public <U> MyFuture<U> applyToEitherAsync(CompletionStage<? extends T> other, Function<? super T, U> fn) {
return or(other).thenApplyAsync(fn);
}
@Override
public <U> MyFuture<U> applyToEitherAsync(
CompletionStage<? extends T> other,
Function<? super T, U> fn,
Executor executor) {
return or(other).thenApplyAsync(fn, executor);
}
@Override
public MyFuture<Void> acceptEither(CompletionStage<? extends T> other, Consumer<? super T> action) {
return or(other).thenAccept(action);
}
@Override
public MyFuture<Void> acceptEitherAsync(CompletionStage<? extends T> other, Consumer<? super T> action) {
return or(other).thenAcceptAsync(action);
}
@Override
public MyFuture<Void> acceptEitherAsync(
CompletionStage<? extends T> other,
Consumer<? super T> action,
Executor executor) {
return or(other).thenAcceptAsync(action, executor);
}
@Override
public MyFuture<Void> runAfterEither(CompletionStage<?> other, Runnable action) {
return eitherOf(this, other).thenRun(action);
}
@Override
public MyFuture<Void> runAfterEitherAsync(CompletionStage<?> other, Runnable action) {
return eitherOf(this, other).thenRunAsync(action);
}
@Override
public MyFuture<Void> runAfterEitherAsync(CompletionStage<?> other, Runnable action, Executor executor) {
return eitherOf(this, other).thenRunAsync(action, executor);
}
@Override
public <U> MyFuture<U> thenCompose(Function<? super T, ? extends CompletionStage<U>> fn) {
return thenComposeAsync(fn, Runnable::run);
}
@Override
public <U> MyFuture<U> thenComposeAsync(Function<? super T, ? extends CompletionStage<U>> fn) {
return thenComposeAsync(fn, ForkJoinPool.commonPool());
}
@Override
public <U> MyFuture<U> thenComposeAsync(Function<? super T, ? extends CompletionStage<U>> fn, Executor executor) {
return addInternalCallback(new ComposingCallback<>(fn, executor)).getFuture();
}
@Override
public MyFuture<T> exceptionally(Function<Throwable, ? extends T> fn) {
return handleAsync((t, x) -> (x != null) ? fn.apply(x) : t, Runnable::run);
}
/**
* Returns a new future with the same result or exception as this future,
* that executes the given action when this future completes.
*
* <p>
* When this future is complete, the given action is invoked with the result
* (or {@code null} if none) and the exception (or {@code null} if none) of
* this future as arguments. The returned future is completed when the
* action returns. If the supplied action itself encounters an exception,
* then the returned future exceptionally completes with this exception
* unless this future also completed exceptionally, in which case it is
* added as a suppressed exception.
*
* @param action
* the action to perform
* @return the new CompletionStage
*/
@Override
public MyFuture<T> whenComplete(BiConsumer<? super T, ? super Throwable> action) {
return whenCompleteAsync(action, Runnable::run);
}
/**
* Returns a new future with the same result or exception as this future,
* that executes the given action using this future's default asynchronous
* execution facility when this future completes.
*
* <p>
* When this future is complete, the given action is invoked with the result
* (or {@code null} if none) and the exception (or {@code null} if none) of
* this future as arguments. The returned future is completed when the
* action returns. If the supplied action itself encounters an exception,
* then the returned future exceptionally completes with this exception
* unless this future also completed exceptionally, in which case it is
* added as a suppressed exception.
*
* @param action
* the action to perform
* @return the new future
*/
@Override
public MyFuture<T> whenCompleteAsync(BiConsumer<? super T, ? super Throwable> action) {
return whenCompleteAsync(action, ForkJoinPool.commonPool());
}
/**
* Returns a new future with the same result or exception as this future,
* that executes the given action using the supplied Executor when this
* future completes.
*
* <p>
* When this future is complete, the given action is invoked with the result
* (or {@code null} if none) and the exception (or {@code null} if none) of
* this future as arguments. The returned future is completed when the
* action returns. If the supplied action itself encounters an exception,
* then the returned future exceptionally completes with this exception
* unless this future also completed exceptionally, in which case it is
* added as a suppressed exception.
*
* @param action
* the action to perform
* @param executor
* the executor to use for asynchronous execution
* @return the new future
*/
@Override
public MyFuture<T> whenCompleteAsync(BiConsumer<? super T, ? super Throwable> action, Executor executor) {
return addInternalCallback(new WhenCompleteCallback<T>(action, executor)).getFuture();
}
@Override
public <U> MyFuture<U> handle(BiFunction<? super T, Throwable, ? extends U> fn) {
return handleAsync(fn, Runnable::run);
}
@Override
public <U> MyFuture<U> handleAsync(BiFunction<? super T, Throwable, ? extends U> fn) {
return handleAsync(fn, ForkJoinPool.commonPool());
}
@Override
public <U> MyFuture<U> handleAsync(BiFunction<? super T, Throwable, ? extends U> fn, Executor executor) {
return addInternalCallback(new HandlingCallback<T, U>(fn, executor)).getFuture();
}
/**
* Returns a {@link CompletableFuture} maintaining the same completion
* properties as this future. Completing or cancelling the result will
* complete this future also. (Note however that this is not done
* atomically; if two threads race, they may leave the result in a different
* state than this future.)
*/
@Override
public CompletableFuture<T> toCompletableFuture() {
CompletableFuture<T> future = addInternalCallback(new CompletableFutureCallback<T>()).getFuture();
completeFrom(future);
return future;
}
private final class ListenableFutureBridge implements ListenableFuture<T> {
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return MyFuture.this.cancel();
}
@Override
public boolean isCancelled() {
return MyFuture.this.isCancelled();
}
@Override
public boolean isDone() {
return MyFuture.this.isDone();
}
@Override
public T get() throws InterruptedException, ExecutionException {
return MyFuture.this
.internalGet(ExecutionException::new, ExecutionException::new, CountDownLatch::await, null);
}
@Override
public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
return MyFuture.this.internalGet(
ExecutionException::new,
ExecutionException::new,
latch -> latch.await(timeout, unit),
() -> {
throw new TimeoutException();
});
}
@Override
public void addListener(Runnable listener, Executor executor) {
addInternalCallback(MyFuture.wrap(listener, executor));
}
}
/** If the state is a Callback instance, the stage is not yet complete. */
private static interface Callback {
void onComplete(Object state);
default Callback and(Callback also) {
return new CallbackStack(also, this);
}
default void onCompleteAsync(Object state) {
runSoon(() -> this.onComplete(state));
}
}
private static Callback wrap(Runnable callback, Executor e) {
return state -> {
try {
e.execute(callback);
} catch (RuntimeException f) {}
};
}
private static <T> Callback wrap(FutureCallback<T> callback, Executor e) {
return state -> {
try {
if (state instanceof Failure) {
Throwable cause = ((Failure) state).cause;
e.execute(() -> callback.onFailure(cause == null ? new CancellationException() : cause));
} else {
@SuppressWarnings("unchecked")
T result = (T) state;
e.execute(() -> callback.onSuccess(result));
}
} catch (RuntimeException f) {}
};
}
private abstract static class CallbackWithFuture<T> implements Callback {
protected final MyFuture<T> future = new MyFuture<T>();
public MyFuture<T> getFuture() {
return future;
}
}
private static class TransformingCallback<T, U> extends CallbackWithFuture<U> {
final Function<? super T, ? extends U> transform;
final Executor executor;
TransformingCallback(Function<? super T, ? extends U> transform, Executor executor) {
this.transform = transform;
this.executor = executor;
}
@Override
@SuppressWarnings("unchecked")
public void onComplete(Object state) {
if (state instanceof Failure) {
future.completeInternal(state);
} else {
executor.execute(() -> {
try {
future.completeInternal(transform.apply((T) state));
} catch (RuntimeException | Error e) {
future.completeInternal(withFailure(e));
}
});
}
}
}
private static final class CombiningCallback<T, U, V> extends CallbackWithFuture<V> {
private final Executor executor;
private final BiFunction<? super T, ? super U, ? extends V> fn;
private final CompletionStage<? extends U> other;
CombiningCallback(
CompletionStage<? extends U> other,
BiFunction<? super T, ? super U, ? extends V> fn,
Executor executor) {
this.executor = executor;
this.fn = fn;
this.other = other;
}
@Override
@SuppressWarnings("unchecked")
public void onComplete(Object state) {
if (state instanceof Failure) {
future.completeInternal(state);
} else {
other.whenCompleteAsync((input2, t) -> {
if (t != null) {
future.completeInternal(withFailure(t));
} else {
executor.execute(() -> {
try {
V output = fn.apply((T) state, input2);
future.completeInternal(output);
} catch (RuntimeException | Error e) {
future.completeInternal(withFailure(e));
}
});
}
} , Runnable::run);
}
}
}
private static final class ComposingCallback<T, U> extends CallbackWithFuture<U> {
private final Function<? super T, ? extends CompletionStage<U>> fn;
private final Executor executor;
ComposingCallback(Function<? super T, ? extends CompletionStage<U>> fn, Executor executor) {
this.fn = fn;
this.executor = executor;
}
@Override
@SuppressWarnings("unchecked")
public void onComplete(Object state) {
if (state instanceof Failure) {
future.completeInternal(state);
} else {
T t = (T) state;
executor.execute(() -> {
try {
CompletionStage<U> nextStage = fn.apply(t);
if (nextStage == null) {
future.completeExceptionally(new NullPointerException("apply() returned null: " + fn));
} else {
future.completeFrom(nextStage);
}
} catch (RuntimeException | Error x) {
future.completeInternal(withFailure(x, state));
}
});
}
}
}
private void completeFrom(CompletionStage<? extends T> stage) {
if (stage instanceof MyFuture) {
((MyFuture<? extends T>) stage).addInternalCallback(this::completeInternal);
} else {
stage.whenCompleteAsync((u, x) -> {
completeInternal((x != null) ? withFailure(x) : u);
} , Runnable::run);
}
}
private static class WhenCompleteCallback<T> extends CallbackWithFuture<T> {
private final BiConsumer<? super T, ? super Throwable> action;
final Executor executor;
WhenCompleteCallback(BiConsumer<? super T, ? super Throwable> action, Executor executor) {
this.action = action;
this.executor = executor;
}
@Override
@SuppressWarnings("unchecked")
public void onComplete(Object state) {
executor.execute(() -> {
T t;
Throwable x;
if (state instanceof Failure) {
t = null;
x = ((Failure) state).cause;
} else {
t = (T) state;
x = null;
}
try {
action.accept(t, x);
future.completeInternal(state);
} catch (RuntimeException | Error e) {
if (x == null) {
future.completeInternal(withFailure(e));
} else if (e == x) {
future.completeInternal(state);
} else {
Throwable clone = ExceptionCloner.clone(x);
clone.addSuppressed(e);
future.completeInternal(withFailure(clone));
}
}
});
}
}
private static class HandlingCallback<T, U> extends CallbackWithFuture<U> {
private final BiFunction<? super T, Throwable, ? extends U> fn;
final Executor executor;
HandlingCallback(BiFunction<? super T, Throwable, ? extends U> fn, Executor executor) {
this.fn = fn;
this.executor = executor;
}
@Override
@SuppressWarnings("unchecked")
public void onComplete(Object state) {
executor.execute(() -> {
T t;
Throwable x;
if (state instanceof Failure) {
t = null;
x = ((Failure) state).cause;
} else {
t = (T) state;
x = null;
}
try {
future.completeInternal(fn.apply(t, x));
} catch (RuntimeException | Error e) {
future.completeInternal(withFailure(e, state));
}
});
}
}
private static class CompletableFutureCallback<T> implements Callback {
private final CompletableFuture<T> future = new CompletableFuture<>();
@Override
@SuppressWarnings("unchecked")
public void onComplete(Object state) {
if (state instanceof Failure) {
future.completeExceptionally(((Failure) state).cause);
} else {
future.complete((T) state);
}
}
public CompletableFuture<T> getFuture() {
return future;
}
}
/**
* A callback that executes a pair of callbacks.
*/
private static class CallbackStack implements Callback {
final Callback action;
final Callback next;
CallbackStack(Callback action, Callback next) {
this.action = checkNotNull(action);
this.next = checkNotNull(next);
assert!(action instanceof CallbackStack);
assert next != NOTHING;
}
@Override
public void onComplete(Object state) {
action.onComplete(state);
next.onCompleteAsync(state);
}
}
/**
* Callback that delays asynchronous execution, of {@code action} as well as
* any subsequently-chained actions, to {@code executor}.
*/
private static class CallbackWithExecutor implements Callback {
final Callback action;
final Executor executor;
CallbackWithExecutor(Callback action, Executor executor) {
this.action = action;
this.executor = executor;
}
@Override
public void onComplete(Object state) {
action.onComplete(state);
}
@Override
public void onCompleteAsync(Object state) {
executor.execute(() -> action.onCompleteAsync(state));
}
@Override
public Callback and(Callback also) {
return new CallbackWithExecutor(action.and(also), executor);
}
}
/**
* Callback that does nothing, but if actions are chained to it, delays
* their asynchronous execution to {@code executor}.
*/
private static class NoCallbackWithExecutor implements Callback {
final Executor executor;
NoCallbackWithExecutor(Executor executor) {
this.executor = executor;
}
@Override
public void onComplete(Object state) {}
@Override
public void onCompleteAsync(Object state) {}
@Override
public Callback and(Callback also) {
return new CallbackWithExecutor(also, executor);
}
}
/**
* If the state is a Failure instance, the stage completed exceptionally.
*/
private static class Failure {
private final Throwable cause;
Failure(Throwable t) {
this.cause = t;
}
}
private static Failure withFailure(Throwable t) {
return new Failure(t);
}
private static Failure withFailure(Throwable t, Object state) {
if (state instanceof Failure && ((Failure) state).cause == t) {
return (Failure) state;
} else {
return new Failure(t);
}
}
private static final Callback NOTHING = new Callback() {
@Override
public void onComplete(Object state) {}
@Override
public void onCompleteAsync(Object state) {}
@Override
public Callback and(Callback also) {
return also;
}
};
/*
* Completing this future may cause a cascade of other callbacks. We need to
* ensure we do not get a StackOverflowError by executing them all
* recursively. However, we also want to ensure all callbacks are triggered
* "soon enough", in case the user has run a synchronous action, and is
* expecting results. To do that, we delay execution of the callback and
* only run it at the top level of this method, keeping the stack trace
* small. If the callback happens after this method returns, we still want
* to run it with the minimal amount of stack, or we may still contribute to
* a StackOverflowError; but we do not want to delay indefinitely, or we
* risk breaking the apparent immediacy of an outer stage. We therefore
* piggyback on the most recently started executor on this thread.
* LocalExecutor provides this very specific set of behaviors. The net
* result is our synchronous methods always return a completed future if all
* the data is available, without StackOverflowErrors.
*/
private <C extends Callback> C addInternalCallback(C callback) {
Object currentState = null;
Callback newState;
do {
currentState = state;
if (currentState instanceof Callback) {
newState = ((Callback) currentState).and(callback);
} else {
Object finalState = currentState;
runNow(() -> {
callback.onComplete(finalState);
});
newState = null;
}
} while (newState != null && !compareAndSwapState(currentState, newState));
return callback;
}
private boolean completeInternal(Object finalState) {
Object currentState = null;
do {
currentState = state;
if (!(currentState instanceof Callback)) {
return false;
}
} while (!compareAndSwapState(state, finalState));
((Callback) currentState).onCompleteAsync(finalState);
return true;
}
private boolean compareAndSwapState(Object expected, Object value) {
return STATE.compareAndSet(this, expected, value);
}
private interface FailureAction<X extends Exception> {
X createException(Throwable cause);
}
private interface CancellationAction<X extends Exception> {
X createException(Throwable cause);
}
private interface BlockingAction<X extends Exception> {
void block(CountDownLatch latch) throws X;
}
private interface IncompleteAction<T, X extends Exception> {
T getFallback() throws X;
}
private <W extends Exception, X extends Exception, Y extends Exception, Z extends Exception> T internalGet(
FailureAction<X> failureAction,
CancellationAction<W> cancellationAction,
BlockingAction<Y> blockingAction,
IncompleteAction<T, Z> fallback) throws W, X, Y, Z {
checkArgument(blockingAction != null || fallback != null);
Object currentState = this.state;
if (currentState instanceof Callback && blockingAction != null) {
CountDownLatch latch = new CountDownLatch(1);
boolean addedCallback;
do {
Callback newState = ((Callback) currentState).and(state -> latch.countDown());
addedCallback = compareAndSwapState(currentState, newState);
if (addedCallback) {
blockingAction.block(latch);
}
currentState = this.state;
} while (currentState instanceof Callback && !addedCallback);
}
if (currentState instanceof Callback) {
checkArgument(fallback != null, "Blocking action did not wait for latch, and no fallback provided");
return fallback.getFallback();
} else if (currentState instanceof Failure) {
Throwable cause = ((Failure) currentState).cause;
if (cause instanceof CancellationException) {
throw cancellationAction.createException(cause);
} else {
throw failureAction.createException(cause);
}
} else {
@SuppressWarnings("unchecked")
T result = (T) currentState;
return result;
}
}
}
|
package nl.sense_os.service.constants;
import android.content.Context;
/**
* Contains all preference keys for the SharedPreferences that are used by the Sense library.<br/>
* <br/>
* Nota bene: there are three separate preference files:
* <ul>
* <li>{@link #MAIN_PREFS}, containing the settings for the sensors and sample and sync rates;</li>
* <li>{@link #AUTH_PREFS}, containing all user-related stuff like login, session, cached sensor
* IDs;</li>
* <li>{@link #STATUS_PREFS}, containing settings about which sensors are activated.</li>
* </ul>
*
* To access the settings, you should not uses the default SharedPreference, but use the names of
* the right preference file instead:
*
* <pre>
* // preferences about sensor settings are store in the main prefs
* SharedPreferences mainPrefs = getSharedPreferences(SensePrefs.MAIN_PREFS, MODE_PRIVATE);
* boolean useGps = mainPrefs.getBoolean(Main.Location.GPS, true);
*
* // prefs about login are stored in auth prefs
* SharedPreferences authPrefs = getSharedPreferences(SensePrefs.AUTH_PREFS, MODE_PRIVATE);
* String cookie = mainPrefs.getBoolean(Auth.LOGIN_COOKIE, null);
* </pre>
*
* @author Steven Mulder <steven@sense-os.nl>
*/
public class SensePrefs {
/**
* Keys for the authentication-related preferences of the Sense Platform
*/
public static class Auth {
/**
* Key for login preference for session cookie.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String LOGIN_COOKIE = "login_cookie";
/**
* Key for login preference for email address.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String LOGIN_USERNAME = "login_mail";
/**
* Key for login preference for hashed password.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String LOGIN_PASS = "login_pass";
/**
* Key for storing the online sensor list for this device (type of JSONArray).
*
* @see #SENSOR_LIST_COMPLETE
* @see SensePrefs#AUTH_PREFS
* @deprecated
*/
public static final String SENSOR_LIST = "sensor_list";
/**
* Key for storing the online sensor list for this user (type of JSONArray).
*
* @see #SENSOR_LIST
* @see SensePrefs#AUTH_PREFS
*/
public static final String SENSOR_LIST_COMPLETE = "sensor_list_complete";
/**
* Key for storing the retrieval time of device's online sensor list.
*
* @see #SENSOR_LIST_COMPLETE_TIME
* @see SensePrefs#AUTH_PREFS
* @deprecated
*/
public static final String SENSOR_LIST_TIME = "sensor_list_timestamp";
/**
* Key for storing the retrieval time of complete online sensor list.
*
* @see #SENSOR_LIST_TIME
* @see SensePrefs#AUTH_PREFS
*/
public static final String SENSOR_LIST_COMPLETE_TIME = "sensor_list_complete_timestamp";
/**
* Key for storing the online device id.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String DEVICE_ID = "device_id";
/**
* Key for storing the retrieval time of the online device id.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String DEVICE_ID_TIME = "device_id_timestamp";
/**
* Key for storing the online device type.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String DEVICE_TYPE = "device_type";
/**
* Key for storing the IMEI of the phone.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String PHONE_IMEI = "phone_imei";
/**
* Key for storing the type of the phone.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String PHONE_TYPE = "phone_type";
/**
* Key for storing if gcm registration_id
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String GCM_REGISTRATION_ID = "gcm_registration_id";
}
/**
* Keys for the main Sense Platform service preferences
*/
public static class Main {
public static class Advanced {
/**
* Key to use the development version of CommonSense.
*
* @see SensePrefs#AUTH_PREFS
*/
public static final String DEV_MODE = "devmode";
/**
* Key for preference that toggles use of compression for transmission. Default is true.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String COMPRESS = "compression";
/**
* Key for preference that enables local storage, making the sensor data available to
* other apps through a ContentProvider. Default is true.
*
* @see SensePrefs#MAIN_PREFS
* @deprecated Local storage is always on.
*/
public static final String LOCAL_STORAGE = "local_storage";
/**
* Key for preference that enables communication with CommonSense. Disable this to work
* in local-only mode. Default is true.
*/
public static final String USE_COMMONSENSE = "use_commonsense";
/**
* Key for preference that enables the location feedback sensor. Enable this to
* participate in Pim's location feedback test. Default is false.
*/
public static final String LOCATION_FEEDBACK = "location_feedback";
/**
* Key for preference that enables Agostino mode. Enable this to participate in
* Agostino's saliency test. Default is false.
*/
public static final String AGOSTINO = "agostino_mode";
}
public static class Ambience {
/**
* Key for preference that toggles use of light sensor in ambience sensing.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String LIGHT = "ambience_light";
/**
* Key for preference that toggles use of camera light sensor in ambience sensing.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String CAMERA_LIGHT = "ambience_camera_light";
/**
* Key for preference that toggles use of the microphone in ambience sensing.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MIC = "ambience_mic";
/**
* Key for preference that toggles use of the audio spectrum in ambience sensing.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String AUDIO_SPECTRUM = "ambience_audio_spectrum";
/**
* Key for preference that toggles use of the pressure sensor in ambience sensing.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String PRESSURE = "ambience_pressure";
/**
* Key for preference that toggles use of the temperature sensor in ambience sensing.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String TEMPERATURE = "ambience_temperature";
/**
* Key for preference that toggles use of the magnetic field sensor in ambience sensing.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MAGNETIC_FIELD = "ambience_magnetic_field";
}
public static class DevProx {
/**
* Key for preference that toggles use of Bluetooth in the Device Proximity sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String BLUETOOTH = "proximity_bt";
/**
* Key for preference that toggles use of Wi-Fi in the Device Proximity sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String WIFI = "proximity_wifi";
/**
* Key for preference that toggles use of NFC in the Device Proximity sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String NFC = "proximity_nfc";
}
public static class External {
public static class MyGlucoHealth {
/**
* Key for preference that toggles use of the MyGlucohealth sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MAIN = "myglucohealth";
}
public static class TanitaScale {
/**
* Key for preference that toggles use of the Tanita scale sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MAIN = "tanita_scale";
}
public static class ZephyrBioHarness {
/**
* Key for preference that toggles use of the Zephyr BioHarness.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MAIN = "zephyrBioHarness";
/**
* Key for preference that toggles use of the Zephyr BioHarness Accelerometer.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String ACC = "zephyrBioHarness_acc";
/**
* Key for preference that toggles use of the Zephyr BioHarness Heart rate.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String HEART_RATE = "zephyrBioHarness_heartRate";
/**
* Key for preference that toggles use of the Zephyr BioHarness Temperature.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String TEMP = "zephyrBioHarness_temp";
/**
* Key for preference that toggles use of the Zephyr BioHarness Respiration rate.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String RESP = "zephyrBioHarness_resp";
/**
* Key for preference that toggles use of the Zephyr BioHarness worn status.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String WORN_STATUS = "zephyrBioHarness_wornStatus";
/**
* Key for preference that toggles use of the Zephyr BioHarness battery level.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String BATTERY = "zephyrBioHarness_battery";
}
public static class ZephyrHxM {
/**
* Key for preference that toggles use of the Zephyr HxM.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MAIN = "zephyrHxM";
/**
* Key for preference that toggles use of the Zephyr HxM speed.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String SPEED = "zephyrHxM_speed";
/**
* Key for preference that toggles use of the Zephyr HxM heart rate.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String HEART_RATE = "zephyrHxM_heartRate";
/**
* Key for preference that toggles use of the Zephyr HxM battery.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String BATTERY = "zephyrHxM_battery";
/**
* Key for preference that toggles use of the Zephyr HxM distance.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String DISTANCE = "zephyrHxM_distance";
/**
* Key for preference that toggles use of the Zephyr HxM strides.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String STRIDES = "zephyrHxM_strides";
}
public static class OBD2Sensor {
/**
* Key for preference that toggles use of the OBD-II sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MAIN = "obd2sensor";
}
}
public static class Location {
/**
* Key for preference that toggles use of GPS in location sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String GPS = "location_gps";
/**
* Key for preference that toggles use of Network in location sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String NETWORK = "location_network";
/**
* Key for preference that toggles use of sensor fusion to toggle th GPS usage.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String AUTO_GPS = "automatic_gps";
}
public static class Motion {
/**
* Key for preference that toggles use of Bluetooth in the DeviceProximity sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String FALL_DETECT = "motion_fall_detector";
/**
* Key for preference that toggles use of Bluetooth in the DeviceProximity sensor.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String FALL_DETECT_DEMO = "motion_fall_detector_demo";
/**
* Key for preference that toggles "epi-mode", drastically changing motion sensing
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String EPIMODE = "epimode";
/**
* Key for preference that determines whether to unregister the motion sensor between
* samples. Nota bene: unregistering the sensor breaks the screen rotation on some
* phones (e.g. Nexus S).
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String UNREG = "motion_unregister";
/**
* Key for preference that toggles motion energy sensing, which measures average kinetic
* energy over a sample period.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String MOTION_ENERGY = "motion_energy";
/**
* Key for preference that enables fix that re-registers the motion sensor when the
* screen turns off.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String SCREENOFF_FIX = "screenoff_fix";
}
public static class PhoneState {
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String BATTERY = "phonestate_battery";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String SCREEN_ACTIVITY = "phonestate_screen_activity";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String PROXIMITY = "phonestate_proximity";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String IP_ADDRESS = "phonestate_ip";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String DATA_CONNECTION = "phonestate_data_connection";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String UNREAD_MSG = "phonestate_unread_msg";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String SERVICE_STATE = "phonestate_service_state";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String SIGNAL_STRENGTH = "phonestate_signal_strength";
/**
* @see SensePrefs#MAIN_PREFS
*/
public static final String CALL_STATE = "phonestate_call_state";
}
public static class Quiz {
/**
* Key for preference that sets the interval between pop quizzes.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String RATE = "popquiz_rate";
/**
* Key for preference that sets the silent mode for pop quizzes.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String SILENT_MODE = "popquiz_silent_mode";
/**
* Key for generic preference that starts an update of the quiz questions when clicked.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String SYNC = "popquiz_sync";
/**
* Key for preference that holds the last update time of the quiz questions with
* CommonSense.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String SYNC_TIME = "popquiz_sync_time";
}
/**
* Key for preference that controls sample frequency of the sensors.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String SAMPLE_RATE = "commonsense_rate";
/**
* Key for preference that controls sync frequency with CommonSense.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String SYNC_RATE = "sync_rate";
/**
* Key for preference that saves the last running services.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String LAST_STATUS = "last_status";
/**
* Key for preference that stores a flag for first login.
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String LAST_LOGGED_IN = "never_logged_in";
/**
* Key for preference that stores a timestamp for last time the sensors registration was
* verified
*
* @see SensePrefs#MAIN_PREFS
*/
public static final String LAST_VERIFIED_SENSORS = "verified_sensors";
}
/**
* Keys for the status preferences of the Sense Platform service
*/
public static class Status {
/**
* Key for the main status of the sensors. Set to <code>false</code> to disable all the
* sensing components.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String MAIN = "main service status";
/**
* Key for the status of the "ambience" sensors. Set to <code>true</code> to enable sensing.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String AMBIENCE = "ambience component status";
/**
* Key for the status of the "device proximity" sensors. Set to <code>true</code> to enable
* sensing.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String DEV_PROX = "device proximity component status";
/**
* Key for the status of the external Bluetooth sensors. Set to <code>true</code> to enable
* sensing.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String EXTERNAL = "external services component status";
/**
* Key for the status of the location sensors. Set to <code>true</code> to enable sensing.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String LOCATION = "location component status";
/**
* Key for the status of the motion sensors. Set to <code>true</code> to enable sensing.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String MOTION = "motion component status";
/**
* Key for the status of the "phone state" sensors. Set to <code>true</code> to enable
* sensing.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String PHONESTATE = "phone state component status";
/**
* Key for the status of the questionnaire. Set to <code>true</code> to enable it.
*
* @see SensePrefs#STATUS_PREFS
* @deprecated Sense does not support the questionnaire anymore
*/
public static final String POPQUIZ = "pop quiz component status";
/**
* Key for preference to automatically start the Sense service on boot.
*
* @see SensePrefs#STATUS_PREFS
*/
public static final String AUTOSTART = "autostart";
}
public static class SensorSpecifics {
public static class Loudness {
/**
* Key for learned value of total silence..
*/
public static final String TOTAL_SILENCE = "total_silence";
/**
* Key for learned value of highest loudness.
*/
public static final String LOUDEST = "loudest";
}
public static class AutoCalibratedNoise {
/**
* Key for learned value of total silence..
*/
public static final String TOTAL_SILENCE = "AutoCalibratedNoise.total_silence";
/**
* Key for learned value of highest loudness.
*/
public static final String LOUDEST = "AutoCalibratedNoise.loudest";
}
}
/**
* Name of the shared preferences file used for storing CommonSense authentication data. Use
* {@link Context#MODE_PRIVATE}.
*
* @see #MAIN_PREFS_PREFS
* @see #STATUS_PREFS
*/
public static final String AUTH_PREFS = "authentication";// "login";
/**
* Name of the main preference file, used for storing the settings for the Sense service.
*
* @see #AUTH_PREFS
* @see #STATUS_PREFS
*/
public static final String MAIN_PREFS = "main";
/**
* Name of shared preferences file holding the desired status of the Sense service.
*
* @see #AUTH_PREFS
* @see #MAIN_PREFS
*/
public static final String STATUS_PREFS = "service_status_prefs";
/**
* Name of the sensor specifics file, used for storing the settings for the Sense service.
*
* @see #AUTH_PREFS
* @see #STATUS_PREFS
*/
public static final String SENSOR_SPECIFICS = "sensor_specifics";
private SensePrefs() {
// private constructor to prevent instantiation
}
}
|
package org.javaee7.websocket.bingo;
/**
*
* @author sree
*/
import org.json.simple.JSONObject;
public class PeerInfo {
JSONObject peerInfo;
PeerInfo(String id) {
peerInfo = new JSONObject();
peerInfo.put("Type", "Id Description");
peerInfo.put("id", id);
}
public JSONObject getJSONObject() {
return peerInfo;
}
}
|
package opendap.bes.dap4Responders;
import opendap.bes.*;
import opendap.bes.dap2Responders.BesApi;
import opendap.coreServlet.ReqInfo;
import opendap.coreServlet.ResourceInfo;
import opendap.coreServlet.Scrub;
import opendap.coreServlet.Util;
import opendap.http.error.*;
import opendap.namespaces.DAP;
import org.jdom.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.text.FieldPosition;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Pattern;
public abstract class Dap4Responder extends BesDapResponder {
Logger _log;
private MediaType _normativeMediaType;
private Vector<Dap4Responder> _altResponders;
private String _combinedRequestSuffixRegex;
private boolean _addTypeSuffixToDownloadFilename;
public Dap4Responder(String sysPath, String pathPrefix, String requestSuffix, BesApi besApi) {
super(sysPath, pathPrefix, requestSuffix, besApi);
_log = LoggerFactory.getLogger(getClass().getName());
_altResponders = new Vector<>();
addTypeSuffixToDownloadFilename(false);
}
public void addTypeSuffixToDownloadFilename(boolean value){
_addTypeSuffixToDownloadFilename = value;
}
public boolean addTypeSuffixToDownloadFilename(){
return _addTypeSuffixToDownloadFilename;
}
public void setNormativeMediaType(MediaType mt){
_normativeMediaType = mt;
_combinedRequestSuffixRegex = buildRequestMatchingRegex();
_log.debug("combinedRequestSuffixRegex: {}", _combinedRequestSuffixRegex);
setRequestMatchRegex(_combinedRequestSuffixRegex);
}
public MediaType getNormativeMediaType(){
return _normativeMediaType;
}
public Dap4Responder[] getAltRepResponders(){
Dap4Responder[] ars = new Dap4Responder[_altResponders.size()];
return _altResponders.toArray(ars);
}
public void addAltRepResponder(Dap4Responder altRepResponder){
_altResponders.add(altRepResponder);
_combinedRequestSuffixRegex = buildRequestMatchingRegex();
_log.debug("combinedRequestSuffixRegex: {}", _combinedRequestSuffixRegex);
setRequestMatchRegex(_combinedRequestSuffixRegex);
for(Dap4Responder responder: _altResponders){
responder._combinedRequestSuffixRegex = _combinedRequestSuffixRegex;
}
}
public void clearAltResponders(){
_altResponders.clear();
}
public String getCombinedRequestSuffixRegex(){
return _combinedRequestSuffixRegex;
}
public void setCombinedRequestSuffixRegex(String regex){
_combinedRequestSuffixRegex = regex;
}
public String buildRequestMatchingRegex() {
StringBuilder s = new StringBuilder();
s.append(buildRequestMatchingRegexWorker(this));
s.append("$");
_log.debug("Request Match Regex: {}", s.toString());
return s.toString();
}
private String buildRequestMatchingRegexWorker(Dap4Responder responder) {
StringBuilder s = new StringBuilder();
if (responder.getNormativeMediaType().getMediaSuffix().startsWith("."))
s.append("\\");
s.append(responder.getNormativeMediaType().getMediaSuffix());
Dap4Responder[] altResponders = responder.getAltRepResponders();
boolean hasAltRepResponders = altResponders.length > 0;
if (hasAltRepResponders)
s.append("(");
boolean notFirstPass = false;
for (Dap4Responder altResponder : altResponders) {
if (notFirstPass)
s.append("|");
s.append("(").append("(");
s.append(buildRequestMatchingRegexWorker(altResponder));
s.append(")?").append(")");
notFirstPass = true;
}
if (hasAltRepResponders)
s.append(")?");
return s.toString();
}
/**
* THis is where we do the Server-driven HTTP Content Negotiation.
* @param request
* @return
* @throws NoSuchElementException
*/
public Dap4Responder getBestResponderForHttpRequest(HttpServletRequest request) throws NoSuchElementException {
HashMap<MediaType,Dap4Responder> responderMap = new HashMap<MediaType, Dap4Responder>();
String acceptsHeaderValue = request.getHeader("Accept");
_log.debug("Accept: {}", acceptsHeaderValue);
Vector<MediaType> clientMediaTypes = new Vector<MediaType>();
if(acceptsHeaderValue!=null){
String[] mimeTypes = acceptsHeaderValue.split(",");
for(String mimeType: mimeTypes){
clientMediaTypes.add(new MediaType(mimeType.trim()));
}
}
else {
return this;
}
for(MediaType mt: clientMediaTypes){
_log.debug("Clients accepts media type: {}", mt.toString());
}
TreeSet<MediaType> matchingTypes = new TreeSet<MediaType>();
for(MediaType mt: clientMediaTypes){
if(mt.getMimeType().equalsIgnoreCase(_normativeMediaType.getMimeType())){
matchingTypes.add(mt);
responderMap.put(mt,this);
}
else if(mt.getPrimaryType().equalsIgnoreCase(_normativeMediaType.getPrimaryType()) &&
mt.getSubType().equalsIgnoreCase("*")){
matchingTypes.add(mt);
responderMap.put(mt,this);
}
else if(mt.getPrimaryType().equalsIgnoreCase("*") &&
mt.getSubType().equalsIgnoreCase("*")){
matchingTypes.add(mt);
responderMap.put(mt,this);
}
for(Dap4Responder altRepResponder : getAltRepResponders()){
MediaType altType = altRepResponder.getNormativeMediaType();
if(mt.getMimeType().equalsIgnoreCase(altType.getMimeType())){
matchingTypes.add(mt);
responderMap.put(mt,altRepResponder);
}
else if(mt.getPrimaryType().equalsIgnoreCase(altType.getMimeType()) &&
mt.getSubType().equalsIgnoreCase("*")){
matchingTypes.add(mt);
responderMap.put(mt,altRepResponder);
}
}
}
if(matchingTypes.isEmpty()){
return null;
}
MediaType bestType = matchingTypes.last();
Dap4Responder bestResponder = responderMap.get(bestType);
_log.debug("Best Matching Type: {}", bestType);
_log.debug("Worst Matching Type: {}", matchingTypes.first());
_log.debug("Best Responder: {}", bestResponder.getClass().getName());
return bestResponder;
}
@Override
public void respondToHttpGetRequest(HttpServletRequest request, HttpServletResponse response) throws Exception {
_log.debug("respondToHttpGetRequest() - Checking Last-Modified header...");
if (!response.containsHeader("Last-Modified")) {
_log.debug("respondToHttpGetRequest() - Last-Modified header has not been set. Setting...");
Date lmt = new Date(getLastModified(request));
//Date lmt = new Date((long)-1);
SimpleDateFormat httpDateFormat = new SimpleDateFormat(HttpDatFormatString);
response.setHeader("Last-Modified",httpDateFormat.format(lmt));
_log.debug("respondToHttpGetRequest() - Last-Modified: {}", httpDateFormat.format(lmt));
} else {
_log.debug("respondToHttpGetRequest() - Last-Modified header has already been set.");
}
String relativeUrl = ReqInfo.getLocalUrl(request);
for(Dap4Responder altResponder: getAltRepResponders()){
Pattern p = altResponder.getRequestSuffixMatchPattern();
if(Util.matchesSuffixPattern(relativeUrl, p)){
altResponder.respondToHttpGetRequest(request,response);
return;
}
}
boolean regexMatch = Util.matchesSuffixPattern(relativeUrl,getRequestSuffixMatchPattern());
if(regexMatch){
_log.debug("requestedResourceId matches RequestSuffixMatchPattern: {}", regexMatch);
Dap4Responder targetResponder = getBestResponderForHttpRequest(request);
if(targetResponder==null){
//If an Accept header field is present, and if the server cannot send a response
// which is acceptable according to the combined Accept field value, then the server
// SHOULD send a 406 (not acceptable) response.
String msg = "Server-driven content negotiation failed. Returning status 406. Client request 'Accept: "+ Scrub.urlContent(request.getHeader("Accept"))+"'";
_log.error("respondToHttpGetRequest() - {} ", msg);
throw new NotAcceptable(msg);
}
_log.debug("respondToHttpGetRequest() - Target Responder: {} normative media-type: {}", targetResponder.getClass().getName(), targetResponder.getNormativeMediaType());
targetResponder.sendNormativeRepresentation(request,response);
return;
}
String msg ="Something Bad Happened. Unable to respond to request for : '" + Scrub.urlContent(relativeUrl) + "'";
_log.error("respondToHttpGetRequest() - {}",msg);
throw new opendap.http.error.InternalError(msg);
}
/**
*
* @param requestedResourceId
* @return
*/
@Override
public boolean matches(String requestedResourceId, boolean checkWithBes) {
String resourceID = getResourceId(requestedResourceId,checkWithBes);
boolean result = resourceID != null;
return result;
}
public String getResourceId(String requestedResource, boolean checkWithBes){
Pattern suffixPattern = Pattern.compile(_combinedRequestSuffixRegex, Pattern.CASE_INSENSITIVE);
return getBesApi().getBesDataSourceID(requestedResource, suffixPattern, checkWithBes);
}
/*
public String getResourceId(String requestedResource, boolean checkWithBes){
Pattern suffixPattern = Pattern.compile(combinedRequestSuffixRegex, Pattern.CASE_INSENSITIVE);
Matcher suffixMatcher = suffixPattern.matcher(requestedResource);
boolean suffixMatched = false;
while(!suffixMatcher.hitEnd()){
suffixMatched = suffixMatcher.find();
log.debug("{}", AwsUtil.checkRegex(suffixMatcher, suffixMatched));
}
String besDataSourceId = null;
if(suffixMatched){
int start = suffixMatcher.start();
besDataSourceId = requestedResource.substring(0,start);
if(checkWithBes){
log.debug("Asking BES about resource: {}", besDataSourceId);
try {
ResourceInfo dsi = new BESResource(besDataSourceId, getBesApi());
if (!dsi.isDataset()) {
besDataSourceId = null;
}
} catch (Exception e) {
log.debug("matches() failed with an Exception. Msg: '{}'", e.getMessage());
}
}
}
return besDataSourceId;
}
*/
public String getRequestUrlPath(HttpServletRequest req) {
String forwardRequestUri = (String)req.getAttribute("javax.servlet.forward.request_uri");
String requestUrl = req.getRequestURL().toString();
if(forwardRequestUri != null){
String server = req.getServerName();
int port = req.getServerPort();
String scheme = req.getScheme();
requestUrl = scheme + "://" + server + ":" + port + forwardRequestUri;
}
return requestUrl;
}
@Override
public String getXmlBase(HttpServletRequest req){
String requestUrl = getRequestUrlPath(req);
String xmlBase = Util.dropSuffixFrom(requestUrl, Pattern.compile(getCombinedRequestSuffixRegex()));
_log.debug("getXmlBase(): @xml:base='{}'", xmlBase);
return xmlBase;
}
@Override
public long getLastModified(HttpServletRequest request) throws Exception {
String relativeUrl = ReqInfo.getLocalUrl(request);
String dataSource = getResourceId(relativeUrl,true);
_log.debug("getLastModified(): Determining LastModified time for resource {}", dataSource);
ResourceInfo ri = getResourceInfo(dataSource);
return ri.lastModified();
}
private static final String CF_History_Entry_Date_Format = "yyyy-MM-dd HH:mm:ss z";
public String getCFHistoryEntry(HttpServletRequest request) throws IOException {
StringBuilder cf_history_entry = new StringBuilder();
// Add the date
Date now = new Date();
SimpleDateFormat sdf = new SimpleDateFormat(CF_History_Entry_Date_Format);
sdf.setTimeZone(new SimpleTimeZone(0,"GMT"));
cf_history_entry.append(sdf.format(now,new StringBuffer(),new FieldPosition(0)));
// Add the Hyrax Version
cf_history_entry.append(" Hyrax-").append(opendap.bes.Version.getHyraxVersionString());
cf_history_entry.append(" ");
// Add the complete request URL
cf_history_entry.append(getRequestUrlPath(request));
cf_history_entry.append("?");
cf_history_entry.append(ReqInfo.getConstraintExpression(request));
cf_history_entry.append("\n");
return cf_history_entry.toString();
}
public Element getServiceElement(String datasetUrl){
Element service = getServiceElement();
Element link = getNormativeLink(datasetUrl);
service.addContent(link);
for(Dap4Responder altRepResponder: getAltRepResponders()){
MediaType altMediaType = altRepResponder.getNormativeMediaType();
String href = datasetUrl + getNormativeMediaType().getMediaSuffix()+altMediaType.getMediaSuffix();
link = getLinkElement(altMediaType.getMimeType(),href,altRepResponder.getServiceDescription());
service.addContent(link);
}
return service;
}
public Element getNormativeLink(String datasetUrl){
String href = datasetUrl + getNormativeMediaType().getMediaSuffix();
Element link = getLinkElement(getNormativeMediaType().getMimeType(),href,"The normative form of the "+getServiceTitle());
Element alt;
for(Dap4Responder altRepResponder: getAltRepResponders()){
alt = new Element("alt",DAP.DAPv40_DatasetServices_NS);
alt.setAttribute("type",altRepResponder.getNormativeMediaType().getMimeType());
link.addContent(alt);
}
return link;
}
public Element getLinkElement(String mediaType, String href, String description ){
Element link = new Element("link",DAP.DAPv40_DatasetServices_NS);
link.setAttribute("type",mediaType);
link.setAttribute("href",href);
if(description!=null && !description.equals(""))
link.setAttribute("description",description);
return link;
}
public Element getServiceElement(){
return getServiceElement(getServiceTitle(),getServiceRoleId(),getServiceDescription(),getServiceDescriptionLink());
}
public Element getServiceElement(String title, String role, String descriptionText, String descriptionLink){
Element service = new Element("Service",DAP.DAPv40_DatasetServices_NS);
service.setAttribute("title",title);
service.setAttribute("role",role);
Element description = getDescriptionElement(descriptionText, descriptionLink);
if(description!=null)
service.addContent(description);
return service;
}
public Element getDescriptionElement(String descriptionText, String descriptionLink){
Element description=null;
if(descriptionText!=null || descriptionLink!=null){
description = new org.jdom.Element("Description", DAP.DAPv40_DatasetServices_NS);
if(descriptionLink!=null)
description.setAttribute("href",descriptionLink);
if(descriptionText!=null)
description.setText(descriptionText);
}
return description;
}
/**
* If addTypeSuffixToDownloadFilename() is true, append the value of
* getRequestSuffix() to the name.
*
* {@inheritDoc}
*/
@Override
public String getDownloadFileName(String resourceID){
String name = super.getDownloadFileName(resourceID);
// old rule: add the suffix - there was no option
// old-new rule: if addTypeSuffixToDownloadFilename() is true, append getRequestSuffix().
// new rule: if addType...() is true, then look at 'name' and do one of the following:
// file.<ext>: remove '.<ext>' and append the value of getRequestSuffix()
// file [no ext at all]: append getRequestSuffix()
// else if addType...() is not true, provide the old behavior
// Assume that all <ext> are no more than three characters long (some are, but this is
// a reasonable compromise).
if(addTypeSuffixToDownloadFilename()) {
int dotPos = name.lastIndexOf('.'); // -1 if '.' not found
int extLength = name.length() - (dotPos + 1);
if (dotPos != -1 && (extLength > 0 && extLength < 4)) {
name = name.substring(0, dotPos);
}
}
name += getRequestSuffix();
return name;
}
public abstract void sendNormativeRepresentation(HttpServletRequest request, HttpServletResponse response) throws Exception;
}
|
package org.javamexico.protobuf.example;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.MetricRegistry;
import org.javamexico.service.Servicio;
import org.javamexico.ws.example.ClienteWS;
import org.javamexico.ws.example.WebService;
import javax.xml.ws.Endpoint;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.util.concurrent.TimeUnit;
public class Main {
private final static Servicio servicio = new Servicio();
public static final MetricRegistry METRICS = new MetricRegistry();
public static final int ROUNDS = 50000;
private static void externalTest() throws Exception {
ProtoServer server = new ProtoServer(servicio);
Thread t = new Thread(server, "proto-server");
t.start();
System.out.println("LISTO; dale ENTER para terminar");
synchronized(servicio) {
servicio.wait();
}
}
private static void proto() throws Exception {
ProtoServer server = new ProtoServer(servicio);
Thread t = new Thread(server, "proto-server");
t.start();
//Crear un cliente y enviar N peticiones
final ProtoClient pc = new ProtoClient();
pc.writeToDisk();
pc.benchmark(1000);
METRICS.timer("proto.total").time(() -> pc.benchmark(Main.ROUNDS));
server.shutdown();
}
private static void ws() throws Exception {
WebService ws = new WebService();
ws.setService(servicio);
Endpoint endpoint = Endpoint.create(ws);
endpoint.publish("http://localhost:9998/ws");
ClienteWS wsc = new ClienteWS();
System.out.println("WS warmup");
wsc.benchmark(1000);
System.out.println("WS benchmark");
METRICS.timer("ws.total").time(() -> wsc.benchmark(Main.ROUNDS));
}
public static void main(String... args) throws Exception {
ws();
proto();
ConsoleReporter.forRegistry(Main.METRICS).convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS).outputTo(System.out).build().report();
System.out.println("FYI, " + Runtime.getRuntime().availableProcessors() + " CPUs");
externalTest();
System.exit(0);
}
}
|
package org.jboss.netty.buffer;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.UnsupportedCharsetException;
import java.util.NoSuchElementException;
public interface ChannelBuffer extends Comparable<ChannelBuffer> {
/**
* A buffer whose capacity is {@code 0}.
*/
static ChannelBuffer EMPTY_BUFFER = new BigEndianHeapChannelBuffer(0);
/**
* Returns the number of bytes (octets) this buffer can contain.
*/
int capacity();
ByteOrder order();
/**
* Returns the {@code readerIndex} of this buffer.
*/
int readerIndex();
/**
* Sets the {@code readerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code readerIndex} is less than 0 or
* greater than {@code this.writerIndex}
*/
void readerIndex(int readerIndex);
/**
* Returns the {@code writerIndex} of this buffer.
*/
int writerIndex();
/**
* Sets the {@code writerIndex} of this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code writerIndex} is less than
* {@code this.readerIndex} or greater than {@code this.capacity}
*/
void writerIndex(int writerIndex);
/**
* Sets the {@code readerIndex} and {@code writerIndex} of this buffer
* in one shot. This method is useful when you have to worry about the
* invocation order of {@link #readerIndex(int)} and {@link #writerIndex(int)}
* methods. For example, the following code will fail:
*
* <pre>
* // Create a buffer whose readerIndex, writerIndex and capacity are
* // 0, 0 and 8 respectively.
* ChannelBuffer buf = ChannelBuffers.buffer(8);
*
* // IndexOutOfBoundsException is thrown because the specified
* // readerIndex (2) cannot be greater than the current writerIndex (0).
* buf.readerIndex(2);
* buf.writerIndex(4);
* </pre>
*
* The following code will also fail:
*
* <pre>
* // Create a buffer whose readerIndex, writerIndex and capacity are
* // 0, 8 and 8 respectively.
* ChannelBuffer buf = ChannelBuffers.wrappedBuffer(new byte[8]);
*
* // readerIndex becomes 8.
* buf.readLong();
*
* // IndexOutOfBoundsException is thrown because the specified
* // writerIndex (4) cannot be less than the current readerIndex (8).
* buf.writerIndex(4);
* buf.readerIndex(2);
* </pre>
*
* By contrast, {@link #setIndex(int, int)} guarantees that it never
* throws an {@link IndexOutOfBoundsException} as long as the specified
* indexes meet all constraints, regardless what the current index values
* of the buffer are:
*
* <pre>
* // No matter what the current state of the buffer is, the following
* // call always succeeds as long as the capacity of the buffer is not
* // less than 4.
* buf.setIndex(2, 4);
* </pre>
*
* @throws IndexOutOfBoundsException
* if the specified {@code readerIndex} is less than 0,
* if the specified {@code writerIndex} is less than the specified
* {@code readerIndex} or if the specified {@code writerIndex} is
* greater than {@code this.capacity}
*/
void setIndex(int readerIndex, int writerIndex);
/**
* Returns the number of readable bytes which equals to
* {@code (this.writerIndex - this.readerIndex)}.
*/
int readableBytes();
/**
* Returns the number of writable bytes which equals to
* {@code (this.capacity - this.writerIndex)}.
*/
int writableBytes();
/**
* Returns {@code true}
* if and only if {@code (this.writerIndex - this.readerIndex)} is greater
* than {@code 0}.
*/
boolean readable();
/**
* Returns {@code true}
* if and only if {@code (this.capacity - this.writerIndex)} is greater
* than {@code 0}.
*/
boolean writable();
/**
* Sets the {@code readerIndex} and {@code writerIndex} of this buffer to
* {@code 0}.
* This method is identical to {@link #setIndex(int, int) setIndex(0, 0)}.
* <p>
* Please note that the behavior of this method is different
* from that of NIO {@link ByteBuffer}, which sets the {@code limit} to
* the {@code capacity} of the buffer.
*/
void clear();
/**
* Marks the current {@code readerIndex} in this buffer. You can
* reposition the current {@code readerIndex} to the marked
* {@code readerIndex} by calling {@link #resetReaderIndex()}.
* The initial value of the marked {@code readerIndex} is {@code 0}.
*/
void markReaderIndex();
/**
* Repositions the current {@code readerIndex} to the marked
* {@code readerIndex} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the current {@code writerIndex} is less than the marked
* {@code readerIndex}
*/
void resetReaderIndex();
/**
* Marks the current {@code writerIndex} in this buffer. You can
* reposition the current {@code writerIndex} to the marked
* {@code writerIndex} by calling {@link #resetWriterIndex()}.
* The initial value of the marked {@code writerIndex} is {@code 0}.
*/
void markWriterIndex();
/**
* Repositions the current {@code writerIndex} to the marked
* {@code writerIndex} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the current {@code readerIndex} is greater than the marked
* {@code writerIndex}
*/
void resetWriterIndex();
/**
* Discards the bytes between the 0th index and {@code readerIndex}.
* It moves the bytes between {@code readerIndex} and {@code writerIndex}
* to the 0th index, and sets {@code readerIndex} and {@code writerIndex}
* to {@code 0} and {@code oldWriterIndex - oldReaderIndex} respectively.
* <p>
* Please refer to the class documentation for more detailed explanation.
*/
void discardReadBytes();
/**
* Gets a byte at the specified absolute {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
byte getByte(int index);
/**
* Gets a unsigned byte at the specified absolute {@code index} in this
* buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
short getUnsignedByte(int index);
/**
* Gets a 16-bit short integer at the specified absolute {@code index} in
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
short getShort(int index);
/**
* Gets a unsigned 16-bit short integer at the specified absolute
* {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
int getUnsignedShort(int index);
/**
* Gets a 24-bit medium integer at the specified absolute {@code index} in
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
int getMedium(int index);
/**
* Gets a unsigned 24-bit medium integer at the specified absolute
* {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
int getUnsignedMedium(int index);
/**
* Gets a 32-bit integer at the specified absolute {@code index} in
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
int getInt(int index);
/**
* Gets a unsigned 32-bit integer at the specified absolute {@code index}
* in this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
long getUnsignedInt(int index);
/**
* Gets a 64-bit long integer at the specified absolute {@code index} in
* this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
long getLong(int index);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index} until the destination becomes
* unwritable. This method is basically same with
* {@link #getBytes(int, ChannelBuffer, int, int)}, except that this
* method increases the {@code writerIndex} of the destination by the
* number of the transferred bytes while
* {@link #getBytes(int, ChannelBuffer, int, int)} doesn't.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + dst.writableBytes} is greater than
* {@code this.capacity}
*/
void getBytes(int index, ChannelBuffer dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code dstIndex + length} is greater than
* {@code dst.capacity}
*/
void getBytes(int index, ChannelBuffer dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + dst.length} is greater than
* {@code this.capacity}
*/
void getBytes(int index, byte[] dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index}.
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code dstIndex + length} is greater than
* {@code dst.length}
*/
void getBytes(int index, byte[] dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the specified absolute {@code index} until the destination's position
* reaches to its limit.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + dst.remaining()} is greater than
* {@code this.capacity}
*/
void getBytes(int index, ByteBuffer dst);
/**
* Transfers this buffer's data to the specified stream starting at the
* specified absolute {@code index}.
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified stream threw an exception during I/O
*/
void getBytes(int index, OutputStream out, int length) throws IOException;
/**
* Transfers this buffer's data to the specified channel starting at the
* specified absolute {@code index}.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes written out to the specified channel
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
int getBytes(int index, GatheringByteChannel out, int length) throws IOException;
/**
* Sets the specified byte at the specified absolute {@code index} in this
* buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 1} is greater than {@code this.capacity}
*/
void setByte(int index, byte value);
/**
* Sets the specified 16-bit short integer at the specified absolute
* {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 2} is greater than {@code this.capacity}
*/
void setShort(int index, short value);
/**
* Sets the specified 24-bit medium integer at the specified absolute
* {@code index} in this buffer. Please note that the most significant
* byte is ignored in the specified value.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 3} is greater than {@code this.capacity}
*/
void setMedium(int index, int value);
/**
* Sets the specified 32-bit integer at the specified absolute
* {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 4} is greater than {@code this.capacity}
*/
void setInt(int index, int value);
/**
* Sets the specified 64-bit long integer at the specified absolute
* {@code index} in this buffer.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* {@code index + 8} is greater than {@code this.capacity}
*/
void setLong(int index, long value);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index} until the destination becomes
* unreadable. This method is basically same with
* {@link #setBytes(int, ChannelBuffer, int, int)}, except that this
* method increased the {@code readerIndex} of the source buffer by
* the number of the transferred bytes while
* {@link #getBytes(int, ChannelBuffer, int, int)} doesn't.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + src.readableBytes} is greater than
* {@code this.capacity}
*/
void setBytes(int index, ChannelBuffer src);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index}.
*
* @param srcIndex the first index of the source
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code srcIndex + length} is greater than
* {@code src.capacity}
*/
void setBytes(int index, ChannelBuffer src, int srcIndex, int length);
/**
* Transfers the specified source array's data to this buffer starting at
* the specified absolute {@code index}.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + src.length} is greater than
* {@code this.capacity}
*/
void setBytes(int index, byte[] src);
/**
* Transfers the specified source array's data to this buffer starting at
* the specified absolute {@code index}.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0},
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code index + length} is greater than
* {@code this.capacity}, or
* if {@code srcIndex + length} is greater than {@code src.length}
*/
void setBytes(int index, byte[] src, int srcIndex, int length);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the specified absolute {@code index} until the source buffer's position
* reaches to its limit.
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + src.remaining()} is greater than
* {@code this.capacity}
*/
void setBytes(int index, ByteBuffer src);
/**
* Transfers the content of the specified source stream to this buffer
* starting at the specified absolute {@code index}.
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
* @throws IOException
* if the specified stream threw an exception during I/O
*/
void setBytes(int index, InputStream in, int length) throws IOException;
/**
* Transfers the content of the specified source channel to this buffer
* starting at the specified absolute {@code index}.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
int setBytes(int index, ScatteringByteChannel in, int length) throws IOException;
/**
* Fills this buffer with <tt>NUL (0x00)</tt> starting at the specified
* absolute {@code index}.
*
* @param length the number of <tt>NUL</tt>s to write to the buffer
*
* @throws IndexOutOfBoundsException
* if the specified {@code index} is less than {@code 0} or
* if {@code index + length} is greater than {@code this.capacity}
*/
void setZero(int index, int length);
/**
* Gets a byte at the current {@code readerIndex} and increases
* the {@code readerIndex} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 1} is greater than
* {@code this.writerIndex}
*/
byte readByte();
/**
* Gets a unsigned byte at the current {@code readerIndex} and increases
* the {@code readerIndex} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 1} is greater than
* {@code this.writerIndex}
*/
short readUnsignedByte();
/**
* Gets a 16-bit short integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 2} is greater than
* {@code this.writerIndex}
*/
short readShort();
/**
* Gets a unsigned 16-bit short integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 2} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 2} is greater than
* {@code this.writerIndex}
*/
int readUnsignedShort();
/**
* Gets a 24-bit medium integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 3} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 3} is greater than
* {@code this.writerIndex}
*/
int readMedium();
/**
* Gets a unsigned 24-bit medium integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 3} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 3} is greater than
* {@code this.writerIndex}
*/
int readUnsignedMedium();
/**
* Gets a 32-bit integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 4} is greater than
* {@code this.writerIndex}
*/
int readInt();
/**
* Gets a unsigned 32-bit integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 4} is greater than
* {@code this.writerIndex}
*/
long readUnsignedInt();
/**
* Gets a 64-bit integer at the current {@code readerIndex}
* and increases the {@code readerIndex} by {@code 8} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + 8} is greater than
* {@code this.writerIndex}
*/
long readLong();
ChannelBuffer readBytes(int length);
ChannelBuffer readBytes(ChannelBufferIndexFinder endIndexFinder);
ChannelBuffer readSlice(int length);
ChannelBuffer readSlice(ChannelBufferIndexFinder endIndexFinder);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} until the destination becomes
* unwritable, and increases the {@code readerIndex} by the number of the
* transferred bytes. This method is basically same with
* {@link #readBytes(ChannelBuffer, int, int)}, except that this method
* increases the {@code writerIndex} of the destination by the number of
* the transferred bytes while {@link #readBytes(ChannelBuffer, int, int)}
* doesn't.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + dst.writableBytes} is greater than
* {@code this.writerIndex}
*/
void readBytes(ChannelBuffer dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code length}). This method
* is basically same with {@link #readBytes(ChannelBuffer, int, int)},
* except that this method increases the {@code writerIndex} of the
* destination by the number of the transferred bytes (= {@code length})
* while {@link #readBytes(ChannelBuffer, int, int)} doesn't.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + length} is greater than
* {@code this.writerIndex} or
* if {@code dst.writerIndex + length} is greater than
* {@code dst.capacity}
*/
void readBytes(ChannelBuffer dst, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code length}).
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code this.readerIndex + length} is greater than
* {@code this.writerIndex}, or
* if {@code dstIndex + length} is greater than
* {@code dst.capacity}
*/
void readBytes(ChannelBuffer dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code dst.length}).
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + dst.length} is greater than
* {@code this.writerIndex} or
* if {@code this.readableBytes} is greater than
* {@code dst.length}
*/
void readBytes(byte[] dst);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} and increases the {@code readerIndex}
* by the number of the transferred bytes (= {@code length}).
*
* @param dstIndex the first index of the destination
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code dstIndex} is less than {@code 0},
* if {@code this.readerIndex + length} is greater than
* {@code this.writerIndex}, or
* if {@code dstIndex + length} is greater than {@code dst.length}
*/
void readBytes(byte[] dst, int dstIndex, int length);
/**
* Transfers this buffer's data to the specified destination starting at
* the current {@code readerIndex} until the destination's position
* reaches to its limit, and increases the {@code readerIndex} by the
* number of the transferred bytes.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + dst.remaining()} is greater than
* {@code this.capacity} or
* if {@code this.readableBytes} is greater than
* {@code dst.remaining}
*
*/
void readBytes(ByteBuffer dst);
/**
* Transfers this buffer's data to the specified stream starting at the
* current {@code readerIndex}.
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified stream threw an exception during I/O
*/
void readBytes(OutputStream out, int length) throws IOException;
/**
* Transfers this buffer's data to the specified stream starting at the
* current {@code readerIndex}.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes written out to the specified channel
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
int readBytes(GatheringByteChannel out, int length) throws IOException;
/**
* Increases the current {@code readerIndex} by the specified
* {@code length} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.readerIndex + length} is greater than
* {@code this.writerIndex}
*/
void skipBytes(int length);
/**
* Increases the current {@code readerIndex} until the specified
* {@code firstIndexFinder} returns {@code true} in this buffer.
*
* @return the number of skipped bytes
*
* @throws NoSuchElementException
* if {@code firstIndexFinder} didn't return {@code true} at all
*/
int skipBytes(ChannelBufferIndexFinder firstIndexFinder);
/**
* Sets the specified byte at the current {@code writerIndex}
* and increases the {@code writerIndex} by {@code 1} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + 1} is greater than
* {@code this.capacity}
*/
void writeByte(byte value);
/**
* Sets the specified 16-bit short integer at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 2}
* in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + 2} is greater than
* {@code this.capacity}
*/
void writeShort(short value);
/**
* Sets the specified 24-bit medium integer at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 3}
* in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + 3} is greater than
* {@code this.capacity}
*/
void writeMedium(int value);
/**
* Sets the specified 32-bit integer at the current {@code writerIndex}
* and increases the {@code writerIndex} by {@code 4} in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + 4} is greater than
* {@code this.capacity}
*/
void writeInt(int value);
/**
* Sets the specified 64-bit long integer at the current
* {@code writerIndex} and increases the {@code writerIndex} by {@code 8}
* in this buffer.
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + 8} is greater than
* {@code this.capacity}
*/
void writeLong(long value);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} until the source buffer becomes
* unreadable, and increases the {@code writerIndex} by the number of
* the transferred bytes. This method is basically same with
* {@link #writeBytes(ChannelBuffer, int, int)}, except that this method
* increases the {@code readerIndex} of the source buffer by the number of
* the transferred bytes while {@link #writeBytes(ChannelBuffer, int, int)}
* doesn't.
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + src.readableBytes} is greater than
* {@code this.capacity}
*/
void writeBytes(ChannelBuffer src);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code length}). This method
* is basically same with {@link #writeBytes(ChannelBuffer, int, int)},
* except that this method increases the {@code readerIndex} of the source
* buffer by the number of the transferred bytes (= {@code length}) while
* {@link #writeBytes(ChannelBuffer, int, int)} doesn't.
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + length} is greater than
* {@code this.capacity}
*/
void writeBytes(ChannelBuffer src, int length);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code length}).
*
* @param srcIndex the first index of the source
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code srcIndex + length} is greater than
* {@code src.capacity}, or
* if {@code this.writerIndex + length} is greater than
* {@code this.capacity}
*/
void writeBytes(ChannelBuffer src, int srcIndex, int length);
/**
* Transfers the specified source array's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code src.length}).
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + src.length} is greater than
* {@code this.capacity}
*/
void writeBytes(byte[] src);
/**
* Transfers the specified source array's data to this buffer starting at
* the current {@code writerIndex} and increases the {@code writerIndex}
* by the number of the transferred bytes (= {@code length}).
*
* @param srcIndex the first index of the source
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if the specified {@code srcIndex} is less than {@code 0},
* if {@code srcIndex + length} is greater than
* {@code src.length}, or
* if {@code this.writerIndex + length} is greater than
* {@code this.capacity}
*/
void writeBytes(byte[] src, int srcIndex, int length);
/**
* Transfers the specified source buffer's data to this buffer starting at
* the current {@code writerIndex} until the source buffer's position
* reaches to its limit, and increases the {@code writerIndex} by the
* number of the transferred bytes.
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + src.remaining()} is greater than
* {@code this.capacity}
*/
void writeBytes(ByteBuffer src);
/**
* Transfers the content of the specified stream to this buffer
* starting at the current {@code writerIndex} and increases the
* {@code writerIndex} by the number of the transferred bytes.
*
* @param length the number of bytes to transfer
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified stream threw an exception during I/O
*/
void writeBytes(InputStream in, int length) throws IOException;
/**
* Transfers the content of the specified channel to this buffer
* starting at the current {@code writerIndex} and increases the
* {@code writerIndex} by the number of the transferred bytes.
*
* @param length the maximum number of bytes to transfer
*
* @return the actual number of bytes read in from the specified channel
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + length} is greater than
* {@code this.capacity}
* @throws IOException
* if the specified channel threw an exception during I/O
*/
int writeBytes(ScatteringByteChannel in, int length) throws IOException;
/**
* Fills this buffer with <tt>NUL (0x00)</tt> starting at the current
* {@code writerIndex} and increases the {@code writerIndex} by the
* specified {@code length}.
*
* @param length the number of <tt>NUL</tt>s to write to the buffer
*
* @throws IndexOutOfBoundsException
* if {@code this.writerIndex + length} is greater than
* {@code this.capacity}
*/
void writeZero(int length);
int indexOf(int fromIndex, int toIndex, byte value);
int indexOf(int fromIndex, int toIndex, ChannelBufferIndexFinder indexFinder);
/**
* Returns a copy of this buffer's readable bytes. Modifying the content
* of the returned buffer or this buffer doesn't affect each other at all.
* This method is identical to {@code buf.copy(buf.readerIndex(), buf.readableBytes())}.
*/
ChannelBuffer copy();
/**
* Returns a copy of this buffer's sub-region. Modifying the content of
* the returned buffer or this buffer doesn't affect each other at all.
*/
ChannelBuffer copy(int index, int length);
/**
* Returns a slice of this buffer's readable bytes. Modifying the content
* of the returned buffer or this buffer affects each other's content
* while they maintain separate indexes and marks. This method is
* identical to {@code buf.slice(buf.readerIndex(), buf.readableBytes())}.
*/
ChannelBuffer slice();
/**
* Returns a slice of this buffer's sub-region. Modifying the content of
* the returned buffer or this buffer affects each other's content while
* they maintain separate indexes and marks. This method is identical to
* {@code buf.slice(buf.readerIndex(), buf.readableBytes())}.
*/
ChannelBuffer slice(int index, int length);
/**
* Returns a buffer which shares the whole region of this buffer.
* Modifying the content of the returned buffer or this buffer affects
* each other's content while they maintain separate indexes and marks.
* This method is identical to {@code buf.slice(0, buf.capacity())}.
*/
ChannelBuffer duplicate();
/**
* Converts this buffer's readable bytes into a NIO buffer. The returned
* buffer might or might not share the content with this buffer, while
* they have separate indexes and marks. This method is identical to
* {@code buf.toByteBuffer(buf.readerIndex(), buf.readableBytes())}.
*/
ByteBuffer toByteBuffer();
/**
* Converts this buffer's sub-region into a NIO buffer. The returned
* buffer might or might not share the content with this buffer, while
* they have separate indexes and marks.
*/
ByteBuffer toByteBuffer(int index, int length);
/**
* Converts this buffer's readable bytes into an array of NIO buffers.
* The returned buffers might or might not share the content with this
* buffer, while they have separate indexes and marks. This method is
* identical to {@code buf.toByteBuffers(buf.readerIndex(), buf.readableBytes())}.
*/
ByteBuffer[] toByteBuffers();
/**
* Converts this buffer's sub-region into an array of NIO buffers.
* The returned buffers might or might not share the content with this
* buffer, while they have separate indexes and marks.
*/
ByteBuffer[] toByteBuffers(int index, int length);
/**
* Decodes this buffer's readable bytes into a string with the specified
* character set name. This method is identical to
* {@code buf.toString(buf.readerIndex(), buf.readableBytes(), charsetName)}.
*
* @throws UnsupportedCharsetException
* if the specified character set name is not supported by the
* current VM
*/
String toString(String charsetName);
/**
* Decodes this buffer's readable bytes into a string until the specified
* {@code terminatorFinder} returns {@code true} with the specified
* character set name. This method is identical to
* {@code buf.toString(buf.readerIndex(), buf.readableBytes(), charsetName, terminatorFinder)}.
*
* @throws UnsupportedCharsetException
* if the specified character set name is not supported by the
* current VM
*/
String toString(
String charsetName, ChannelBufferIndexFinder terminatorFinder);
/**
* Decodes this buffer's sub-region into a string with the specified
* character set name.
*
* @throws UnsupportedCharsetException
* if the specified character set name is not supported by the
* current VM
*/
String toString(int index, int length, String charsetName);
/**
* Decodes this buffer's readable bytes into a string until the specified
* {@code terminatorFinder} returns {@code true} with the specified
* character set name.
*
* @throws UnsupportedCharsetException
* if the specified character set name is not supported by the
* current VM
*/
String toString(
int index, int length, String charsetName,
ChannelBufferIndexFinder terminatorFinder);
/**
* Returns a hash code which was calculated from the content of this
* buffer. If there's a byte array which is
* {@linkplain #equals(Object) equal to} this array, both arrays should
* return the same value.
*/
int hashCode();
/**
* Determines if the content of the specified buffer is identical to the
* content of this array. 'Identical' here means:
* <ul>
* <li>the size of the contents of the two buffers are same and</li>
* <li>every single byte of the content of the two buffers are same.</li>
* </ul>
* Please note that it doesn't compare {@link #readerIndex()} nor
* {@link #writerIndex()}. This method also returns {@code false} for
* {@code null} and an object which is not an instance of
* {@link ChannelBuffer} type.
*/
boolean equals(Object obj);
/**
* Compares the content of the specified buffer to the content of this
* buffer. Comparison is performed in the same manner with the string
* comparison functions of various languages such as {@code strcmp},
* {@code memcmp} and {@link String#compareTo(String)}.
*/
int compareTo(ChannelBuffer buffer);
/**
* Returns the string representation of this buffer. This method doesn't
* necessarily return the whole content of the buffer but returns
* the values of the key properties such as {@link #readerIndex()},
* {@link #writerIndex()} and {@link #capacity()}..
*/
String toString();
}
|
package org.lightmare.ejb.startup;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import javax.annotation.Resource;
import javax.ejb.Stateless;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceUnit;
import org.apache.log4j.Logger;
import org.lightmare.ejb.exceptions.BeanInUseException;
import org.lightmare.ejb.meta.ConnectionSemaphore;
import org.lightmare.ejb.meta.MetaContainer;
import org.lightmare.ejb.meta.MetaData;
import org.lightmare.jndi.NamingUtils;
import org.lightmare.jpa.JPAManager;
import org.lightmare.jpa.datasource.DataSourceInitializer;
import org.lightmare.libraries.LibraryLoader;
import org.lightmare.utils.beans.BeanUtils;
import org.lightmare.utils.fs.FileUtils;
/**
* Class for running in distinct thread to initialize
* {@link javax.sql.DataSource}s load libraries and {@link javax.ejb.Stateless}
* session beans and cache them and clean resources after deployments
*
* @author levan
*
*/
public class BeanLoader {
private static final int LOADER_POOL_SIZE = 5;
private static final Logger LOG = Logger.getLogger(BeanLoader.class);
// Thread pool for deploying and removal of beans and temporal resources
private static ExecutorService loaderPool = Executors.newFixedThreadPool(
LOADER_POOL_SIZE, new ThreadFactory() {
@Override
public Thread newThread(Runnable runnable) {
Thread thread = new Thread(runnable);
thread.setName(String.format("Ejb-Loader-Thread-%s",
thread.getId()));
return thread;
}
});
/**
* {@link Runnable} implementation for initializing and deploying
* {@link javax.sql.DataSource}
*
* @author levan
*
*/
private static class ConnectionDeployer implements Runnable {
private DataSourceInitializer initializer;
private Properties properties;
private CountDownLatch dsLatch;
private boolean countedDown;
public ConnectionDeployer(DataSourceInitializer initializer,
Properties properties, CountDownLatch dsLatch) {
this.initializer = initializer;
this.properties = properties;
this.dsLatch = dsLatch;
}
private void notifyDs() {
if (!countedDown) {
dsLatch.countDown();
countedDown = true;
}
}
@Override
public void run() {
try {
initializer.registerDataSource(properties);
notifyDs();
} catch (IOException ex) {
notifyDs();
LOG.error("Could not initialize datasource", ex);
}
}
}
/**
* {@link Runnable} implementation for temporal resources removal
*
* @author levan
*
*/
private static class ResourceCleaner implements Runnable {
List<File> tmpFiles;
public ResourceCleaner(List<File> tmpFiles) {
this.tmpFiles = tmpFiles;
}
/**
* Removes temporal resources after deploy {@link Thread} notifies
*
* @throws InterruptedException
*/
private void clearTmpData() throws InterruptedException {
synchronized (tmpFiles) {
tmpFiles.wait();
}
for (File tmpFile : tmpFiles) {
FileUtils.deleteFile(tmpFile);
LOG.info(String.format("Cleaning temporal resource %s done",
tmpFile.getName()));
}
}
@Override
public void run() {
try {
clearTmpData();
} catch (InterruptedException ex) {
LOG.error("Coluld not clear temporary resources", ex);
}
}
}
/**
* {@link Callable} implementation for deploying {@link javax.ejb.Stateless}
* session beans and cache {@link MetaData} keyed by bean name
*
* @author levan
*
*/
private static class BeanDeployer implements Callable<String> {
private MetaCreator creator;
private String beanName;
private String className;
private ClassLoader loader;
private List<File> tmpFiles;
private MetaData metaData;
private CountDownLatch conn;
private boolean isCounted;
public BeanDeployer(MetaCreator creator, String beanName,
String className, ClassLoader loader, MetaData metaData,
List<File> tmpFiles, CountDownLatch conn) {
this.creator = creator;
this.beanName = beanName;
this.className = className;
this.loader = loader;
this.tmpFiles = tmpFiles;
this.metaData = metaData;
this.conn = conn;
}
/**
* Locks {@link ConnectionSemaphore} if needed for connection processing
*
* @param semaphore
* @param unitName
* @param jndiName
* @throws IOException
*/
private void lockSemaphore(ConnectionSemaphore semaphore,
String unitName, String jndiName) throws IOException {
synchronized (semaphore) {
if (!semaphore.isCheck()) {
creator.configureConnection(unitName, beanName);
semaphore.notifyAll();
}
}
}
/**
* Increases {@link CountDownLatch} conn if it is first time in current
* thread
*/
private void notifyConn() {
if (!isCounted) {
conn.countDown();
isCounted = true;
}
}
/**
* Checks if bean {@link MetaData} with same name already cached if it
* is increases {@link CountDownLatch} for connection and throws
* {@link BeanInUseException} else caches meta data with associated name
*
* @param beanEjbName
* @throws BeanInUseException
*/
private void checkAndSetBean(String beanEjbName)
throws BeanInUseException {
try {
MetaContainer.checkAndAddMetaData(beanEjbName, metaData);
} catch (BeanInUseException ex) {
notifyConn();
throw ex;
}
}
/**
* Checks if {@link PersistenceContext}, {@link Resource} and
* {@link PersistenceUnit} annotated fields are cached already
*
* @param context
* @param resource
* @param unit
* @return boolean
*/
private boolean checkOnBreak(PersistenceContext context,
Resource resource, PersistenceUnit unit) {
return context != null && resource != null && unit != null;
}
/**
* Checks weather connection with passed unit or jndi name already
* exists
*
* @param unitName
* @param jndiName
* @return <code>boolean</code>
*/
private boolean checkOnEmf(String unitName, String jndiName) {
boolean checkForEmf;
if (jndiName == null || jndiName.isEmpty()) {
checkForEmf = JPAManager.checkForEmf(unitName);
} else {
jndiName = NamingUtils.createJndiName(jndiName);
checkForEmf = JPAManager.checkForEmf(unitName)
&& JPAManager.checkForEmf(jndiName);
}
return checkForEmf;
}
/**
* Creates {@link ConnectionSemaphore} if such does not exists
*
* @param context
* @param field
* @param resource
* @param unit
* @return <code>boolean</code>
* @throws IOException
*/
private boolean identifyConnections(PersistenceContext context,
Field field, Resource resource, PersistenceUnit unit)
throws IOException {
metaData.setConnectorField(field);
String unitName = context.unitName();
String jndiName = context.name();
metaData.setUnitName(unitName);
metaData.setJndiName(jndiName);
boolean checkForEmf = checkOnEmf(unitName, jndiName);
boolean checkOnAnnotations = false;
ConnectionSemaphore semaphore;
if (checkForEmf) {
notifyConn();
semaphore = JPAManager.getSemaphore(unitName);
checkOnAnnotations = checkOnBreak(context, resource, unit);
} else {
// Sets connection semaphore for this connection
semaphore = JPAManager.setSemaphore(unitName, jndiName);
notifyConn();
if (semaphore != null) {
lockSemaphore(semaphore, unitName, jndiName);
}
checkOnAnnotations = checkOnBreak(context, resource, unit);
}
return checkOnAnnotations;
}
/**
* Finds and caches {@link PersistenceContext}, {@link PersistenceUnit}
* and {@link Resource} annotated {@link Field}s in bean class and
* configures connections and creates {@link ConnectionSemaphore}s if it
* does not exists for {@link PersistenceContext#unitName()} object
*
* @throws IOException
*/
private void retrieveConnections() throws IOException {
Class<?> beanClass = metaData.getBeanClass();
Field[] fields = beanClass.getDeclaredFields();
PersistenceUnit unit;
PersistenceContext context;
Resource resource;
boolean checkOnAnnotations = false;
if (fields == null || fields.length == 0) {
notifyConn();
}
for (Field field : fields) {
context = field.getAnnotation(PersistenceContext.class);
resource = field.getAnnotation(Resource.class);
unit = field.getAnnotation(PersistenceUnit.class);
if (context != null) {
checkOnAnnotations = identifyConnections(context, field,
resource, unit);
} else if (resource != null) {
metaData.setTransactionField(field);
checkOnAnnotations = checkOnBreak(context, resource, unit);
} else if (unit != null) {
metaData.setUnitField(field);
checkOnAnnotations = checkOnBreak(context, resource, unit);
}
if (checkOnAnnotations) {
break;
}
}
}
/**
* Creates {@link MetaData} for bean class
*
* @param beanClass
* @throws ClassNotFoundException
*/
private void createMeta(Class<?> beanClass) throws IOException {
metaData.setBeanClass(beanClass);
if (MetaCreator.CONFIG.isServer()) {
retrieveConnections();
} else {
notifyConn();
}
metaData.setLoader(loader);
}
/**
* Loads and caches bean {@link Class} by name
*
* @return
* @throws IOException
*/
private String createBeanClass() throws IOException {
try {
Class<?> beanClass;
if (loader == null) {
beanClass = Class.forName(className);
} else {
beanClass = Class.forName(className, true, loader);
}
Stateless annotation = beanClass.getAnnotation(Stateless.class);
String beanEjbName = annotation.name();
if (beanEjbName == null || beanEjbName.isEmpty()) {
beanEjbName = beanName;
}
checkAndSetBean(beanEjbName);
createMeta(beanClass);
metaData.setInProgress(false);
return beanEjbName;
} catch (ClassNotFoundException ex) {
notifyConn();
throw new IOException(ex);
}
}
private String deploy() {
String deployed = beanName;
try {
LibraryLoader.loadCurrentLibraries(loader);
deployed = createBeanClass();
LOG.info(String.format("bean %s deployed", beanName));
} catch (IOException ex) {
LOG.error(String.format("Could not deploy bean %s cause %s",
beanName, ex.getMessage()), ex);
}
return deployed;
}
@Override
public String call() throws Exception {
synchronized (metaData) {
try {
String deployed;
if (tmpFiles != null) {
synchronized (tmpFiles) {
deployed = deploy();
tmpFiles.notifyAll();
}
} else {
deployed = deploy();
}
notifyConn();
metaData.notifyAll();
return deployed;
} catch (Exception ex) {
LOG.error(ex.getMessage(), ex);
metaData.notifyAll();
notifyConn();
return null;
}
}
}
}
/**
* Creates and starts bean deployment process
*
* @param creator
* @param className
* @param loader
* @param tmpFiles
* @param conn
* @return {@link Future}
* @throws IOException
*/
public static Future<String> loadBean(MetaCreator creator,
String className, ClassLoader loader, List<File> tmpFiles,
CountDownLatch conn) throws IOException {
MetaData metaData = new MetaData();
String beanName = BeanUtils.parseName(className);
BeanDeployer beanDeployer = new BeanDeployer(creator, beanName,
className, loader, metaData, tmpFiles, conn);
Future<String> future = loaderPool.submit(beanDeployer);
return future;
}
/**
* Initialized {@link javax.sql.DataSource}s in parallel mode
*
* @param initializer
* @param properties
* @param sdLatch
*/
public static void initializeDatasource(DataSourceInitializer initializer,
Properties properties, CountDownLatch dsLatch) throws IOException {
ConnectionDeployer connectionDeployer = new ConnectionDeployer(
initializer, properties, dsLatch);
loaderPool.submit(connectionDeployer);
}
/**
* Creates and starts temporal resources removal process
*
* @param tmpFiles
*/
public static <V> void removeResources(List<File> tmpFiles) {
ResourceCleaner cleaner = new ResourceCleaner(tmpFiles);
loaderPool.submit(cleaner);
}
}
|
package org.mitre.synthea.export;
import static org.mitre.synthea.export.ExportHelper.dateFromTimestamp;
import static org.mitre.synthea.export.ExportHelper.iso8601Timestamp;
import com.google.gson.JsonObject;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.sis.geometry.DirectPosition2D;
import org.mitre.synthea.engine.Event;
import org.mitre.synthea.helpers.FactTable;
import org.mitre.synthea.helpers.Utilities;
import org.mitre.synthea.modules.DeathModule;
import org.mitre.synthea.modules.Immunizations;
import org.mitre.synthea.modules.LifecycleModule;
import org.mitre.synthea.world.agents.Person;
import org.mitre.synthea.world.agents.Provider;
import org.mitre.synthea.world.concepts.HealthRecord;
import org.mitre.synthea.world.concepts.HealthRecord.CarePlan;
import org.mitre.synthea.world.concepts.HealthRecord.Code;
import org.mitre.synthea.world.concepts.HealthRecord.Encounter;
import org.mitre.synthea.world.concepts.HealthRecord.EncounterType;
import org.mitre.synthea.world.concepts.HealthRecord.Entry;
import org.mitre.synthea.world.concepts.HealthRecord.ImagingStudy;
import org.mitre.synthea.world.concepts.HealthRecord.Immunization;
import org.mitre.synthea.world.concepts.HealthRecord.Medication;
import org.mitre.synthea.world.concepts.HealthRecord.Observation;
import org.mitre.synthea.world.concepts.HealthRecord.Procedure;
import org.mitre.synthea.world.concepts.HealthRecord.Report;
import org.mitre.synthea.world.geography.Location;
public class CDWExporter {
/** Number of clinicians to generate. */
private static final int CLINICIANS = 100;
/** Temporary attribute to record clinician on a provider encounter. */
private static final String CLINICIAN_SID = "CLINICIAN_SID";
/**
* Table key sequence generators.
*/
private Map<FileWriter,AtomicInteger> sids;
private int sidStart = 1;
private FactTable sstaff = new FactTable();
private FactTable maritalStatus = new FactTable();
private FactTable sta3n = new FactTable();
private FactTable location = new FactTable();
// private FactTable appointmentStatus = new FactTable();
// private FactTable appointmentType = new FactTable();
private FactTable immunizationName = new FactTable();
private FactTable reaction = new FactTable();
private FactTable providerNarrative = new FactTable();
private FactTable localDrug = new FactTable();
private FactTable nationalDrug = new FactTable();
private FactTable dosageForm = new FactTable();
private FactTable pharmacyOrderableItem = new FactTable();
private FactTable orderableItem = new FactTable();
private FactTable orderStatus = new FactTable();
private FactTable vistaPackage = new FactTable();
private FactTable collectionsample = new FactTable();
private FactTable labchemtest = new FactTable();
private FactTable topography = new FactTable();
private FactTable institution = new FactTable();
private FactTable loinc = new FactTable();
/**
* Writers for patient data.
*/
private FileWriter lookuppatient;
private FileWriter spatient;
private FileWriter spatientaddress;
private FileWriter spatientphone;
private FileWriter patientrace;
private FileWriter patientethnicity;
/**
* Writers for encounter data.
*/
private FileWriter consult;
private FileWriter visit;
private FileWriter appointment;
private FileWriter inpatient;
/**
* Writers for immunization data.
*/
private FileWriter immunization;
/**
* Writers for allergy data.
*/
private FileWriter allergy;
private FileWriter allergicreaction;
private FileWriter allergycomment;
/**
* Writers for condition data.
*/
private FileWriter problemlist;
private FileWriter vdiagnosis;
/**
* Writers for medications data.
*/
private FileWriter rxoutpatient;
private FileWriter rxoutpatfill;
private FileWriter nonvamed;
private FileWriter cprsorder; // also required for labs
private FileWriter ordereditem; // also required for labs
/**
* Writers for diagnostic report data (i.e. labs).
*/
private FileWriter labchem;
private FileWriter labpanel;
private FileWriter patientlabchem;
private FileWriter vprocedure;
/**
* System-dependent string for a line break. (\n on Mac, *nix, \r\n on Windows)
*/
private static final String NEWLINE = System.lineSeparator();
/**
* Constructor for the CDWExporter -
* initialize the required files and associated writers.
*/
private CDWExporter() {
sids = new HashMap<FileWriter,AtomicInteger>();
try {
File output = Exporter.getOutputFolder("cdw", null);
output.mkdirs();
Path outputDirectory = output.toPath();
// Patient Data
lookuppatient = openFileWriter(outputDirectory, "lookuppatient.csv");
spatient = openFileWriter(outputDirectory, "spatient.csv");
spatientaddress = openFileWriter(outputDirectory, "spatientaddress.csv");
spatientphone = openFileWriter(outputDirectory, "spatientphone.csv");
patientrace = openFileWriter(outputDirectory, "patientrace.csv");
patientethnicity = openFileWriter(outputDirectory, "patientethnicity.csv");
// Encounter Data
consult = openFileWriter(outputDirectory, "consult.csv");
visit = openFileWriter(outputDirectory, "visit.csv");
appointment = openFileWriter(outputDirectory, "appointment.csv");
inpatient = openFileWriter(outputDirectory, "inpatient.csv");
// Immunization Data
immunization = openFileWriter(outputDirectory, "immunization.csv");
// Allergy Data
allergy = openFileWriter(outputDirectory, "allergy.csv");
allergicreaction = openFileWriter(outputDirectory, "allergicreaction.csv");
allergycomment = openFileWriter(outputDirectory, "allergycomment.csv");
// Condition Data
problemlist = openFileWriter(outputDirectory, "problemlist.csv");
vdiagnosis = openFileWriter(outputDirectory, "vdiagnosis.csv");
// Medications Data
rxoutpatient = openFileWriter(outputDirectory, "rxoutpatient.csv");
rxoutpatfill = openFileWriter(outputDirectory, "rxoutpatfill.csv");
nonvamed = openFileWriter(outputDirectory, "nonvamed.csv");
cprsorder = openFileWriter(outputDirectory, "cprsorder.csv");
ordereditem = openFileWriter(outputDirectory, "ordereditem.csv");
// Diagnotic Report (i.e. Labs) Data
labchem = openFileWriter(outputDirectory, "labchem.csv");
labpanel = openFileWriter(outputDirectory, "labpanel.csv");
patientlabchem = openFileWriter(outputDirectory, "patientlabchem.csv");
vprocedure = openFileWriter(outputDirectory, "vprocedure.csv");
writeCSVHeaders();
} catch (IOException e) {
// wrap the exception in a runtime exception.
// the singleton pattern below doesn't work if the constructor can throw
// and if these do throw ioexceptions there's nothing we can do anyway
throw new RuntimeException(e);
}
}
private FileWriter openFileWriter(Path outputDirectory, String filename) throws IOException {
File file = outputDirectory.resolve(filename).toFile();
return new FileWriter(file);
}
/**
* Write the headers to each of the CSV files.
* @throws IOException if any IO error occurs
*/
private void writeCSVHeaders() throws IOException {
// Fact Tables
sstaff.setHeader("StaffSID,StaffName");
maritalStatus.setHeader("MaritalStatusSID,MaritalStatusCode");
sta3n.setHeader("Sta3n,Sta3nName,TimeZone");
location.setHeader("LocationSID,LocationName");
immunizationName.setHeader("ImmunizationNameSID,ImmunizationName,CVXCode,MaxInSeries");
reaction.setHeader("ReactionSID,Reaction,VUID");
providerNarrative.setHeader("ProviderNarrativeSID,ProviderNarrative");
localDrug.setHeader("LocalDrugSID,LocalDrugIEN,Sta3n,LocalDrugNameWithDose,"
+ "NationalDrugSID,NationalDrugNameWithDose,PharmacyOrderableItemSID");
nationalDrug.setHeader("NationalDrugSID,DrugNameWithDose,DosageFormSID,"
+ "InactivationDate,VUID");
dosageForm.setHeader("DosageFormSID,DosageFormIEN,DosageForm");
dosageForm.addFact("1", "1,Once per day."); // Default Dosage
pharmacyOrderableItem.setHeader("PharmacyOrderableItemSID,PharmacyOrderableItem,SupplyFlag");
orderableItem.setHeader("OrderableItemSID,OrderableItemName,IVBaseFlag,IVAdditiveFlag");
orderStatus.setHeader("OrderStatusSID,OrderStatus");
vistaPackage.setHeader("VistaPackageSID,VistaPackage");
collectionsample.setHeader("CollectionSampleSID,CollectionSample");
labchemtest.setHeader("LabChemTestSID,LabChemTestName,CollectionSampleSID");
topography.setHeader("TopographySID,Topography");
institution.setHeader("InstitutionSID,Sta3n,InstitutionName,InstitutionCode");
loinc.setHeader("LOINCSID,LOINC,Component");
// Patient Tables
lookuppatient.write("PatientSID,Sta3n,PatientIEN,PatientICN,PatientFullCN,"
+ "PatientName,TestPatient");
lookuppatient.write(NEWLINE);
spatient.write("PatientSID,PatientName,PatientLastName,PatientFirstName,PatientSSN,Age,"
+ "BirthDateTime,DeceasedFlag,DeathDateTime,Gender,SelfIdentifiedGender,Religion,"
+ "MaritalStatus,MaritalStatusSID,PatientEnteredDateTime");
spatient.write(NEWLINE);
spatientaddress.write("SPatientAddressSID,PatientSID,AddressType,NameOfContact,"
+ "RelationshipToPatient,StreetAddress1,StreetAddress2,StreetAddress3,"
+ "City,State,Zip,PostalCode,Country,GISMatchScore,GISStreetSide,"
+ "GISPatientAddressLongitude,GISPatientAddressLatitude,GISFIPSCode");
spatientaddress.write(NEWLINE);
spatientphone.write("SPatientPhoneSID,PatientSID,PatientContactType,NameOfContact,"
+ "RelationshipToPatient,PhoneNumber,WorkPhoneNumber,EmailAddress");
spatientphone.write(NEWLINE);
patientrace.write("PatientRaceSID,PatientSID,Race");
patientrace.write(NEWLINE);
patientethnicity.write("PatientEthnicitySID,PatientSID,Ethnicity");
patientethnicity.write(NEWLINE);
// Encounter Tables
consult.write("ConsultSID,ToRequestServiceSID");
consult.write(NEWLINE);
visit.write("VisitSID,VisitDateTime,CreatedByStaffSID,LocationSID,PatientSID");
visit.write(NEWLINE);
appointment.write("AppointmentSID,Sta3n,PatientSID,AppointmentDateTime,AppointmentMadeDate,"
+ "AppointmentTypeSID,AppointmentStatus,VisitSID,LocationSID,PurposeOfVisit,"
+ "SchedulingRequestType,FollowUpVisitFlag,LengthOfAppointment,ConsultSID,"
+ "CheckInDateTime,CheckOutDateTime");
appointment.write(NEWLINE);
inpatient.write("InpatientSID,PatientSID,AdmitDateTime");
inpatient.write(NEWLINE);
// Immunization Table
immunization.write("ImmunizationSID,ImmunizationIEN,Sta3n,PatientSID,ImmunizationNameSID,"
+ "Series,Reaction,VisitDateTime,ImmunizationDateTime,OrderingStaffSID,ImmunizingStaffSID,"
+ "VisitSID,ImmunizationComments,ImmunizationRemarks");
immunization.write(NEWLINE);
// Allergy Tables
allergy.write("AllergySID,AllergyIEN,Sta3n,PatientSID,AllergyType,AllergicReactant,"
+ "LocalDrugSID,DrugNameWithoutDoseSID,DrugClassSID,ReactantSID,DrugIngredientSID,"
+ "OriginationDateTime,OriginatingStaffSID,ObservedHistorical,Mechanism,VerifiedFlag,"
+ "VerificationDateTime,VerifyingStaffSID,EnteredInErrorFlag");
allergy.write(NEWLINE);
allergicreaction.write("AllergicReactionSID,AllergySID,AllergyIEN,Sta3n,ReactionSID");
allergicreaction.write(NEWLINE);
allergycomment.write("AllergyCommentSID,AllergySID,AllergyIEN,Sta3n,PatientSID,"
+ "OriginationDateTime,EnteringStaffSID,AllergyComment,CommentEnteredDateTime");
allergycomment.write(NEWLINE);
// Condition Tables
problemlist.write("ProblemListSID,Sta3n,ICD9SID,ICD10SID,PatientSID,ProviderNarrativeSID,"
+ "EnteredDateTime,OnsetDateTime,ProblemListCondition,RecordingProviderSID,"
+ "ResolvedDateTime,SNOMEDCTConceptCode");
problemlist.write(NEWLINE);
vdiagnosis.write("VDiagnosisSID,Sta3n,ICD9SID,ICD10SID,PatientSID,VisitSID,"
+ "VisitDateTime,VDiagnosisDateTime,ProviderNarrativeSID,ProblemListSID,"
+ "OrderingProviderSID,EncounterProviderSID");
vdiagnosis.write(NEWLINE);
// Medications Tables
rxoutpatient.write("RxOutpatSID,Sta3n,RxNumber,IssueDate,CancelDate,FinishingDateTime,"
+ "PatientSID,ProviderSID,EnteredByStaffSID,LocalDrugSID,NationalDrugSID,"
+ "PharmacyOrderableItemSID,MaxRefills,RxStatus,OrderedQuantity");
rxoutpatient.write(NEWLINE);
rxoutpatfill.write("RxOutpatFillSID,RxOutpatSID,Qty,DaysSupply");
rxoutpatfill.write(NEWLINE);
nonvamed.write("NonVAMedSID,PatientSID,NonVAMedIEN,Sta3n,LocalDrugSID,Dosage,"
+ "MedicationRoute,Schedule,NonVAMedStatus,CPRSOrderSID,StartDateTime,"
+ "DocumentedDateTime,NonVAMedComments");
nonvamed.write(NEWLINE);
cprsorder.write("CPRSOrderID,Sta3n,PatientSID,OrderStaffSID,EnteredByStaffSID,"
+ "EnteredDateTime,OrderStatusSID,VistaPackageSID,OrderStartDateTime,OrderStopDateTime,"
+ "PackageReference");
cprsorder.write(NEWLINE);
ordereditem.write("OrderedItemSID,CPRSOrderSID,OrderableItemSID");
ordereditem.write(NEWLINE);
// Diagnostic Report Tables
labchem.write("LabChemSID,Sta3n,LabPanelSID,LabChemTestSID,PatientSID,StaffSID,"
+ "LabChemSpecimenDateTime,LabChemResultValue,LOINCSID,Units,Abnormal,RefHigh,RefLow");
labchem.write(NEWLINE);
labpanel.write("LabPanelSID,LabPanelIEN,PatientSID");
labpanel.write(NEWLINE);
patientlabchem.write("LabChemSID,Sta3n,LabPanelSID,PatientSID,LabChemSpecimenDateTime,"
+ "LabChemCompleteDateTime,TopographySID,AccessionInstitutionSID");
patientlabchem.write(NEWLINE);
vprocedure.write("VProcedureSID,PatientSID,VisitSID,CPRSOrderSID");
vprocedure.write(NEWLINE);
}
/**
* Generate a list of practicing Clinicians.
* This is temporary until Provider organizations have associated
* Clinician agents.
*/
private void generateClinicians() {
Random random = new Random(999L);
for (int i = 0; i < CLINICIANS; i++) {
Person clinician = new Person(random.nextLong());
if (random.nextBoolean()) {
clinician.attributes.put(Person.GENDER, "M");
} else {
clinician.attributes.put(Person.GENDER, "F");
}
clinician.attributes.put(Person.RACE, "unknown");
clinician.attributes.put(Person.ETHNICITY, "unknown");
clinician.attributes.put(Person.FIRST_LANGUAGE, "English");
LifecycleModule.birth(clinician, 0L);
String name = "Dr. " + clinician.attributes.get(Person.FIRST_NAME);
name += " " + clinician.attributes.get(Person.LAST_NAME);
sstaff.addFact("" + i, clean(name));
}
}
private static class SingletonHolder {
/**
* Singleton instance of the CDWExporter.
*/
private static final CDWExporter instance = new CDWExporter();
}
/**
* Get the current instance of the CDWExporter.
* @return the current instance of the CDWExporter.
*/
public static CDWExporter getInstance() {
return SingletonHolder.instance;
}
/**
* Set the sequence generator key starting values.
* Useful to ensure states do not generate
* overlapping or colliding values.
* @param id The start of the sequence generators.
*/
public void setKeyStart(int id) {
sidStart = id;
// Dim tables have smaller key ranges: only a 2 byte integer -- max of 32K
id = (id / 10_000);
sstaff.setNextId(id);
generateClinicians();
maritalStatus.setNextId(id);
sta3n.setNextId(id);
location.setNextId(id);
// appointmentStatus.setNextId(id);
// appointmentType.setNextId(id);
immunizationName.setNextId(id);
reaction.setNextId(id);
providerNarrative.setNextId(id);
localDrug.setNextId(id);
nationalDrug.setNextId(id);
dosageForm.setNextId(id);
pharmacyOrderableItem.setNextId(id);
orderableItem.setNextId(id);
orderStatus.setNextId(id);
vistaPackage.setNextId(id);
collectionsample.setNextId(id);
labchemtest.setNextId(id);
topography.setNextId(id);
institution.setNextId(id);
loinc.setNextId(id);
}
/**
* Add a single Person's health record info to the CSV records.
* @param person Person to write record data for
* @param time Time the simulation ended
* @throws IOException if any IO error occurs
*/
public void export(Person person, long time) throws IOException {
// TODO Ignore civilians, only consider the veteran population.
// if (!person.attributes.containsKey("veteran")) {
// return;
int primarySta3n = -1;
Provider provider = person.getAmbulatoryProvider(time);
if (provider != null) {
String state = Location.getStateName(provider.state);
String tz = Location.getTimezoneByState(state);
primarySta3n = sta3n.addFact(provider.id, clean(provider.name) + "," + tz);
location.addFact(provider.id, clean(provider.name));
}
int personID = patient(person, primarySta3n, time);
for (Encounter encounter : person.record.encounters) {
int encounterID = encounter(personID, person, encounter, primarySta3n);
for (HealthRecord.Entry condition : encounter.conditions) {
condition(personID, encounterID, encounter, condition, primarySta3n);
}
for (HealthRecord.Entry allergy : encounter.allergies) {
allergy(personID, person, encounterID, encounter, allergy, primarySta3n);
}
for (HealthRecord.Report report : encounter.reports) {
// Ignore death certificates
if (!DeathModule.DEATH_CERTIFICATE.equals(report.codes.get(0))) {
report(personID, encounterID, encounter, primarySta3n, report);
}
}
for (Observation observation : encounter.observations) {
observation(personID, encounterID, observation);
}
for (Procedure procedure : encounter.procedures) {
procedure(personID, encounterID, procedure);
}
for (Medication medication : encounter.medications) {
medication(personID, encounterID, encounter, medication, primarySta3n);
}
for (Immunization immunization : encounter.immunizations) {
immunization(personID, person, encounterID, encounter, immunization, primarySta3n);
}
for (CarePlan careplan : encounter.careplans) {
careplan(personID, encounterID, careplan);
}
for (ImagingStudy imagingStudy : encounter.imagingStudies) {
imagingStudy(personID, encounterID, imagingStudy);
}
}
// Patient Data
lookuppatient.flush();
spatient.flush();
spatientaddress.flush();
spatientphone.flush();
patientrace.flush();
patientethnicity.flush();
// Encounter Data
consult.flush();
visit.flush();
appointment.flush();
inpatient.flush();
// Immunization Data
immunization.flush();
// Allergy Data
allergy.flush();
allergicreaction.flush();
allergycomment.flush();
// Condition Data
problemlist.flush();
vdiagnosis.flush();
}
/**
* Fact Tables should only be written after all patients have completed export.
*/
public void writeFactTables() {
try {
File output = Exporter.getOutputFolder("cdw", null);
output.mkdirs();
Path outputDirectory = output.toPath();
sstaff.write(openFileWriter(outputDirectory, "sstaff.csv"));
maritalStatus.write(openFileWriter(outputDirectory, "maritalstatus.csv"));
sta3n.write(openFileWriter(outputDirectory, "sta3n.csv"));
location.write(openFileWriter(outputDirectory, "location.csv"));
immunizationName.write(openFileWriter(outputDirectory, "immunizationname.csv"));
reaction.write(openFileWriter(outputDirectory, "reaction.csv"));
providerNarrative.write(openFileWriter(outputDirectory, "providernarrative.csv"));
localDrug.write(openFileWriter(outputDirectory, "localdrug.csv"));
nationalDrug.write(openFileWriter(outputDirectory, "nationaldrug.csv"));
dosageForm.write(openFileWriter(outputDirectory, "dosageform.csv"));
pharmacyOrderableItem.write(openFileWriter(outputDirectory, "pharmacyorderableitem.csv"));
orderableItem.write(openFileWriter(outputDirectory, "orderableitem.csv"));
orderStatus.write(openFileWriter(outputDirectory, "orderstatus.csv"));
vistaPackage.write(openFileWriter(outputDirectory, "vistapackage.csv"));
collectionsample.write(openFileWriter(outputDirectory, "collectionsample.csv"));
labchemtest.write(openFileWriter(outputDirectory, "labchemtest.csv"));
topography.write(openFileWriter(outputDirectory, "topography.csv"));
institution.write(openFileWriter(outputDirectory, "institution.csv"));
loinc.write(openFileWriter(outputDirectory, "loinc.csv"));
} catch (IOException e) {
// wrap the exception in a runtime exception.
// the singleton pattern below doesn't work if the constructor can throw
// and if these do throw ioexceptions there's nothing we can do anyway
throw new RuntimeException(e);
}
}
/**
* Record a Patient.
*
* @param person Person to write data for
* @param sta3n The primary station ID for this patient
* @param time Time the simulation ended, to calculate age/deceased status
* @return the patient's ID, to be referenced as a "foreign key" if necessary
* @throws IOException if any IO error occurs
*/
private int patient(Person person, int sta3n, long time) throws IOException {
// Generate full name and ID
StringBuilder s = new StringBuilder();
if (person.attributes.containsKey(Person.NAME_PREFIX)) {
s.append(person.attributes.get(Person.NAME_PREFIX)).append(' ');
}
s.append(person.attributes.get(Person.FIRST_NAME)).append(' ');
s.append(person.attributes.get(Person.LAST_NAME));
if (person.attributes.containsKey(Person.NAME_SUFFIX)) {
s.append(' ').append(person.attributes.get(Person.NAME_SUFFIX));
}
String patientName = s.toString();
int personID = getNextKey(spatient);
// lookuppatient.write("PatientSID,Sta3n,PatientIEN,PatientICN,PatientFullCN,"
// + "PatientName,TestPatient");
s.setLength(0);
s.append(personID).append(',');
s.append(sta3n).append(',');
s.append(personID).append(',');
s.append(personID).append(',');
s.append(personID).append(',');
s.append(patientName).append(",1");
s.append(NEWLINE);
write(s.toString(), lookuppatient);
// spatient.write("PatientSID,PatientName,PatientLastName,PatientFirstName,PatientSSN,Age,"
// + "BirthDateTime,DeceasedFlag,DeathDateTime,Gender,SelfIdentifiedGender,Religion,"
// + "MaritalStatus,MaritalStatusSID,PatientEnteredDateTime");
s.setLength(0);
s.append(personID).append(',');
s.append(patientName);
s.append(',').append(clean((String) person.attributes.getOrDefault(Person.LAST_NAME, "")));
s.append(',').append(clean((String) person.attributes.getOrDefault(Person.FIRST_NAME, "")));
s.append(',').append(clean((String) person.attributes.getOrDefault(Person.IDENTIFIER_SSN, "")));
boolean alive = person.alive(time);
int age = 0;
if (alive) {
age = person.ageInYears(time);
} else {
age = person.ageInYears(person.events.event(Event.DEATH).time);
}
s.append(',').append(age);
s.append(',').append(iso8601Timestamp((long) person.attributes.get(Person.BIRTHDATE)));
if (alive) {
s.append(',').append('N').append(',');
} else {
s.append(',').append('Y');
s.append(',').append(iso8601Timestamp(person.events.event(Event.DEATH).time));
}
if (person.attributes.get(Person.GENDER).equals("M")) {
s.append(",M,Male");
} else {
s.append(",F,Female");
}
s.append(",None"); // Religion
// Currently there are no divorces or widows
String marital = ((String) person.attributes.get(Person.MARITAL_STATUS));
if (marital != null) {
if (marital.equals("M")) {
s.append(",Married");
} else {
marital = "N";
s.append(",Never Married");
}
} else {
marital = "U";
s.append(",Unknown");
}
s.append(',').append(maritalStatus.addFact(marital, marital));
// TODO Need an enlistment date or date they became a veteran.
s.append(',').append(iso8601Timestamp(time - Utilities.convertTime("years", 10)));
s.append(NEWLINE);
write(s.toString(), spatient);
// spatientaddress.write("SPatientAddressSID,PatientSID,AddressType,NameOfContact,"
// + "RelationshipToPatient,StreetAddress1,StreetAddress2,StreetAddress3,"
// + "City,State,Zip,PostalCode,Country,GISMatchScore,GISStreetSide,"
// + "GISPatientAddressLongitude,GISPatientAddressLatitude,GISFIPSCode");
s.setLength(0);
s.append(getNextKey(spatientaddress)).append(',');
s.append(personID).append(',');
s.append("Legal Residence").append(',');
s.append(person.attributes.get(Person.FIRST_NAME)).append(' ');
s.append(person.attributes.get(Person.LAST_NAME)).append(',');
s.append("Self").append(',');
s.append(person.attributes.get(Person.ADDRESS)).append(",,,");
s.append(person.attributes.get(Person.CITY)).append(',');
s.append(person.attributes.get(Person.STATE)).append(',');
s.append(person.attributes.get(Person.ZIP)).append(',');
s.append(person.attributes.get(Person.ZIP)).append(",USA,,,");
DirectPosition2D coord = (DirectPosition2D) person.attributes.get(Person.COORDINATE);
if (coord != null) {
s.append(coord.x).append(',').append(coord.y).append(',');
} else {
s.append(",,");
}
s.append(NEWLINE);
write(s.toString(), spatientaddress);
//spatientphone.write("SPatientPhoneSID,PatientSID,PatientContactType,NameOfContact,"
// + "RelationshipToPatient,PhoneNumber,WorkPhoneNumber,EmailAddress");
s.setLength(0);
s.append(getNextKey(spatientphone)).append(',');
s.append(personID).append(',');
s.append("Patient Cell Phone").append(',');
s.append(person.attributes.get(Person.FIRST_NAME)).append(' ');
s.append(person.attributes.get(Person.LAST_NAME)).append(',');
s.append("Self").append(',');
s.append(person.attributes.get(Person.TELECOM)).append(",,");
s.append(NEWLINE);
write(s.toString(), spatientphone);
if (person.random.nextBoolean()) {
// Add an email address
s.setLength(0);
s.append(getNextKey(spatientphone)).append(',');
s.append(personID).append(',');
s.append("Patient Email").append(',');
s.append(person.attributes.get(Person.FIRST_NAME)).append(' ');
s.append(person.attributes.get(Person.LAST_NAME)).append(',');
s.append("Self").append(',');
s.append(",,");
s.append(person.attributes.get(Person.FIRST_NAME)).append('.');
s.append(person.attributes.get(Person.LAST_NAME)).append("@email.example");
s.append(NEWLINE);
write(s.toString(), spatientphone);
}
//patientrace.write("PatientRaceSID,PatientSID,Race");
String race = (String) person.attributes.get(Person.RACE);
if (race.equals("white")) {
race = "WHITE NOT OF HISP ORIG";
} else if (race.equals("hispanic")) {
race = "WHITE";
} else if (race.equals("black")) {
race = "BLACK OR AFRICAN AMERICAN";
} else if (race.equals("asian")) {
race = "ASIAN";
} else if (race.equals("native")) {
if (person.attributes.get(Person.STATE).equals("Hawaii")) {
race = "NATIVE HAWAIIAN OR OTHER PACIFIC ISLANDER";
} else {
race = "AMERICAN INDIAN OR ALASKA NATIVE";
}
} else { // race.equals("other")
race = "ASIAN";
}
s.setLength(0);
s.append(getNextKey(patientrace)).append(',');
s.append(personID).append(',');
s.append(race);
s.append(NEWLINE);
write(s.toString(), patientrace);
//patientethnicity.write("PatientEthnicitySID,PatientSID,Ethnicity");
s.setLength(0);
s.append(getNextKey(patientethnicity)).append(',');
s.append(personID).append(',');
race = (String) person.attributes.get(Person.RACE);
if (race.equals("hispanic")) {
s.append("HISPANIC OR LATINO");
} else {
s.append("NOT HISPANIC OR LATINO");
}
s.append(NEWLINE);
write(s.toString(), patientethnicity);
return personID;
}
/**
* Write a single Encounter to the tables.
*
* @param personID The ID of the person that had this encounter
* @param person The person attending the encounter
* @param encounter The encounter itself
* @param primarySta3n The primary home sta3n for the patient
* @return The encounter ID, to be referenced as a "foreign key" if necessary
* @throws IOException if any IO error occurs
*/
private int encounter(int personID, Person person, Encounter encounter, int primarySta3n)
throws IOException {
StringBuilder s = new StringBuilder();
// consult.write("ConsultSID,ToRequestServiceSID");
int consultSid = getNextKey(consult);
s.append(consultSid).append(',').append(consultSid).append(NEWLINE);
write(s.toString(), consult);
// visit.write("VisitSID,VisitDateTime,CreatedByStaffSID,LocationSID,PatientSID");
int visitSid = getNextKey(visit);
int staffSid = person.randInt(CLINICIANS) + (sidStart / 10_000);
if (encounter.provider != null) {
encounter.provider.attributes.put(CLINICIAN_SID, staffSid);
}
s.setLength(0);
s.append(visitSid).append(',');
s.append(iso8601Timestamp(encounter.start)).append(',');
s.append(staffSid).append(','); // CreatedByStaffID
Integer locationSid = null;
if (encounter.provider != null) {
locationSid = location.addFact(encounter.provider.id, clean(encounter.provider.name));
s.append(locationSid).append(',');
} else {
s.append(primarySta3n).append(',');
}
s.append(personID);
s.append(NEWLINE);
write(s.toString(), visit);
// appointment.write("AppointmentSID,Sta3n,PatientSID,AppointmentDateTime,AppointmentMadeDate,"
// + "AppointmentTypeSID,AppointmentStatus,VisitSID,LocationSID,PurposeOfVisit,"
// + "SchedulingRequestType,FollowUpVisitFlag,LengthOfAppointment,ConsultSID,"
// + "CheckInDateTime,CheckOutDateTime");
s.setLength(0);
s.append(getNextKey(appointment)).append(',');
if (encounter.provider != null) {
String state = Location.getStateName(encounter.provider.state);
String tz = Location.getTimezoneByState(state);
s.append(sta3n.addFact(encounter.provider.id, clean(encounter.provider.name) + "," + tz));
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(personID).append(',');
s.append(iso8601Timestamp(encounter.start)).append(',');
s.append(iso8601Timestamp(encounter.start)).append(',');
s.append(",,"); // skip: AppointmentTypeSID, AppointmentStatus
s.append(visitSid).append(',');
if (locationSid != null) {
s.append(locationSid).append(',');
} else {
s.append(primarySta3n).append(",");
}
s.append("3,"); // 3:SCHEDULED VISIT
s.append(person.rand(new String[] {"N", "C", "P", "W", "M", "A", "O"})).append(',');
s.append(person.randInt(1)).append(',');
s.append((encounter.stop - encounter.start) / (60 * 1000)).append(',');
s.append(consultSid).append(',');
s.append(iso8601Timestamp(encounter.start)).append(',');
s.append(iso8601Timestamp(encounter.stop)).append(NEWLINE);
write(s.toString(), appointment);
if (encounter.type.equalsIgnoreCase(EncounterType.INPATIENT.toString())) {
// inpatient.write("InpatientSID,PatientSID,AdmitDateTime");
s.setLength(0);
s.append(getNextKey(inpatient)).append(',');
s.append(personID).append(',');
s.append(iso8601Timestamp(encounter.start)).append(NEWLINE);
write(s.toString(), inpatient);
}
return visitSid;
}
/**
* Write a single Condition to the tables.
*
* @param personID ID of the person that has the condition.
* @param encounterID ID of the encounter where the condition was diagnosed
* @param encounter The encounter
* @param condition The condition itself
* @param primarySta3n The primary home sta3n for the patient
* @throws IOException if any IO error occurs
*/
private void condition(int personID, int encounterID, Encounter encounter,
Entry condition, int primarySta3n) throws IOException {
StringBuilder s = new StringBuilder();
Integer sta3nValue = null;
Integer providerSID = (sidStart / 10_000);
if (encounter.provider != null) {
String state = Location.getStateName(encounter.provider.state);
String tz = Location.getTimezoneByState(state);
sta3nValue = sta3n.addFact(encounter.provider.id, clean(encounter.provider.name) + "," + tz);
providerSID = (Integer) encounter.provider.attributes.get(CLINICIAN_SID);
}
Code code = condition.codes.get(0);
int snomedSID = providerNarrative.addFact(code.code, clean(code.display));
// problemlist.write("ProblemListSID,Sta3n,ICD9SID,ICD10SID,PatientSID,ProviderNarrativeSID,"
// + "EnteredDateTime,OnsetDateTime,ProblemListCondition,RecordingProviderSID,"
// + "ResolvedDateTime,SNOMEDCTConceptCode");
int problemListSid = getNextKey(problemlist);
s.append(problemListSid).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(",,"); // skip icd 9 and icd 10
s.append(personID).append(',');
s.append(snomedSID).append(','); // snomed display is jammed into narrative.
s.append(iso8601Timestamp(encounter.start)).append(',');
s.append(iso8601Timestamp(condition.start)).append(',');
s.append("P,");
s.append(providerSID).append(','); // RecordingProviderSID
if (condition.stop != 0L) {
s.append(iso8601Timestamp(condition.stop));
}
s.append(',');
s.append(code.code);
s.append(NEWLINE);
write(s.toString(), problemlist);
// vdiagnosis.write("VDiagnosisSID,Sta3n,ICD9SID,ICD10SID,PatientSID,VisitSID,"
// + "VisitDateTime,VDiagnosisDateTime,ProviderNarrativeSID,ProblemListSID,"
// + "OrderingProviderSID,EncounterProviderSID");
s.setLength(0);
s.append(getNextKey(vdiagnosis));
s.append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(",,"); // skip icd 9 and icd 10
s.append(personID).append(',');
s.append(encounterID).append(',');
s.append(iso8601Timestamp(encounter.start)).append(',');
s.append(iso8601Timestamp(condition.start)).append(',');
s.append(snomedSID).append(','); // snomed display is jammed into narrative.
s.append(problemListSid).append(',');
s.append(providerSID).append(','); // OrderingProviderSID
s.append(providerSID).append(','); // EncounterProviderSID
s.append(NEWLINE);
write(s.toString(), vdiagnosis);
}
/**
* Write a single Allergy to the tables.
*
* @param personID ID of the person that has the allergy.
* @param person The person
* @param encounterID ID of the encounter where the allergy was diagnosed
* @param encounter The encounter
* @param allergyEntry The allergy itself
* @param primarySta3n The primary home sta3n for the patient
* @throws IOException if any IO error occurs
*/
private void allergy(int personID, Person person, int encounterID, Encounter encounter,
Entry allergyEntry, int primarySta3n) throws IOException {
StringBuilder s = new StringBuilder();
Integer sta3nValue = null;
Integer providerSID = (sidStart / 10_000);
if (encounter.provider != null) {
String state = Location.getStateName(encounter.provider.state);
String tz = Location.getTimezoneByState(state);
sta3nValue = sta3n.addFact(encounter.provider.id, clean(encounter.provider.name) + "," + tz);
providerSID = (Integer) encounter.provider.attributes.get(CLINICIAN_SID);
}
Code code = allergyEntry.codes.get(0);
boolean food = code.display.matches(".*(nut|peanut|milk|dairy|eggs|shellfish|wheat).*");
// allergy.write("AllergySID,AllergyIEN,Sta3n,PatientSID,AllergyType,AllergicReactant,"
// + "LocalDrugSID,DrugNameWithoutDoseSID,DrugClassSID,ReactantSID,DrugIngredientSID,"
// + "OriginationDateTime,OriginatingStaffSID,ObservedHistorical,Mechanism,VerifiedFlag,"
// + "VerificatiionDateTime,VerifyingStaffSID,EnteredInErrorFlag");
int allergySID = getNextKey(allergy);
s.append(allergySID).append(',');
s.append(allergySID).append(',');
if (encounter.provider != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(personID).append(',');
if (food) {
s.append('F').append(','); // F: Food allergy
} else {
s.append('O').append(','); // O: Other
}
s.append(clean(code.display)).append(','); // AllergicReactant
s.append(','); // LocalDrugSID
s.append(','); // DrugNameWithoutDoseSID
s.append(','); // DrugClassSID
s.append(','); // ReactantSID
s.append(','); // DrugIngredientSID
s.append(iso8601Timestamp(allergyEntry.start)).append(',');
s.append(providerSID).append(','); // OriginatingStaffSID
s.append(person.rand(new String[] {"o", "h"})).append(',');
s.append("A,");
s.append("1,"); // Verified
s.append(iso8601Timestamp(allergyEntry.start)).append(',');
s.append(providerSID).append(','); // VerifyingStaffSID
s.append(',');
s.append(NEWLINE);
write(s.toString(), allergy);
// allergyreaction.write("AllergicReactionSID,AllergySID,AllergyIEN,Sta3n,ReactionSID");
String reactionDisplay = person.rand(
new String[] {"Sneezing and Coughing", "Inflammation of Skin",
"Itchy Watery Eyes", "Difficulty Breathing"});
s.setLength(0);
int allergyreactionSID = getNextKey(allergicreaction);
s.append(allergyreactionSID).append(',');
s.append(allergySID).append(',');
s.append(allergySID).append(',');
if (encounter.provider != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(reaction.addFact(reactionDisplay, reactionDisplay + "," + allergyreactionSID));
s.append(NEWLINE);
write(s.toString(), allergicreaction);
// allergycomment.write("AllergyCommentSID,AllergySID,AllergyIEN,Sta3n,PatientSID,"
// + "OriginationDateTime,EnteringStaffSID,AllergyComment,CommentEnteredDateTime");
s.setLength(0);
int allergyCommentSid = getNextKey(allergycomment);
s.append(allergyCommentSid).append(',');
s.append(allergySID).append(',');
s.append(allergySID).append(',');
if (encounter.provider != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(personID).append(',');
s.append(iso8601Timestamp(allergyEntry.start)).append(',');
s.append(providerSID).append(','); // EnteringStaffSID
s.append(clean(code.display)).append(',');
s.append(iso8601Timestamp(allergyEntry.start));
s.append(NEWLINE);
write(s.toString(), allergycomment);
}
/**
* Write a DiagnosticReport to the tables.
*
* @param personID The ID of the person that had this encounter
* @param encounterID The ID of the encounter
* @param encounter The encounter
* @param primarySta3n The primary home sta3n for the patient
* @param report The diagnostic lab report
* @throws IOException if any IO error occurs
*/
private void report(int personID, int encounterID, Encounter encounter,
int primarySta3n, Report report) throws IOException {
StringBuilder s = new StringBuilder();
Integer sta3nValue = null;
Integer providerSID = (sidStart / 10_000);
if (encounter.provider != null) {
String state = Location.getStateName(encounter.provider.state);
String tz = Location.getTimezoneByState(state);
sta3nValue = sta3n.addFact(encounter.provider.id, clean(encounter.provider.name) + "," + tz);
providerSID = (Integer) encounter.provider.attributes.get(CLINICIAN_SID);
}
Code code = report.codes.get(0);
// cprsorder.write("CPRSOrderID,Sta3n,PatientSID,OrderStaffSID,EnteredByStaffSID,"
// + "EnteredDateTime,OrderStatusSID,VistaPackageSID,OrderStartDateTime,OrderStopDateTime,"
// + "PackageReference");
int cprsSID = getNextKey(cprsorder);
s.setLength(0);
s.append(cprsSID).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(personID).append(',');
s.append(providerSID).append(","); // OrderStaffSID
s.append(providerSID).append(","); // EnteredByStaffSID
s.append(iso8601Timestamp(report.start)).append(',');
int orderStatusSID = orderStatus.addFact("COMPLETED", "COMPLETED");
s.append(orderStatusSID).append(',');
int vistaPackageSID = vistaPackage.addFact("DIAGNOSTIC LABORATORY", "DIAGNOSTIC LABORATORY");
s.append(vistaPackageSID).append(',');
s.append(iso8601Timestamp(report.start)).append(',');
if (report.stop != 0L) {
s.append(iso8601Timestamp(report.stop));
}
s.append(',');
s.append(clean(code.display));
s.append(NEWLINE);
write(s.toString(), cprsorder);
// orderableItem.setHeader("OrderableItemSID,OrderableItemName,IVBaseFlag,IVAdditiveFlag");
int orderableItemSID = orderableItem.addFact(code.code, clean(code.display) + ",0,0");
// ordereditem.write("OrderedItemSID,CPRSOrderSID,OrderableItemSID");
s.setLength(0);
s.append(cprsSID).append(",");
s.append(cprsSID).append(",");
s.append(orderableItemSID).append(NEWLINE);
write(s.toString(), ordereditem);
// vprocedure.write("VProcedureSID,PatientSID,VisitSID,CPRSOrderSID");
s.setLength(0);
s.append(getNextKey(vprocedure)).append(',');
s.append(personID).append(',');
s.append(encounterID).append(',');
s.append(cprsSID);
s.append(NEWLINE);
write(s.toString(), vprocedure);
// loinc.setHeader("LOINCSID,LOINC,Component");
loinc.addFact(code.code, code.code + "," + clean(code.display));
// labpanel.write("LabPanelSID,LabPanelIEN,PatientSID");
s.setLength(0);
int labpanelSID = getNextKey(labpanel);
s.append(labpanelSID).append(',');
s.append(clean(code.display)).append(',');
s.append(personID);
s.append(NEWLINE);
write(s.toString(), labpanel);
// collectionsample.setHeader("CollectionSampleSID,CollectionSample");
int sampleSID = collectionsample.addFact(code.code, clean(code.display) + " Sample");
// labchemtest.setHeader("LabChemTestSID,LabChemTestName,CollectionSampleSID");
int labchemtestSID = labchemtest.addFact(code.code, clean(code.display) + "," + sampleSID);
// labchem.write("LabChemSID,Sta3n,LabPanelSID,LabChemTestSID,PatientSID,StaffSID,"
// + "LabChemSpecimenDateTime,LabChemResultValue,LOINCSID,Units,Abnormal,RefHigh,RefLow");
for (Observation observation : report.observations) {
int labchemSID = getNextKey(labchem);
if (observation.value == null && !observation.observations.isEmpty()) {
System.out.println("Fuck");
}
s.setLength(0);
s.append(labchemSID).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(labpanelSID).append(',');
s.append(labchemtestSID).append(',');
s.append(personID).append(',');
s.append(providerSID).append(","); // StaffSID
s.append(iso8601Timestamp(observation.start)).append(',');
s.append(observation.value).append(',');
Code obscode = observation.codes.get(0);
int loincSID = loinc.addFact(obscode.code, obscode.code + "," + clean(obscode.display));
s.append(loincSID).append(',');
s.append(observation.unit).append(',');
s.append(','); // Abnormal
s.append(','); // RefHigh, RefLow
s.append(NEWLINE);
write(s.toString(), labchem);
}
// institution.setHeader("InstitutionSID,Sta3n,InstitutionName,InstitutionCode");
int institutionSID = 0;
s.setLength(0);
if (sta3nValue != null) {
s.append(sta3nValue).append(',');
s.append("Diagnostic Laboratory").append(',');
s.append(sta3nValue);
institutionSID = institution.addFact("" + sta3nValue, s.toString());
} else {
s.append(primarySta3n).append(',');
s.append("Diagnostic Laboratory").append(',');
s.append(primarySta3n);
institutionSID = institution.addFact("" + primarySta3n, s.toString());
}
// topography.setHeader("TopographySID,Topography");
int topographySID = topography.addFact(code.code, clean(code.display) + " Specimen");
// patientlabchem.write("LabChemSID,Sta3n,LabPanelSID,PatientSID,LabChemSpecimenDateTime,"
// + "LabChemCompleteDateTime,TopographySID,AccessionInstitutionSID");
s.setLength(0);
int patientlabchemSID = getNextKey(patientlabchem);
s.append(patientlabchemSID).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(labpanelSID).append(',');
s.append(personID).append(',');
s.append(iso8601Timestamp(report.start)).append(',');
if (report.stop != 0L) {
s.append(iso8601Timestamp(report.stop));
}
s.append(',');
s.append(topographySID).append(',');
s.append(institutionSID);
s.append(NEWLINE);
write(s.toString(), patientlabchem);
}
/**
* Write a single Observation to observations.csv.
*
* @param personID ID of the person to whom the observation applies.
* @param encounterID ID of the encounter where the observation was taken
* @param observation The observation itself
* @throws IOException if any IO error occurs
*/
private void observation(int personID, int encounterID,
Observation observation) throws IOException {
if (observation.value == null) {
if (observation.observations != null && !observation.observations.isEmpty()) {
// just loop through the child observations
for (Observation subObs : observation.observations) {
observation(personID, encounterID, subObs);
}
}
// no value so nothing more to report here
return;
}
// DATE,PATIENT,ENCOUNTER,CODE,DESCRIPTION,VALUE,UNITS
StringBuilder s = new StringBuilder();
s.append(dateFromTimestamp(observation.start)).append(',');
s.append(personID).append(',');
s.append(encounterID).append(',');
Code coding = observation.codes.get(0);
s.append(coding.code).append(',');
s.append(clean(coding.display)).append(',');
String value = ExportHelper.getObservationValue(observation);
String type = ExportHelper.getObservationType(observation);
s.append(value).append(',');
s.append(observation.unit).append(',');
s.append(type);
s.append(NEWLINE);
//write(s.toString(), observations);
}
/**
* Write a single Procedure to procedures.csv.
*
* @param personID ID of the person on whom the procedure was performed.
* @param encounterID ID of the encounter where the procedure was performed
* @param procedure The procedure itself
* @throws IOException if any IO error occurs
*/
private void procedure(int personID, int encounterID,
Procedure procedure) throws IOException {
// DATE,PATIENT,ENCOUNTER,CODE,DESCRIPTION,COST,REASONCODE,REASONDESCRIPTION
StringBuilder s = new StringBuilder();
s.append(dateFromTimestamp(procedure.start)).append(',');
s.append(personID).append(',');
s.append(encounterID).append(',');
Code coding = procedure.codes.get(0);
s.append(coding.code).append(',');
s.append(clean(coding.display)).append(',');
s.append(String.format("%.2f", procedure.cost())).append(',');
if (procedure.reasons.isEmpty()) {
s.append(','); // reason code & desc
} else {
Code reason = procedure.reasons.get(0);
s.append(reason.code).append(',');
s.append(clean(reason.display));
}
s.append(NEWLINE);
//write(s.toString(), procedures);
}
/**
* Write a single Medication to the tables.
*
* @param personID ID of the person prescribed the medication.
* @param encounterID ID of the encounter where the medication was prescribed
* @param encounter The encounter
* @param medication The medication itself
* @param primarySta3n The primary home sta3n for the patient
* @throws IOException if any IO error occurs
*/
private void medication(int personID, int encounterID, Encounter encounter,
Medication medication, int primarySta3n) throws IOException {
StringBuilder s = new StringBuilder();
Integer sta3nValue = null;
Integer providerSID = (sidStart / 10_000);
if (encounter.provider != null) {
String state = Location.getStateName(encounter.provider.state);
String tz = Location.getTimezoneByState(state);
sta3nValue = sta3n.addFact(encounter.provider.id, clean(encounter.provider.name) + "," + tz);
providerSID = (Integer) encounter.provider.attributes.get(CLINICIAN_SID);
}
Code code = medication.codes.get(0);
// pharmacyOrderableItem ("PharmacyOrderableItemSID,PharmacyOrderableItem,SupplyFlag");
int pharmSID = pharmacyOrderableItem.addFact(code.code, clean(code.display) + ",1");
// orderableItem ("OrderableItemSID,OrderableItemName,IVBaseFlag,IVAdditiveFlag");
int orderSID = orderableItem.addFact(code.code, clean(code.display) + ",0,0");
// dosageForm.setHeader("DosageFormSID,DosageFormIEN,DosageForm");
Integer dosageSID = 1; // Default Dosage SID
if (medication.prescriptionDetails != null
&& medication.prescriptionDetails.has("dosage")) {
JsonObject dosage = medication.prescriptionDetails.get("dosage").getAsJsonObject();
s.setLength(0);
s.append(dosage.get("amount").getAsInt());
s.append(" dose(s) ");
s.append(dosage.get("frequency").getAsInt());
s.append(" time(s) per ");
s.append(dosage.get("period").getAsInt());
s.append(" ");
s.append(dosage.get("unit").getAsString());
dosageSID = dosageForm.addFact(code.code, pharmSID + "," + s.toString());
}
// nationalDrug.setHeader("NationalDrugSID,DrugNameWithDose,DosageFormSID,"
// + "InactivationDate,VUID");
s.setLength(0);
s.append(clean(code.display));
s.append(',');
s.append(dosageSID);
s.append(",,");
s.append(code.code);
int ndrugSID = nationalDrug.addFact(code.code, s.toString());
// localDrug.setHeader("LocalDrugSID,LocalDrugIEN,Sta3n,LocalDrugNameWithDose,"
// + "NationalDrugSID,NationalDrugNameWithDose,PharmacyOrderableItemSID");
s.setLength(0);
s.append(ndrugSID).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(clean(code.display)).append(',');
s.append(ndrugSID).append(',');
s.append(clean(code.display)).append(',');
s.append(pharmSID);
int ldrugSID = localDrug.addFact(code.code, s.toString());
// rxoutpatient.write("RxOutpatSID,Sta3n,RxNumber,IssueDate,CancelDate,FinishingDateTime,"
// + "PatientSID,ProviderSID,EnteredByStaffSID,LocalDrugSID,NationalDrugSID,"
// + "PharmacyOrderableItemSID,MaxRefills,RxStatus,OrderedQuantity");
s.setLength(0);
int rxNum = getNextKey(rxoutpatient);
s.append(rxNum).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(rxNum).append(',');
s.append(iso8601Timestamp(medication.start)).append(',');
if (medication.stop != 0L) {
s.append(iso8601Timestamp(medication.stop));
}
s.append(',');
if (medication.prescriptionDetails != null
&& medication.prescriptionDetails.has("duration")) {
JsonObject duration = medication.prescriptionDetails.get("duration").getAsJsonObject();
long time = Utilities.convertTime(
duration.get("unit").getAsString(), duration.get("quantity").getAsLong());
s.append(iso8601Timestamp(medication.start + time));
}
s.append(',');
s.append(personID).append(',');
s.append(providerSID).append(","); // Provider
s.append(providerSID).append(","); // Entered by staff
s.append(ldrugSID).append(',');
s.append(ndrugSID).append(',');
s.append(pharmSID).append(',');
if (medication.prescriptionDetails != null
&& medication.prescriptionDetails.has("refills")) {
s.append(medication.prescriptionDetails.get("refills").getAsInt());
}
s.append(',');
if (medication.stop == 0L) {
s.append("ACTIVE,");
} else {
s.append("EXPIRED,");
}
s.append(NEWLINE);
write(s.toString(), rxoutpatient);
// rxoutpatfill.write("RxOutpatFillSID,RxOutpatSID,Qty,DaysSupply");
s.setLength(0);
s.append(rxNum).append(',');
s.append(rxNum).append(',');
s.append("1,30");
s.append(NEWLINE);
write(s.toString(), rxoutpatfill);
// cprsorder.write("CPRSOrderID,Sta3n,PatientSID,OrderStaffSID,EnteredByStaffSID,"
// + "EnteredDateTime,OrderStatusSID,VistaPackageSID,OrderStartDateTime,OrderStopDateTime,"
// + "PackageReference");
int cprsSID = getNextKey(cprsorder);
s.setLength(0);
s.append(cprsSID).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(personID).append(',');
s.append(providerSID).append(","); // OrderStaffSID
s.append(providerSID).append(","); // EnteredByStaffSID
s.append(iso8601Timestamp(medication.start)).append(',');
int orderStatusSID = -1;
if (medication.stop != 0L) {
orderStatusSID = orderStatus.addFact("EXPIRED", "EXPIRED");
} else {
orderStatusSID = orderStatus.addFact("ACTIVE", "ACTIVE");
}
s.append(orderStatusSID).append(',');
s.append(vistaPackage.addFact("OUTPATIENT PHARMACY", "OUTPATIENT PHARMACY")).append(',');
s.append(iso8601Timestamp(medication.start)).append(',');
if (medication.stop != 0L) {
s.append(iso8601Timestamp(medication.stop));
}
s.append(',');
s.append("OUTPATIENT PHARMACY");
s.append(NEWLINE);
write(s.toString(), cprsorder);
// ordereditem.write("OrderedItemSID,CPRSOrderSID,OrderableItemSID");
s.setLength(0);
s.append(cprsSID).append(",");
s.append(cprsSID).append(",");
s.append(orderSID).append(NEWLINE);
write(s.toString(), ordereditem);
// nonvamed.write("NonVAMedSID,PatientSID,NonVAMedIEN,Sta3n,LocalDrugSID,Dosage,"
// + "MedicationRoute,Schedule,NonVAMedStatus,CPRSOrderSID,StartDateTime,"
// + "DocumentedDateTime,NonVAMedComments");
s.setLength(0);
int nonvamedSID = getNextKey(nonvamed);
s.append(nonvamedSID).append(',');
s.append(personID).append(',');
s.append(nonvamedSID).append(',');
if (sta3nValue != null) {
s.append(sta3nValue);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(ldrugSID).append(',');
if (dosageSID != null) {
String fact = dosageForm.getFactById(dosageSID);
s.append(fact.substring(fact.indexOf(',') + 1));
}
s.append(',');
s.append("As directed by physician.,"); // MedicationRoute
s.append("As directed by physician.,"); // Schedule
s.append(orderStatus.getFactById(orderStatusSID)).append(',');
s.append(cprsSID).append(',');
s.append(iso8601Timestamp(medication.start)).append(',');
s.append(iso8601Timestamp(medication.start)).append(',');
s.append(clean(code.display));
s.append(NEWLINE);
write(s.toString(), nonvamed);
}
/**
* Write a single Immunization to the tables.
*
* @param personID ID of the person on whom the immunization was performed.
* @param person The person
* @param encounterID ID of the encounter where the immunization was performed
* @param encounter The encounter itself
* @param immunization The immunization itself
* @param primarySta3n The primary home sta3n for the patient
* @throws IOException if any IO error occurs
*/
private void immunization(int personID, Person person, int encounterID, Encounter encounter,
Immunization immunizationEntry, int primarySta3n) throws IOException {
StringBuilder s = new StringBuilder();
// immunization.write("ImmunizationSID,ImmunizationIEN,Sta3n,PatientSID,ImmunizationNameSID,"
// + "Series,Reaction,VisitDateTime,ImmunizationDateTime,OrderingStaffSID,ImmunizingStaffSID,"
// + "VisitSID,ImmunizationComments,ImmunizationRemarks");
int immunizationSid = getNextKey(immunization);
s.append(immunizationSid).append(',');
s.append(immunizationSid).append(','); // ImmunizationIEN
Integer providerSID = (sidStart / 10_000);
if (encounter.provider != null) {
String state = Location.getStateName(encounter.provider.state);
String tz = Location.getTimezoneByState(state);
s.append(sta3n.addFact(encounter.provider.id, clean(encounter.provider.name) + "," + tz));
providerSID = (Integer) encounter.provider.attributes.get(CLINICIAN_SID);
} else {
s.append(primarySta3n);
}
s.append(',');
s.append(personID).append(',');
Code cvx = immunizationEntry.codes.get(0);
int maxInSeries = Immunizations.getMaximumDoses(cvx.code);
s.append(
immunizationName.addFact(
cvx.code, clean(cvx.display) + "," + cvx.code + "," + maxInSeries));
int series = immunizationEntry.series;
if (series == maxInSeries) {
s.append(",C,");
} else {
s.append(",B,");
}
s.append(person.randInt(12)).append(','); // Reaction
s.append(iso8601Timestamp(immunizationEntry.start)).append(',');
s.append(iso8601Timestamp(immunizationEntry.start)).append(',');
s.append(providerSID).append(","); // OrderingStaffSID
s.append(providerSID).append(","); // ImmunizingStaffSID
s.append(encounterID).append(',');
// Comment
s.append("Dose #" + series + " of " + maxInSeries + " of "
+ clean(cvx.display) + " vaccine administered.,");
// Remark
s.append("Dose #" + series + " of " + maxInSeries + " of "
+ clean(cvx.display) + " vaccine administered.");
s.append(NEWLINE);
write(s.toString(), immunization);
}
/**
* Write a single CarePlan to careplans.csv.
*
* @param personID ID of the person prescribed the careplan.
* @param encounterID ID of the encounter where the careplan was prescribed
* @param careplan The careplan itself
* @throws IOException if any IO error occurs
*/
private String careplan(int personID, int encounterID,
CarePlan careplan) throws IOException {
// ID,START,STOP,PATIENT,ENCOUNTER,CODE,DESCRIPTION,REASONCODE,REASONDESCRIPTION
StringBuilder s = new StringBuilder();
String careplanID = UUID.randomUUID().toString();
s.append(careplanID).append(',');
s.append(dateFromTimestamp(careplan.start)).append(',');
if (careplan.stop != 0L) {
s.append(dateFromTimestamp(careplan.stop));
}
s.append(',');
s.append(personID).append(',');
s.append(encounterID).append(',');
Code coding = careplan.codes.get(0);
s.append(coding.code).append(',');
s.append(coding.display).append(',');
if (careplan.reasons.isEmpty()) {
s.append(','); // reason code & desc
} else {
Code reason = careplan.reasons.get(0);
s.append(reason.code).append(',');
s.append(clean(reason.display));
}
s.append(NEWLINE);
//write(s.toString(), careplans);
return careplanID;
}
/**
* Write a single ImagingStudy to imaging_studies.csv.
*
* @param personID ID of the person the ImagingStudy was taken of.
* @param encounterID ID of the encounter where the ImagingStudy was performed
* @param imagingStudy The ImagingStudy itself
* @throws IOException if any IO error occurs
*/
private String imagingStudy(int personID, int encounterID,
ImagingStudy imagingStudy) throws IOException {
// ID,DATE,PATIENT,ENCOUNTER,BODYSITE_CODE,BODYSITE_DESCRIPTION,
// MODALITY_CODE,MODALITY_DESCRIPTION,SOP_CODE,SOP_DESCRIPTION
StringBuilder s = new StringBuilder();
String studyID = UUID.randomUUID().toString();
s.append(studyID).append(',');
s.append(dateFromTimestamp(imagingStudy.start)).append(',');
s.append(personID).append(',');
s.append(encounterID).append(',');
ImagingStudy.Series series1 = imagingStudy.series.get(0);
ImagingStudy.Instance instance1 = series1.instances.get(0);
Code bodySite = series1.bodySite;
Code modality = series1.modality;
Code sopClass = instance1.sopClass;
s.append(bodySite.code).append(',');
s.append(bodySite.display).append(',');
s.append(modality.code).append(',');
s.append(modality.display).append(',');
s.append(sopClass.code).append(',');
s.append(sopClass.display);
s.append(NEWLINE);
//write(s.toString(), imagingStudies);
return studyID;
}
private int getNextKey(FileWriter table) {
synchronized (sids) {
return sids.computeIfAbsent(table, k -> new AtomicInteger(sidStart)).getAndIncrement();
}
}
/**
* Replaces commas and line breaks in the source string with a single space.
* Null is replaced with the empty string.
*/
private static String clean(String src) {
if (src == null) {
return "";
} else {
return src.replaceAll("\\r\\n|\\r|\\n|,", " ").trim();
}
}
/**
* Helper method to write a line to a File.
* Extracted to a separate method here to make it a little easier to replace implementations.
*
* @param line The line to write
* @param writer The place to write it
* @throws IOException if an I/O error occurs
*/
private static void write(String line, FileWriter writer) throws IOException {
synchronized (writer) {
writer.write(line);
writer.flush();
}
}
}
|
package org.neo4j.admin.tool;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import org.neo4j.kernel.impl.nioneo.store.Abstract64BitRecord;
import org.neo4j.kernel.impl.nioneo.store.CommonAbstractStore;
import org.neo4j.kernel.impl.nioneo.store.Filter;
import org.neo4j.kernel.impl.nioneo.store.GraphDatabaseStore;
import org.neo4j.kernel.impl.nioneo.store.StoreAccess;
public abstract class SimpleStoreTool
{
protected final GraphDatabaseStore store;
protected SimpleStoreTool( String[] args )
{
if ( args.length == 0 )
{
throw new IllegalArgumentException( "USAGE: java -jar " + jar( getClass() ) + " <path to neo4j store dir>" );
}
this.store = new GraphDatabaseStore( args[0] );
}
public static <SST extends SimpleStoreTool> void main( Class<SST> main, String... args ) throws Throwable
{
final Constructor<SST> init;
try
{
init = main.getDeclaredConstructor( String[].class );
}
catch ( Exception e )
{
System.err.println( main.getName() + " cannot use the default main implementation" );
e.printStackTrace();
return;
}
init.setAccessible( true );
final SST instance;
try
{
instance = init.newInstance( new Object[] { args } );
}
catch ( InvocationTargetException e )
{
Throwable failure = e.getTargetException();
if ( failure instanceof IllegalArgumentException )
{
System.err.println( failure.getMessage() );
return;
}
else
{
throw failure;
}
}
catch ( Exception e )
{
System.err.println( main.getName() + " cannot use the default main implementation" );
e.printStackTrace();
return;
}
instance.prepare();
try
{
instance.run();
}
finally
{
instance.shutdown();
}
}
protected abstract void run() throws Throwable;
protected void prepare()
{
store.makeStoreOk();
}
protected void shutdown()
{
store.shutdown();
}
protected static <T extends CommonAbstractStore, R extends Abstract64BitRecord> void process(
RecordProcessor<R> processor, StoreAccess<T, R> store, Filter<? super R>... filters )
{
long highId = store.getHighId();
System.err.printf( "%s for %s records%n", processor, Long.toString( highId ) );
int lastPercent = 0;
for ( R record : store.scan( filters ) )
{
processor.process( record );
int permille = (int) ( ( record.getId() * 1000L ) / highId );
if ( permille != lastPercent ) progress( lastPercent = permille );
}
if ( lastPercent != 1000 ) progress( 1000 );
}
private static void progress( int permille )
{
if ( permille % 100 == 0 )
System.err.printf( "%3s%%%n", Integer.toString( permille / 10 ) );
else if ( permille % 5 == 0 ) System.err.print( "." );
}
private static String jar( Class<?> type )
{
return type.getProtectionDomain().getCodeSource().getLocation().getFile();
}
}
|
package org.nwapw.abacus.fx;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.scene.control.*;
import javafx.scene.control.cell.CheckBoxListCell;
import javafx.scene.text.Text;
import javafx.util.Callback;
import javafx.util.StringConverter;
import org.nwapw.abacus.Abacus;
import org.nwapw.abacus.config.Configuration;
import org.nwapw.abacus.number.ComputationInterruptedException;
import org.nwapw.abacus.number.NumberInterface;
import org.nwapw.abacus.plugin.PluginListener;
import org.nwapw.abacus.plugin.PluginManager;
import org.nwapw.abacus.tree.TreeNode;
import java.util.Set;
/**
* The controller for the abacus FX UI, responsible
* for all the user interaction.
*/
public class AbacusController implements PluginListener {
/**
* The title for the apply alert dialog.
*/
private static final String APPLY_MSG_TITLE = "\"Apply\" Needed";
/**
* The text for the header of the apply alert dialog.
*/
private static final String APPLY_MSG_HEADER = "The settings have not been applied.";
/**
* The text for the dialog that is shown if settings haven't been applied.
*/
private static final String APPLY_MSG_TEXT = "You have made changes to the configuration, however, you haven't pressed \"Apply\". " +
"The changes to the configuration will not be present in the calculator until \"Apply\" is pressed.";
/**
* Constant string that is displayed if the text could not be lexed or parsed.
*/
private static final String ERR_SYNTAX = "Syntax Error";
/**
* Constant string that is displayed if the tree could not be reduced.
*/
private static final String ERR_EVAL = "Evaluation Error";
/**
* Constant string that is displayed if the calculations are stopped before they are done.
*/
private static final String ERR_STOP = "Stopped";
@FXML
private TabPane coreTabPane;
@FXML
private Tab calculateTab;
@FXML
private Tab settingsTab;
@FXML
private TableView<HistoryModel> historyTable;
@FXML
private TableColumn<HistoryModel, String> inputColumn;
@FXML
private TableColumn<HistoryModel, String> parsedColumn;
@FXML
private TableColumn<HistoryModel, String> outputColumn;
@FXML
private Text outputText;
@FXML
private TextField inputField;
@FXML
private Button inputButton;
@FXML
private Button stopButton;
@FXML
private ComboBox<String> numberImplementationBox;
@FXML
private ListView<ToggleablePlugin> enabledPluginView;
private ObservableList<HistoryModel> historyData;
/**
* The abacus instance used for calculations and all
* other main processing code.
*/
private ObservableList<String> numberImplementationOptions;
/**
* The list of plugin objects that can be toggled on and off,
* and, when reloaded, get added to the plugin manager's black list.
*/
private ObservableList<ToggleablePlugin> enabledPlugins;
/**
* The abacus instance used for changing the plugin configuration.
*/
private Abacus abacus;
/**
* Boolean which represents whether changes were made to the configuration.
*/
private boolean changesMade;
/**
* Whether an alert about changes to the configuration was already shown.
*/
private boolean reloadAlertShown;
/**
* The alert shown when a press to "apply" is needed.
*/
private Alert reloadAlert;
/**
* The runnable used to perform the calculation.
*/
private final Runnable CALCULATION_RUNNABLE = new Runnable() {
private String attemptCalculation(){
TreeNode constructedTree = abacus.parseString(inputField.getText());
if (constructedTree == null) {
return ERR_SYNTAX;
}
try {
NumberInterface evaluatedNumber = abacus.evaluateTree(constructedTree);
if (evaluatedNumber == null) {
return ERR_EVAL;
}
historyData.add(new HistoryModel(inputField.getText(), constructedTree.toString(), evaluatedNumber.toString()));
} catch (ComputationInterruptedException exception) {
return ERR_STOP;
}
return "";
}
@Override
public void run() {
String calculation = attemptCalculation();
Platform.runLater(() -> {
outputText.setText(calculation);
inputField.setText("");
inputButton.setDisable(false);
stopButton.setDisable(true);
});
}
};
private Thread calculationThread;
/**
* Alerts the user if the changes they made
* have not yet been applied.
*/
private void alertIfApplyNeeded(boolean ignorePrevious) {
if (changesMade && (!reloadAlertShown || ignorePrevious)) {
reloadAlertShown = true;
reloadAlert.showAndWait();
}
}
@FXML
public void initialize() {
Callback<TableColumn<HistoryModel, String>, TableCell<HistoryModel, String>> cellFactory =
param -> new CopyableCell<>();
Callback<ListView<ToggleablePlugin>, ListCell<ToggleablePlugin>> pluginCellFactory =
param -> new CheckBoxListCell<>(ToggleablePlugin::enabledProperty, new StringConverter<ToggleablePlugin>() {
@Override
public String toString(ToggleablePlugin object) {
return object.getClassName().substring(object.getClassName().lastIndexOf('.') + 1);
}
@Override
public ToggleablePlugin fromString(String string) {
return new ToggleablePlugin(true, string);
}
});
historyData = FXCollections.observableArrayList();
historyTable.setItems(historyData);
numberImplementationOptions = FXCollections.observableArrayList();
numberImplementationBox.setItems(numberImplementationOptions);
numberImplementationBox.getSelectionModel().selectedIndexProperty().addListener(e -> changesMade = true);
historyTable.getSelectionModel().setCellSelectionEnabled(true);
enabledPlugins = FXCollections.observableArrayList();
enabledPluginView.setItems(enabledPlugins);
enabledPluginView.setCellFactory(pluginCellFactory);
inputColumn.setCellFactory(cellFactory);
inputColumn.setCellValueFactory(cell -> cell.getValue().inputProperty());
parsedColumn.setCellFactory(cellFactory);
parsedColumn.setCellValueFactory(cell -> cell.getValue().parsedProperty());
outputColumn.setCellFactory(cellFactory);
outputColumn.setCellValueFactory(cell -> cell.getValue().outputProperty());
coreTabPane.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> {
if (oldValue.equals(settingsTab)) alertIfApplyNeeded(true);
});
abacus = new Abacus();
abacus.getPluginManager().addListener(this);
abacus.getPluginManager().reload();
changesMade = false;
reloadAlertShown = false;
reloadAlert = new Alert(Alert.AlertType.WARNING);
reloadAlert.setTitle(APPLY_MSG_TITLE);
reloadAlert.setHeaderText(APPLY_MSG_HEADER);
reloadAlert.setContentText(APPLY_MSG_TEXT);
}
@FXML
private void performCalculation() {
inputButton.setDisable(true);
stopButton.setDisable(false);
calculationThread = new Thread(CALCULATION_RUNNABLE);
calculationThread.start();
}
@FXML
private void performStop(){
if(calculationThread != null)
calculationThread.interrupt();
}
@FXML
private void performSaveAndReload() {
performSave();
performReload();
changesMade = false;
reloadAlertShown = false;
}
@FXML
private void performReload() {
alertIfApplyNeeded(true);
abacus.getPluginManager().reload();
}
@FXML
private void performSave() {
Configuration configuration = abacus.getConfiguration();
configuration.setNumberImplementation(numberImplementationBox.getSelectionModel().getSelectedItem());
Set<String> disabledPlugins = configuration.getDisabledPlugins();
disabledPlugins.clear();
for (ToggleablePlugin pluginEntry : enabledPlugins) {
if (!pluginEntry.isEnabled()) disabledPlugins.add(pluginEntry.getClassName());
}
configuration.saveTo(Abacus.CONFIG_FILE);
changesMade = false;
reloadAlertShown = false;
}
@Override
public void onLoad(PluginManager manager) {
Configuration configuration = abacus.getConfiguration();
Set<String> disabledPlugins = configuration.getDisabledPlugins();
numberImplementationOptions.addAll(abacus.getPluginManager().getAllNumberImplementations());
String actualImplementation = configuration.getNumberImplementation();
String toSelect = (numberImplementationOptions.contains(actualImplementation)) ? actualImplementation : "<default>";
numberImplementationBox.getSelectionModel().select(toSelect);
for (Class<?> pluginClass : abacus.getPluginManager().getLoadedPluginClasses()) {
String fullName = pluginClass.getName();
ToggleablePlugin plugin = new ToggleablePlugin(!disabledPlugins.contains(fullName), fullName);
plugin.enabledProperty().addListener(e -> changesMade = true);
enabledPlugins.add(plugin);
}
}
@Override
public void onUnload(PluginManager manager) {
enabledPlugins.clear();
numberImplementationOptions.clear();
}
}
|
package org.pcap4j.packet;
import static org.pcap4j.util.ByteArrays.*;
import java.util.ArrayList;
import java.util.List;
import org.pcap4j.packet.factory.PacketFactories;
import org.pcap4j.packet.namednumber.IcmpV4Code;
import org.pcap4j.packet.namednumber.IcmpV4Type;
import org.pcap4j.util.ByteArrays;
/**
* @author Kaito Yamada
* @since pcap4j 0.9.11
*/
public final class IcmpV4CommonPacket extends AbstractPacket {
private static final long serialVersionUID = 7643067752830062365L;
private final IcmpV4CommonHeader header;
private final Packet payload;
/**
*
* @param rawData
* @return a new IcmpV4CommonPacket object.
*/
public static IcmpV4CommonPacket newPacket(byte[] rawData) {
return new IcmpV4CommonPacket(rawData);
}
private IcmpV4CommonPacket(byte[] rawData) {
this.header = new IcmpV4CommonHeader(rawData);
byte[] rawPayload
= ByteArrays.getSubArray(
rawData,
header.length(),
rawData.length - header.length()
);
this.payload
= PacketFactories.getFactory(IcmpV4Type.class)
.newPacket(rawPayload, header.getType());
}
private IcmpV4CommonPacket(Builder builder) {
if (
builder == null
|| builder.type == null
|| builder.code == null
|| builder.payloadBuilder == null
) {
StringBuilder sb = new StringBuilder();
sb.append("builder: ").append(builder)
.append(" builder.type: ").append(builder.type)
.append(" builder.code: ").append(builder.code)
.append(" builder.payloadBuilder: ").append(builder.payloadBuilder);
throw new NullPointerException(sb.toString());
}
this.payload = builder.payloadBuilder.build();
this.header = new IcmpV4CommonHeader(
builder,
payload.getRawData()
);
}
@Override
public IcmpV4CommonHeader getHeader() {
return header;
}
@Override
public Packet getPayload() {
return payload;
}
@Override
public Builder getBuilder() {
return new Builder(this);
}
/**
*
* @param acceptZero
* @return true if the packet represented by this object has a valid checksum;
* false otherwise.
*/
public boolean hasValidChecksum(boolean acceptZero) {
if (header.checksum == 0) {
if (acceptZero) { return true; }
else { return false; }
}
return header.calcChecksum(payload.getRawData()) == header.checksum;
}
/**
* @author Kaito Yamada
* @since pcap4j 0.9.11
*/
public static final
class Builder extends AbstractBuilder
implements ChecksumBuilder<IcmpV4CommonPacket> {
private IcmpV4Type type;
private IcmpV4Code code;
private short checksum;
private Packet.Builder payloadBuilder;
private boolean correctChecksumAtBuild;
public Builder() {}
private Builder(IcmpV4CommonPacket packet) {
this.type = packet.header.type;
this.code = packet.header.code;
this.checksum = packet.header.checksum;
this.payloadBuilder = packet.payload.getBuilder();
}
/**
*
* @param type
* @return this Builder object for method chaining.
*/
public Builder type(IcmpV4Type type) {
this.type = type;
return this;
}
/**
*
* @param code
* @return this Builder object for method chaining.
*/
public Builder code(IcmpV4Code code) {
this.code = code;
return this;
}
/**
*
* @param checksum
* @return this Builder object for method chaining.
*/
public Builder checksum(short checksum) {
this.checksum = checksum;
return this;
}
@Override
public Builder payloadBuilder(Packet.Builder payloadBuilder) {
this.payloadBuilder = payloadBuilder;
return this;
}
@Override
public Packet.Builder getPayloadBuilder() {
return payloadBuilder;
}
public Builder correctChecksumAtBuild(boolean correctChecksumAtBuild) {
this.correctChecksumAtBuild = correctChecksumAtBuild;
return this;
}
@Override
public IcmpV4CommonPacket build() {
return new IcmpV4CommonPacket(this);
}
}
/**
* @author Kaito Yamada
* @since pcap4j 0.9.11
*/
public static final class IcmpV4CommonHeader extends AbstractHeader {
/*
* 0 15
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Type | Code |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Checksum |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*
*/
private static final long serialVersionUID = 752307079936231186L;
private static final int TYPE_OFFSET
= 0;
private static final int TYPE_SIZE
= BYTE_SIZE_IN_BYTES;
private static final int CODE_OFFSET
= TYPE_OFFSET + TYPE_SIZE;
private static final int CODE_SIZE
= BYTE_SIZE_IN_BYTES;
private static final int CHECKSUM_OFFSET
= CODE_OFFSET + CODE_SIZE;
private static final int CHECKSUM_SIZE
= SHORT_SIZE_IN_BYTES;
private static final int ICMP_V4_COMMON_HEADER_SIZE
= CHECKSUM_OFFSET + CHECKSUM_SIZE;
private final IcmpV4Type type;
private final IcmpV4Code code;
private final short checksum;
private IcmpV4CommonHeader(byte[] rawData) {
if (rawData.length < ICMP_V4_COMMON_HEADER_SIZE) {
StringBuilder sb = new StringBuilder(80);
sb.append("The data is too short to build an ICMPv4 common header(")
.append(ICMP_V4_COMMON_HEADER_SIZE)
.append(" bytes). data: ")
.append(ByteArrays.toHexString(rawData, " "));
throw new IllegalRawDataException(sb.toString());
}
this.type
= IcmpV4Type
.getInstance(ByteArrays.getByte(rawData, TYPE_OFFSET));
this.code
= IcmpV4Code
.getInstance(type.value(), ByteArrays.getByte(rawData, CODE_OFFSET));
this.checksum
= ByteArrays.getShort(rawData, CHECKSUM_OFFSET);
}
private IcmpV4CommonHeader(Builder builder, byte[] payload) {
this.type = builder.type;
this.code = builder.code;
if (builder.correctChecksumAtBuild) {
if (PacketPropertiesLoader.getInstance().icmpV4CalcChecksum()) {
this.checksum = calcChecksum(payload);
}
else {
this.checksum = (short)0;
}
}
else {
this.checksum = builder.checksum;
}
}
private short calcChecksum(byte[] payload) {
byte[] data;
int packetLength = payload.length + length();
if ((packetLength % 2) != 0) {
data = new byte[packetLength + 1];
}
else {
data = new byte[packetLength];
}
// getRawData()checksum fieldrawData
// buildRawData()
System.arraycopy(buildRawData(), 0, data, 0, length());
System.arraycopy(
payload, 0, data, length(), payload.length
);
for (int i = 0; i < CHECKSUM_SIZE; i++) {
data[CHECKSUM_OFFSET + i] = (byte)0;
}
return ByteArrays.calcChecksum(data);
}
/**
*
* @return type
*/
public IcmpV4Type getType() {
return type;
}
/**
*
* @return code
*/
public IcmpV4Code getCode() {
return code;
}
/**
*
* @return checksum
*/
public short getChecksum() {
return checksum;
}
@Override
protected List<byte[]> getRawFields() {
List<byte[]> rawFields = new ArrayList<byte[]>();
rawFields.add(ByteArrays.toByteArray(type.value()));
rawFields.add(ByteArrays.toByteArray(code.value()));
rawFields.add(ByteArrays.toByteArray(checksum));
return rawFields;
}
@Override
public int length() {
return ICMP_V4_COMMON_HEADER_SIZE;
}
@Override
protected String buildString() {
StringBuilder sb = new StringBuilder();
String ls = System.getProperty("line.separator");
sb.append("[ICMP Common Header (")
.append(length())
.append(" bytes)]")
.append(ls);
sb.append(" Type: ")
.append(type)
.append(ls);
sb.append(" Code: ")
.append(code)
.append(ls);
sb.append(" Checksum: 0x")
.append(ByteArrays.toHexString(checksum, ""))
.append(ls);
return sb.toString();
}
}
}
|
package org.rspql.spin.utils;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Iterator;
import org.rspql.lang.rspql.ParserRSPQL;
import org.topbraid.spin.arq.ARQ2SPIN;
import org.topbraid.spin.arq.ARQFactory;
import org.topbraid.spin.model.Template;
import org.topbraid.spin.system.SPINModuleRegistry;
import org.topbraid.spin.vocabulary.ARG;
import org.topbraid.spin.vocabulary.SP;
import org.topbraid.spin.vocabulary.SPIN;
import org.topbraid.spin.vocabulary.SPL;
import com.hp.hpl.jena.query.ParameterizedSparqlString;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolutionMap;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.NodeIterator;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
public class TemplateManager {
public Model model = ModelFactory.createDefaultModel();
public String NS = "http://w3id.org/rsp/spin/template
public SimpleDateFormat df = new SimpleDateFormat("YYYY-MM-dd'T'hh:mm:ss");
/**
* Setup a new template manager
* @param loadTemplates
*/
public TemplateManager() {
// Initialize
SPINModuleRegistry.get().init();
ParserRSPQL.register();
ARQFactory.get().setSyntax(ParserRSPQL.rspqlSyntax);
// Eager literal validation active, required
com.hp.hpl.jena.shared.impl.JenaParameters.enableEagerLiteralValidation = true;
}
/**
* Load templates from input stream.
* @param is
* @param format
*/
public void loadTemplates(InputStream is, String format) {
model.read(is, format);
}
/**
* Set the default namespace.
* @param ns
*/
public void setNS(String ns){
this.NS = ns;
}
/**
* Add a new query template.
*
* @param qString
* @param handle
* @param comment
*/
public Template createTemplate(String qString, String handle, String comment) {
handle = handle.startsWith("http://") ? handle : NS + handle;
Query query = QueryFactory.create(qString, ParserRSPQL.rspqlSyntax);
Resource queryType;
switch (query.getQueryType()) {
case Query.QueryTypeAsk:
// TODO: Not tested
queryType = SPIN.AskTemplate;
break;
case Query.QueryTypeConstruct:
queryType = SPIN.ConstructTemplate;
break;
case Query.QueryTypeSelect:
queryType = SPIN.SelectTemplate;
break;
default:
System.err.println("Unrecognized");
return null;
}
ARQ2SPIN arq2SPIN = new ARQ2SPIN(model);
org.topbraid.spin.model.Query spinQuery = arq2SPIN.createQuery(query, null);
// Create a template
Template template = model.createResource(handle, queryType).as(Template.class);
template.addProperty(SPIN.body, spinQuery);
template.addProperty(RDFS.comment, comment);
return template;
}
/**
* Create a template argument.
* @param template
* @param varName
* @param valueType
* @param defaultValue
* @param optional
* @param comment
* @return arg
*/
public Resource createArgument(Template template, String varName, RDFNode valueType, RDFNode defaultValue,
boolean optional, String comment) {
Model model = template.getModel();
Resource arg = model.createResource(SPL.Argument);
template.addProperty(SPIN.constraint, arg);
arg.addProperty(SPL.predicate, model.getProperty(ARG.NS + varName));
arg.addProperty(SPL.valueType, valueType);
if (defaultValue != null) {
arg.addProperty(SPL.defaultValue, defaultValue);
}
arg.addProperty(SPL.optional, model.createTypedLiteral(optional));
arg.addProperty(RDFS.comment, comment);
return arg;
}
/**
* Return a template from the template model or null.
*
* @param handle
* @return
*/
public Template getTemplate(String handle) {
handle = handle.startsWith("http://") ? handle : NS + handle;
// Find template type
NodeIterator iter = model.listObjectsOfProperty(model.createResource(handle), RDF.type);
if (iter.hasNext()) {
Resource r = iter.next().asResource();
if (r.equals(SPIN.SelectTemplate)) {
return model.createResource(handle, SPIN.SelectTemplate).as(Template.class);
} else if (r.equals(SPIN.ConstructTemplate)) {
return model.createResource(handle, SPIN.ConstructTemplate).as(Template.class);
} else if (r.equals(SPIN.AskTemplate)) {
// TODO: Not tested
return model.createResource(handle, SPIN.AskTemplate).as(Template.class);
} else {
return model.createResource(handle, SPIN.Template).as(Template.class);
}
}
return null;
}
/**
* Create a query from a query template. Before initiating this call the
* query should have been checked for constraint violations.
*
* @param template
* @param bindings
* @return
*/
public String getQuery(Template template, QuerySolutionMap bindings) {
Query arq;
if (template.getBody() != null) {
org.topbraid.spin.model.Query spinQuery = (org.topbraid.spin.model.Query) template.getBody();
arq = ARQFactory.get().createQuery(spinQuery);
} else {
arq = ARQFactory.get().createQuery(template.getProperty(SP.text).getObject().toString());
}
// Set limit
if (bindings.contains("limit")) {
arq.setLimit(bindings.getLiteral("limit").getInt());
}
// Set offset
if (bindings.contains("offset")) {
arq.setOffset(bindings.getLiteral("offset").getInt());
}
// Parameterized
ParameterizedSparqlString pss = new ParameterizedSparqlString(arq.toString(), bindings);
return pss.toString();
}
/**
* Get list of all query templates.
*
* @return templates
*/
public ArrayList<Template> getTemplateList(Resource type) {
// Get the template from the model using a template resource handle
Iterator<Resource> iter = model.listResourcesWithProperty(RDF.type, type);
ArrayList<Template> templates = new ArrayList<>();
while (iter.hasNext()) {
Template template = iter.next().as(Template.class);
templates.add(template);
}
return templates;
}
public ArrayList<Template> getTemplateList() {
// Add subClasses of SPIN template
ResIterator resIter = SPIN.getModel().listSubjectsWithProperty(RDFS.subClassOf, SPIN.Template);
ArrayList<Template> templates = new ArrayList<>();
while (resIter.hasNext()) {
Iterator<Resource> iter = model.listResourcesWithProperty(RDF.type, resIter.next());
while (iter.hasNext()) {
Template template = iter.next().as(Template.class);
templates.add(template);
}
}
// Add top level SPIN templates
Iterator<Resource> iter = model.listResourcesWithProperty(RDF.type, SPIN.Template);
while (iter.hasNext()) {
Template template = iter.next().as(Template.class);
templates.add(template);
}
return templates;
}
}
|
package org.spongepowered.api;
import org.spongepowered.api.item.ItemType;
import java.util.Map;
import java.util.Set;
/**
* A GameDictionary is a store of {@link org.spongepowered.api.item.ItemTypes}.
*
* <p>Note that the GameDictionary's keys are different from Minecraft item
* ids. Minecraft item IDs are namespaces, e.g. minecraft:carrot while
* ItemDictionary keys are not, by design(e.g. carrot). This is mainly to keep
* supporting the existing Forge 'ore dictionary'.</p>
*/
public interface GameDictionary {
/**
* Registers an ItemType in the dictionary with a String key.
*
* @param key The key of the item as a String
* @param type The item type to register
*/
void register(String key, ItemType type);
/**
* Retrieves the item types registered for the given key.
*
* @param key The key of the items as a String
* @return The item types registered for the given key
*/
Set<ItemType> get(String key);
/**
* Retrieves all items registered in this item dictionary, mapped by
* their key.
*
* @return A map of all items registered
*/
Map<String, Set<ItemType>> getAllItems();
/**
* Checks if this is a flowerpot.
*
* @return Whether this is a flowerpot
*/
boolean isFlowerPot();
}
|
package permafrost.tundra.lang;
import com.wm.app.b2b.server.ServiceException;
import com.wm.data.IData;
import com.wm.util.coder.IDataCodable;
import permafrost.tundra.data.IDataMap;
import java.util.Collection;
public class BaseException extends ServiceException implements IDataCodable {
/**
* Constructs a new BaseException.
*/
public BaseException() {
super("");
}
/**
* Constructs a new BaseException with the given message.
*
* @param message A message describing why the BaseException was thrown.
*/
public BaseException(String message) {
super(message);
}
/**
* Constructs a new BaseException with the given cause.
*
* @param cause The cause of this BaseException.
*/
public BaseException(Throwable cause) {
this(ExceptionHelper.getMessage(cause), cause);
}
/**
* Constructs a new BaseException with the given message and cause.
*
* @param message A message describing why the BaseException was thrown.
* @param cause The cause of this Exception.
*/
public BaseException(String message, Throwable cause) {
super(message);
if (cause != null) initCause(cause);
}
/**
* Constructs a new BaseException with the given list of exceptions.
*
* @param exceptions A collection of exceptions this exception will wrap.
*/
public BaseException(Collection<? extends Throwable> exceptions) {
super(ExceptionHelper.getMessage(exceptions));
}
/**
* Constructs a new BaseException with the given list of exceptions.
*
* @param exceptions A collection of exceptions this exception will wrap.
*/
public BaseException(Throwable... exceptions) {
super(ExceptionHelper.getMessage(exceptions));
}
/**
* Returns an IData representation of this object.
*
* @return An IData representation of this object.
*/
public IData getIData() {
IDataMap map = new IDataMap();
map.put("$exception?", "true");
map.put("$exception.class", getClass().getName());
map.put("$exception.message", getMessage());
return map;
}
/**
* This method has not been implemented.
*
* @param document An IData document.
* @throws UnsupportedOperationException This method has not been implemented.
*/
public void setIData(IData document) {
throw new UnsupportedOperationException("setIData(IData) not implemented");
}
}
|
package foam.nanos.http;
import foam.box.Skeleton;
import foam.core.ContextAware;
import foam.core.X;
import foam.dao.DAO;
import foam.dao.DAOSkeleton;
import foam.nanos.boot.NSpec;
import foam.nanos.boot.NSpecAware;
import foam.nanos.logger.NanoLogger;
import foam.nanos.NanoService;
import foam.nanos.pm.PM;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException;
public class NanoRouter
extends HttpServlet
implements NanoService, ContextAware
{
protected X x_;
protected Map<String, HttpServlet> handlerMap_ = new ConcurrentHashMap<>();
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String path = req.getRequestURI();
//AuthService auth = this.X.get("authService");
String[] urlParams = path.split("/");
String serviceKey = urlParams[1];
Object service = getX().get(serviceKey);
DAO nSpecDAO = (DAO) getX().get("nSpecDAO");
NSpec spec = (NSpec) nSpecDAO.find(serviceKey);
HttpServlet serv = getServlet(spec, service);
PM pm = new PM(this.getClass(), serviceKey);
try {
if ( serv == null ) {
System.err.println("No service found for: " + serviceKey);
} else {
serv.service(req, resp);
}
} catch (Throwable t) {
System.err.println("Error serving " + serviceKey + " " + path);
t.printStackTrace();
} finally {
pm.log(x_);
}
}
protected HttpServlet getServlet(NSpec spec, Object service) {
if ( spec == null ) return null;
if ( ! handlerMap_.containsKey(spec.getName()) ) {
handlerMap_.put(spec.getName(), createServlet(spec, service));
}
return handlerMap_.get(spec.getName());
}
protected HttpServlet createServlet(NSpec spec, Object service) {
informService(service, spec);
if ( spec.getServe() ) {
try {
Class cls = spec.getBoxClass() != null && spec.getBoxClass().length() > 0 ?
Class.forName(spec.getBoxClass()) :
DAOSkeleton.class ;
Skeleton skeleton = (Skeleton) cls.newInstance();
informService(skeleton, spec);
skeleton.setDelegateObject(service);
service = new ServiceServlet(service, skeleton);
informService(service, spec);
} catch (IllegalAccessException | InstantiationException | ClassNotFoundException ex) {
ex.printStackTrace();
((NanoLogger) getX().get("logger")).error("Unable to create NSPec servlet: " + spec.getName());
}
}
if ( service instanceof WebAgent ) {
service = new WebAgentServlet((WebAgent) service);
informService(service, spec);
}
if ( service instanceof HttpServlet ) return (HttpServlet) service;
NanoLogger logger = (NanoLogger) getX().get("logger");
logger.error(this.getClass(), spec.getName() + " does not have a HttpServlet.");
return null;
}
protected void informService(Object service, NSpec spec) {
if ( service instanceof ContextAware ) ((ContextAware) service).setX(getX());
if ( service instanceof NSpecAware ) ((NSpecAware) service).setNSpec(spec);
}
@Override
public void start() {
}
@Override
public X getX() {
return x_;
}
@Override
public void setX(X x) {
x_ = x;
}
}
|
package permafrost.tundra.lang;
import permafrost.tundra.io.InputStreamHelper;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
/**
* A collection of convenience methods for working with Charset objects.
*/
public final class CharsetHelper {
/**
* The default character set name used by Tundra.
*/
public static final String DEFAULT_CHARSET_NAME = "UTF-8";
/**
* The default character set used by Tundra.
*/
public static final Charset DEFAULT_CHARSET = Charset.forName(DEFAULT_CHARSET_NAME);
/**
* Disallow instantiation of this class.
*/
private CharsetHelper() {}
/**
* Converts the given object to a Class.
*
* @param object The object to be converted.
* @return The converted object.
*/
public static Charset normalize(Object object) {
Charset value = null;
if (object instanceof Charset) {
value = (Charset)object;
} else if (object instanceof String) {
value = Charset.forName((String)object);
}
return value;
}
/**
* Normalizes the given charset name as a Charset object.
*
* @param charsetName The character set name to normalize.
* @return The Charset representing the given name, or a default Charset if the given name is null.
*/
public static Charset normalize(String charsetName) {
return normalize(charsetName, DEFAULT_CHARSET);
}
/**
* Normalizes the given charset name as a Charset object.
*
* @param charsetName The character set name to normalize.
* @param defaultCharset The default character set to return if the given name is null.
* @return The Charset representing the given name, or a default Charset if the given name is null.
*/
public static Charset normalize(String charsetName, Charset defaultCharset) {
Charset charset;
if (charsetName == null) {
charset = defaultCharset;
} else {
charset = Charset.forName(charsetName);
}
return charset;
}
/**
* Normalizes the given Charset object.
*
* @param charset The character set to normalize.
* @return The normalized character set.
*/
public static Charset normalize(Charset charset) {
return normalize(charset, DEFAULT_CHARSET);
}
/**
* Normalizes the given Charset object.
*
* @param charset The character set to normalize.
* @param defaultCharset The default character set to return if the given charset is null.
* @return The normalized character set.
*/
public static Charset normalize(Charset charset, Charset defaultCharset) {
if (charset == null) charset = defaultCharset;
return charset;
}
/**
* Returns the charset for the given display name.
*
* @param charsetName The name of the charset to return.
* @return The charset with the given display name.
*/
public static Charset of(String charsetName) {
if (charsetName == null) return null;
return Charset.forName(charsetName);
}
/**
* Returns a new byte[] by converting the given byte[] from the input charset to the output charset, unless the two
* charsets are equal in which case the given byte[] is returned as is.
*
* @param content The text content to be converted to another charset.
* @param inCharset The charset the text content is currently encoded with.
* @param outCharset The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
*/
public static byte[] convert(byte[] content, Charset inCharset, Charset outCharset) {
if (inCharset.equals(outCharset)) return content;
return BytesHelper.normalize(StringHelper.normalize(content, inCharset), outCharset);
}
/**
* Returns a new byte[] by converting the given byte[] from the input charset to the output charset, unless the two
* charsets are equal in which case the given byte[] is returned as is.
*
* @param content The text content to be converted to another charset.
* @param inCharsetName The charset the text content is currently encoded with.
* @param outCharsetName The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
*/
public static byte[] convert(byte[] content, String inCharsetName, String outCharsetName) {
return convert(content, normalize(inCharsetName), normalize(outCharsetName));
}
/**
* Returns a new InputStream by converting the given InputStream from the input charset to the output charset,
* unless the two charsets are equal in which case the given InputStream is returned as is.
*
* @param content The text content to be converted to another charset.
* @param inCharset The charset the text content is currently encoded with.
* @param outCharset The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
* @throws IOException If an I/O error occurs.
*/
public static InputStream convert(InputStream content, Charset inCharset, Charset outCharset) throws IOException {
return InputStreamHelper.normalize(convert(BytesHelper.normalize(content), inCharset, outCharset));
}
/**
* Returns a new InputStream by converting the given InputStream from the input charset to the output charset,
* unless the two charsets are equal in which case the given InputStream is returned as is.
*
* @param content The text content to be converted to another charset.
* @param inCharsetName The charset the text content is currently encoded with.
* @param outCharsetName The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
* @throws IOException If an I/O error occurs.
*/
public static InputStream convert(InputStream content, String inCharsetName, String outCharsetName) throws IOException {
return convert(content, normalize(inCharsetName), normalize(outCharsetName));
}
}
|
package permafrost.tundra.lang;
import permafrost.tundra.io.StreamHelper;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
/**
* A collection of convenience methods for working with Charset objects.
*/
public class CharsetHelper {
/**
* The default character set used by Tundra.
*/
public static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8");
/**
* The default character set name used by Tundra.
*/
public static final String DEFAULT_CHARSET_NAME = DEFAULT_CHARSET.name();
/**
* Disallow instantiation of this class.
*/
private CharsetHelper() {}
/**
* Normalizes the given charset name as a Charset object.
* @param charsetName The character set name to normalize.
* @return The Charset representing the given name,
* or a default Charset if the given name is null.
*/
public static Charset normalize(String charsetName) {
return normalize(charsetName, DEFAULT_CHARSET);
}
/**
* Normalizes the given charset name as a Charset object.
* @param charsetName The character set name to normalize.
* @param defaultCharset The default character set to return
* if the given name is null.
* @return The Charset representing the given name,
* or a default Charset if the given name is null.
*/
public static Charset normalize(String charsetName, Charset defaultCharset) {
Charset charset;
if (charsetName == null) {
charset = defaultCharset;
} else {
charset = Charset.forName(charsetName);
}
return charset;
}
/**
* Normalizes the given Charset object.
* @param charset The character set to normalize.
* @return The normalized character set.
*/
public static Charset normalize(Charset charset) {
return normalize(charset, DEFAULT_CHARSET);
}
/**
* Normalizes the given Charset object.
* @param charset The character set to normalize.
* @param defaultCharset The default character set to return
* if the given charset is null.
* @return The normalized character set.
*/
public static Charset normalize(Charset charset, Charset defaultCharset) {
if (charset == null) charset = defaultCharset;
return charset;
}
/**
* Returns a new byte[] by converting the given byte[] from the input charset to
* the output charset, unless the two charsets are equal in which case the given
* byte[] is returned as is.
* @param content The text content to be converted to another charset.
* @param inCharset The charset the text content is currently encoded with.
* @param outCharset The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
*/
public static byte[] convert(byte[] content, Charset inCharset, Charset outCharset) {
if (inCharset.equals(outCharset)) return content;
return BytesHelper.normalize(StringHelper.normalize(content, inCharset), outCharset);
}
/**
* Returns a new byte[] by converting the given byte[] from the input charset to
* the output charset, unless the two charsets are equal in which case the given
* byte[] is returned as is.
* @param content The text content to be converted to another charset.
* @param inCharsetName The charset the text content is currently encoded with.
* @param outCharsetName The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
*/
public static byte[] convert(byte[] content, String inCharsetName, String outCharsetName) {
return convert(content, normalize(inCharsetName), normalize(outCharsetName));
}
/**
* Returns a new InputStream by converting the given InputStream from the input charset to
* the output charset, unless the two charsets are equal in which case the given
* InputStream is returned as is.
* @param content The text content to be converted to another charset.
* @param inCharset The charset the text content is currently encoded with.
* @param outCharset The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
*/
public static InputStream convert(InputStream content, Charset inCharset, Charset outCharset) throws IOException {
return StreamHelper.normalize(convert(BytesHelper.normalize(content), inCharset, outCharset));
}
/**
* Returns a new InputStream by converting the given InputStream from the input charset to
* the output charset, unless the two charsets are equal in which case the given
* InputStream is returned as is.
* @param content The text content to be converted to another charset.
* @param inCharsetName The charset the text content is currently encoded with.
* @param outCharsetName The charset the returned converted text content will be encoded with.
* @return The given text content converted from one charset to another.
*/
public static InputStream convert(InputStream content, String inCharsetName, String outCharsetName) throws IOException {
return convert(content, normalize(inCharsetName), normalize(outCharsetName));
}
}
|
package roycurtis.signshopexport;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.stream.JsonWriter;
import roycurtis.signshopexport.json.Exclusions;
import roycurtis.signshopexport.json.Record;
import roycurtis.signshopexport.json.TypeAdapters;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.logging.Level;
import static roycurtis.signshopexport.SignShopExport.*;
/**
* Manager class for collecting, serializing and exporting shop data.
*
* This manager runs it self in a loop using Bukkit's scheduler. This is so it can spread the task
* of serializing so many signs across server ticks, rather than cause lag spikes every so often.
*
* If the plugin is reloaded or otherwise needs to stop, this manager is safely stopped without any
* state corruption or cleanup needed.
*/
class DataManager implements Runnable
{
private enum Operation
{
Init,
Serializing,
Saving
}
private Operation currentOp = Operation.Init;
private DataSource dataSource;
private File outputFile;
private File outputMinFile;
private Gson gson;
private JsonArray dataSet;
private int current;
private int total;
DataManager(DataSource source)
{
dataSource = source;
outputFile = new File(CONFIG.exportPath);
outputMinFile = new File(CONFIG.exportMinPath);
gson = new GsonBuilder()
.addSerializationExclusionStrategy( new Exclusions() )
.registerTypeAdapterFactory( new TypeAdapters() )
.create();
if ( outputFile.exists() && !outputFile.isFile() )
throw new RuntimeException("outputPath config points to a directory/invalid file");
if ( outputMinFile.exists() && !outputMinFile.isFile() )
throw new RuntimeException("outputMinPath config points to a directory/invalid file");
}
/** Generates data file of entire SignShop database across server ticks */
public void run()
{
switch (currentOp)
{
case Init: doInit(); break;
case Serializing: doSerialize(); break;
case Saving: doSave(); break;
}
}
/** Prepares the source's shops database for serializing */
private void doInit()
{
current = 0;
total = dataSource.prepare();
dataSet = new JsonArray();
if (total <= 0)
{
LOGGER.fine( "There are no shops to export. Doing nothing." );
LOGGER.fine("Scheduling next check in " + CONFIG.exportInterval * 20 + " ticks");
SERVER.getScheduler().runTaskLater(PLUGIN, this, CONFIG.exportInterval * 20);
return;
}
LOGGER.fine("Beginning JSON export of " + total + " entries (1 per tick)");
currentOp = Operation.Serializing;
SERVER.getScheduler().runTaskLater(PLUGIN, this, 1);
}
/** Serializes one sign every tick */
private void doSerialize()
{
Record signRec = null;
JsonElement signJson = null;
try
{
signRec = dataSource.createRecordForIndex(current);
signJson = gson.toJsonTree(signRec);
dataSet.add(signJson);
}
catch (Exception ex)
{
LOGGER.info("Skipping sign " + current + " as it failed to serialize. " +
"This is likely because it was changed mid-process");
LOGGER.fine("Details for sign " + current + ":");
LOGGER.fine(signRec == null
? "Could not generate record from data source"
: signRec.toString() );
LOGGER.log(Level.FINE, "Exception for sign " + current, ex);
}
current++;
if (current >= total)
{
LOGGER.fine("Finished serializing all signs");
currentOp = Operation.Saving;
}
else if (current % 10 == 0)
LOGGER.finer( current + "/" + total + " signs serialized" );
SERVER.getScheduler().runTaskLater(PLUGIN, this, 1);
}
/** Export all the processed shop data, free resources and schedule next export */
private void doSave()
{
doSaveFile(outputFile, false);
doSaveFile(outputMinFile, true);
dataSource.free();
dataSet = null;
current = 0;
total = 0;
currentOp = Operation.Init;
LOGGER.fine("Scheduling next export in " + CONFIG.exportInterval * 20 + " ticks");
SERVER.getScheduler().runTaskLater(PLUGIN, this, CONFIG.exportInterval * 20);
}
private void doSaveFile(File file, boolean minified)
{
try (
FileWriter fWriter = new FileWriter(file);
JsonWriter jWriter = new JsonWriter(fWriter)
)
{
if (!minified)
jWriter.setIndent(" ");
gson.toJson(dataSet, jWriter);
LOGGER.fine( "Json file exported to " + file.getAbsolutePath() );
}
catch (IOException e)
{
throw new RuntimeException("Could not save json file", e);
}
}
}
|
package sb.tasks.jobs.dailypress;
import com.google.common.io.Files;
import com.jcabi.http.Response;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.JsoupResponse;
import com.jcabi.http.wire.AutoRedirectingWire;
import com.jcabi.http.wire.RetryWire;
import com.jcabi.log.Logger;
import org.bson.Document;
import org.jsoup.Jsoup;
import sb.tasks.jobs.Agent;
import javax.ws.rs.core.HttpHeaders;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.List;
public final class AnOblGazeta implements Agent<MagResult> {
private final Document document;
public AnOblGazeta(Document document) {
this.document = document;
}
@Override
public List<MagResult> perform() throws IOException {
String source = new JdkRequest("https://oblgazeta.ru/")
.through(AutoRedirectingWire.class)
.fetch()
.as(JsoupResponse.class)
.body();
String newPaper = Jsoup.parse(source)
.getElementsByClass("foot-newspaper block100")
.get(0).attr("href");
String paperSource = new JdkRequest(String.format("https://oblgazeta.ru%s", newPaper))
.through(AutoRedirectingWire.class)
.fetch()
.as(JsoupResponse.class)
.body();
String pdfUrl = Jsoup.parse(paperSource)
.getElementsByClass("file pdf").get(0)
.getElementsByTag("a").get(0)
.attr("href");
String url = String.format("https:
Logger.info(this, String.format("Checking link: %s", url));
File out = new File(
System.getProperty("java.io.tmpdir"),
String.format("%s.pdf", new SimpleDateFormat("yyyyMMdd").format(new Date()))
);
if (!url.equals(document.get("vars", Document.class).getString("download_url"))) {
Response response = new JdkRequest(url)
.through(RetryWire.class)
.through(AutoRedirectingWire.class)
.header(HttpHeaders.ACCEPT, "application/pdf")
.fetch();
if (response.status() == 200) {
if ("application/pdf"
.equals(response
.headers()
.getOrDefault(
"Content-Type",
Collections.singletonList("")
).get(0))) {
Files.write(response.binary(), out);
Logger.info(this, String.format("Downloaded file %s", out.getName()));
} else
Logger.info(this, "No magazine for this date");
} else
Logger.info(this, "No content for this page");
} else
Logger.info(this, String.format("%s already downloaded. Exiting", url));
return Collections.singletonList(
new MagResult(out, url, document.get("params", Document.class).getString("text"))
);
}
}
|
package sc.iview.commands.process;
import static sc.iview.commands.MenuWeights.PROCESS;
import static sc.iview.commands.MenuWeights.PROCESS_CONVEX_HULL;
import graphics.scenery.Node;
import net.imagej.ops.OpService;
import org.scijava.command.Command;
import org.scijava.log.LogService;
import org.scijava.plugin.Menu;
import org.scijava.plugin.Parameter;
import org.scijava.plugin.Plugin;
import sc.iview.SciView;
import sc.iview.process.MeshConverter;
import cleargl.GLVector;
import graphics.scenery.Mesh;
@Plugin(type = Command.class, menuRoot = "SciView",
menu = {@Menu(label = "Process", weight = PROCESS),
@Menu(label = "Convex Hull", weight = PROCESS_CONVEX_HULL)})
public class ConvexHull implements Command {
@Parameter
private OpService ops;
@Parameter
private LogService logService;
@Parameter
private SciView sciView;
@Parameter
private Node node;
@Override
public void run() {
if( node instanceof Mesh ) {
net.imagej.mesh.Mesh ijMesh = MeshConverter.toImageJ((Mesh) node);
net.imagej.mesh.Mesh smoothMesh = ( net.imagej.mesh.Mesh ) ops.geom().convexHull( ijMesh ).get( 0 );
Node convexHull = sciView.addMesh(smoothMesh);
convexHull.setPosition(node.getPosition());
}
}
}
|
package seedu.address.storage;
import seedu.address.commons.exceptions.IllegalValueException;
import seedu.address.model.tag.Tag;
import seedu.address.model.tag.UniqueTagList;
import seedu.address.model.task.Detail;
import seedu.address.model.task.Done;
import seedu.address.model.task.DueByDate;
import seedu.address.model.task.DueByTime;
import seedu.address.model.task.Priority;
import seedu.address.model.task.ReadOnlyTask;
import seedu.address.model.task.Task;
import javax.xml.bind.annotation.XmlElement;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
/**
* JAXB-friendly version of the Task.
*/
public class XmlAdaptedTask {
@XmlElement(required = true)
private String detail;
@XmlElement(required = true)
private String done;
@XmlElement(required = true)
private String dueByDate;
@XmlElement(required = true)
private String dueByTime;
@XmlElement(required = true)
private String priority;
@XmlElement
private List<XmlAdaptedTag> tagged = new ArrayList<>();
/**
* No-arg constructor for JAXB use.
*/
public XmlAdaptedTask() {}
/**
* Converts a given Task into this class for JAXB use.
*
* @param source future changes to this will not affect the created XmlAdaptedTask
*/
public XmlAdaptedTask(ReadOnlyTask source) {
detail = source.getDetail().details;
done = source.checkDone().value.toString();
dueByDate = source.getDueByDate().toString();
dueByTime = source.getDueByTime().toString();
priority = source.getPriority().value;
tagged = new ArrayList<>();
for (Tag tag : source.getTags()) {
tagged.add(new XmlAdaptedTag(tag));
}
}
public Task toModelType() throws IllegalValueException {
final List<Tag> taskTags = new ArrayList<>();
for (XmlAdaptedTag tag : tagged) {
taskTags.add(tag.toModelType());
}
final Detail detail = new Detail(this.detail);
final DueByDate dbd = new DueByDate(LocalDate.parse(this.dueByDate));
final DueByTime dbt = new DueByTime(LocalTime.parse(this.dueByTime));
final Priority priority = new Priority(this.priority);
final UniqueTagList tags = new UniqueTagList(taskTags);
return new Task(detail, dbd, dbt, priority, tags);
}
}
|
package seedu.todo.controllers;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.joestelmach.natty.*;
import seedu.todo.commons.exceptions.InvalidNaturalDateException;
import seedu.todo.commons.exceptions.ParseException;
import seedu.todo.commons.util.DateUtil;
import seedu.todo.commons.util.ParseUtil;
import seedu.todo.commons.util.StringUtil;
import seedu.todo.controllers.concerns.Tokenizer;
import seedu.todo.controllers.concerns.DateParser;
import seedu.todo.controllers.concerns.Renderer;
import seedu.todo.models.Event;
import seedu.todo.models.Task;
import seedu.todo.models.TodoListDB;
/**
* @@author A0093907W
*
* Controller to add an event or task.
*/
public class AddController implements Controller {
private static final String NAME = "Add";
private static final String DESCRIPTION = "Adds a task / event to the to-do list.\n"
+ "Accepts natural date formats (e.g. \"Today 5pm\" is allowed).";
private static final String COMMAND_SYNTAX = "add <task> by <deadline> || add <event> from <start_date> to <end_date>";
private static final String MESSAGE_ADD_SUCCESS = "Item successfully added!";
private static final String STRING_WHITESPACE = "";
private static final String ADD_EVENT_TEMPLATE = "add event \"%s\" from \"%s\" to \"%s\"";
private static final String ADD_TASK_TEMPLATE = "add task \"%s\" by \"%s\"";
private static final String START_TIME_FIELD = "<start time>";
private static final String END_TIME_FIELD = "<end time>";
private static final String DEADLINE_FIELD = "<deadline>";
private static final String NAME_FIELD = "<name>";
private static CommandDefinition commandDefinition =
new CommandDefinition(NAME, DESCRIPTION, COMMAND_SYNTAX);
public static CommandDefinition getCommandDefinition() {
return commandDefinition;
}
@Override
public float inputConfidence(String input) {
// TODO
return (input.toLowerCase().startsWith("add")) ? 1 : 0;
}
/**
* Get the token definitions for use with <code>tokenizer</code>.<br>
* This method exists primarily because Java does not support HashMap
* literals...
*
* @return tokenDefinitions
*/
private static Map<String, String[]> getTokenDefinitions() {
Map<String, String[]> tokenDefinitions = new HashMap<String, String[]>();
tokenDefinitions.put("default", new String[] {"add"});
tokenDefinitions.put("eventType", new String[] { "event", "task" });
tokenDefinitions.put("time", new String[] { "at", "by", "on", "before", "time" });
tokenDefinitions.put("timeFrom", new String[] { "from" });
tokenDefinitions.put("timeTo", new String[] { "to" });
tokenDefinitions.put("tagName", new String [] { "tag" });
return tokenDefinitions;
}
@Override
public void process(String input) throws ParseException {
Map<String, String[]> parsedResult;
parsedResult = Tokenizer.tokenize(getTokenDefinitions(), input);
// Task or event?
boolean isTask = parseIsTask(parsedResult);
// Name
String name = parseName(parsedResult);
// Tag
String tag = ParseUtil.getTokenResult(parsedResult, "tagName");
// Time
String[] naturalDates = DateParser.extractDatePair(parsedResult);
String naturalFrom = naturalDates[0];
String naturalTo = naturalDates[1];
// Validate isTask, name and times.
if (validateParams(isTask, name, naturalFrom, naturalTo)) {
renderDisambiguation(isTask, name, naturalFrom, naturalTo);
return;
}
// Parse natural date using Natty.
LocalDateTime dateFrom;
LocalDateTime dateTo;
try {
dateFrom = naturalFrom == null ? null : DateParser.parseNatural(naturalFrom);
dateTo = naturalTo == null ? null : DateParser.parseNatural(naturalTo);
} catch (InvalidNaturalDateException e) {
renderDisambiguation(isTask, name, naturalFrom, naturalTo);
return;
}
// Create and persist task / event.
TodoListDB db = TodoListDB.getInstance();
createCalendarItem(db, isTask, name, dateFrom, dateTo, tag);
// Re-render
Renderer.renderIndex(db, MESSAGE_ADD_SUCCESS);
}
/**
* Creates and persists a CalendarItem to the DB.
*
* @param db
* TodoListDB object
* @param isTask
* true if CalendarItem should be a Task, false if Event
* @param name
* Display name of CalendarItem object
* @param dateFrom
* Due date for Task or start date for Event
* @param dateTo
* End date for Event
*/
private void createCalendarItem(TodoListDB db, boolean isTask, String name,
LocalDateTime dateFrom, LocalDateTime dateTo, String tag) {
LocalDateTime parsedDateFrom = DateUtil.parseTimeStamp(dateFrom, dateTo, true);
LocalDateTime parsedDateTo = DateUtil.parseTimeStamp(dateTo, dateFrom, false);
dateFrom = parsedDateFrom;
dateTo = parsedDateTo;
boolean isSuccessfullyTagged = false;
if (isTask) {
Task newTask = db.createTask();
newTask.setName(name);
newTask.setDueDate(dateFrom);
if (tag != null) {
isSuccessfullyTagged = newTask.addTag(tag);
}
} else {
Event newEvent = db.createEvent();
newEvent.setName(name);
newEvent.setStartDate(dateFrom);
newEvent.setEndDate(dateTo);
if (tag != null) {
isSuccessfullyTagged = newEvent.addTag(tag);
}
}
if (isSuccessfullyTagged) {
db.addIntoTagList(new String[] { tag });
}
db.save();
}
/**
* Validates the parsed parameters.
*
* <ul>
* <li>Fail if name is null.</li>
* <li>Fail if "to" exists without "from"</li>
* <li>Fail if task, but "from" and "to" exist</li>
* </ul>
*
* @param isTask
* true if CalendarItem should be a Task, false if Event
* @param name
* Display name of CalendarItem object
* @param naturalFrom
* Raw input for due date for Task or start date for Event
* @param naturalTo
* Raw input for end date for Event
* @return true if validation passed, false otherwise
*/
private boolean validateParams(boolean isTask, String name, String naturalFrom, String naturalTo) {
return (name == null ||
(naturalFrom == null && naturalTo != null) || (isTask && naturalTo != null));
}
/**
* Extracts the display name of the CalendarItem from parsedResult.
*
* @param parsedResult
* @return name
*/
private String parseName(Map<String, String[]> parsedResult) {
String name = null;
if (parsedResult.get("default") != null && parsedResult.get("default")[1] != null) {
name = parsedResult.get("default")[1];
}
if (parsedResult.get("eventType") != null && parsedResult.get("eventType")[1] != null) {
name = parsedResult.get("eventType")[1];
}
return name;
}
/**
* Extracts the intended CalendarItem type from parsedResult.
*
* @param parsedResult
* @return true if Task, false if Event
*/
private boolean parseIsTask(Map<String, String[]> parsedResult) {
boolean isTask = true;
if (parsedResult.get("eventType") != null && parsedResult.get("eventType")[0].equals("event")) {
isTask = false;
}
return isTask;
}
private void renderDisambiguation(boolean isTask, String name, String naturalFrom, String naturalTo) {
name = StringUtil.replaceEmpty(name, NAME_FIELD);
String disambiguationString;
String errorMessage = STRING_WHITESPACE; // TODO
if (isTask) {
naturalFrom = StringUtil.replaceEmpty(naturalFrom, DEADLINE_FIELD);
disambiguationString = String.format(ADD_TASK_TEMPLATE, name, naturalFrom);
} else {
naturalFrom = StringUtil.replaceEmpty(naturalFrom, START_TIME_FIELD);
naturalTo = StringUtil.replaceEmpty(naturalTo, END_TIME_FIELD);
disambiguationString = String.format(ADD_EVENT_TEMPLATE, name, naturalFrom, naturalTo);
}
// Show an error in the console
Renderer.renderDisambiguation(disambiguationString, errorMessage);
}
}
|
package sizebay.catalog.client.model;
public enum Measure {
chest,
waist,
hip,
sleeve,
biceps,
length,
insideLeg,
height,
fist,
neck,
thigh,
centralSeam,
underBust,
shoulderWidth,
insoleLength,
insoleWidth,
waistUpper,
waistLower,
ageChart,
weightChart,
equivalence,
headCircumference,
weight,
palm,
wrist,
lowerLength,
collar,
calf,
width,
circumference,
bar;
}
|
package vaeke.restcountries.rest;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.text.Normalizer;
import java.text.Normalizer.Form;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.ext.Provider;
import org.apache.log4j.Logger;
import vaeke.restcountries.domain.Country;
import com.google.gson.Gson;
import com.google.gson.stream.JsonReader;
@Provider
@Path("rest")
@Produces(MediaType.APPLICATION_JSON)
public class CountryRest {
private static final Logger LOG = Logger.getLogger(CountryRest.class);
private static List<Country> countries;
@GET
public Object getCountries() {
LOG.info("Getting all");
try {
return getAll();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("500: Internal Server Error").build();
}
}
@GET
@Path("alpha2/{alpha2code}")
public Object getByAlpha2(@PathParam("alpha2code") String alpha2) {
LOG.info("Getting by alpha2 " + alpha2);
try {
List<Country> countries = getAll();
for(Country country : countries) {
if (country.getCca2().toLowerCase().equals(alpha2.toLowerCase())) {
return country;
}
}
return Response.status(Status.NOT_FOUND).entity("404: Not Found").build();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("500: Internal Server Error").build();
}
}
@GET
@Path("alpha3/{alpha3code}")
public Object getByAlpha3(@PathParam("alpha3code") String alpha3) {
LOG.info("Getting by alpha3 " + alpha3);
try {
List<Country> countries = getAll();
for(Country country : countries) {
if (country.getCca3().toLowerCase().equals(alpha3.toLowerCase())) {
return country;
}
}
return Response.status(Status.NOT_FOUND).entity("404: Not Found").build();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("500: Internal Server Error").build();
}
}
@GET
@Path("currency/{currency}")
public Object getByCurrency(@PathParam("currency") String currency) {
LOG.info("Getting by currency " + currency);
try {
List<Country> countries = getAll();
List<Country> result = new ArrayList<Country>();
for(Country country : countries) {
if(country.getCurrency().toLowerCase().contains(currency.toLowerCase())) {
result.add(country);
}
}
if (!result.isEmpty()) {
return result;
} else {
return Response.status(Status.NOT_FOUND).entity("404: Not Found").build();
}
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("500: Internal Server Error").build();
}
}
@GET
@Path("name/{name}")
public Object getByName(@PathParam("name") String name) {
LOG.info("Getting by name " + name);
try {
List<Country> countries = getAll();
List<Country> result = new ArrayList<Country>();
for(Country country : countries) {
if(country.getName().toLowerCase().contains(name.toLowerCase())) {
result.add(country);
}
if(country.getAltSpellings().toLowerCase().contains(name.toLowerCase()) && !result.contains(country)) {
result.add(country);
}
}
if (!result.isEmpty()) {
return result;
} else {
return Response.status(Status.NOT_FOUND).entity("404: Not Found").build();
}
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("500: Internal Server Error").build();
}
}
@GET
@Path("callingcode/{callingcode}")
public Object getByCallingCode(@PathParam("callingcode") String callingcode) {
LOG.info("Getting by calling code " + callingcode);
try {
List<Country> countries = getAll();
List<Country> result = new ArrayList<Country>();
for(Country country : countries) {
if(country.getCallingcode().equals(callingcode))
result.add(country);
}
if (!result.isEmpty()) {
return result;
} else {
return Response.status(Status.NOT_FOUND).entity("404: Not Found").build();
}
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("500: Internal Server Error").build();
}
}
@GET
@Path("capital/{capital}")
public Object getByCapital(@PathParam("capital") String capital) {
LOG.info("Getting by capital " + capital);
try {
List<Country> countries = getAll();
for(Country country : countries) {
if(removeDiacriticalMarks(country.getCapital().toLowerCase()).equals(removeDiacriticalMarks(capital.toLowerCase()))) {
return country;
}
}
return Response.status(Status.NOT_FOUND).entity("404: Not Found").build();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity("500: Internal Server Error").build();
}
}
private String removeDiacriticalMarks(String string) {
return Normalizer.normalize(string, Form.NFD)
.replaceAll("\\p{InCombiningDiacriticalMarks}+", "");
}
private List<Country> getAll() throws IOException {
if(countries != null) return countries;
LOG.debug("Loading JSON Database");
InputStream is = this.getClass().getClassLoader().getResourceAsStream("countries.json");
Gson gson = new Gson();
JsonReader reader = new JsonReader(new InputStreamReader(is, "UTF-8"));
countries = new ArrayList<Country>();
reader.beginArray();
while(reader.hasNext()) {
Country country = gson.fromJson(reader, Country.class);
countries.add(country);
}
reader.endArray();
reader.close();
return countries;
}
}
|
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class DwarfTest
{
@Test
public void criaAnaoPrimeiroConstrutor(){
Dwarf dwarf = new Dwarf("nome");
assertEquals("nome", dwarf.getName());
assertEquals(110, dwarf.getVida());
assertEquals(Status.VIVO, dwarf.getStatus());
assertTrue(dwarf.getInventario() != null);
assertEquals(1, dwarf.getDataNasc().getDia());
assertEquals(1, dwarf.getDataNasc().getMes());
assertEquals(1, dwarf.getDataNasc().getAno());
}
@Test
public void anaoPerdeVidaMorre(){
Dwarf dwarf = new Dwarf("nome");
for(int i = 0; i < 12; i++)
dwarf.perdeVida();
assertEquals(Status.MORTO, dwarf.getStatus());
}
@Test
public void anaoNaoTemVidaNegativa(){
Dwarf dwarf = new Dwarf("nome");
for(int i = 0; i < 13; i++)
dwarf.perdeVida();
assertEquals(0, dwarf.getVida());
}
@Test
public void testePerdeVida(){
Dwarf dwarf = new Dwarf("nome");
dwarf.perdeVida();
assertEquals(100, dwarf.getVida());
}
@Test
public void anaoTemInventarioDataNascDefault(){
Dwarf dwarf = new Dwarf("nome");
assertTrue(dwarf.getInventario() != null);
assertEquals(1, dwarf.getDataNasc().getDia());
assertEquals(1, dwarf.getDataNasc().getMes());
assertEquals(1, dwarf.getDataNasc().getAno());
}
@Test
public void anaoNovoConstrutorDataNasc222(){
DataTerceiraEra dte = new DataTerceiraEra(2,2,2);
Dwarf dwarf = new Dwarf("nome",dte);
assertEquals("nome", dwarf.getName());
assertEquals(110, dwarf.getVida());
assertEquals(Status.VIVO, dwarf.getStatus());
assertTrue(dwarf.getInventario() != null);
assertEquals(2, dwarf.getDataNasc().getDia());
assertEquals(2, dwarf.getDataNasc().getMes());
assertEquals(2, dwarf.getDataNasc().getAno());
}
@Test
public void anaoAdicionaItemInventario(){
Dwarf dwarf = new Dwarf("nome");
Item item = new Item(1,"espada");
dwarf.adicionarItem(item);
assertTrue(dwarf.getInventario().getItens().get(0) != null);
}
@Test
public void anaoRemoveItemEspada(){
Dwarf dwarf = new Dwarf("nome");
Item espada = new Item(1,"espada");
Item arco = new Item(1,"arco");
dwarf.adicionarItem(espada);
dwarf.adicionarItem(arco);
dwarf.perderItem(espada);
assertTrue(dwarf.getInventario().getItens().get(0) == arco);
}
@Test
public void numeroSorteMenosTresMilETrintaETres(){
DataTerceiraEra dte = new DataTerceiraEra(1,1,2000);
Dwarf dwarf = new Dwarf("nome", dte);
for(int i = 0; i < 3; i++)
dwarf.perdeVida();
assertTrue(dwarf.getNumeroSorte() == -3333);
}
@Test
public void numeroSorteTrintaETres(){
DataTerceiraEra dte = new DataTerceiraEra(1,1,2100);
Dwarf dwarf = new Dwarf("Seixas", dte);
assertTrue(dwarf.getNumeroSorte() == 33); //3333 / 100 = 33.33 - 33 * 100 = 3300 - resto 33
}
@Test
public void numeroSorteCentoEUmBiMaior(){//NascBissextoVidaMaiorQueNoventa
DataTerceiraEra dte = new DataTerceiraEra(1,1,2000);
Dwarf dwarf = new Dwarf("Seixas", dte);
assertTrue(dwarf.getNumeroSorte() == 101);
}
@Test
public void numeroSorteCentoEUmBiMenor(){ //NascBissextoVidaMenorQueOitenta
DataTerceiraEra dte = new DataTerceiraEra(1,1,2000);
Dwarf dwarf = new Dwarf("Seixas", dte);
for(int i = 0; i < 5; i++)
dwarf.perdeVida();
assertTrue(dwarf.getNumeroSorte() == 101);
}
@Test
public void numeroSorteCentoEUmNaoBi(){//NascNaoBissextoNomeDiferenteSeixasOuMeireles
DataTerceiraEra dte = new DataTerceiraEra(1,1,2100);
Dwarf dwarf = new Dwarf("Anao", dte);
assertTrue(dwarf.getNumeroSorte() == 101);
}
}
|
package net.domesdaybook.reader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A very simple {@link ByteReader} which accesses bytes from a file,
* using an underlying RandomAccessFile.
*
* Note: performance reading individual bytes from a RandomAccessFile
* will be very slow, so this class is provided only for convenience.
*
* Also note, if an IOException occurs reading bytes from the file,
* then a Runtime exception
*
* @author Matt Palmer.
*/
public final class RandomAccessFileReader implements ByteReader {
private final static String READ_ONLY = "r";
private final static String ILLEGAL_ARGUMENTS = "Null file passed to RandomAccessFileReader";
private final RandomAccessFile file;
/**
* Constructs an immutable RandomAccessFileReader.
*
* @param file The file to read from.
* @throws FileNotFoundException If the file does not exist.
*/
public RandomAccessFileReader(final File file) throws FileNotFoundException {
if (file == null) {
throw new IllegalArgumentException(ILLEGAL_ARGUMENTS);
}
this.file = new RandomAccessFile(file, READ_ONLY);
}
/**
* Reads a byte in the file at the given position.
*
* @param position The position in the file to read a byte from.
* @return The byte at the given position.
* @throws ReadByteException if an IOException occurs reading the file.
*/
@Override
public byte readByte(long position) {
try {
file.seek(position);
return file.readByte();
} catch (IOException ex) {
throw new ReadByteException(ex);
}
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.broad.igv.ui.action;
import org.apache.log4j.Logger;
import org.broad.igv.PreferenceManager;
import org.broad.igv.ui.IGV;
import org.broad.igv.ui.UIConstants;
import org.broad.igv.ui.util.MessageUtils;
import org.broad.igv.util.LongRunningTask;
import org.broad.igv.util.NamedRunnable;
import org.broad.igv.util.ResourceLocator;
import sun.util.resources.CurrencyNames_vi_VN;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.util.Arrays;
/**
* @author jrobinso
*/
public class LoadFromDatabaseAction extends MenuAction {
static Logger log = Logger.getLogger(LoadFromDatabaseAction.class);
IGV mainFrame;
public LoadFromDatabaseAction(String label, int mnemonic, IGV mainFrame) {
super(label, null, mnemonic);
this.mainFrame = mainFrame;
setToolTipText(UIConstants.LOAD_SERVER_DATA_TOOLTIP);
}
@Override
public void actionPerformed(ActionEvent evt) {
SwingWorker worker = new SwingWorker() {
@Override
protected Object doInBackground() throws Exception {
String host = PreferenceManager.getInstance().get(PreferenceManager.DB_HOST);
if (host == null || host.trim().length() == 0) {
MessageUtils.showMessage("Please set database configuration in user preferences (View > Preferences)");
return null;
}
final PreferenceManager preferenceManager = PreferenceManager.getInstance();
String db = preferenceManager.get(PreferenceManager.DB_NAME);
String port = preferenceManager.get(PreferenceManager.DB_PORT);
String url = "jdbc:mysql://" + host;
if (!port.equals("-1")) {
url += ":" + port;
}
url += "/" + db;
String table2 = "SAMPLE_INFO";
ResourceLocator loc2 = new ResourceLocator(url, table2);
loc2.setDescription("SELECT * FROM " + table2);
// // CNV. Do a join for fun
String table1 = "CNV";
ResourceLocator loc1 = new ResourceLocator(url, table1);
// TODO -- get these mappings from a config table
String query = "SELECT Sample as Sample, `Probe Median` as Value, " +
"Chromosome as chr, Start as start, Stop as end, " +
"CONCAT('<br>Event: ', Event,'<br>% CNV Overlap = ', `% of CNV Overlap`) as description " +
" FROM CNV";
// + "INNER JOIN SAMPLE_INFO ON SAMPLE_INFO.SAMPLE = CNV.SAMPLE " +
// "WHERE SAMPLE_INFO.SUBTYPE like 'Classical'";
// String query = "select * from cnv";
loc1.setDescription(query);
loc1.setType(".seg");
mainFrame.loadTracks(Arrays.asList(loc1, loc2));
return null;
}
@Override
protected void done() {
mainFrame.showLoadedTrackCount();
}
};
worker.execute();
}
}
|
package org.caleydo.view.bicluster.elem;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.caleydo.core.data.collection.table.Table;
import org.caleydo.core.data.datadomain.ATableBasedDataDomain;
import org.caleydo.core.data.perspective.table.TablePerspective;
import org.caleydo.core.data.selection.SelectionType;
import org.caleydo.core.data.virtualarray.VirtualArray;
import org.caleydo.core.data.virtualarray.events.DimensionVAUpdateEvent;
import org.caleydo.core.data.virtualarray.events.RecordVAUpdateEvent;
import org.caleydo.core.event.EventListenerManager.ListenTo;
import org.caleydo.core.event.EventPublisher;
import org.caleydo.core.id.IDCategory;
import org.caleydo.core.id.IDType;
import org.caleydo.core.util.color.Color;
import org.caleydo.core.view.opengl.canvas.EDetailLevel;
import org.caleydo.core.view.opengl.layout.Column.VAlign;
import org.caleydo.core.view.opengl.layout2.GLElement;
import org.caleydo.core.view.opengl.layout2.GLElementAccessor;
import org.caleydo.core.view.opengl.layout2.GLElementContainer;
import org.caleydo.core.view.opengl.layout2.GLGraphics;
import org.caleydo.core.view.opengl.layout2.animation.AnimatedGLElementContainer;
import org.caleydo.core.view.opengl.layout2.animation.MoveTransitions;
import org.caleydo.core.view.opengl.layout2.animation.Transitions;
import org.caleydo.core.view.opengl.layout2.basic.GLButton;
import org.caleydo.core.view.opengl.layout2.basic.GLButton.ISelectionCallback;
import org.caleydo.core.view.opengl.layout2.basic.GLSlider;
import org.caleydo.core.view.opengl.layout2.layout.GLLayouts;
import org.caleydo.core.view.opengl.layout2.layout.IGLLayout;
import org.caleydo.core.view.opengl.layout2.layout.IGLLayoutElement;
import org.caleydo.core.view.opengl.layout2.renderer.GLRenderers;
import org.caleydo.core.view.opengl.layout2.renderer.IGLRenderer;
import org.caleydo.core.view.opengl.picking.IPickingListener;
import org.caleydo.core.view.opengl.picking.Pick;
import org.caleydo.view.bicluster.concurrent.ScanProbabilityMatrix;
import org.caleydo.view.bicluster.concurrent.ScanResult;
import org.caleydo.view.bicluster.event.ClusterGetsHiddenEvent;
import org.caleydo.view.bicluster.event.ClusterScaleEvent;
import org.caleydo.view.bicluster.event.CreateBandsEvent;
import org.caleydo.view.bicluster.event.FocusChangeEvent;
import org.caleydo.view.bicluster.event.LZThresholdChangeEvent;
import org.caleydo.view.bicluster.event.MaxThresholdChangeEvent;
import org.caleydo.view.bicluster.event.MouseOverClusterEvent;
import org.caleydo.view.bicluster.event.RecalculateOverlapEvent;
import org.caleydo.view.bicluster.event.SortingChangeEvent;
import org.caleydo.view.bicluster.event.SortingChangeEvent.SortingType;
import org.caleydo.view.bicluster.event.UnhidingClustersEvent;
import org.caleydo.view.bicluster.sorting.ASortingStrategy;
import org.caleydo.view.bicluster.sorting.BandSorting;
import org.caleydo.view.bicluster.sorting.ProbabilityStrategy;
import org.caleydo.view.bicluster.util.Vec2d;
import org.caleydo.view.heatmap.v2.BasicBlockColorer;
import org.caleydo.view.heatmap.v2.HeatMapElement;
import org.caleydo.view.heatmap.v2.HeatMapElement.EShowLabels;
import org.caleydo.view.heatmap.v2.IBlockColorer;
/**
* e.g. a class for representing a cluster
*
* @author Michael Gillhofer
* @author Samuel Gratzl
*/
public class ClusterElement extends AnimatedGLElementContainer implements
IBlockColorer, IGLLayout {
private float highOpacityFactor = 1;
private float lowOpacityFactor = 0.2f;
private float opacityChangeInterval = 10f;
private final TablePerspective data;
private final TablePerspective x;
private final TablePerspective l;
private final TablePerspective z;
private final AllClustersElement allClusters;
private final ExecutorService executor;
private float recThreshold = 0.08f;
private float dimThreshold = 4.5f;
private Vec2d attForce = new Vec2d(0, 0);
private Vec2d repForce = new Vec2d(0, 0);
private Vec2d frameForce = new Vec2d(0, 0);
private boolean isDragged = false;
private boolean isHovered = false;
private boolean isHidden = false;
private boolean hasContent = false;
private Map<GLElement, List<Integer>> dimOverlap;
private Map<GLElement, List<Integer>> recOverlap;
private SortingType sortingType = SortingType.probabilitySorting;
private List<Integer> dimProbabilitySorting;
private List<Integer> recProbabilitySorting;
private boolean setOnlyShowXElements;
private int bcNr;
private ToolBar toolBar;
private HeaderBar headerBar;
private ThresholdBar dimThreshBar;
private ThresholdBar recThreshBar;
private GLElement heatmap;
private float opacityfactor = 1;
private float curOpacityFactor = 1f;
public ClusterElement(TablePerspective data, AllClustersElement root,
TablePerspective x, TablePerspective l, TablePerspective z,
ExecutorService executor) {
setLayout(this);
this.data = data;
this.allClusters = root;
this.x = x;
this.l = l;
this.z = z;
this.executor = executor;
toolBar = new ToolBar();
headerBar = new HeaderBar(this);
dimThreshBar = new ThresholdBar(true);
recThreshBar = new ThresholdBar(false);
this.add(toolBar); // add a element toolbar
this.add(headerBar);
this.add(dimThreshBar);
this.add(recThreshBar);
final HeatMapElement heatmapImpl = new HeatMapElement(data, this,
EDetailLevel.HIGH);
heatmapImpl.setRecordLabels(EShowLabels.RIGHT);
// heatmapImpl.setDimensionLabels(EShowLabels.RIGHT);
// heatmap = new ScrollingDecorator(heatmapImpl, new ScrollBar(true),
// new ScrollBar(false), 5);
heatmap = heatmapImpl;
heatmap.setzDelta(0.5f);
// setzDelta(f);
this.add(heatmap);
setVisibility(EVisibility.PICKABLE);
this.onPick(new IPickingListener() {
@Override
public void pick(Pick pick) {
onPicked(pick);
}
});
this.setLayoutData(MoveTransitions.MOVE_AND_GROW_LINEAR);
}
@Override
public Color apply(int recordID, int dimensionID,
ATableBasedDataDomain dataDomain, boolean deSelected) {
Color color = BasicBlockColorer.INSTANCE.apply(recordID, dimensionID,
dataDomain, deSelected);
color.a = color.a * curOpacityFactor;
return color;
}
public IDCategory getRecordIDCategory() {
return data.getDataDomain().getRecordIDCategory();
}
public IDCategory getDimensionIDCategory() {
return data.getDataDomain().getDimensionIDCategory();
}
public IDType getDimensionIDType() {
return getDimensionVirtualArray().getIdType();
}
public IDType getRecordIDType() {
return getRecordVirtualArray().getIdType();
}
public String getDataDomainID() {
return data.getDataDomain().getDataDomainID();
}
/**
* @return the id, see {@link #id}
*/
public String getID() {
return data.getLabel();
}
@Override
protected void renderPickImpl(GLGraphics g, float w, float h) {
g.color(java.awt.Color.black);
if (isHovered) {
g.fillRect(-20, -20, w < 55 ? 120 : w + 65, h < 80 ? 150 : h + 70);
}
super.renderPickImpl(g, w, h);
}
private int accu; // for animating the opacity fading
@Override
public void layout(int deltaTimeMs) {
// duration -= delta
if (deltaTimeMs + accu > opacityChangeInterval) {
if (opacityfactor < curOpacityFactor)
curOpacityFactor -= 0.02;
else if (opacityfactor > curOpacityFactor)
curOpacityFactor += 0.02;
repaint();
for (GLElement child : this)
child.repaint();
accu = 0;
} else
accu += deltaTimeMs;
super.layout(deltaTimeMs);
}
@Override
protected void renderImpl(GLGraphics g, float w, float h) {
super.renderImpl(g, w, h);
// if (getID().contains("24")){
// System.out.println("stop");
float[] color = { 0, 0, 0, curOpacityFactor };
float[] highlightedColor = SelectionType.MOUSE_OVER.getColor();
g.color(color);
if (isHovered) {
g.color(highlightedColor);
}
g.drawRect(-1, -1, w + 2, h + 2);
}
protected void onPicked(Pick pick) {
switch (pick.getPickingMode()) {
// case DRAGGED:
// if (!pick.isDoDragging()) return;
// if (isDragged == false) {
// allClusters.setDragedLayoutElement(this);
// isDragged = true;
// setLocation(getLocation().x() + pick.getDx(), getLocation().y()
// + pick.getDy());
// relayoutParent();
// repaintPick();
// break;
// case CLICKED:
// if (!pick.isAnyDragging())pick.setDoDragging(true);
// break;
// case MOUSE_RELEASED:
// pick.setDoDragging(false);
// break;
case MOUSE_OVER:
if (!pick.isAnyDragging()) {
isHovered = true;
allClusters.setHooveredElement(this);
EventPublisher.trigger(new MouseOverClusterEvent(this, true));
relayout(); // for showing the toolbar
}
break;
case MOUSE_OUT:
mouseOut();
break;
// default:
// isDragged = false;
// allClusters.setDragedLayoutElement(null);
}
}
private void mouseOut() {
if (isHovered && !headerBar.isClicked()) {
// System.out.println("out");
isHovered = false;
if (wasResizedWhileHovered)
setClusterSize(newDimSize, newRecSize);
allClusters.setHooveredElement(null);
opacityfactor = highOpacityFactor;
// timer.restart();
relayout(); // for showing the toolbar
repaintAll();
for (GLElement child : this)
child.repaint();
EventPublisher.trigger(new MouseOverClusterEvent(this, false));
}
}
private void recreateVirtualArrays(List<Integer> dimIndices,
List<Integer> recIndices) {
VirtualArray dimArray = getDimensionVirtualArray();
VirtualArray recArray = getRecordVirtualArray();
dimArray.clear();
int count = 0;
for (Integer i : dimIndices) {
if (setOnlyShowXElements
&& allClusters.getFixedElementsCount() <= count)
break;
dimArray.append(i);
count++;
}
count = 0;
recArray.clear();
for (Integer i : recIndices) {
if (setOnlyShowXElements
&& allClusters.getFixedElementsCount() <= count)
break;
recArray.append(i);
count++;
}
}
void calculateOverlap() {
// if (getID().contains("27"))
// System.out.println("27 .. overlap calc");
dimOverlap = new HashMap<>();
recOverlap = new HashMap<>();
List<Integer> myDimIndizes = getDimensionVirtualArray().getIDs();
List<Integer> myRecIndizes = getRecordVirtualArray().getIDs();
dimensionOverlapSize = 0;
recordOverlapSize = 0;
for (GLElement element : allClusters.asList()) {
if (element == this)
continue;
ClusterElement e = (ClusterElement) element;
List<Integer> eIndizes = new ArrayList<Integer>(myDimIndizes);
eIndizes.retainAll(e.getDimensionVirtualArray().getIDs());
dimOverlap.put(element, eIndizes);
dimensionOverlapSize += eIndizes.size();
eIndizes = new ArrayList<Integer>(myRecIndizes);
eIndizes.retainAll(e.getRecordVirtualArray().getIDs());
recOverlap.put(element, eIndizes);
recordOverlapSize += eIndizes.size();
}
if (getVisibility() == EVisibility.PICKABLE)
sort(sortingType);
fireTablePerspectiveChanged();
}
public Vec2d getAttForce() {
return attForce;
}
public void setAttForce(Vec2d force) {
this.attForce = force;
}
public void setRepForce(Vec2d force) {
this.repForce = force;
}
public Vec2d getRepForce() {
return repForce;
}
public Vec2d getFrameForce() {
return frameForce;
}
public void setFrameForce(Vec2d frameForce) {
this.frameForce = frameForce;
}
public void setPerspectiveLabel(String dimensionName, String recordName) {
data.getDimensionPerspective().setLabel(dimensionName);
data.getRecordPerspective().setLabel(recordName);
}
private void fireTablePerspectiveChanged() {
EventPublisher.trigger(new RecordVAUpdateEvent(data.getDataDomain()
.getDataDomainID(), data.getRecordPerspective()
.getPerspectiveID(), this));
EventPublisher.trigger(new DimensionVAUpdateEvent(data.getDataDomain()
.getDataDomainID(), data.getDimensionPerspective()
.getPerspectiveID(), this));
repaintAll();
}
private VirtualArray getDimensionVirtualArray() {
return data.getDimensionPerspective().getVirtualArray();
}
private VirtualArray getRecordVirtualArray() {
return data.getRecordPerspective().getVirtualArray();
}
public int getNumberOfDimElements() {
return getDimensionVirtualArray().size();
}
public int getNumberOfRecElements() {
return getRecordVirtualArray().size();
}
public boolean isDragged() {
return isDragged;
}
public boolean isVisible() {
return getVisibility().doRender();
}
public List<Integer> getDimOverlap(GLElement jElement) {
return dimOverlap.get(jElement);
}
public List<Integer> getRecOverlap(GLElement jElement) {
return recOverlap.get(jElement);
}
// int overallOverlapSize;
int dimensionOverlapSize;
int recordOverlapSize;
private double dimSize;
private double recSize;
private boolean isFocused = false;
public int getDimensionOverlapSize() {
return dimensionOverlapSize;
}
public int getRecordOverlapSize() {
return recordOverlapSize;
}
protected IGLLayoutElement getIGLayoutElement() {
return GLElementAccessor.asLayoutElement(this);
}
@Override
public void doLayout(List<? extends IGLLayoutElement> children, float w,
float h) {
// if (isHidden) return;
IGLLayoutElement toolbar = children.get(0);
IGLLayoutElement headerbar = children.get(1);
IGLLayoutElement dimthreshbar = children.get(2);
IGLLayoutElement recthreshbar = children.get(3);
if (isHovered) { // depending whether we are hovered or not, show hide
// the toolbar's
toolbar.setBounds(-18, 0, 18, 80);
headerbar.setBounds(0, -19, w < 55 ? 75 : w + 20, 17);
dimthreshbar.setBounds(-1, h < 60 ? 61 : h + 1,
w < 55 ? 56 : w + 1, 20);
recthreshbar.setBounds(w < 57 ? 56 : w + 1, -1, 17, h < 60 ? 61
: h + 1);
} else {
toolbar.setBounds(0, 0, 0, 0); // hide by setting the width to 0
headerbar.setBounds(0, -18, w < 50 ? 50 : w, 17);
dimthreshbar.setBounds(-1, h, 0, 0);
recthreshbar.setBounds(w, 0, 0, 0);
}
IGLLayoutElement content = children.get(4);
content.setBounds(0, 0, w, h);
}
private class HeaderBar extends GLButton implements ISelectionCallback {
private boolean clicked = false;
public boolean isClicked() {
return clicked;
}
ClusterElement parent;
public HeaderBar(ClusterElement parent) {
// super(GLLayouts.flowHorizontal(1));
// move to the top
this.parent = parent;
setzDelta(0.5f);
// create buttons
createButtons();
setSize(Float.NaN, 20);
// define the animation used to move this element
// this.setLayoutData(new MoveTransitions.MoveTransitionBase(
// Transitions.NO, Transitions.LINEAR, Transitions.NO,
// Transitions.LINEAR));
}
protected void createButtons() {
setRenderer(new IGLRenderer() {
@Override
public void render(GLGraphics g, float w, float h,
GLElement parent) {
if (isFocused) {
g.color(SelectionType.SELECTION.getColor());
g.fillRoundedRect(0, 0, w, h, 2);
} else if (isHovered) {
g.color(SelectionType.MOUSE_OVER.getColor());
g.fillRoundedRect(0, 0, w, h, 2);
}
float[] color = { 0, 0, 0, curOpacityFactor };
g.textColor(color);
g.drawText(scaleFactor == 1 ? getID() : getID() + " ("
+ (int) (100 * scaleFactor) + "%)", 0, 0, 100, 12);
float[] black = { 0, 0, 0, 1 };
g.textColor(black);
}
});
}
@Override
protected void onPicked(Pick pick) {
switch (pick.getPickingMode()) {
case DRAGGED:
if (!pick.isDoDragging())
return;
if (isDragged == false) {
allClusters.setDragedLayoutElement(parent);
}
isDragged = true;
parent.setLocation(parent.getLocation().x() + pick.getDx(),
parent.getLocation().y() + pick.getDy());
parent.relayout();
parent.repaintPick();
break;
case CLICKED:
if (!pick.isAnyDragging()) {
pick.setDoDragging(true);
clicked = true;
}
break;
case MOUSE_RELEASED:
pick.setDoDragging(false);
clicked = false;
break;
default:
isDragged = false;
allClusters.setDragedLayoutElement(null);
}
}
@Override
public void onSelectionChanged(GLButton button, boolean selected) {
// TODO Auto-generated method stub
}
}
private class ThresholdBar extends GLElementContainer
implements
org.caleydo.core.view.opengl.layout2.basic.GLSlider.ISelectionCallback {
boolean isHorizontal;
GLSlider slider;
float globalMaxThreshold;
float localMaxSliderValue;
float localMinSliderValue;
protected ThresholdBar(boolean layout) {
super(layout ? GLLayouts.flowHorizontal(1) : GLLayouts
.flowVertical(1));
isHorizontal = layout;
// move to the top
setzDelta(+0.5f);
// create buttons
createButtons();
setSize(Float.NaN, 20);
// define the animation used to move this element
if (isHorizontal) {
this.setLayoutData(new MoveTransitions.MoveTransitionBase(
Transitions.LINEAR, Transitions.LINEAR, Transitions.NO,
Transitions.LINEAR));
} else {
this.setLayoutData(new MoveTransitions.MoveTransitionBase(
Transitions.LINEAR, Transitions.LINEAR,
Transitions.LINEAR, Transitions.NO));
}
}
protected void createButtons() {
this.remove(slider);
float max = localMaxSliderValue > localMinSliderValue ? localMaxSliderValue
: localMinSliderValue;
this.slider = new GLSlider(0, max, max / 2);
// slider.setzDelta(-0.5f);
slider.setCallback(this);
slider.setHorizontal(isHorizontal);
if (isHorizontal) {
slider.setSize(Float.NaN, 18);
} else {
slider.setSize(18, Float.NaN);
}
this.add(slider);
}
boolean ignoreNextChange = true;
@Override
public void onSelectionChanged(GLSlider slider, float value) {
if (ignoreNextChange) {
ignoreNextChange = false;
return;
}
if (value <= localMinSliderValue || value >= localMaxSliderValue)
return;
if (isHorizontal)
dimThreshold = value;
else
recThreshold = value;
rebuildMyData(false);
}
protected void updateSliders(double maxValue, double minValue) {
localMaxSliderValue = (float) maxValue;
localMinSliderValue = (float) minValue;
// createButtons();
relayout();
}
@ListenTo
public void listenTo(MaxThresholdChangeEvent event) {
globalMaxThreshold = (float) (isHorizontal ? event
.getDimThreshold() : event.getRecThreshold());
createButtons();
}
@ListenTo
public void listenTo(LZThresholdChangeEvent event) {
if (event.isGlobalEvent()) {
ignoreNextChange = true;
slider.setValue(isHorizontal ? event.getDimensionThreshold()
: event.getRecordThreshold());
}
}
}
private class ToolBar extends GLElementContainer implements
ISelectionCallback {
GLButton hide, sorting, enlarge, smaller, focus;
SortingType sortingButtonCaption = SortingType.probabilitySorting;
public ToolBar() {
super(GLLayouts.flowVertical(5));
setzDelta(-0.1f);
createButtons();
setSize(Float.NaN, 20);
this.setLayoutData(new MoveTransitions.MoveTransitionBase(
Transitions.LINEAR, Transitions.NO, Transitions.LINEAR,
Transitions.LINEAR));
}
protected void createButtons() {
hide = new GLButton();
hide.setRenderer(GLRenderers
.fillImage("./resources/icons/dialog_close.png"));
hide.setTooltip("Close");
hide.setSize(16, Float.NaN);
hide.setCallback(this);
this.add(hide);
sorting = new GLButton();
sorting.setRenderer(GLRenderers
.drawText(
sortingButtonCaption == SortingType.probabilitySorting ? "P"
: "B", VAlign.CENTER));
sorting.setSize(16, Float.NaN);
sorting.setTooltip("Change sorting");
sorting.setCallback(this);
this.add(sorting);
focus = new GLButton();
focus.setRenderer(GLRenderers.drawText(" F"));
focus.setSize(16, Float.NaN);
focus.setTooltip("Focus this Cluster");
focus.setCallback(this);
this.add(focus);
enlarge = new GLButton();
enlarge.setSize(16, Float.NaN);
enlarge.setTooltip("Enlarge");
enlarge.setRenderer(GLRenderers
.fillImage("./resources/icons/zoom_in.png"));
enlarge.setCallback(this);
this.add(enlarge);
smaller = new GLButton();
smaller.setTooltip("Reduce");
smaller.setSize(16, Float.NaN);
smaller.setRenderer(GLRenderers
.fillImage("./resources/icons/zoom_out.png"));
smaller.setCallback(this);
this.add(smaller);
}
void setSortingCaption(SortingType caption) {
sortingButtonCaption = caption;
sorting.setRenderer(GLRenderers
.drawText(
sortingButtonCaption == SortingType.probabilitySorting ? "P"
: "B", VAlign.CENTER));
}
@Override
public void onSelectionChanged(GLButton button, boolean selected) {
if (button == hide) {
hideThisCluster();
} else if (button == sorting) {
setSortingCaption(sortingType == SortingType.probabilitySorting ? SortingType.bandSorting
: SortingType.probabilitySorting);
sort(sortingType == SortingType.probabilitySorting ? SortingType.bandSorting
: SortingType.probabilitySorting);
} else if (button == enlarge) {
scaleFactor += 0.6;
heatmap.setzDelta(1f);
resize();
} else if (button == smaller) {
scaleFactor = 1;
heatmap.setzDelta(0.5f);
resize();
EventPublisher.trigger(new FocusChangeEvent(null));
} else if (button == focus) {
focusThisCluster();
}
}
}
private boolean wasResizedWhileHovered = false;
private double newRecSize = 0;
private double newDimSize = 0;
public void setClusterSize(double x, double y) {
if (isHovered) {
wasResizedWhileHovered = true;
newRecSize = y;
newDimSize = x;
} else {
wasResizedWhileHovered = false;
newRecSize = 0;
newDimSize = 0;
dimSize = x;
recSize = y;
resize();
}
}
@ListenTo
public void listenTo(FocusChangeEvent e) {
if (e.getSender() == this)
return;
if (!isFocused)
return;
scaleFactor = 1;
resize();
this.isFocused = false;
}
private double scaleFactor = 1;
private void resize() {
setSize((float) (dimSize * scaleFactor),
(float) (recSize * scaleFactor));
relayout();
}
private void focusThisCluster() {
this.isFocused = !this.isFocused;
if (isFocused) {
scaleFactor = scaleFactor >= 4 ? 4 : 3;
resize();
EventPublisher.trigger(new FocusChangeEvent(this));
} else {
scaleFactor = 1;
resize();
EventPublisher.trigger(new FocusChangeEvent(null));
mouseOut();
}
repaintAll();
}
private void hideThisCluster() {
isHidden = true;
setVisibility(EVisibility.NONE);
isHovered = false;
relayout();
allClusters.setHooveredElement(null);
EventPublisher.trigger(new ClusterGetsHiddenEvent(getID()));
EventPublisher.trigger(new MouseOverClusterEvent(this, false));
repaintAll();
}
@ListenTo
private void listenTo(UnhidingClustersEvent event) {
isHidden = false;
if (hasContent) {
setVisibility(EVisibility.PICKABLE);
}
}
@ListenTo
private void listenTo(SortingChangeEvent e) {
if (e.getSender() instanceof ClusterElement && e.getSender() == this) {
// only local change
} else {
sort(e.getType());
}
toolBar.setSortingCaption(e.getType());
}
@ListenTo
private void listenTo(MouseOverClusterEvent event) {
ClusterElement hoveredElement = event.getElement();
if (hoveredElement == this || getDimOverlap(hoveredElement).size() > 0
|| getRecOverlap(hoveredElement).size() > 0) {
opacityfactor = highOpacityFactor;
return;
} else if (event.isMouseOver()) {
opacityfactor = lowOpacityFactor;
} else {
opacityfactor = highOpacityFactor;
}
}
@ListenTo
private void listenTo(LZThresholdChangeEvent event) {
if (!event.isGlobalEvent()) {
return;
}
if (event.getRecordThreshold() != recThreshold
|| event.getDimensionThreshold() != dimThreshold
|| setOnlyShowXElements != event.isFixedClusterCount()) {
recThreshold = event.getRecordThreshold();
dimThreshold = event.getDimensionThreshold();
setOnlyShowXElements = event.isFixedClusterCount();
rebuildMyData(event.isGlobalEvent());
}
}
public void setData(List<Integer> dimIndices, List<Integer> recIndices,
boolean setXElements, String id, int bcNr, double maxDim,
double maxRec, double minDim, double minRec) {
data.setLabel(id);
if (maxDim >= 0 && maxRec >= 0) {
dimThreshBar.updateSliders(maxDim, minDim);
recThreshBar.updateSliders(maxRec, minRec);
}
dimProbabilitySorting = new ArrayList<Integer>(dimIndices);
recProbabilitySorting = new ArrayList<Integer>(recIndices);
this.bcNr = bcNr;
this.setOnlyShowXElements = setXElements;
if (dimIndices.size() > 0 && recIndices.size() > 0) {
hasContent = true;
if (!isHidden)
setVisibility(EVisibility.PICKABLE);
recreateVirtualArrays(dimIndices, recIndices);
} else {
setVisibility(EVisibility.NONE);
hasContent = false;
}
}
private void sort(SortingType type) {
switch (type) {
case probabilitySorting:
sortingType = SortingType.probabilitySorting;
probabilitySorting();
break;
case bandSorting:
sortingType = SortingType.bandSorting;
bandSorting();
break;
default:
}
}
private void bandSorting() {
Set<Integer> finalDimSorting = new LinkedHashSet<Integer>();
List<List<Integer>> nonEmptyDimBands = new ArrayList<>();
for (List<Integer> dimBand : dimOverlap.values()) {
if (dimBand.size() > 0)
nonEmptyDimBands.add(dimBand);
}
BandSorting dimConflicts = new BandSorting(nonEmptyDimBands);
for (Integer i : dimConflicts) {
finalDimSorting.add(i);
}
finalDimSorting.addAll(dimProbabilitySorting);
Set<Integer> finalRecSorting = new LinkedHashSet<Integer>();
List<List<Integer>> nonEmptyRecBands = new ArrayList<>();
for (List<Integer> recBand : recOverlap.values()) {
if (recBand.size() > 0)
nonEmptyRecBands.add(recBand);
}
BandSorting recConflicts = new BandSorting(nonEmptyRecBands);
for (Integer i : recConflicts) {
finalRecSorting.add(i);
}
finalRecSorting.addAll(recProbabilitySorting);
recreateVirtualArrays(new ArrayList<Integer>(finalDimSorting),
new ArrayList<Integer>(finalRecSorting));
fireTablePerspectiveChanged();
}
private void probabilitySorting() {
sortingType = SortingType.probabilitySorting;
recreateVirtualArrays(dimProbabilitySorting, recProbabilitySorting);
fireTablePerspectiveChanged();
}
public boolean isContinuousRecSequenze(List<Integer> overlap) {
List<Integer> recordArray = getRecordVirtualArray().getIDs();
int index = 0;
for (Integer i : recordArray) {
if (overlap.contains(i))
break;
index++;
}
if (index > recordArray.size() - overlap.size())
return false;
int done = 1;
for (Integer i : recordArray.subList(index, recordArray.size() - 1)) {
if (done++ >= overlap.size())
break;
if (!overlap.contains(i))
return false;
}
return true;
}
public boolean isContinuousDimSequenze(List<Integer> overlap) {
List<Integer> recordArray = getDimensionVirtualArray().getIDs();
int index = 0;
for (Integer i : recordArray) {
if (overlap.contains(i))
break;
index++;
}
if (index > recordArray.size() - overlap.size())
return false;
int done = 1;
for (Integer i : recordArray.subList(index, recordArray.size() - 1)) {
if (done++ >= overlap.size())
break;
if (!overlap.contains(i))
return false;
}
return true;
}
public int getDimIndexOf(int value) {
return getDimensionVirtualArray().indexOf(value);
}
public int getRecIndexOf(int value) {
return getRecordVirtualArray().indexOf(value);
}
private void rebuildMyData(boolean isGlobal) {
Table L = l.getDataDomain().getTable();
Table Z = z.getDataDomain().getTable();
Future<ScanResult> recList = null, dimList = null;
ASortingStrategy strategy = new ProbabilityStrategy(L, bcNr);
recList = executor.submit(new ScanProbabilityMatrix(recThreshold, L,
bcNr, strategy));
strategy = new ProbabilityStrategy(Z, bcNr);
dimList = executor.submit(new ScanProbabilityMatrix(dimThreshold, Z,
bcNr, strategy));
List<Integer> dimIndices = null, recIndices = null;
try {
dimIndices = dimList.get().getIndices();
recIndices = recList.get().getIndices();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
setData(dimIndices, recIndices, setOnlyShowXElements, getID(), bcNr,
-1, -1, -1, -1);
EventPublisher.trigger(new ClusterScaleEvent(this));
if (!isGlobal)
EventPublisher.trigger(new MouseOverClusterEvent(this, true));
EventPublisher.trigger(new RecalculateOverlapEvent(this, isGlobal));
EventPublisher.trigger(new CreateBandsEvent(this));
}
}
|
package org.joval.os.windows.registry;
import org.joval.intf.windows.registry.IKey;
import org.joval.intf.windows.registry.IMultiStringValue;
/**
* Representation of a Windows registry multi-string value.
*
* @author David A. Solin
* @version %I% %G%
*/
public class MultiStringValue extends Value implements IMultiStringValue {
String[] data;
public MultiStringValue(IKey parent, String name, String[] data) {
type = Type.REG_MULTI_SZ;
this.parent = parent;
this.name = name;
this.data = data;
}
public String[] getData() {
return data;
}
public String toString() {
StringBuffer sb = new StringBuffer("MultiStringValue [Name=\"").append(name).append("\" Value=");
if (data == null) {
sb.append("null");
} else {
sb.append("{");
for (int i=0; i < data.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append("\"").append(data[i]).append("\"");
}
sb.append("}");
}
sb.append("]");
return sb.toString();
}
}
|
package org.mockito.internal.creation;
import org.mockito.MockSettings;
import org.mockito.exceptions.Reporter;
import org.mockito.internal.creation.settings.CreationSettings;
import org.mockito.internal.debugging.VerboseMockInvocationLogger;
import org.mockito.internal.util.MockCreationValidator;
import org.mockito.internal.util.MockNameImpl;
import org.mockito.internal.util.MockitoSpy;
import org.mockito.listeners.InvocationListener;
import org.mockito.mock.MockCreationSettings;
import org.mockito.mock.MockName;
import org.mockito.stubbing.Answer;
import java.io.Serializable;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.mockito.internal.util.collections.Sets.newSet;
@SuppressWarnings("unchecked")
public class MockSettingsImpl<T> extends CreationSettings<T> implements MockSettings, MockCreationSettings<T> {
private static final long serialVersionUID = 4475297236197939569L;
public MockSettings serializable() {
this.serializable = true;
return this;
}
public MockSettings extraInterfaces(Class... extraInterfaces) {
if (extraInterfaces == null || extraInterfaces.length == 0) {
new Reporter().extraInterfacesRequiresAtLeastOneInterface();
}
for (Class i : extraInterfaces) {
if (i == null) {
new Reporter().extraInterfacesDoesNotAcceptNullParameters();
} else if (!i.isInterface()) {
new Reporter().extraInterfacesAcceptsOnlyInterfaces(i);
}
}
this.extraInterfaces = newSet(extraInterfaces);
return this;
}
public MockName getMockName() {
return mockName;
}
public Set<Class> getExtraInterfaces() {
return extraInterfaces;
}
public Object getSpiedInstance() {
return spiedInstance;
}
public MockSettings name(String name) {
this.name = name;
return this;
}
public MockSettings spiedInstance(Object spiedInstance) {
this.spiedInstance = spiedInstance;
return this;
}
public MockSettings defaultAnswer(Answer defaultAnswer) {
this.defaultAnswer = defaultAnswer;
return this;
}
public Answer<Object> getDefaultAnswer() {
return defaultAnswer;
}
public boolean isSerializable() {
return serializable;
}
public MockSettings stubOnly() {
this.stubOnly = true;
return this;
}
public boolean isStubOnly() {
return this.stubOnly;
}
public MockSettings verboseLogging() {
if (!invocationListenersContainsType(VerboseMockInvocationLogger.class)) {
invocationListeners(new VerboseMockInvocationLogger());
}
return this;
}
public MockSettings invocationListeners(InvocationListener... listeners) {
if (listeners == null || listeners.length == 0) {
new Reporter().invocationListenersRequiresAtLeastOneListener();
}
for (InvocationListener listener : listeners) {
if (listener == null) {
new Reporter().invocationListenerDoesNotAcceptNullParameters();
}
this.invocationListeners.add(listener);
}
return this;
}
private boolean invocationListenersContainsType(Class<?> clazz) {
for (InvocationListener listener : invocationListeners) {
if (listener.getClass().equals(clazz)) {
return true;
}
}
return false;
}
public List<InvocationListener> getInvocationListeners() {
return this.invocationListeners;
}
public boolean hasInvocationListeners() {
return !invocationListeners.isEmpty();
}
public Class<T> getTypeToMock() {
return typeToMock;
}
public MockCreationSettings<T> confirm(Class<T> typeToMock) {
return validatedSettings(typeToMock, this);
}
private static <T> CreationSettings<T> validatedSettings(Class<T> typeToMock, CreationSettings<T> source) {
MockCreationValidator validator = new MockCreationValidator();
validator.validateType(typeToMock);
validator.validateExtraInterfaces(typeToMock, source.getExtraInterfaces());
validator.validateMockedType(typeToMock, source.getSpiedInstance());
//TODO SF - add this validation and also add missing coverage
// validator.validateDelegatedInstance(classToMock, settings.getDelegatedInstance());
validator.validateSerializable(typeToMock, source.isSerializable());
CreationSettings<T> settings = new CreationSettings<T>(source);
settings.setMockName(new MockNameImpl(source.getName(), typeToMock));
settings.setTypeToMock(typeToMock);
settings.setExtraInterfaces(prepareExtraInterfaces(source));
return settings;
}
private static Set<Class> prepareExtraInterfaces(CreationSettings settings) {
Set<Class> interfaces = new HashSet<Class>(settings.getExtraInterfaces());
if(settings.isSerializable()) {
interfaces.add(Serializable.class);
}
if (settings.getSpiedInstance() != null) {
interfaces.add(MockitoSpy.class);
}
return interfaces;
}
}
|
package org.myrobotlab.control;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSlider;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import javax.swing.ScrollPaneConstants;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.service.GUIService;
import org.myrobotlab.service.InMoovGestureCreator.ServoItemHolder;
import org.myrobotlab.service._TemplateService;
import org.slf4j.Logger;
/**
* based on _TemplateServiceGUI
*/
/**
*
* @author LunDev (github), Ma. Vo. (MyRobotlab)
*/
public class InMoovGestureCreatorGUI extends ServiceGUI implements
ActionListener, ItemListener {
static final long serialVersionUID = 1L;
public final static Logger log = LoggerFactory
.getLogger(InMoovGestureCreatorGUI.class.getCanonicalName());
boolean[] tabs_main_checkbox_states;
JTextField control_gestname;
JTextField control_funcname;
JButton control_connect;
JButton control_loadscri;
JButton control_savescri;
JButton control_loadgest;
JButton control_addgest;
JButton control_updategest;
JButton control_removegest;
JButton control_testgest;
JList control_list;
JTextField frame_add_textfield;
JButton frame_add;
JButton frame_addspeed;
JTextField frame_addsleep_textfield;
JButton frame_addsleep;
JTextField frame_addspeech_textfield;
JButton frame_addspeech;
JButton frame_importminresmax;
JButton frame_remove;
JButton frame_load;
JButton frame_update;
JButton frame_copy;
JButton frame_up;
JButton frame_down;
JButton frame_test;
JCheckBox frame_moverealtime;
JList framelist;
public InMoovGestureCreatorGUI(final String boundServiceName,
final GUIService myService, final JTabbedPane tabs) {
super(boundServiceName, myService, tabs);
}
public void init() {
// display:
// |
// |
// |
// |
// |
// bottom:
// |bott
// |
//
// splitpanebottom1bottom2
// bottom1:
// |bottom1top| <- JButton's: exportcode, testgesture
// |
// |
// bottom2:
// |bottom2top| <- JButton's & JTextField's: [frame_] connect, add,
// addsleep, addspeech
// |
// update, copy, up, down, test
// |
// |
// |
// predefined min- / res- / max- positions
int[][][] minresmaxpos = {
{ { 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 },
{ 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 } },
{ { 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 },
{ 0, 90, 180 } },
{ { 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 },
{ 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 } },
{ { 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 },
{ 0, 90, 180 } },
{ { 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 },
{ 0, 90, 180 }, { 0, 90, 180 } },
{ { 0, 90, 180 }, { 0, 90, 180 }, { 0, 90, 180 } } };
JPanel top = new JPanel();
JTabbedPane top_tabs = new JTabbedPane(JTabbedPane.TOP,
JTabbedPane.WRAP_TAB_LAYOUT);
// JPanels for the JTabbedPane
final JPanel mainpanel = new JPanel();
final JPanel c1panel = new JPanel();
final JPanel c2panel = new JPanel();
final JPanel c3panel = new JPanel();
// mainpanel (enabeling / disabeling sections)
mainpanel.setLayout(new BoxLayout(mainpanel, BoxLayout.Y_AXIS));
tabs_main_checkbox_states = new boolean[6];
for (int i = 0; i < 6; i++) {
String name = "";
if (i == 0) {
name = "Right Hand";
} else if (i == 1) {
name = "Right Arm";
} else if (i == 2) {
name = "Left Hand";
} else if (i == 3) {
name = "Left Arm";
} else if (i == 4) {
name = "Head";
} else if (i == 5) {
name = "Torso";
}
final int fi = i;
final JCheckBox checkbox = new JCheckBox(name);
checkbox.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent arg0) {
tabs_main_checkbox_states[fi] = checkbox.isSelected();
myService.send(boundServiceName,
"tabs_main_checkbox_states_changed",
tabs_main_checkbox_states);
}
});
checkbox.setSelected(true);
mainpanel.add(checkbox);
}
Container c1con = c1panel;
Container c2con = c2panel;
Container c3con = c3panel;
GridBagLayout c1gbl = new GridBagLayout();
c1con.setLayout(c1gbl);
GridBagLayout c2gbl = new GridBagLayout();
c2con.setLayout(c2gbl);
GridBagLayout c3gbl = new GridBagLayout();
c3con.setLayout(c3gbl);
// c1-, c2-, c3-panel
for (int i1 = 0; i1 < 6; i1++) {
Container con = null;
GridBagLayout gbl = null;
if (i1 == 0 || i1 == 1) {
con = c1con;
gbl = c1gbl;
} else if (i1 == 2 || i1 == 3) {
con = c2con;
gbl = c2gbl;
} else if (i1 == 4 || i1 == 5) {
con = c3con;
gbl = c3gbl;
}
int size = 0;
if (i1 == 0 || i1 == 2) {
size = 6;
} else if (i1 == 1 || i1 == 3) {
size = 4;
} else if (i1 == 4) {
size = 5;
} else if (i1 == 5) {
size = 3;
}
int offset = 0;
if (i1 == 1 || i1 == 3) {
offset = 6;
} else if (i1 == 5) {
offset = 5;
}
ServoItemHolder[] sih1 = new ServoItemHolder[size];
for (int i2 = 0; i2 < size; i2++) {
ServoItemHolder sih11 = new ServoItemHolder();
String servoname = "";
if (i1 == 0 || i1 == 2) {
if (i2 == 0) {
servoname = "thumb";
} else if (i2 == 1) {
servoname = "index";
} else if (i2 == 2) {
servoname = "majeure";
} else if (i2 == 3) {
servoname = "ringfinger";
} else if (i2 == 4) {
servoname = "pinky";
} else if (i2 == 5) {
servoname = "wrist";
}
} else if (i1 == 1 || i1 == 3) {
if (i2 == 0) {
servoname = "bicep";
} else if (i2 == 1) {
servoname = "rotate";
} else if (i2 == 2) {
servoname = "shoulder";
} else if (i2 == 3) {
servoname = "omoplate";
}
} else if (i1 == 4) {
if (i2 == 0) {
servoname = "neck";
} else if (i2 == 1) {
servoname = "rothead";
} else if (i2 == 2) {
servoname = "eyeX";
} else if (i2 == 3) {
servoname = "eyeY";
} else if (i2 == 4) {
servoname = "jaw";
}
} else if (i1 == 5) {
if (i2 == 0) {
servoname = "topStom";
} else if (i2 == 1) {
servoname = "midStom";
} else if (i2 == 2) {
servoname = "lowStom";
}
}
sih11.fin = new JLabel(servoname);
sih11.min = new JLabel(minresmaxpos[i1][i2][0] + "");
sih11.res = new JLabel(minresmaxpos[i1][i2][1] + "");
sih11.max = new JLabel(minresmaxpos[i1][i2][2] + "");
sih11.sli = new JSlider();
customizeslider(sih11.sli, i1, i2, minresmaxpos[i1][i2]);
sih11.akt = new JLabel(sih11.sli.getValue() + "");
sih11.spe = new JTextField("1.00");
// x y w h wx wy
gridbaglayout_addComponent(con, gbl, sih11.fin, offset + i2, 0,
1, 1, 1.0, 1.0);
gridbaglayout_addComponent(con, gbl, sih11.min, offset + i2, 1,
1, 1, 1.0, 1.0);
gridbaglayout_addComponent(con, gbl, sih11.res, offset + i2, 2,
1, 1, 1.0, 1.0);
gridbaglayout_addComponent(con, gbl, sih11.max, offset + i2, 3,
1, 1, 1.0, 1.0);
gridbaglayout_addComponent(con, gbl, sih11.sli, offset + i2, 4,
1, 1, 1.0, 1.0);
gridbaglayout_addComponent(con, gbl, sih11.akt, offset + i2, 5,
1, 1, 1.0, 1.0);
gridbaglayout_addComponent(con, gbl, sih11.spe, offset + i2, 6,
1, 1, 1.0, 1.0);
sih1[i2] = sih11;
}
myService.send(boundServiceName, "servoitemholder_set_sih1", i1,
sih1);
}
top_tabs.addTab("Main", mainpanel);
top_tabs.addTab("Right Side", c1panel);
top_tabs.addTab("Left Side", c2panel);
top_tabs.addTab("Head + Torso", c3panel);
top.add(BorderLayout.CENTER, top_tabs);
JPanel bottom = new JPanel();
JPanel bottom1 = new JPanel();
bottom1.setLayout(new BorderLayout());
JPanel bottom1top = new JPanel();
bottom1top.setLayout(new BoxLayout(bottom1top, BoxLayout.X_AXIS));
control_gestname = new JTextField("Gest. Name");
bottom1top.add(control_gestname);
control_funcname = new JTextField("Func. Name");
bottom1top.add(control_funcname);
control_connect = new JButton("Connect");
bottom1top.add(control_connect);
control_connect.addActionListener(this);
bottom1.add(BorderLayout.NORTH, bottom1top);
JPanel bottom1right = new JPanel();
bottom1right.setLayout(new BoxLayout(bottom1right, BoxLayout.Y_AXIS));
control_loadscri = new JButton("Load Scri");
bottom1right.add(control_loadscri);
control_loadscri.addActionListener(this);
control_savescri = new JButton("Save Scri");
bottom1right.add(control_savescri);
control_savescri.addActionListener(this);
control_loadgest = new JButton("Load Gest");
bottom1right.add(control_loadgest);
control_loadgest.addActionListener(this);
control_addgest = new JButton("Add Gest");
bottom1right.add(control_addgest);
control_addgest.addActionListener(this);
control_updategest = new JButton("Update Gest");
bottom1right.add(control_updategest);
control_updategest.addActionListener(this);
control_removegest = new JButton("Remove Gest");
bottom1right.add(control_removegest);
control_removegest.addActionListener(this);
control_testgest = new JButton("Test Gest");
bottom1right.add(control_testgest);
control_testgest.addActionListener(this);
bottom1.add(BorderLayout.EAST, bottom1right);
String[] te1 = { " ",
"T1", "T2", "T3", "T4", "T5", "T6", "T7", "T8", "T9", "T10" };
control_list = new JList(te1);
control_list.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JScrollPane control_listscroller = new JScrollPane(control_list);
control_listscroller
.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
control_listscroller
.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
bottom1.add(BorderLayout.CENTER, control_listscroller);
JPanel bottom2 = new JPanel();
bottom2.setLayout(new BoxLayout(bottom2, BoxLayout.Y_AXIS));
JPanel bottom2top = new JPanel();
bottom2top.setLayout(new BoxLayout(bottom2top, BoxLayout.Y_AXIS));
JPanel bottom2top1 = new JPanel();
bottom2top1.setLayout(new BoxLayout(bottom2top1, BoxLayout.X_AXIS));
frame_add_textfield = new JTextField("Frame-Name");
bottom2top1.add(frame_add_textfield);
frame_add = new JButton("Add");
bottom2top1.add(frame_add);
frame_add.addActionListener(this);
frame_addspeed = new JButton("Add Speed");
bottom2top1.add(frame_addspeed);
frame_addspeed.addActionListener(this);
frame_addsleep_textfield = new JTextField("Seconds of Sleep");
bottom2top1.add(frame_addsleep_textfield);
frame_addsleep = new JButton("Add Sleep");
bottom2top1.add(frame_addsleep);
frame_addsleep.addActionListener(this);
frame_addspeech_textfield = new JTextField("Speech");
bottom2top1.add(frame_addspeech_textfield);
frame_addspeech = new JButton("Add Speech");
bottom2top1.add(frame_addspeech);
frame_addspeech.addActionListener(this);
bottom2top.add(bottom2top1);
JPanel bottom2top2 = new JPanel();
bottom2top2.setLayout(new BoxLayout(bottom2top2, BoxLayout.X_AXIS));
frame_importminresmax = new JButton("Import Min Rest Max");
bottom2top2.add(frame_importminresmax);
frame_importminresmax.addActionListener(this);
frame_remove = new JButton("Remove");
bottom2top2.add(frame_remove);
frame_remove.addActionListener(this);
frame_load = new JButton("Load");
bottom2top2.add(frame_load);
frame_load.addActionListener(this);
frame_update = new JButton("Update");
bottom2top2.add(frame_update);
frame_update.addActionListener(this);
frame_copy = new JButton("Copy");
bottom2top2.add(frame_copy);
frame_copy.addActionListener(this);
frame_up = new JButton("Up");
bottom2top2.add(frame_up);
frame_up.addActionListener(this);
frame_down = new JButton("Down");
bottom2top2.add(frame_down);
frame_down.addActionListener(this);
frame_test = new JButton("Test");
bottom2top2.add(frame_test);
frame_test.addActionListener(this);
frame_moverealtime = new JCheckBox("Move Real Time");
frame_moverealtime.setSelected(false);
bottom2top2.add(frame_moverealtime);
frame_moverealtime.addItemListener(this);
bottom2top.add(bottom2top2);
bottom2.add(BorderLayout.NORTH, bottom2top);
String[] te2 = {
" ",
"T1", "T2", "T3", "T4", "T5", "T6", "T7", "T8", "T9", "T10" };
framelist = new JList(te2);
framelist.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
JScrollPane framelistscroller = new JScrollPane(framelist);
framelistscroller
.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
framelistscroller
.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
bottom2.add(BorderLayout.CENTER, framelistscroller);
JSplitPane splitpanebottom1bottom2 = new JSplitPane(
JSplitPane.HORIZONTAL_SPLIT, bottom1, bottom2);
splitpanebottom1bottom2.setOneTouchExpandable(true);
// splitpanebottom1bottom2.setDividerLocation(200);
bottom.add(splitpanebottom1bottom2);
JSplitPane splitpanetopbottom = new JSplitPane(
JSplitPane.VERTICAL_SPLIT, top, bottom);
splitpanetopbottom.setOneTouchExpandable(true);
// splitpanetopbottom.setDividerLocation(300);
display.add(splitpanetopbottom);
}
public void getState(_TemplateService template) {
// I think I should do something with this ...
SwingUtilities.invokeLater(new Runnable() {
public void run() {
}
});
}
@Override
public void attachGUI() {
// commented out subscription due to this class being used for
// un-defined gui's
// subscribe("publishState", "getState", _TemplateService.class);
// send("publishState");
}
@Override
public void detachGUI() {
// commented out subscription due to this class being used for
// un-defined gui's
// unsubscribe("publishState", "getState", _TemplateService.class);
}
@Override
public void actionPerformed(ActionEvent ae) {
Object o = ae.getSource();
// Button - Events
if (o == control_connect) {
myService
.send(boundServiceName, "control_connect", control_connect);
} else if (o == control_loadscri) {
myService.send(boundServiceName, "control_loadscri", control_list);
} else if (o == control_savescri) {
myService.send(boundServiceName, "control_savescri");
} else if (o == control_loadgest) {
myService.send(boundServiceName, "control_loadgest", control_list,
framelist, control_gestname, control_funcname);
} else if (o == control_addgest) {
myService.send(boundServiceName, "control_addgest", control_list,
control_gestname, control_funcname);
} else if (o == control_updategest) {
myService.send(boundServiceName, "control_updategest",
control_list, control_gestname, control_funcname);
} else if (o == control_removegest) {
myService
.send(boundServiceName, "control_removegest", control_list);
} else if (o == control_testgest) {
myService.send(boundServiceName, "control_testgest");
} else if (o == frame_add) {
myService.send(boundServiceName, "frame_add", framelist,
frame_add_textfield);
} else if (o == frame_addspeed) {
myService.send(boundServiceName, "frame_addspeed", framelist);
} else if (o == frame_addsleep) {
myService.send(boundServiceName, "frame_addsleep", framelist,
frame_addsleep_textfield);
} else if (o == frame_addspeech) {
myService.send(boundServiceName, "frame_addspeech", framelist,
frame_addspeech_textfield);
} else if (o == frame_importminresmax) {
myService.send(boundServiceName, "frame_importminresmax");
} else if (o == frame_remove) {
myService.send(boundServiceName, "frame_remove", framelist);
} else if (o == frame_load) {
myService.send(boundServiceName, "frame_load", framelist,
frame_add_textfield, frame_addsleep_textfield,
frame_addspeech_textfield);
} else if (o == frame_update) {
myService.send(boundServiceName, "frame_update", framelist,
frame_add_textfield, frame_addsleep_textfield,
frame_addspeech_textfield);
} else if (o == frame_copy) {
myService.send(boundServiceName, "frame_copy", framelist);
} else if (o == frame_up) {
myService.send(boundServiceName, "frame_up", framelist);
} else if (o == frame_down) {
myService.send(boundServiceName, "frame_down", framelist);
} else if (o == frame_test) {
myService.send(boundServiceName, "frame_test", framelist);
}
myService.send(boundServiceName, "publishState");
}
@Override
public void itemStateChanged(ItemEvent ie) {
Object o = ie.getSource();
// CheckBox - Events
if (o == frame_moverealtime) {
myService.send(boundServiceName, "frame_moverealtime",
frame_moverealtime);
}
}
public void gridbaglayout_addComponent(Container cont, GridBagLayout gbl,
Component c, int x, int y, int width, int height, double weightx,
double weighty) {
// function for easier gridbaglayout's
GridBagConstraints gbc = new GridBagConstraints();
gbc.fill = GridBagConstraints.BOTH;
gbc.gridx = x;
gbc.gridy = y;
gbc.gridwidth = width;
gbc.gridheight = height;
gbc.weightx = weightx;
gbc.weighty = weighty;
gbl.setConstraints(c, gbc);
cont.add(c);
}
public void customizeslider(JSlider slider, final int t1, final int t2,
int[] minresmaxpos11) {
// preset the slider
slider.setOrientation(SwingConstants.VERTICAL);
slider.setMinimum(minresmaxpos11[0]);
slider.setMaximum(minresmaxpos11[2]);
slider.setMajorTickSpacing(20);
slider.setMinorTickSpacing(1);
slider.createStandardLabels(1);
slider.setPaintTicks(true);
slider.setPaintLabels(true);
slider.setValue((minresmaxpos11[0] + minresmaxpos11[2]) / 2);
slider.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent ce) {
myService.send(boundServiceName,
"servoitemholder_slider_changed", t1, t2);
}
});
}
}
|
package org.myrobotlab.service;
import java.util.HashMap;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.repo.ServiceType;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.Logging;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.service.interfaces.SpeechRecognizer;
import org.myrobotlab.service.interfaces.SpeechSynthesis;
import org.myrobotlab.service.interfaces.TextListener;
import org.myrobotlab.service.interfaces.TextPublisher;
/**
*
* WebkitSpeechRecognition - uses the speech recognition that is built into the chrome web browser
* this service requires the webgui to be running.
*
*/
public class WebkitSpeechRecognition extends Service implements SpeechRecognizer, TextPublisher {
/**
* TODO: make it's own class.
* TODO: merge this data structure with the programab oob stuff?
*
*/
public class Command {
public String name;
public String method;
public Object[] params;
Command(String name, String method, Object[] params) {
this.name = name;
this.method = method;
this.params = params;
}
}
private static final long serialVersionUID = 1L;
private String language = "en-US";
HashMap<String, Command> commands = new HashMap<String, Command>();
public WebkitSpeechRecognition(String reservedKey) {
super(reservedKey);
}
@Override
public String publishText(String text) {
log.info("Publish Text : {}", text);
// TODO: is there a better place to do this? maybe recognized?
// TODO: remove this! it probably should be invoking the command on publish text.. only on recognized?!
// not sure.
String cleantext = text.toLowerCase().trim();
/* Double Speak FIX - I don't think a cmd should be sent from here
* because it's not 'recognized' - recognized sends commands
* this method should be subscribed too - GroG
*
if (commands.containsKey(cleantext)) {
// If we have a command. send it when we recognize...
Command cmd = commands.get(cleantext);
send(cmd.name, cmd.method, cmd.params);
}
*/
return cleantext;
}
@Override
public void listeningEvent() {
// TODO Auto-generated method stub
}
@Override
public void pauseListening() {
// TODO Auto-generated method stub
}
@Override
public String recognized(String text) {
log.info("Recognized : >{}<", text);
String cleanedText = text.toLowerCase().trim();
if (commands.containsKey(cleanedText)) {
// If we have a command. send it when we recognize...
Command cmd = commands.get(cleanedText);
send(cmd.name, cmd.method, cmd.params);
}
return cleanedText;
}
@Override
public void resumeListening() {
// TODO Auto-generated method stub
}
@Override
public void startListening() {
// TODO Auto-generated method stub
}
@Override
public void stopListening() {
// TODO Auto-generated method stub
}
public void setLanguage(String language) {
// Here we want to set the language string and broadcast the update to the
// web gui so that it knows to update the language on webkit speech
this.language = language;
broadcastState();
}
public String getLanguage() {
// a getter for it .. just in case.
return this.language;
}
@Override
public void addTextListener(TextListener service) {
addListener("publishText", service.getName(), "onText");
}
@Override
public void addMouth(SpeechSynthesis mouth) {
mouth.addEar(this);
// TODO : we can implement the "did you say x?"
// logic like sphinx if we want here.
// when we add the ear, we need to listen for request confirmation
}
@Override
public void onStartSpeaking(String utterance) {
// at this point we should subscribe to this in the webgui
// so we can pause listening.
}
@Override
public void onEndSpeaking(String utterance) {
// need to subscribe to this in the webgui
// so we can resume listening.
}
public static void main(String[] args) {
LoggingFactory.getInstance().configure();
LoggingFactory.getInstance().setLevel(Level.INFO);
try {
Runtime.start("webgui", "WebGui");
Runtime.start("webkitspeechrecognition", "WebkitSpeechRecognition");
} catch (Exception e) {
Logging.logError(e);
}
}
/**
* This static method returns all the details of the class without
* it having to be constructed. It has description, categories,
* dependencies, and peer definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData(){
ServiceType meta = new ServiceType(WebkitSpeechRecognition.class.getCanonicalName());
meta.addDescription("Speech recognition using Google Chrome webkit");
meta.addCategory("speech recognition");
// meta.addPeer("tracker", "Tracking", "test tracking");
return meta;
}
@Override
public void lockOutAllGrammarExcept(String lockPhrase) {
log.warn("Lock out grammar not supported on webkit, yet...");
}
@Override
public void clearLock() {
log.warn("clear lock out grammar not supported on webkit, yet...");
}
// TODO - should this be in Service ?????
public void addCommand(String actionPhrase, String name, String method, Object... params) {
actionPhrase = actionPhrase.toLowerCase().trim();
if (commands == null) {
commands = new HashMap<String, Command>();
}
commands.put(actionPhrase, new Command(name, method, params));
}
// TODO: this might need to go into the interface if we want to support it.
public void addComfirmations(String... txt) {
log.warn("Confirmation support not enabled in webkit speech.");
}
// TODO: this might need to go into the interface if we want to support it.
public void addNegations(String... txt) {
log.warn("Negations not enabled in webkit speech.");
}
public void startListening(String grammar) {
log.warn("Webkit speech doesn't listen for a specific grammar. use startListening() instead. ");
startListening();
}
}
|
package org.pentaho.di.job.entries.shell;
import static org.pentaho.di.job.entry.validator.AndValidator.putValidators;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notBlankValidator;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.vfs.FileObject;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.logging.Log4jFileAppender;
import org.pentaho.di.core.logging.LogWriter;
import org.pentaho.di.core.util.StreamLogger;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobEntryType;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.w3c.dom.Node;
/**
* Shell type of Job Entry. You can define shell scripts to be executed in a Job.
*
* @author Matt
* @since 01-10-2003, rewritten on 18-06-2004
*
*/
public class JobEntryShell extends JobEntryBase implements Cloneable, JobEntryInterface
{
private String filename;
private String workDirectory;
public String arguments[];
public boolean argFromPrevious;
public boolean setLogfile;
public String logfile, logext;
public boolean addDate, addTime;
public int loglevel;
public boolean execPerRow;
public JobEntryShell(String name)
{
super(name, "");
setJobEntryType(JobEntryType.SHELL);
}
public JobEntryShell()
{
this("");
clear();
}
public JobEntryShell(JobEntryBase jeb)
{
super(jeb);
setJobEntryType(JobEntryType.SHELL);
}
public Object clone()
{
JobEntryShell je = (JobEntryShell) super.clone();
return je;
}
public String getXML()
{
StringBuffer retval = new StringBuffer(300);
retval.append(super.getXML());
retval.append(" ").append(XMLHandler.addTagValue("filename", filename));
retval.append(" ").append(XMLHandler.addTagValue("work_directory", workDirectory));
retval.append(" ").append(XMLHandler.addTagValue("arg_from_previous", argFromPrevious));
retval.append(" ").append(XMLHandler.addTagValue("exec_per_row", execPerRow));
retval.append(" ").append(XMLHandler.addTagValue("set_logfile", setLogfile));
retval.append(" ").append(XMLHandler.addTagValue("logfile", logfile));
retval.append(" ").append(XMLHandler.addTagValue("logext", logext));
retval.append(" ").append(XMLHandler.addTagValue("add_date", addDate));
retval.append(" ").append(XMLHandler.addTagValue("add_time", addTime));
retval.append(" ").append(XMLHandler.addTagValue("loglevel", LogWriter.getLogLevelDesc(loglevel)));
if (arguments!=null)
for (int i=0;i<arguments.length;i++)
{
// THIS IS A VERY BAD WAY OF READING/SAVING AS IT MAKES
// THE XML "DUBIOUS". DON'T REUSE IT.
retval.append(" ").append(XMLHandler.addTagValue("argument"+i, arguments[i]));
}
return retval.toString();
}
public void loadXML(Node entrynode, List<DatabaseMeta> databases, Repository rep) throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases);
setFileName( XMLHandler.getTagValue(entrynode, "filename") );
setWorkDirectory( XMLHandler.getTagValue(entrynode, "work_directory") );
argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "arg_from_previous") );
execPerRow = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "exec_per_row") );
setLogfile = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "set_logfile") );
addDate = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "add_date") );
addTime = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "add_time") );
logfile = XMLHandler.getTagValue(entrynode, "logfile");
logext = XMLHandler.getTagValue(entrynode, "logext");
loglevel = LogWriter.getLogLevel( XMLHandler.getTagValue(entrynode, "loglevel"));
// How many arguments?
int argnr = 0;
while ( XMLHandler.getTagValue(entrynode, "argument"+argnr)!=null) argnr++;
arguments = new String[argnr];
// Read them all...
// THIS IS A VERY BAD WAY OF READING/SAVING AS IT MAKES
// THE XML "DUBIOUS". DON'T REUSE IT.
for (int a=0;a<argnr;a++) arguments[a]=XMLHandler.getTagValue(entrynode, "argument"+a);
}
catch(KettleException e)
{
throw new KettleXMLException("Unable to load job entry of type 'shell' from XML node", e);
}
}
// Load the jobentry from repository
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases);
setFileName( rep.getJobEntryAttributeString(id_jobentry, "file_name") );
setWorkDirectory( rep.getJobEntryAttributeString(id_jobentry, "work_directory") );
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
loglevel = LogWriter.getLogLevel( rep.getJobEntryAttributeString(id_jobentry, "loglevel") );
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
arguments = new String[argnr];
// Read them all...
for (int a=0;a<argnr;a++)
{
arguments[a]= rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'shell' from the repository with id_jobentry="+id_jobentry, dbe);
}
}
// Save the attributes of this job entry
public void saveRep(Repository rep, long id_job)
throws KettleException
{
try
{
super.saveRep(rep, id_job);
rep.saveJobEntryAttribute(id_job, getID(), "file_name", filename);
rep.saveJobEntryAttribute(id_job, getID(), "work_directory", workDirectory);
rep.saveJobEntryAttribute(id_job, getID(), "arg_from_previous", argFromPrevious);
rep.saveJobEntryAttribute(id_job, getID(), "exec_per_row", execPerRow);
rep.saveJobEntryAttribute(id_job, getID(), "set_logfile", setLogfile);
rep.saveJobEntryAttribute(id_job, getID(), "add_date", addDate);
rep.saveJobEntryAttribute(id_job, getID(), "add_time", addTime);
rep.saveJobEntryAttribute(id_job, getID(), "logfile", logfile);
rep.saveJobEntryAttribute(id_job, getID(), "logext", logext);
rep.saveJobEntryAttribute(id_job, getID(), "loglevel", LogWriter.getLogLevelDesc(loglevel));
// save the arguments...
if (arguments!=null)
{
for (int i=0;i<arguments.length;i++)
{
rep.saveJobEntryAttribute(id_job, getID(), i, "argument", arguments[i]);
}
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to save job entry of type 'shell' to the repository", dbe);
}
}
public void clear()
{
super.clear();
filename=null;
workDirectory=null;
arguments=null;
argFromPrevious=false;
addDate=false;
addTime=false;
logfile=null;
logext=null;
setLogfile=false;
execPerRow=false;
}
public void setFileName(String n)
{
filename=n;
}
public String getFilename()
{
return filename;
}
public String getRealFilename()
{
return environmentSubstitute(getFilename());
}
public void setWorkDirectory(String n)
{
workDirectory=n;
}
public String getWorkDirectory()
{
return workDirectory;
}
public String getLogFilename()
{
String retval="";
if (setLogfile)
{
retval+=logfile;
Calendar cal = Calendar.getInstance();
if (addDate)
{
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
retval+="_"+sdf.format(cal.getTime());
}
if (addTime)
{
SimpleDateFormat sdf = new SimpleDateFormat("HHmmss");
retval+="_"+sdf.format(cal.getTime());
}
if (logext!=null && logext.length()>0)
{
retval+="."+logext;
}
}
return retval;
}
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException
{
LogWriter log = LogWriter.getInstance();
Log4jFileAppender appender = null;
int backupLogLevel = log.getLogLevel();
if (setLogfile)
{
try
{
appender = LogWriter.createFileAppender(environmentSubstitute(getLogFilename()), true);
}
catch(KettleException e)
{
log.logError(toString(), "Unable to open file appender for file ["+getLogFilename()+"] : "+e.toString());
log.logError(toString(), Const.getStackTracker(e));
result.setNrErrors(1);
result.setResult(false);
return result;
}
log.addAppender(appender);
log.setLogLevel(loglevel);
}
result.setEntryNr( nr );
int iteration = 0;
String args[] = arguments;
RowMetaAndData resultRow = null;
boolean first = true;
List<RowMetaAndData> rows = result.getRows();
log.logDetailed(toString(), "Found "+(rows!=null?rows.size():0)+" previous result rows");
while( ( first && !execPerRow ) || ( execPerRow && rows!=null && iteration<rows.size() && result.getNrErrors()==0 ) )
{
first=false;
if (rows!=null && execPerRow)
{
resultRow = (RowMetaAndData) rows.get(iteration);
}
else
{
resultRow = null;
}
List<RowMetaAndData> cmdRows = null;
if (execPerRow) // Execute for each input row
{
if (argFromPrevious) // Copy the input row to the (command line) arguments
{
if (resultRow!=null)
{
args = new String[resultRow.size()];
for (int i=0;i<resultRow.size();i++)
{
args[i] = resultRow.getString(i, null);
}
}
}
else
{
// Just pass a single row
List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
newList.add(resultRow);
cmdRows = newList;
}
}
else
{
if (argFromPrevious)
{
// Only put the first Row on the arguments
args = null;
if (resultRow!=null)
{
args = new String[resultRow.size()];
for (int i=0;i<resultRow.size();i++)
{
args[i] = resultRow.getString(i, null);
}
}
else
{
cmdRows = rows;
}
}
else
{
// Keep it as it was...
cmdRows = rows;
}
}
executeShell(result, cmdRows, args);
iteration++;
}
if (setLogfile)
{
if (appender!=null)
{
log.removeAppender(appender);
appender.close();
}
log.setLogLevel(backupLogLevel);
}
return result;
}
private void executeShell(Result result, List<RowMetaAndData> cmdRows, String[] args)
{
LogWriter log = LogWriter.getInstance();
try
{
// What's the exact command?
String base[] = null;
List<String> cmds = new ArrayList<String>();
log.logBasic(toString(), "Running on platform : "+Const.getOS());
FileObject fileObject = null;
String realFilename = environmentSubstitute(getFilename());
fileObject = KettleVFS.getFileObject(realFilename);
if( Const.getOS().equals( "Windows 95" ) )
{
base = new String[] { "command.com", "/C" };
}
else
if( Const.getOS().startsWith( "Windows" ) )
{
base = new String[] { "cmd.exe", "/C" };
}
else
{
base = new String[] { KettleVFS.getFilename(fileObject) };
}
// Construct the arguments...
if (argFromPrevious && cmdRows!=null)
{
// Add the base command...
for (int i=0;i<base.length;i++) cmds.add(base[i]);
if( Const.getOS().equals( "Windows 95" ) ||
Const.getOS().startsWith( "Windows" ) )
{
// for windows all arguments including the command itself need to be
// included in 1 argument to cmd/command.
StringBuffer cmdline = new StringBuffer(300);
cmdline.append('"');
cmdline.append(optionallyQuoteField(KettleVFS.getFilename(fileObject), "\""));
// Add the arguments from previous results...
for (int i=0;i<cmdRows.size();i++) // Normally just one row, but once in a while to remain compatible we have multiple.
{
RowMetaAndData r = (RowMetaAndData)cmdRows.get(i);
for (int j=0;j<r.size();j++)
{
cmdline.append(' ');
cmdline.append(optionallyQuoteField(r.getString(j, null), "\""));
}
}
cmdline.append('"');
cmds.add(cmdline.toString());
}
else
{
// Add the arguments from previous results...
for (int i=0;i<cmdRows.size();i++) // Normally just one row, but once in a while to remain compatible we have multiple.
{
RowMetaAndData r = (RowMetaAndData)cmdRows.get(i);
for (int j=0;j<r.size();j++)
{
cmds.add(optionallyQuoteField(r.getString(j, null), "\""));
}
}
}
}
else if (args!=null)
{
// Add the base command...
for (int i=0;i<base.length;i++) cmds.add(base[i]);
if( Const.getOS().equals( "Windows 95" ) ||
Const.getOS().startsWith( "Windows" ) )
{
// for windows all arguments including the command itself need to be
// included in 1 argument to cmd/command.
StringBuffer cmdline = new StringBuffer(300);
cmdline.append('"');
cmdline.append(optionallyQuoteField(KettleVFS.getFilename(fileObject), "\""));
for (int i=0;i<args.length;i++)
{
cmdline.append(' ');
cmdline.append(optionallyQuoteField(args[i], "\""));
}
cmdline.append('"');
cmds.add(cmdline.toString());
}
else
{
for (int i=0;i<args.length;i++)
{
cmds.add(args[i]);
}
}
}
StringBuffer command = new StringBuffer();
Iterator<String> it = cmds.iterator();
boolean first = true;
while ( it.hasNext() )
{
if ( ! first )
command.append(' ');
else
first = false;
command.append((String)it.next());
}
log.logBasic(toString(), "Executing command : "+command.toString());
// Build the environment variable list...
ProcessBuilder procBuilder = new ProcessBuilder(cmds);
Map<String,String> env = procBuilder.environment();
String[] variables = listVariables();
for ( int i = 0; i < variables.length; i++ )
{
env.put(variables[i], getVariable(variables[i]));
}
if ( getWorkDirectory() != null && ! Const.isEmpty(Const.rtrim(getWorkDirectory())))
{
File file = new File(getWorkDirectory());
procBuilder.directory(file);
}
Process proc = procBuilder.start();
// any error message?
StreamLogger errorLogger = new
StreamLogger(proc.getErrorStream(), toString()+" (stderr)");
// any output?
StreamLogger outputLogger = new
StreamLogger(proc.getInputStream(), toString()+" (stdout)");
// kick them off
new Thread(errorLogger).start();
new Thread(outputLogger).start();
proc.waitFor();
log.logDetailed(toString(), "Command " + command.toString() + " has finished");
// What's the exit status?
result.setExitStatus( proc.exitValue() );
if (result.getExitStatus()!=0)
{
log.logDetailed(toString(), "Exit status of shell ["+environmentSubstitute(getFilename())+"] was "+result.getExitStatus());
result.setNrErrors(1);
}
}
catch(IOException ioe)
{
log.logError(toString(), "Error running shell ["+environmentSubstitute(getFilename())+"] : "+ioe.toString());
result.setNrErrors(1);
}
catch(InterruptedException ie)
{
log.logError(toString(), "Shell ["+environmentSubstitute(getFilename())+"] was interupted : "+ie.toString());
result.setNrErrors(1);
}
catch(Exception e)
{
log.logError(toString(), "Unexpected error running shell ["+environmentSubstitute(getFilename())+"] : "+e.toString());
result.setNrErrors(1);
}
if (result.getNrErrors() > 0)
{
result.setResult( false );
}
else
{
result.setResult( true );
}
}
private String optionallyQuoteField(String field, String quote)
{
if (Const.isEmpty(field) ) return "\"\"";
// If the field already contains quotes, we don't touch it anymore, just return the same string...
// also return it if no spaces are found
if (field.indexOf(quote)>=0 || field.indexOf(' ')<0 )
{
return field;
}
else
{
return quote + field + quote;
}
}
public boolean evaluates()
{
return true;
}
public boolean isUnconditional()
{
return true;
}
public List<ResourceReference> getResourceDependencies(JobMeta jobMeta) {
List<ResourceReference> references = super.getResourceDependencies(jobMeta);
if (!Const.isEmpty(filename)) {
String realFileName = jobMeta.environmentSubstitute(filename);
ResourceReference reference = new ResourceReference(this);
reference.getEntries().add( new ResourceEntry(realFileName, ResourceType.FILE));
references.add(reference);
}
return references;
}
@Override
public void check(List<CheckResultInterface> remarks, JobMeta jobMeta)
{
andValidator().validate(this, "workDirectory", remarks, putValidators(notBlankValidator(), fileExistsValidator())); //$NON-NLS-1$
andValidator().validate(this, "filename", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$
if (setLogfile) {
andValidator().validate(this, "logfile", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$
}
}
protected String getLogfile()
{
return logfile;
}
}
|
package org.usfirst.frc.team3335.robot.subsystems;
import com.kauailabs.navx.frc.AHRS;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.I2C;
import edu.wpi.first.wpilibj.SPI;
import edu.wpi.first.wpilibj.command.Subsystem;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
public class NavX extends Subsystem implements LoggableSubsystem {
private AHRS ahrs = null;
public NavX() {
try {
ahrs = new AHRS(SPI.Port.kMXP); // Use SPI!!!
//ahrs = new AHRS(I2C.Port.kMXP);
} catch (RuntimeException ex ) {
DriverStation.reportError("Error instantiating navX MXP: " + ex.getMessage(), true);
}
}
@Override
protected void initDefaultCommand() {
}
public AHRS getAHRS() {
return ahrs;
}
public double getYaw() {
return ahrs.getYaw();
}
public void zeroYaw() {
ahrs.zeroYaw();
}
@Override
public void log() {
/* Display 6-axis Processed Angle Data */
//SmartDashboard.putBoolean( "NavX: IMU_Connected", ahrs.isConnected());
//SmartDashboard.putBoolean( "NavX: IMU_IsCalibrating", ahrs.isCalibrating());
SmartDashboard.putNumber( "NavX: IMU_Yaw", ahrs.getYaw());
SmartDashboard.putNumber( "NavX: IMU_Pitch", ahrs.getPitch());
SmartDashboard.putNumber( "NavX: IMU_Roll", ahrs.getRoll());
/* Display tilt-corrected, Magnetometer-based heading (requires */
/* magnetometer calibration to be useful) */
SmartDashboard.putNumber( "NavX: IMU_CompassHeading", ahrs.getCompassHeading());
/* Display 9-axis Heading (requires magnetometer calibration to be useful) */
//SmartDashboard.putNumber( "NavX: IMU_FusedHeading", ahrs.getFusedHeading());
/* These functions are compatible w/the WPI Gyro Class, providing a simple */
/* path for upgrading from the Kit-of-Parts gyro to the navx MXP */
SmartDashboard.putNumber( "NavX: IMU_TotalYaw", ahrs.getAngle());
//SmartDashboard.putNumber( "NavX: IMU_YawRateDPS", ahrs.getRate());
/* Display Processed Acceleration Data (Linear Acceleration, Motion Detect) */
//SmartDashboard.putNumber( "NavX: IMU_Accel_X", ahrs.getWorldLinearAccelX());
//SmartDashboard.putNumber( "NavX: IMU_Accel_Y", ahrs.getWorldLinearAccelY());
//SmartDashboard.putBoolean( "NavX: IMU_IsMoving", ahrs.isMoving());
//SmartDashboard.putBoolean( "NavX: IMU_IsRotating", ahrs.isRotating());
/* Display estimates of velocity/displacement. Note that these values are */
/* not expected to be accurate enough for estimating robot position on a */
/* FIRST FRC Robotics Field, due to accelerometer noise and the compounding */
/* of these errors due to single (velocity) integration and especially */
/* double (displacement) integration. */
SmartDashboard.putNumber( "NavX: Velocity_X", ahrs.getVelocityX());
SmartDashboard.putNumber( "NavX: Velocity_Y", ahrs.getVelocityY());
SmartDashboard.putNumber( "NavX: Displacement_X", ahrs.getDisplacementX());
SmartDashboard.putNumber( "NavX: Displacement_Y", ahrs.getDisplacementY());
/* Display Raw Gyro/Accelerometer/Magnetometer Values */
/* NOTE: These values are not normally necessary, but are made available */
/* for advanced users. Before using this data, please consider whether */
/* the processed data (see above) will suit your needs. */
//SmartDashboard.putNumber( "NavX: RawGyro_X", ahrs.getRawGyroX());
//SmartDashboard.putNumber( "NavX: RawGyro_Y", ahrs.getRawGyroY());
//SmartDashboard.putNumber( "NavX: RawGyro_Z", ahrs.getRawGyroZ());
//SmartDashboard.putNumber( "NavX: RawAccel_X", ahrs.getRawAccelX());
//SmartDashboard.putNumber( "NavX: RawAccel_Y", ahrs.getRawAccelY());
//SmartDashboard.putNumber( "NavX: RawAccel_Z", ahrs.getRawAccelZ());
//SmartDashboard.putNumber( "NavX: RawMag_X", ahrs.getRawMagX());
//SmartDashboard.putNumber( "NavX: RawMag_Y", ahrs.getRawMagY());
//SmartDashboard.putNumber( "NavX: RawMag_Z", ahrs.getRawMagZ());
//SmartDashboard.putNumber( "NavX: IMU_Temp_C", ahrs.getTempC());
//SmartDashboard.putNumber( "NavX: IMU_Timestamp", ahrs.getLastSensorTimestamp());
/* Omnimount Yaw Axis Information */
AHRS.BoardYawAxis yaw_axis = ahrs.getBoardYawAxis();
SmartDashboard.putString( "NavX: YawAxisDirection", yaw_axis.up ? "Up" : "Down" );
SmartDashboard.putNumber( "NavX: YawAxis", yaw_axis.board_axis.getValue() );
/* Sensor Board Information */
SmartDashboard.putString( "NavX: FirmwareVersion", ahrs.getFirmwareVersion());
/* Quaternion Data */
/* Quaternions are fascinating, and are the most compact representation of */
/* orientation data. All of the Yaw, Pitch and Roll Values can be derived */
/* from the Quaternions. If interested in motion processing, knowledge of */
/* Quaternions is highly recommended. */
//SmartDashboard.putNumber( "NavX: QuaternionW", ahrs.getQuaternionW());
//SmartDashboard.putNumber( "NavX: QuaternionX", ahrs.getQuaternionX());
//SmartDashboard.putNumber( "NavX: QuaternionY", ahrs.getQuaternionY());
//SmartDashboard.putNumber( "NavX: QuaternionZ", ahrs.getQuaternionZ());
/* Connectivity Debugging Support */
//SmartDashboard.putNumber( "NavX: IMU_Byte_Count", ahrs.getByteCount());
//SmartDashboard.putNumber( "NavX: IMU_Update_Count", ahrs.getUpdateCount());
}
}
|
package jeranvier.math.dsp;
import jeranvier.math.linearAlgebra.Vector;
import jeranvier.math.util.Complex;
public class Fourier {
public static Vector dft(Vector in) {
if((in.size() & (in.size() - 1)) == 0){
return FastFourier.cooleyTukeyFFT(in);
}
else{
return SimpleFourier.simpleDFT(in);
}
}
public static Vector ift(Vector in) {
if((in.size() & (in.size() - 1)) == 0){
return FastFourier.cooleyTukeyIFFT(in);
}
else{
return SimpleFourier.simpleIDFT(in);
}
}
public static final class Util{
public static Vector potPadding(Vector in){
int power=1;
while(in.size()>power){
power = power << 1;
}
System.out.println("was: "+in.size()+", and is now: "+power);
Vector.Builder vb = new Vector.Builder(power);
int i = 1;
for(Complex c : in){
vb.set(i, c);
i++;
}
for(int j = i; j<= power;j++){
vb.set(j, new Complex());
}
return vb.build();
}
}
}
|
package joshua.tools;
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.TreeMap;
import java.util.logging.Logger;
import joshua.corpus.Vocabulary;
import joshua.util.FormatUtils;
import joshua.util.io.LineReader;
import joshua.util.quantization.QuantizerConfiguration;
public class GrammarPacker {
private static final Logger logger = Logger.getLogger(GrammarPacker.class.getName());
private static int SLICE_SIZE;
private static int DATA_SIZE_LIMIT;
private static int DATA_SIZE_ESTIMATE;
private static String WORKING_DIRECTORY;
private String grammar;
private boolean have_alignments;
private String alignments;
private QuantizerConfiguration quantization;
private boolean autopack;
static {
SLICE_SIZE = 5000000;
DATA_SIZE_LIMIT = (int) (Integer.MAX_VALUE * 0.8);
DATA_SIZE_ESTIMATE = 20;
WORKING_DIRECTORY = System.getProperty("user.dir") + File.separator + "packed";
}
public GrammarPacker(String config_filename, String grammar_filename, String alignments_filename,
boolean autopack)
throws IOException {
this.grammar = grammar_filename;
this.quantization = new QuantizerConfiguration();
this.alignments = alignments_filename;
have_alignments = (alignments != null);
if (!have_alignments) {
logger.info("No alignments file specified, skipping.");
} else if (!new File(alignments_filename).exists()) {
logger.severe("Alignements file does not exist: " + alignments);
System.exit(0);
}
this.autopack = autopack;
if (this.autopack) {
logger.info("Packing automatically. Feature types will be auto-detected.");
}
readConfig(config_filename);
}
private void readConfig(String config_filename) throws IOException {
LineReader reader = new LineReader(config_filename);
while (reader.hasNext()) {
// Clean up line, chop comments off and skip if the result is empty.
String line = reader.next().trim();
if (line.indexOf('#') != -1) line = line.substring(0, line.indexOf('#'));
if (line.isEmpty()) continue;
String[] fields = line.split("[\\s]+");
if (fields.length < 2) {
logger.severe("Incomplete line in config.");
System.exit(0);
}
if ("slice_size".equals(fields[0])) {
// Number of records to concurrently load into memory for sorting.
SLICE_SIZE = Integer.parseInt(fields[1]);
} else if ("quantizer".equals(fields[0])) {
// Adding a quantizer to the mix.
if (fields.length < 3) {
logger.severe("Incomplete quantizer line in config.");
System.exit(0);
}
String quantizer_key = fields[1];
ArrayList<Integer> feature_ids = new ArrayList<Integer>();
for (int i = 2; i < fields.length; i++)
feature_ids.add(Vocabulary.id(fields[i]));
quantization.add(quantizer_key, feature_ids);
}
}
reader.close();
File working_dir = new File(WORKING_DIRECTORY);
if (!working_dir.exists() && !working_dir.mkdirs()) {
logger.severe("Failed creating working directory.");
System.exit(0);
}
}
/**
* Executes the packing.
*
* @throws IOException
*/
public void pack() throws IOException {
logger.info("Beginning exploration pass.");
LineReader grammar_reader = null;
LineReader alignment_reader = null;
quantization.initialize();
// Explore pass. Learn vocabulary and quantizer histograms.
logger.info("Exploring: " + grammar);
grammar_reader = new LineReader(grammar);
explore(grammar_reader);
logger.info("Exploration pass complete. Freezing vocabulary and " + "finalizing quantizers.");
quantization.finalize();
quantization.write(WORKING_DIRECTORY + File.separator + "quantization");
Vocabulary.freeze();
Vocabulary.write(WORKING_DIRECTORY + File.separator + "vocabulary");
// Read previously written quantizer configuration to match up to changed
// vocabulary id's.
quantization.read(WORKING_DIRECTORY + File.separator + "quantization");
logger.info("Beginning packing pass.");
Queue<PackingFileTuple> slices = new PriorityQueue<PackingFileTuple>();
// Actual binarization pass. Slice and pack source, target and data.
grammar_reader = new LineReader(grammar);
if (have_alignments) alignment_reader = new LineReader(alignments);
binarize(grammar_reader, alignment_reader, slices);
logger.info("Packing complete.");
logger.info("Packed grammar in: " + WORKING_DIRECTORY);
logger.info("Done.");
}
private void explore(LineReader grammar) {
int counter = 0;
while (grammar.hasNext()) {
String line = grammar.next().trim();
counter++;
String[] fields = line.split("\\s\\|{3}\\s");
if (fields.length < 4) {
logger.warning("Incomplete grammar line at line " + counter);
continue;
}
String lhs = fields[0];
String[] source = fields[1].split("\\s");
String[] target = fields[2].split("\\s");
String[] features = fields[3].split("\\s");
Vocabulary.id(lhs);
// Add symbols to vocabulary.
for (String source_word : source) {
if (FormatUtils.isNonterminal(source_word))
Vocabulary.id(FormatUtils.stripNt(source_word));
else
Vocabulary.id(source_word);
}
for (String target_word : target) {
if (FormatUtils.isNonterminal(target_word))
Vocabulary.id(FormatUtils.stripNt(target_word));
else
Vocabulary.id(target_word);
}
// Add feature names to vocabulary and pass the value through the
// appropriate quantizer.
for (String feature_entry : features) {
String[] fe = feature_entry.split("=");
quantization.get(Vocabulary.id(fe[0])).add(Float.parseFloat(fe[1]));
}
}
}
private void binarize(LineReader grammar_reader, LineReader alignment_reader,
Queue<PackingFileTuple> slices) throws IOException {
int counter = 0;
int slice_counter = 0;
int num_slices = 0;
boolean ready_to_flush = false;
String first_source_word = null;
PackingTrie<SourceValue> source_trie = new PackingTrie<SourceValue>();
PackingTrie<TargetValue> target_trie = new PackingTrie<TargetValue>();
FeatureBuffer feature_buffer = new FeatureBuffer();
AlignmentBuffer alignment_buffer = null;
if (have_alignments) alignment_buffer = new AlignmentBuffer();
TreeMap<Integer, Float> features = new TreeMap<Integer, Float>();
while (grammar_reader.hasNext()) {
String grammar_line = grammar_reader.next().trim();
counter++;
slice_counter++;
String[] fields = grammar_line.split("\\s\\|{3}\\s");
if (fields.length < 4) {
logger.warning("Incomplete grammar line at line " + counter);
continue;
}
String lhs_word = fields[0];
String[] source_words = fields[1].split("\\s");
String[] target_words = fields[2].split("\\s");
String[] feature_entries = fields[3].split("\\s");
// Reached slice limit size, indicate that we're closing up.
if (!ready_to_flush
&& (slice_counter > SLICE_SIZE || feature_buffer.overflowing() || (have_alignments && alignment_buffer
.overflowing()))) {
ready_to_flush = true;
first_source_word = source_words[0];
}
// Finished closing up.
if (ready_to_flush && !first_source_word.equals(source_words[0])) {
slices.add(flush(source_trie, target_trie, feature_buffer, alignment_buffer, num_slices));
source_trie.clear();
target_trie.clear();
feature_buffer.clear();
if (have_alignments) alignment_buffer.clear();
num_slices++;
slice_counter = 0;
ready_to_flush = false;
}
int alignment_index = -1;
// If present, process alignments.
if (have_alignments) {
if (!alignment_reader.hasNext()) {
logger.severe("No more alignments starting in line " + counter);
throw new RuntimeException("No more alignments starting in line " + counter);
} else {
String alignment_line = alignment_reader.next().trim();
String[] alignment_entries = alignment_line.split("\\s");
byte[] alignments = new byte[alignment_entries.length * 2];
if (alignment_entries.length != 0) {
for (int i = 0; i < alignment_entries.length; i++) {
String[] parts = alignment_entries[i].split("-");
alignments[2 * i] = Byte.parseByte(parts[0]);
alignments[2 * i + 1] = Byte.parseByte(parts[1]);
}
}
alignment_index = alignment_buffer.add(alignments);
}
}
// Process features.
// Implicitly sort via TreeMap, write to data buffer, remember position
// to pass on to the source trie node.
features.clear();
for (String feature_entry : feature_entries) {
String[] parts = feature_entry.split("=");
int feature_id = Vocabulary.id(parts[0]);
float feature_value = Float.parseFloat(parts[1]);
if (feature_value != 0) features.put(feature_id, feature_value);
}
int features_index = feature_buffer.add(features);
// Sanity check on the data block index.
if (have_alignments && features_index != alignment_index) {
logger.severe("Block index mismatch between features (" + features_index
+ ") and alignments (" + alignment_index + ").");
throw new RuntimeException("Data block index mismatch.");
}
// Process source side.
SourceValue sv = new SourceValue(Vocabulary.id(lhs_word), features_index);
int[] source = new int[source_words.length];
for (int i = 0; i < source_words.length; i++) {
if (FormatUtils.isNonterminal(source_words[i]))
source[i] = Vocabulary.id(FormatUtils.stripNt(source_words[i]));
else
source[i] = Vocabulary.id(source_words[i]);
}
source_trie.add(source, sv);
// Process target side.
TargetValue tv = new TargetValue(sv);
int[] target = new int[target_words.length];
for (int i = 0; i < target_words.length; i++) {
if (FormatUtils.isNonterminal(target_words[i])) {
target[target_words.length - (i + 1)] = -FormatUtils.getNonterminalIndex(target_words[i]);
} else {
target[target_words.length - (i + 1)] = Vocabulary.id(target_words[i]);
}
}
target_trie.add(target, tv);
}
slices.add(flush(source_trie, target_trie, feature_buffer, alignment_buffer, num_slices));
}
/**
* Serializes the source, target and feature data structures into interlinked binary files. Target
* is written first, into a skeletal (node don't carry any data) upward-pointing trie, updating
* the linking source trie nodes with the position once it is known. Source and feature data are
* written simultaneously. The source structure is written into a downward-pointing trie and
* stores the rule's lhs as well as links to the target and feature stream. The feature stream is
* prompted to write out a block
*
* @param source_trie
* @param target_trie
* @param feature_buffer
* @param id
* @throws IOException
*/
private PackingFileTuple flush(PackingTrie<SourceValue> source_trie,
PackingTrie<TargetValue> target_trie, FeatureBuffer feature_buffer,
AlignmentBuffer alignment_buffer, int id) throws IOException {
// Make a slice object for this piece of the grammar.
PackingFileTuple slice = new PackingFileTuple("slice_" + String.format("%05d", id));
// Pull out the streams for source, target and data output.
DataOutputStream source_stream = slice.getSourceOutput();
DataOutputStream target_stream = slice.getTargetOutput();
DataOutputStream target_lookup_stream = slice.getTargetLookupOutput();
DataOutputStream feature_stream = slice.getFeatureOutput();
DataOutputStream alignment_stream = slice.getAlignmentOutput();
Queue<PackingTrie<TargetValue>> target_queue;
Queue<PackingTrie<SourceValue>> source_queue;
// The number of bytes both written into the source stream and
// buffered in the source queue.
int source_position;
// The number of bytes written into the target stream.
int target_position;
// Add trie root into queue, set target position to 0 and set cumulated
// size to size of trie root.
target_queue = new LinkedList<PackingTrie<TargetValue>>();
target_queue.add(target_trie);
target_position = 0;
// Target lookup table for trie levels.
int current_level_size = 1;
int next_level_size = 0;
ArrayList<Integer> target_lookup = new ArrayList<Integer>();
// Packing loop for upwards-pointing target trie.
while (!target_queue.isEmpty()) {
// Pop top of queue.
PackingTrie<TargetValue> node = target_queue.poll();
// Register that this is where we're writing the node to.
node.address = target_position;
// Tell source nodes that we're writing to this position in the file.
for (TargetValue tv : node.values)
tv.parent.target = node.address;
// Write link to parent.
if (node.parent != null)
target_stream.writeInt(node.parent.address);
else
target_stream.writeInt(-1);
target_stream.writeInt(node.symbol);
// Enqueue children.
for (int k : node.children.descendingKeySet()) {
PackingTrie<TargetValue> child = node.children.get(k);
target_queue.add(child);
}
target_position += node.size(false, true);
next_level_size += node.children.descendingKeySet().size();
current_level_size
if (current_level_size == 0) {
target_lookup.add(target_position);
current_level_size = next_level_size;
next_level_size = 0;
}
}
target_lookup_stream.writeInt(target_lookup.size());
for (int i : target_lookup)
target_lookup_stream.writeInt(i);
target_lookup_stream.close();
// Setting up for source and data writing.
source_queue = new LinkedList<PackingTrie<SourceValue>>();
source_queue.add(source_trie);
source_position = source_trie.size(true, false);
source_trie.address = target_position;
// Ready data buffers for writing.
feature_buffer.initialize();
if (have_alignments) alignment_buffer.initialize();
// Packing loop for downwards-pointing source trie.
while (!source_queue.isEmpty()) {
// Pop top of queue.
PackingTrie<SourceValue> node = source_queue.poll();
// Write number of children.
source_stream.writeInt(node.children.size());
// Write links to children.
for (int k : node.children.descendingKeySet()) {
PackingTrie<SourceValue> child = node.children.get(k);
// Enqueue child.
source_queue.add(child);
// Child's address will be at the current end of the queue.
child.address = source_position;
// Advance cumulated size by child's size.
source_position += child.size(true, false);
// Write the link.
source_stream.writeInt(k);
source_stream.writeInt(child.address);
}
// Write number of data items.
source_stream.writeInt(node.values.size());
// Write lhs and links to target and data.
for (SourceValue sv : node.values) {
int feature_block_index = feature_buffer.write(sv.data);
if (have_alignments) {
int alignment_block_index = alignment_buffer.write(sv.data);
if (alignment_block_index != feature_block_index) {
logger.severe("Block index mismatch.");
throw new RuntimeException("Block index mismatch: alignment (" + alignment_block_index
+ ") and features (" + feature_block_index + ") don't match.");
}
}
source_stream.writeInt(sv.lhs);
source_stream.writeInt(sv.target);
source_stream.writeInt(feature_block_index);
}
}
// Flush the data stream.
feature_buffer.flush(feature_stream);
if (have_alignments) alignment_buffer.flush(alignment_stream);
target_stream.close();
source_stream.close();
feature_stream.close();
if (have_alignments) alignment_stream.close();
return slice;
}
public static void main(String[] args) throws IOException {
String config_filename = null;
String grammar_filename = null;
String alignments_filename = null;
boolean autopilot = false;
if (args.length < 1 || args[0].equals("-h")) {
System.err.println("Usage: " + GrammarPacker.class.toString());
System.err.println(" -g grammar_file translation grammar to process");
System.err.println(" -p packed_dir output directory for packed grammar");
System.err.println(" -A autopilot");
System.err.println(" -c config_file packing configuration file");
System.err.println(" [-a alignment_file alignment_file]");
System.err.println();
System.exit(-1);
}
for (int i = 0; i < args.length; i++) {
if ("-g".equals(args[i]) && (i < args.length - 1)) {
grammar_filename = args[++i];
} else if ("-p".equals(args[i]) && (i < args.length - 1)) {
WORKING_DIRECTORY = args[++i];
} else if ("-c".equals(args[i]) && (i < args.length - 1)) {
config_filename = args[++i];
} else if ("-a".equals(args[i]) && (i < args.length - 1)) {
alignments_filename = args[++i];
} else if ("-A".equals(args[i])) {
autopilot = true;
}
}
if (grammar_filename == null) {
logger.severe("Grammar file not specified.");
return;
}
if (!new File(grammar_filename).exists()) {
logger.severe("Grammar file not found: " + grammar_filename);
}
if (config_filename == null && !autopilot) {
logger.severe("Config file not specified.");
return;
}
if (config_filename != null && !new File(config_filename).exists()) {
logger.severe("Config file not found: " + config_filename);
}
if (new File(WORKING_DIRECTORY).exists()) {
logger.severe("File or directory already exists: " + WORKING_DIRECTORY);
logger.severe("Will not overwrite.");
return;
}
GrammarPacker packer = new GrammarPacker(config_filename, grammar_filename,
alignments_filename, autopilot);
packer.pack();
}
/**
* Integer-labeled, doubly-linked trie with some provisions for packing.
*
* @author Juri Ganitkevitch
*
* @param <D> The trie's value type.
*/
class PackingTrie<D extends PackingTrieValue> {
int symbol;
PackingTrie<D> parent;
TreeMap<Integer, PackingTrie<D>> children;
List<D> values;
int address;
PackingTrie() {
address = -1;
symbol = 0;
parent = null;
children = new TreeMap<Integer, PackingTrie<D>>();
values = new ArrayList<D>();
}
PackingTrie(PackingTrie<D> parent, int symbol) {
this();
this.parent = parent;
this.symbol = symbol;
}
void add(int[] path, D value) {
add(path, 0, value);
}
private void add(int[] path, int index, D value) {
if (index == path.length)
this.values.add(value);
else {
PackingTrie<D> child = children.get(path[index]);
if (child == null) {
child = new PackingTrie<D>(this, path[index]);
children.put(path[index], child);
}
child.add(path, index + 1, value);
}
}
/**
* Calculate the size (in ints) of a packed trie node. Distinguishes downwards pointing (parent
* points to children) from upwards pointing (children point to parent) tries, as well as
* skeletal (no data, just the labeled links) and non-skeletal (nodes have a data block)
* packing.
*
* @param downwards Are we packing into a downwards-pointing trie?
* @param skeletal Are we packing into a skeletal trie?
*
* @return Number of bytes the trie node would occupy.
*/
int size(boolean downwards, boolean skeletal) {
int size = 0;
if (downwards) {
// Number of children and links to children.
size = 1 + 2 * children.size();
} else {
// Link to parent.
size += 2;
}
// Non-skeletal packing: number of data items.
if (!skeletal) size += 1;
// Non-skeletal packing: write size taken up by data items.
if (!skeletal && !values.isEmpty()) size += values.size() * values.get(0).size();
return size;
}
void clear() {
children.clear();
values.clear();
}
}
interface PackingTrieValue {
int size();
}
class SourceValue implements PackingTrieValue {
int lhs;
int data;
int target;
public SourceValue() {}
SourceValue(int lhs, int data) {
this.lhs = lhs;
this.data = data;
}
void setTarget(int target) {
this.target = target;
}
public int size() {
return 3;
}
}
class TargetValue implements PackingTrieValue {
SourceValue parent;
TargetValue(SourceValue parent) {
this.parent = parent;
}
public int size() {
return 0;
}
}
abstract class PackingBuffer<T> {
private byte[] backing;
protected ByteBuffer buffer;
protected ArrayList<Integer> memoryLookup;
protected int totalSize;
protected ArrayList<Integer> onDiskOrder;
PackingBuffer() throws IOException {
allocate();
memoryLookup = new ArrayList<Integer>();
onDiskOrder = new ArrayList<Integer>();
totalSize = 0;
}
abstract int add(T item);
// Allocate a reasonably-sized buffer for the feature data.
private void allocate() {
backing = new byte[SLICE_SIZE * DATA_SIZE_ESTIMATE];
buffer = ByteBuffer.wrap(backing);
}
// Reallocate the backing array and buffer, copies data over.
protected void reallocate() {
if (backing.length == Integer.MAX_VALUE) return;
long attempted_length = backing.length * 2l;
int new_length;
// Detect overflow.
if (attempted_length >= Integer.MAX_VALUE)
new_length = Integer.MAX_VALUE;
else
new_length = (int) attempted_length;
byte[] new_backing = new byte[new_length];
System.arraycopy(backing, 0, new_backing, 0, backing.length);
int old_position = buffer.position();
ByteBuffer new_buffer = ByteBuffer.wrap(new_backing);
new_buffer.position(old_position);
buffer = new_buffer;
backing = new_backing;
}
/**
* Prepare the data buffer for disk writing.
*/
void initialize() {
onDiskOrder.clear();
}
/**
* Enqueue a data block for later writing.
*
* @param block_index The index of the data block to add to writing queue.
* @return The to-be-written block's output index.
*/
int write(int block_index) {
onDiskOrder.add(block_index);
return onDiskOrder.size() - 1;
}
/**
* Performs the actual writing to disk in the order specified by calls to write() since the last
* call to initialize().
*
* @param out
* @throws IOException
*/
void flush(DataOutputStream out) throws IOException {
writeHeader(out);
int size;
int block_address;
for (int block_index : onDiskOrder) {
block_address = memoryLookup.get(block_index);
size = blockSize(block_index);
out.write(backing, block_address, size);
}
}
void clear() {
buffer.clear();
memoryLookup.clear();
onDiskOrder.clear();
}
boolean overflowing() {
return (buffer.position() >= DATA_SIZE_LIMIT);
}
private void writeHeader(DataOutputStream out) throws IOException {
if (out.size() == 0) {
out.writeInt(onDiskOrder.size());
out.writeInt(totalSize);
int disk_position = headerSize();
for (int block_index : onDiskOrder) {
out.writeInt(disk_position);
disk_position += blockSize(block_index);
}
} else {
throw new RuntimeException("Got a used stream for header writing.");
}
}
private int headerSize() {
// One integer for each data block, plus number of blocks and total size.
return 4 * (onDiskOrder.size() + 2);
}
private int blockSize(int block_index) {
int block_address = memoryLookup.get(block_index);
return (block_index < memoryLookup.size() - 1 ? memoryLookup.get(block_index + 1) : totalSize)
- block_address;
}
}
class FeatureBuffer extends PackingBuffer<TreeMap<Integer, Float>> {
FeatureBuffer() throws IOException {
super();
}
/**
* Add a block of features to the buffer.
*
* @param features TreeMap with the features for one rule.
* @return The index of the resulting data block.
*/
int add(TreeMap<Integer, Float> features) {
int data_position = buffer.position();
// Over-estimate how much room this addition will need: 12 bytes per
// feature (4 for label, "upper bound" of 8 for the value), plus 4 for
// the number of features. If this won't fit, reallocate the buffer.
int size_estimate = 12 * features.size() + 4;
if (buffer.capacity() - buffer.position() <= size_estimate) reallocate();
// Write features to buffer.
buffer.putInt(features.size());
for (Integer k : features.descendingKeySet()) {
float v = features.get(k);
// Sparse features.
if (v != 0.0) {
buffer.putInt(k);
quantization.get(k).write(buffer, v);
}
}
// Store position the block was written to.
memoryLookup.add(data_position);
// Update total size (in bytes).
totalSize = buffer.position();
// Return block index.
return memoryLookup.size() - 1;
}
}
class AlignmentBuffer extends PackingBuffer<byte[]> {
AlignmentBuffer() throws IOException {
super();
}
/**
* Add a rule alignments to the buffer.
*
* @param alignments a byte array with the alignment points for one rule.
* @return The index of the resulting data block.
*/
int add(byte[] alignments) {
int data_position = buffer.position();
int size_estimate = alignments.length + 1;
if (buffer.capacity() - buffer.position() <= size_estimate) reallocate();
// Write alignment points to buffer.
buffer.put((byte) (alignments.length / 2));
buffer.put(alignments);
// Store position the block was written to.
memoryLookup.add(data_position);
// Update total size (in bytes).
totalSize = buffer.position();
// Return block index.
return memoryLookup.size() - 1;
}
}
class PackingFileTuple implements Comparable<PackingFileTuple> {
private File sourceFile;
private File targetLookupFile;
private File targetFile;
private File featureFile;
private File alignmentFile;
PackingFileTuple(String prefix) {
sourceFile = new File(WORKING_DIRECTORY + File.separator + prefix + ".source");
targetFile = new File(WORKING_DIRECTORY + File.separator + prefix + ".target");
targetLookupFile = new File(WORKING_DIRECTORY + File.separator + prefix + ".target.lookup");
featureFile = new File(WORKING_DIRECTORY + File.separator + prefix + ".features");
if (have_alignments)
alignmentFile = new File(WORKING_DIRECTORY + File.separator + prefix + ".alignments");
else
alignmentFile = null;
logger.info("Allocated slice: " + sourceFile.getAbsolutePath());
}
DataOutputStream getSourceOutput() throws IOException {
return getOutput(sourceFile);
}
DataOutputStream getTargetOutput() throws IOException {
return getOutput(targetFile);
}
DataOutputStream getTargetLookupOutput() throws IOException {
return getOutput(targetLookupFile);
}
DataOutputStream getFeatureOutput() throws IOException {
return getOutput(featureFile);
}
DataOutputStream getAlignmentOutput() throws IOException {
if (alignmentFile != null) return getOutput(alignmentFile);
return null;
}
private DataOutputStream getOutput(File file) throws IOException {
if (file.createNewFile()) {
return new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file)));
} else {
throw new RuntimeException("File doesn't exist: " + file.getName());
}
}
long getSize() {
return sourceFile.length() + targetFile.length() + featureFile.length();
}
@Override
public int compareTo(PackingFileTuple o) {
if (getSize() > o.getSize()) {
return -1;
} else if (getSize() < o.getSize()) {
return 1;
} else {
return 0;
}
}
}
}
|
package org.apache.commons.validator;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import org.apache.commons.collections.FastHashMap;
import org.apache.commons.validator.util.ValidatorUtils;
public class Field implements Cloneable, Serializable {
/**
* This is the value that will be used as a key if the <code>Arg</code>
* name field has no value.
*/
private static final String DEFAULT_ARG =
"org.apache.commons.validator.Field.DEFAULT";
/**
* This is the value that will be used as a key if the <code>Arg</code>
* name field has no value.
* @deprecated
*/
public static final String ARG_DEFAULT = DEFAULT_ARG;
/**
* This indicates an indexed property is being referenced.
*/
public static final String TOKEN_INDEXED = "[]";
protected static final String TOKEN_START = "${";
protected static final String TOKEN_END = "}";
protected static final String TOKEN_VAR = "var:";
protected String property = null;
protected String indexedProperty = null;
protected String indexedListProperty = null;
protected String key = null;
/**
* A comma separated list of validator's this field depends on.
*/
protected String depends = null;
protected int page = 0;
protected int fieldOrder = 0;
/**
* @deprecated This is no longer used.
*/
protected FastHashMap hDependencies = new FastHashMap();
/**
* Internal representation of this.depends String as a List. This List gets updated
* whenever setDepends() gets called. This List is synchronized so a call to
* setDepends() (which clears the List) won't interfere with a call to
* isDependency().
*/
private List dependencyList = Collections.synchronizedList(new ArrayList());
protected FastHashMap hVars = new FastHashMap();
protected FastHashMap hMsgs = new FastHashMap();
/**
* Holds Maps of arguments. args[0] returns the Map for the first replacement
* argument.
* @since Validator 1.1
*/
protected Map[] args = new Map[10];
/**
* @deprecated This variable is no longer used, use args instead.
*/
protected FastHashMap hArg0 = new FastHashMap();
/**
* @deprecated This variable is no longer used, use args instead.
*/
protected FastHashMap hArg1 = new FastHashMap();
/**
* @deprecated This variable is no longer used, use args instead.
*/
protected FastHashMap hArg2 = new FastHashMap();
/**
* @deprecated This variable is no longer used, use args instead.
*/
protected FastHashMap hArg3 = new FastHashMap();
/**
* Gets the page value that the Field is associated with for
* validation.
*/
public int getPage() {
return this.page;
}
/**
* Sets the page value that the Field is associated with for
* validation.
*/
public void setPage(int page) {
this.page = page;
}
/**
* Gets the position of the <code>Field</code> in the validation list.
*/
public int getFieldOrder() {
return this.fieldOrder;
}
/**
* Sets the position of the <code>Field</code> in the validation list.
*/
public void setFieldOrder(int fieldOrder) {
this.fieldOrder = fieldOrder;
}
/**
* Gets the property name of the field.
*/
public String getProperty() {
return this.property;
}
/**
* Sets the property name of the field.
*/
public void setProperty(String property) {
this.property = property;
}
/**
* Gets the indexed property name of the field. This
* is the method name that can take an <code>int</code> as
* a parameter for indexed property value retrieval.
*/
public String getIndexedProperty() {
return this.indexedProperty;
}
/**
* Sets the indexed property name of the field.
*/
public void setIndexedProperty(String indexedProperty) {
this.indexedProperty = indexedProperty;
}
/**
* Gets the indexed property name of the field. This
* is the method name that will return an array or a
* <code>Collection</code> used to retrieve the
* list and then loop through the list performing the specified
* validations.
*/
public String getIndexedListProperty() {
return this.indexedListProperty;
}
/**
* Sets the indexed property name of the field.
*/
public void setIndexedListProperty(String indexedListProperty) {
this.indexedListProperty = indexedListProperty;
}
/**
* Gets the validation rules for this field as a comma separated list.
*/
public String getDepends() {
return this.depends;
}
/**
* Sets the validation rules for this field as a comma separated list.
*/
public void setDepends(String depends) {
this.depends = depends;
this.dependencyList.clear();
StringTokenizer st = new StringTokenizer(depends, ",");
while (st.hasMoreTokens()) {
String depend = st.nextToken().trim();
if (depend != null && depend.length() > 0) {
this.dependencyList.add(depend);
}
}
}
/**
* Add a <code>Msg</code> to the <code>Field</code>.
*/
public void addMsg(Msg msg) {
hMsgs.put(msg.getName(), msg.getKey());
}
/**
* Retrieve a message value.
*/
public String getMsg(String key) {
return (String) hMsgs.get(key);
}
/**
* Add an <code>Arg</code> to the replacement argument list.
* @since Validator 1.1
*/
public void addArg(Arg arg) {
// TODO this first if check can go away after arg0, etc. are removed from dtd
if (arg == null || arg.getKey() == null || arg.getKey().length() == 0) {
return;
}
this.ensureArgsCapacity(arg);
Map argMap = this.args[arg.getPosition()];
if (argMap == null) {
argMap = new HashMap();
this.args[arg.getPosition()] = argMap;
}
if (arg.getName() == null) {
argMap.put(DEFAULT_ARG, arg);
} else {
argMap.put(arg.getName(), arg);
}
}
/**
* Ensures that the args array can hold the given arg. Resizes the array as
* necessary.
* @param arg Determine if the args array is long enough to store this arg's
* position.
*/
private void ensureArgsCapacity(Arg arg) {
if (arg.getPosition() >= this.args.length) {
Map[] newArgs = new Map[arg.getPosition() + 1];
System.arraycopy(this.args, 0, newArgs, 0, this.args.length);
this.args = newArgs;
}
}
/**
* Gets the default <code>Arg</code> object at the given position.
* @return The default Arg or null if not found.
* @since Validator 1.1
*/
public Arg getArg(int position) {
return this.getArg(DEFAULT_ARG, position);
}
/**
* Gets the default <code>Arg</code> object at the given position. If the key
* finds a <code>null</code> value then the default value will try to be retrieved.
* @param key The name the Arg is stored under. If not found, the default Arg for
* the given position (if any) will be retrieved.
* @param position The Arg number to find.
* @return The Arg with the given name and position or null if not found.
* @since Validator 1.1
*/
public Arg getArg(String key, int position) {
if ((position >= this.args.length) || (this.args[position] == null)) {
return null;
}
Arg arg = (Arg) args[position].get(key);
// Didn't find default arg so exit, otherwise we would get into infinite recursion
if ((arg == null) && key.equals(DEFAULT_ARG)) {
return null;
}
return (arg == null) ? this.getArg(position) : arg;
}
/**
* Add a <code>Arg</code> to the arg0 list.
* @deprecated Use addArg(Arg) instead.
*/
public void addArg0(Arg arg) {
arg.setPosition(0);
this.addArg(arg);
}
/**
* Gets the default arg0 <code>Arg</code> object.
* @deprecated Use getArg(0) instead.
*/
public Arg getArg0() {
return this.getArg(0);
}
/**
* Gets the arg0 <code>Arg</code> object based on the key passed in. If the key
* finds a <code>null</code> value then the default value will try to be retrieved.
* @deprecated Use getArg(String, 0) instead.
*/
public Arg getArg0(String key) {
return this.getArg(key, 0);
}
/**
* Add a <code>Arg</code> to the arg1 list.
* @deprecated Use addArg(Arg) instead.
*/
public void addArg1(Arg arg) {
arg.setPosition(1);
this.addArg(arg);
}
/**
* Gets the default arg1 <code>Arg</code> object.
* @deprecated Use getArg(1) instead.
*/
public Arg getArg1() {
return this.getArg(1);
}
/**
* Gets the arg1 <code>Arg</code> object based on the key passed in. If the key
* finds a <code>null</code> value then the default value will try to be retrieved.
* @deprecated Use getArg(String, 1) instead.
*/
public Arg getArg1(String key) {
return this.getArg(key, 1);
}
/**
* Add a <code>Arg</code> to the arg2 list.
* @deprecated Use addArg(Arg) instead.
*/
public void addArg2(Arg arg) {
arg.setPosition(2);
this.addArg(arg);
}
/**
* Gets the default arg2 <code>Arg</code> object.
* @deprecated Use getArg(2) instead.
*/
public Arg getArg2() {
return this.getArg(2);
}
/**
* Gets the arg2 <code>Arg</code> object based on the key passed in. If the key
* finds a <code>null</code> value then the default value will try to be retrieved.
* @deprecated Use getArg(String, 2) instead.
*/
public Arg getArg2(String key) {
return this.getArg(key, 2);
}
/**
* Add a <code>Arg</code> to the arg3 list.
* @deprecated Use addArg(Arg) instead.
*/
public void addArg3(Arg arg) {
arg.setPosition(3);
this.addArg(arg);
}
/**
* Gets the default arg3 <code>Arg</code> object.
* @deprecated Use getArg(3) instead.
*/
public Arg getArg3() {
return this.getArg(3);
}
/**
* Gets the arg3 <code>Arg</code> object based on the key passed in. If the key
* finds a <code>null</code> value then the default value will try to be retrieved.
* @deprecated Use getArg(String, 3) instead.
*/
public Arg getArg3(String key) {
return this.getArg(key, 3);
}
/**
* Add a <code>Var</code> to the <code>Field</code>.
*/
public void addVar(Var v) {
this.hVars.put(v.getName(), v);
}
/**
* Add a <code>Var</code>, based on the values passed in, to the
* <code>Field</code>.
* @deprecated Use addVar(String, String, String) instead.
*/
public void addVarParam(String name, String value, String jsType) {
this.addVar(new Var(name, value, jsType));
}
/**
* Add a <code>Var</code>, based on the values passed in, to the
* <code>Field</code>.
* @param name
* @param value
* @param jsType
*/
public void addVar(String name, String value, String jsType) {
this.addVar(new Var(name, value, jsType));
}
/**
* Retrieve a variable.
* @param mainKey
*/
public Var getVar(String mainKey) {
return (Var) hVars.get(mainKey);
}
/**
* Retrieve a variable's value.
* @param mainKey
*/
public String getVarValue(String mainKey) {
String value = null;
Object o = hVars.get(mainKey);
if (o != null && o instanceof Var) {
Var v = (Var) o;
value = v.getValue();
}
return value;
}
/**
* The <code>Field</code>'s variables are returned as an
* unmodifiable <code>Map</code>.
*/
public Map getVars() {
return Collections.unmodifiableMap(hVars);
}
/**
* Gets a unique key based on the property and indexedProperty fields.
*/
public String getKey() {
if (this.key == null) {
this.generateKey();
}
return this.key;
}
/**
* Sets a unique key for the field. This can be used to change
* the key temporarily to have a unique key for an indexed field.
* @param key
*/
public void setKey(String key) {
this.key = key;
}
/**
* If there is a value specified for the indexedProperty field then
* <code>true</code> will be returned. Otherwise it will be <code>false</code>.
*/
public boolean isIndexed() {
return ((indexedListProperty != null && indexedListProperty.length() > 0));
}
/**
* Generate correct <code>key</code> value.
*/
public void generateKey() {
if (this.isIndexed()) {
this.key = this.indexedListProperty + TOKEN_INDEXED + "." + this.property;
} else {
this.key = this.property;
}
}
/**
* Replace constants with values in fields and process the depends field
* to create the dependency <code>Map</code>.
* @deprecated This method is called by the framework. It will be made protected
* in a future release. TODO
*/
public void process(Map globalConstants, Map constants) {
this.hMsgs.setFast(false);
this.hVars.setFast(true);
this.generateKey();
// Process FormSet Constants
for (Iterator i = constants.keySet().iterator(); i.hasNext();) {
String key = (String) i.next();
String key2 = TOKEN_START + key + TOKEN_END;
String replaceValue = (String) constants.get(key);
property = ValidatorUtils.replace(property, key2, replaceValue);
processVars(key2, replaceValue);
this.processMessageComponents(key2, replaceValue);
}
// Process Global Constants
for (Iterator i = globalConstants.keySet().iterator(); i.hasNext();) {
String key = (String) i.next();
String key2 = TOKEN_START + key + TOKEN_END;
String replaceValue = (String) globalConstants.get(key);
property = ValidatorUtils.replace(property, key2, replaceValue);
processVars(key2, replaceValue);
this.processMessageComponents(key2, replaceValue);
}
// Process Var Constant Replacement
for (Iterator i = hVars.keySet().iterator(); i.hasNext();) {
String key = (String) i.next();
String key2 = TOKEN_START + TOKEN_VAR + key + TOKEN_END;
Var var = this.getVar(key);
String replaceValue = var.getValue();
this.processMessageComponents(key2, replaceValue);
}
hMsgs.setFast(true);
}
/**
* Replace the vars value with the key/value pairs passed in.
*/
private void processVars(String key, String replaceValue) {
Iterator i = this.hVars.keySet().iterator();
while (i.hasNext()) {
String varKey = (String) i.next();
Var var = this.getVar(varKey);
var.setValue(ValidatorUtils.replace(var.getValue(), key, replaceValue));
}
}
/**
* Replace the args key value with the key/value pairs passed in.
* @deprecated This is an internal setup method that clients don't need to call.
*/
public void processMessageComponents(String key, String replaceValue) {
this.internalProcessMessageComponents(key, replaceValue);
}
/**
* Replace the args key value with the key/value pairs passed in.
* TODO When processMessageComponents() is removed from the public API we
* should rename this private method to "processMessageComponents".
*/
private void internalProcessMessageComponents(String key, String replaceValue) {
String varKey = TOKEN_START + TOKEN_VAR;
// Process Messages
if (key != null && !key.startsWith(varKey)) {
for (Iterator i = hMsgs.keySet().iterator(); i.hasNext();) {
String msgKey = (String) i.next();
String value = this.getMsg(msgKey);
hMsgs.put(msgKey, ValidatorUtils.replace(value, key, replaceValue));
}
}
this.processArg(key, replaceValue);
}
/**
* Replace the arg <code>Collection</code> key value with the key/value pairs
* passed in.
*/
private void processArg(String key, String replaceValue) {
for (int i = 0; i < this.args.length; i++) {
Map argMap = this.args[i];
if (argMap == null) {
continue;
}
Iterator iter = argMap.values().iterator();
while (iter.hasNext()) {
Arg arg = (Arg) iter.next();
if (arg != null) {
arg.setKey(
ValidatorUtils.replace(arg.getKey(), key, replaceValue));
}
}
}
}
/**
* Checks if the validator is listed as a dependency.
*/
public boolean isDependency(String validatorName) {
return this.dependencyList.contains(validatorName);
}
/**
* Gets an unmodifiable <code>Set</code> of the dependencies.
* @deprecated Use getDependencyList() instead.
*/
public Collection getDependencies() {
return this.getDependencyList();
}
/**
* Gets an unmodifiable <code>List</code> of the dependencies in the same order
* they were defined in parameter passed to the setDepends() method.
*/
public List getDependencyList() {
return Collections.unmodifiableList(this.dependencyList);
}
/**
* Creates and returns a copy of this object.
*/
public Object clone() {
Field field = null;
try {
field = (Field) super.clone();
} catch(CloneNotSupportedException e) {
throw new InternalError(e.toString());
}
field.args = new Map[this.args.length];
for (int i = 0; i < this.args.length; i++) {
if (this.args[i] == null) {
continue;
}
Map argMap = new HashMap(this.args[i]);
Iterator iter = argMap.keySet().iterator();
while (iter.hasNext()) {
String validatorName = (String) iter.next();
Arg arg = (Arg) argMap.get(validatorName);
argMap.put(validatorName, arg.clone());
}
field.args[i] = argMap;
}
field.hVars = ValidatorUtils.copyFastHashMap(hVars);
field.hMsgs = ValidatorUtils.copyFastHashMap(hMsgs);
field.hArg0 = ValidatorUtils.copyFastHashMap(hArg0);
field.hArg1 = ValidatorUtils.copyFastHashMap(hArg1);
field.hArg2 = ValidatorUtils.copyFastHashMap(hArg2);
field.hArg3 = ValidatorUtils.copyFastHashMap(hArg3);
return field;
}
/**
* Returns a string representation of the object.
*/
public String toString() {
StringBuffer results = new StringBuffer();
results.append("\t\tkey = " + key + "\n");
results.append("\t\tproperty = " + property + "\n");
results.append("\t\tindexedProperty = " + indexedProperty + "\n");
results.append("\t\tindexedListProperty = " + indexedListProperty + "\n");
results.append("\t\tdepends = " + depends + "\n");
results.append("\t\tpage = " + page + "\n");
results.append("\t\tfieldOrder = " + fieldOrder + "\n");
if (hVars != null) {
results.append("\t\tVars:\n");
for (Iterator i = hVars.keySet().iterator(); i.hasNext();) {
Object key = i.next();
results.append("\t\t\t");
results.append(key);
results.append("=");
results.append(hVars.get(key));
results.append("\n");
}
}
return results.toString();
}
}
|
package org.hibernate.ogm.datastore.redis;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hibernate.ogm.datastore.document.impl.DotPatternMapHelpers;
import org.hibernate.ogm.datastore.document.options.AssociationStorageType;
import org.hibernate.ogm.datastore.document.options.spi.AssociationStorageOption;
import org.hibernate.ogm.datastore.map.impl.MapHelpers;
import org.hibernate.ogm.datastore.redis.dialect.model.impl.RedisAssociation;
import org.hibernate.ogm.datastore.redis.dialect.model.impl.RedisAssociationSnapshot;
import org.hibernate.ogm.datastore.redis.dialect.model.impl.RedisTupleSnapshot;
import org.hibernate.ogm.datastore.redis.dialect.value.Association;
import org.hibernate.ogm.datastore.redis.dialect.value.Entity;
import org.hibernate.ogm.datastore.redis.impl.RedisDatastoreProvider;
import org.hibernate.ogm.datastore.redis.impl.json.JsonEntityStorageStrategy;
import org.hibernate.ogm.dialect.multiget.spi.MultigetGridDialect;
import org.hibernate.ogm.dialect.spi.AssociationContext;
import org.hibernate.ogm.dialect.spi.AssociationTypeContext;
import org.hibernate.ogm.dialect.spi.ModelConsumer;
import org.hibernate.ogm.dialect.spi.TupleContext;
import org.hibernate.ogm.model.key.spi.AssociationKey;
import org.hibernate.ogm.model.key.spi.AssociationKeyMetadata;
import org.hibernate.ogm.model.key.spi.AssociationKind;
import org.hibernate.ogm.model.key.spi.AssociationType;
import org.hibernate.ogm.model.key.spi.EntityKey;
import org.hibernate.ogm.model.key.spi.EntityKeyMetadata;
import org.hibernate.ogm.model.key.spi.RowKey;
import org.hibernate.ogm.model.spi.Tuple;
import org.hibernate.ogm.model.spi.TupleOperation;
import org.hibernate.ogm.options.spi.OptionsContext;
import org.hibernate.ogm.type.spi.GridType;
import org.hibernate.type.Type;
import com.lambdaworks.redis.KeyScanCursor;
import com.lambdaworks.redis.RedisConnection;
import com.lambdaworks.redis.ScanArgs;
/**
* Stores tuples and associations inside Redis as JSON.
* <p>
* Tuples are stored in Redis as a JSON serialization of a {@link Entity} object. Associations are stored in Redis obtained as a
* JSON serialization of a {@link Association} object either within the entity or external.
* See {@link org.hibernate.ogm.datastore.document.cfg.DocumentStoreProperties#ASSOCIATIONS_STORE} on how to configure
* entity or external storage.
*
* @author Mark Paluch
*/
public class RedisJsonDialect extends AbstractRedisDialect implements MultigetGridDialect {
protected final RedisConnection<String, String> connection;
protected final JsonEntityStorageStrategy entityStorageStrategy;
public RedisJsonDialect(RedisDatastoreProvider provider) {
super( provider.getConnection() );
connection = provider.getConnection();
this.entityStorageStrategy = new JsonEntityStorageStrategy( strategy, connection );
}
@Override
public GridType overrideType(Type type) {
return strategy.overrideType( type );
}
@Override
public Tuple getTuple(EntityKey key, TupleContext tupleContext) {
Entity entity = entityStorageStrategy.getEntity( entityId( key ) );
if ( entity != null ) {
return new Tuple( new RedisTupleSnapshot( entity.getProperties() ) );
}
else {
return null;
}
}
@Override
public void insertOrUpdateTuple(EntityKey key, Tuple tuple, TupleContext tupleContext) {
Map<String, Object> map = ( (RedisTupleSnapshot) tuple.getSnapshot() ).getMap();
MapHelpers.applyTupleOpsOnMap( tuple, map );
storeEntity( key, map, tupleContext.getOptionsContext(), tuple.getOperations() );
}
@Override
public boolean isStoredInEntityStructure(
AssociationKeyMetadata keyMetadata,
AssociationTypeContext associationTypeContext) {
AssociationStorageType associationStorage = getAssociationStorageType( associationTypeContext );
if ( keyMetadata.getAssociationType() == AssociationType.ONE_TO_ONE || keyMetadata.getAssociationKind() == AssociationKind.EMBEDDED_COLLECTION || associationStorage == AssociationStorageType.IN_ENTITY ) {
return true;
}
return false;
}
private AssociationStorageType getAssociationStorageType(AssociationTypeContext associationTypeContext) {
return associationTypeContext.getOptionsContext().getUnique(
AssociationStorageOption.class
);
}
@Override
public org.hibernate.ogm.model.spi.Association getAssociation(
AssociationKey key,
AssociationContext associationContext) {
RedisAssociation redisAssociation = null;
if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) {
Entity owningEntity = getEmbeddingEntity( key );
if ( owningEntity != null && DotPatternMapHelpers.hasField(
owningEntity.getPropertiesAsHierarchy(),
key.getMetadata()
.getCollectionRole()
) ) {
redisAssociation = RedisAssociation.fromEmbeddedAssociation( owningEntity, key.getMetadata() );
}
}
else {
Association association = getAssociation( key );
if ( association != null ) {
redisAssociation = RedisAssociation.fromAssociationDocument( association );
}
}
return redisAssociation != null ? new org.hibernate.ogm.model.spi.Association(
new RedisAssociationSnapshot(
redisAssociation, key
)
) : null;
}
@Override
public org.hibernate.ogm.model.spi.Association createAssociation(
AssociationKey key,
AssociationContext associationContext) {
RedisAssociation redisAssociation;
if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) {
Entity owningEntity = getEmbeddingEntity( key );
if ( owningEntity == null ) {
owningEntity = storeEntity( key.getEntityKey(), new Entity(), associationContext );
}
redisAssociation = RedisAssociation.fromEmbeddedAssociation( owningEntity, key.getMetadata() );
}
else {
Association association = new Association();
redisAssociation = RedisAssociation.fromAssociationDocument( association );
}
return new org.hibernate.ogm.model.spi.Association(
new RedisAssociationSnapshot(
redisAssociation,
key
)
);
}
// Retrieve entity that contains the association, do not enhance with entity key
private Entity getEmbeddingEntity(AssociationKey key) {
return entityStorageStrategy.getEntity( entityId( key.getEntityKey() ) );
}
@Override
public void insertOrUpdateAssociation(
AssociationKey associationKey, org.hibernate.ogm.model.spi.Association association,
AssociationContext associationContext) {
Object rows = getAssociationRows( association, associationKey, associationContext );
RedisAssociation redisAssociation = ( (RedisAssociationSnapshot) association.getSnapshot() ).getRedisAssociation();
redisAssociation.setRows( rows );
if ( isStoredInEntityStructure(
associationKey.getMetadata(),
associationContext.getAssociationTypeContext()
) ) {
storeEntity(
associationKey.getEntityKey(),
(Entity) redisAssociation.getOwningDocument(),
associationContext
);
}
else {
Long currentTtl = connection.pttl( entityId( associationKey.getEntityKey() ) );
storeAssociation( associationKey, (Association) redisAssociation.getOwningDocument() );
setAssociationTTL( associationKey, associationContext, currentTtl );
}
}
/**
* Returns the rows of the given association as to be stored in the database. Elements of the returned list are
* either
* <ul>
* <li>plain values such as {@code String}s, {@code int}s etc. in case there is exactly one row key column which is
* not part of the association key (in this case we don't need to persist the key name as it can be restored from
* the association key upon loading) or</li>
* <li>{@code Entity}s with keys/values for all row key columns which are not part of the association key</li>
* </ul>
*/
protected Object getAssociationRows(
org.hibernate.ogm.model.spi.Association association,
AssociationKey key,
AssociationContext associationContext) {
boolean organizeByRowKey = DotPatternMapHelpers.organizeAssociationMapByRowKey(
association,
key,
associationContext
);
// only in-entity maps can be mapped by row key to prevent huge external association maps
if ( isStoredInEntityStructure(
key.getMetadata(),
associationContext.getAssociationTypeContext()
) && organizeByRowKey ) {
String rowKeyColumn = organizeByRowKey ? key.getMetadata().getRowKeyIndexColumnNames()[0] : null;
Map<String, Object> rows = new HashMap<>();
for ( RowKey rowKey : association.getKeys() ) {
Map<String, Object> row = (Map<String, Object>) getAssociationRow( association.get( rowKey ), key );
String rowKeyValue = (String) row.remove( rowKeyColumn );
// if there is a single column on the value side left, unwrap it
if ( row.keySet().size() == 1 ) {
rows.put( rowKeyValue, row.values().iterator().next() );
}
else {
rows.put( rowKeyValue, row );
}
}
return rows;
}
List<Object> rows = new ArrayList<Object>( association.size() );
for ( RowKey rowKey : association.getKeys() ) {
rows.add( getAssociationRow( association.get( rowKey ), key ) );
}
return rows;
}
@Override
public void removeAssociation(AssociationKey key, AssociationContext associationContext) {
if ( isStoredInEntityStructure( key.getMetadata(), associationContext.getAssociationTypeContext() ) ) {
Entity owningEntity = getEmbeddingEntity( key );
if ( owningEntity != null ) {
owningEntity.removeAssociation( key.getMetadata().getCollectionRole() );
storeEntity( key.getEntityKey(), owningEntity, associationContext );
}
}
else {
removeAssociation( key );
}
}
@Override
public void forEachTuple(final ModelConsumer consumer, EntityKeyMetadata... entityKeyMetadatas) {
for ( EntityKeyMetadata entityKeyMetadata : entityKeyMetadatas ) {
KeyScanCursor<String> cursor = null;
String prefix = entityKeyMetadata.getTable() + ":";
String prefixBytes = prefix;
ScanArgs scanArgs = ScanArgs.Builder.matches( prefix + "*" );
do {
if ( cursor != null ) {
cursor = connection.scan( cursor, scanArgs );
}
else {
cursor = connection.scan( scanArgs );
}
for ( String key : cursor.getKeys() ) {
Entity document = entityStorageStrategy.getEntity( key );
addKeyValuesFromKeyName( entityKeyMetadata, prefixBytes, key, document );
consumer.consume( new Tuple( new RedisTupleSnapshot( document.getProperties() ) ) );
}
} while ( !cursor.isFinished() );
}
}
private void storeEntity(
EntityKey key,
Map<String, Object> map,
OptionsContext optionsContext,
Set<TupleOperation> operations) {
Entity entityDocument = new Entity();
for ( Map.Entry<String, Object> entry : map.entrySet() ) {
if ( key.getMetadata().isKeyColumn( entry.getKey() ) ) {
continue;
}
entityDocument.set( entry.getKey(), entry.getValue() );
}
storeEntity( key, entityDocument, optionsContext, operations );
}
private void storeEntity(
EntityKey key,
Entity document,
OptionsContext optionsContext,
Set<TupleOperation> operations) {
Long currentTtl = connection.pttl( entityId( key ) );
entityStorageStrategy.storeEntity( entityId( key ), document, operations );
setEntityTTL( key, currentTtl, getTTL( optionsContext ) );
}
private Association getAssociation(AssociationKey key) {
String associationId = associationId( key );
List<String> lrange = connection.lrange( associationId, 0, -1 );
Association association = new Association();
for ( String bytes : lrange ) {
association.getRows().add( strategy.deserialize( bytes, Object.class ) );
}
return association;
}
private Entity storeEntity(EntityKey key, Entity entity, AssociationContext associationContext) {
Long currentTtl = connection.pttl( entityId( key ) );
entityStorageStrategy.storeEntity(
entityId( key ),
entity,
null
);
setEntityTTL( key, currentTtl, getTTL( associationContext ) );
return entity;
}
private void storeAssociation(AssociationKey key, Association document) {
String associationId = associationId( key );
connection.del( associationId );
for ( Object row : document.getRows() ) {
connection.rpush( associationId, strategy.serialize( row ) );
}
}
public JsonEntityStorageStrategy getEntityStorageStrategy() {
return entityStorageStrategy;
}
// MultigetGridDialect
@Override
public List<Tuple> getTuples(EntityKey[] keys, TupleContext tupleContext) {
if ( keys.length == 0 ) {
return Collections.emptyList();
}
String ids[] = new String[keys.length];
for ( int i = 0; i < keys.length; i++ ) {
ids[i] = entityId( keys[i] );
}
Iterable<Entity> entities = entityStorageStrategy.getEntities( ids );
List<Tuple> tuples = new ArrayList<Tuple>( keys.length );
int i = 0;
for ( Entity entity : entities ) {
if ( entity != null ) {
EntityKey key = keys[i];
addIdToEntity( entity, key.getColumnNames(), key.getColumnValues() );
tuples.add( new Tuple( new RedisTupleSnapshot( entity.getProperties() ) ) );
}
else {
tuples.add( null );
}
i++;
}
return tuples;
}
}
|
package org.robolectric.internal;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import org.robolectric.annotation.Config;
import org.robolectric.res.Fs;
import org.robolectric.res.FsFile;
import org.robolectric.util.Util;
public class BuckManifestFactory implements ManifestFactory {
private static final String BUCK_ROBOLECTRIC_RES_DIRECTORIES = "buck.robolectric_res_directories";
private static final String BUCK_ROBOLECTRIC_ASSETS_DIRECTORIES = "buck.robolectric_assets_directories";
private static final String BUCK_ROBOLECTRIC_MANIFEST = "buck.robolectric_manifest";
@Override
public ManifestIdentifier identify(Config config) {
String buckManifest = System.getProperty(BUCK_ROBOLECTRIC_MANIFEST);
FsFile manifestFile = Fs.fileFromPath(buckManifest);
String buckResDirs = System.getProperty(BUCK_ROBOLECTRIC_RES_DIRECTORIES);
String buckAssetsDirs = System.getProperty(BUCK_ROBOLECTRIC_ASSETS_DIRECTORIES);
String packageName = config.packageName();
final List<FsFile> buckResources = getDirectoriesFromProperty(buckResDirs);
final List<FsFile> buckAssets = getDirectoriesFromProperty(buckAssetsDirs);
final FsFile resDir = buckResources.size() == 0 ? null : buckResources.get(buckResources.size() - 1);
final FsFile assetsDir = buckAssets.size() == 0 ? null : buckAssets.get(buckAssets.size() - 1);
final List<ManifestIdentifier> libraries;
if (resDir == null && assetsDir == null) {
libraries = null;
} else {
libraries = new ArrayList<>();
for (FsFile buckResource: buckResources) {
libraries.add(new ManifestIdentifier(null, null, buckResource, null, null));
}
for (FsFile buckAsset: buckAssets) {
libraries.add(new ManifestIdentifier(null, null, null, buckAsset, null));
}
}
return new ManifestIdentifier(packageName, manifestFile, resDir, assetsDir, libraries);
}
public static boolean isBuck() {
return System.getProperty(BUCK_ROBOLECTRIC_MANIFEST) != null;
}
@Nonnull
private List<FsFile> getDirectoriesFromProperty(String property) {
if (property == null) {
return Collections.emptyList();
}
List<String> dirs;
if (property.startsWith("@")) {
String filename = property.substring(1);
try {
dirs = Arrays.asList(
new String(Util.readBytes(new FileInputStream(filename)), UTF_8).split("\\n"));
} catch (IOException e) {
throw new RuntimeException("Cannot read file " + filename);
}
} else {
dirs = Arrays.asList(property.split(File.pathSeparator));
}
return dirs.stream().map(Fs::fileFromPath).collect(Collectors.toList());
}
}
|
package net.runelite.client.config;
import com.google.common.base.Strings;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableMap;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.Rectangle;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.nio.channels.FileLock;
import java.nio.charset.Charset;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.time.Instant;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import net.runelite.api.coords.WorldPoint;
import net.runelite.api.events.ConfigChanged;
import net.runelite.client.RuneLite;
import net.runelite.client.account.AccountSession;
import net.runelite.client.eventbus.EventBus;
import net.runelite.client.util.ColorUtil;
import net.runelite.http.api.config.ConfigClient;
import net.runelite.http.api.config.ConfigEntry;
import net.runelite.http.api.config.Configuration;
@Singleton
@Slf4j
public class ConfigManager
{
private static final String SETTINGS_FILE_NAME = "settings.properties";
private static final DateFormat TIME_FORMAT = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
@Inject
EventBus eventBus;
private final ScheduledExecutorService executor;
private AccountSession session;
private ConfigClient client;
private File propertiesFile;
private final ConfigInvocationHandler handler = new ConfigInvocationHandler(this);
private final Properties properties = new Properties();
private final Map<String, String> pendingChanges = new HashMap<>();
@Inject
public ConfigManager(ScheduledExecutorService scheduledExecutorService)
{
this.executor = scheduledExecutorService;
this.propertiesFile = getPropertiesFile();
executor.scheduleWithFixedDelay(this::sendConfig, 30, 30, TimeUnit.SECONDS);
}
public final void switchSession(AccountSession session)
{
// Ensure existing config is saved
sendConfig();
if (session == null)
{
this.session = null;
this.client = null;
}
else
{
this.session = session;
this.client = new ConfigClient(session.getUuid());
}
this.propertiesFile = getPropertiesFile();
load(); // load profile specific config
}
private File getLocalPropertiesFile()
{
return new File(RuneLite.RUNELITE_DIR, SETTINGS_FILE_NAME);
}
private File getPropertiesFile()
{
// Sessions that aren't logged in have no username
if (session == null || session.getUsername() == null)
{
return getLocalPropertiesFile();
}
else
{
File profileDir = new File(RuneLite.PROFILES_DIR, session.getUsername().toLowerCase());
return new File(profileDir, SETTINGS_FILE_NAME);
}
}
public void load()
{
if (client == null)
{
loadFromFile();
return;
}
Configuration configuration;
try
{
configuration = client.get();
}
catch (IOException ex)
{
log.debug("Unable to load configuration from client, using saved configuration from disk", ex);
loadFromFile();
return;
}
if (configuration.getConfig() == null || configuration.getConfig().isEmpty())
{
log.debug("No configuration from client, using saved configuration on disk");
loadFromFile();
return;
}
handler.invalidate();
properties.clear();
for (ConfigEntry entry : configuration.getConfig())
{
log.debug("Loading configuration value from client {}: {}", entry.getKey(), entry.getValue());
final String[] split = entry.getKey().split("\\.", 2);
if (split.length != 2)
{
continue;
}
final String groupName = split[0];
final String key = split[1];
final String value = entry.getValue();
final String oldValue = (String) properties.setProperty(entry.getKey(), value);
ConfigChanged configChanged = new ConfigChanged();
configChanged.setGroup(groupName);
configChanged.setKey(key);
configChanged.setOldValue(oldValue);
configChanged.setNewValue(value);
eventBus.post(configChanged);
}
try
{
saveToFile(propertiesFile);
log.debug("Updated configuration on disk with the latest version");
}
catch (IOException ex)
{
log.warn("Unable to update configuration on disk", ex);
}
}
private synchronized void syncPropertiesFromFile(File propertiesFile)
{
final Properties properties = new Properties();
try (FileInputStream in = new FileInputStream(propertiesFile))
{
properties.load(new InputStreamReader(in, Charset.forName("UTF-8")));
}
catch (Exception e)
{
log.debug("Malformed properties, skipping update");
return;
}
final Map<String, String> copy = (Map) ImmutableMap.copyOf(this.properties);
copy.forEach((groupAndKey, value) ->
{
if (!properties.containsKey(groupAndKey))
{
final String[] split = groupAndKey.split("\\.", 2);
if (split.length != 2)
{
return;
}
final String groupName = split[0];
final String key = split[1];
unsetConfiguration(groupName, key);
}
});
properties.forEach((objGroupAndKey, objValue) ->
{
final String groupAndKey = String.valueOf(objGroupAndKey);
final String[] split = groupAndKey.split("\\.", 2);
if (split.length != 2)
{
return;
}
final String groupName = split[0];
final String key = split[1];
final String value = String.valueOf(objValue);
setConfiguration(groupName, key, value);
});
}
public void importLocal()
{
if (session == null)
{
// No session, no import
return;
}
final File file = new File(propertiesFile.getParent(), propertiesFile.getName() + "." + TIME_FORMAT.format(new Date()));
try
{
saveToFile(file);
}
catch (IOException e)
{
log.warn("Backup failed, skipping import", e);
return;
}
syncPropertiesFromFile(getLocalPropertiesFile());
}
private synchronized void loadFromFile()
{
handler.invalidate();
properties.clear();
try (FileInputStream in = new FileInputStream(propertiesFile))
{
properties.load(new InputStreamReader(in, Charset.forName("UTF-8")));
}
catch (FileNotFoundException ex)
{
log.debug("Unable to load settings - no such file");
}
catch (IllegalArgumentException | IOException ex)
{
log.warn("Unable to load settings", ex);
}
try
{
Map<String, String> copy = (Map) ImmutableMap.copyOf(properties);
copy.forEach((groupAndKey, value) ->
{
final String[] split = groupAndKey.split("\\.", 2);
if (split.length != 2)
{
log.debug("Properties key malformed!: {}", groupAndKey);
properties.remove(groupAndKey);
return;
}
final String groupName = split[0];
final String key = split[1];
ConfigChanged configChanged = new ConfigChanged();
configChanged.setGroup(groupName);
configChanged.setKey(key);
configChanged.setOldValue(null);
configChanged.setNewValue(value);
eventBus.post(configChanged);
});
}
catch (Exception ex)
{
log.warn("Error posting config events", ex);
}
}
private void saveToFile(final File propertiesFile) throws IOException
{
propertiesFile.getParentFile().mkdirs();
try (FileOutputStream out = new FileOutputStream(propertiesFile))
{
final FileLock lock = out.getChannel().lock();
try
{
properties.store(new OutputStreamWriter(out, Charset.forName("UTF-8")), "RuneLite configuration");
}
finally
{
lock.release();
}
}
}
public <T> T getConfig(Class<T> clazz)
{
if (!Modifier.isPublic(clazz.getModifiers()))
{
throw new RuntimeException("Non-public configuration classes can't have default methods invoked");
}
T t = (T) Proxy.newProxyInstance(clazz.getClassLoader(), new Class<?>[]
{
clazz
}, handler);
return t;
}
public List<String> getConfigurationKeys(String prefix)
{
return properties.keySet().stream().filter(v -> ((String) v).startsWith(prefix)).map(String.class::cast).collect(Collectors.toList());
}
public String getConfiguration(String groupName, String key)
{
return properties.getProperty(groupName + "." + key);
}
public <T> T getConfiguration(String groupName, String key, Class<T> clazz)
{
String value = getConfiguration(groupName, key);
if (!Strings.isNullOrEmpty(value))
{
try
{
return (T) stringToObject(value, clazz);
}
catch (Exception e)
{
log.warn("Unable to unmarshal {}.{} ", groupName, key, e);
}
}
return null;
}
public void setConfiguration(String groupName, String key, String value)
{
String oldValue = (String) properties.setProperty(groupName + "." + key, value);
if (Objects.equals(oldValue, value))
{
return;
}
log.debug("Setting configuration value for {}.{} to {}", groupName, key, value);
handler.invalidate();
synchronized (pendingChanges)
{
pendingChanges.put(groupName + "." + key, value);
}
ConfigChanged configChanged = new ConfigChanged();
configChanged.setGroup(groupName);
configChanged.setKey(key);
configChanged.setOldValue(oldValue);
configChanged.setNewValue(value);
eventBus.post(configChanged);
}
public void setConfiguration(String groupName, String key, Object value)
{
setConfiguration(groupName, key, objectToString(value));
}
public void unsetConfiguration(String groupName, String key)
{
String oldValue = (String) properties.remove(groupName + "." + key);
if (oldValue == null)
{
return;
}
log.debug("Unsetting configuration value for {}.{}", groupName, key);
handler.invalidate();
synchronized (pendingChanges)
{
pendingChanges.put(groupName + "." + key, null);
}
ConfigChanged configChanged = new ConfigChanged();
configChanged.setGroup(groupName);
configChanged.setKey(key);
configChanged.setOldValue(oldValue);
eventBus.post(configChanged);
}
public ConfigDescriptor getConfigDescriptor(Object configurationProxy)
{
Class<?> inter = configurationProxy.getClass().getInterfaces()[0];
ConfigGroup group = inter.getAnnotation(ConfigGroup.class);
if (group == null)
{
throw new IllegalArgumentException("Not a config group");
}
final List<ConfigItemDescriptor> items = Arrays.stream(inter.getMethods())
.filter(m -> m.getParameterCount() == 0)
.map(m -> new ConfigItemDescriptor(
m.getDeclaredAnnotation(ConfigItem.class),
m.getReturnType(),
m.getDeclaredAnnotation(Range.class),
m.getDeclaredAnnotation(Alpha.class)
))
.sorted((a, b) -> ComparisonChain.start()
.compare(a.getItem().position(), b.getItem().position())
.compare(a.getItem().name(), b.getItem().name())
.result())
.collect(Collectors.toList());
return new ConfigDescriptor(group, items);
}
/**
* Initialize the configuration from the default settings
*
* @param proxy
*/
public void setDefaultConfiguration(Object proxy, boolean override)
{
Class<?> clazz = proxy.getClass().getInterfaces()[0];
ConfigGroup group = clazz.getAnnotation(ConfigGroup.class);
if (group == null)
{
return;
}
for (Method method : clazz.getDeclaredMethods())
{
ConfigItem item = method.getAnnotation(ConfigItem.class);
// only apply default configuration for methods which read configuration (0 args)
if (item == null || method.getParameterCount() != 0)
{
continue;
}
if (!method.isDefault())
{
if (override)
{
String current = getConfiguration(group.value(), item.keyName());
// only unset if already set
if (current != null)
{
unsetConfiguration(group.value(), item.keyName());
}
}
continue;
}
if (!override)
{
// This checks if it is set and is also unmarshallable to the correct type; so
// we will overwrite invalid config values with the default
Object current = getConfiguration(group.value(), item.keyName(), method.getReturnType());
if (current != null)
{
continue; // something else is already set
}
}
Object defaultValue;
try
{
defaultValue = ConfigInvocationHandler.callDefaultMethod(proxy, method, null);
}
catch (Throwable ex)
{
log.warn(null, ex);
continue;
}
String current = getConfiguration(group.value(), item.keyName());
String valueString = objectToString(defaultValue);
// null and the empty string are treated identically in sendConfig and treated as an unset
// If a config value defaults to "" and the current value is null, it will cause an extra
// unset to be sent, so treat them as equal
if (Objects.equals(current, valueString) || (Strings.isNullOrEmpty(current) && Strings.isNullOrEmpty(valueString)))
{
continue; // already set to the default value
}
log.debug("Setting default configuration value for {}.{} to {}", group.value(), item.keyName(), defaultValue);
setConfiguration(group.value(), item.keyName(), valueString);
}
}
static Object stringToObject(String str, Class<?> type)
{
if (type == boolean.class || type == Boolean.class)
{
return Boolean.parseBoolean(str);
}
if (type == int.class)
{
return Integer.parseInt(str);
}
if (type == Color.class)
{
return ColorUtil.fromString(str);
}
if (type == Dimension.class)
{
String[] splitStr = str.split("x");
int width = Integer.parseInt(splitStr[0]);
int height = Integer.parseInt(splitStr[1]);
return new Dimension(width, height);
}
if (type == Point.class)
{
String[] splitStr = str.split(":");
int width = Integer.parseInt(splitStr[0]);
int height = Integer.parseInt(splitStr[1]);
return new Point(width, height);
}
if (type == Rectangle.class)
{
String[] splitStr = str.split(":");
int x = Integer.parseInt(splitStr[0]);
int y = Integer.parseInt(splitStr[1]);
int width = Integer.parseInt(splitStr[2]);
int height = Integer.parseInt(splitStr[3]);
return new Rectangle(x, y, width, height);
}
if (type.isEnum())
{
return Enum.valueOf((Class<? extends Enum>) type, str);
}
if (type == Instant.class)
{
return Instant.parse(str);
}
if (type == Keybind.class || type == ModifierlessKeybind.class)
{
String[] splitStr = str.split(":");
int code = Integer.parseInt(splitStr[0]);
int mods = Integer.parseInt(splitStr[1]);
if (type == ModifierlessKeybind.class)
{
return new ModifierlessKeybind(code, mods);
}
return new Keybind(code, mods);
}
if (type == WorldPoint.class)
{
String[] splitStr = str.split(":");
int x = Integer.parseInt(splitStr[0]);
int y = Integer.parseInt(splitStr[1]);
int plane = Integer.parseInt(splitStr[2]);
return new WorldPoint(x, y, plane);
}
if (type == Duration.class)
{
return Duration.ofMillis(Long.parseLong(str));
}
return str;
}
static String objectToString(Object object)
{
if (object instanceof Color)
{
return String.valueOf(((Color) object).getRGB());
}
if (object instanceof Enum)
{
return ((Enum) object).name();
}
if (object instanceof Dimension)
{
Dimension d = (Dimension) object;
return d.width + "x" + d.height;
}
if (object instanceof Point)
{
Point p = (Point) object;
return p.x + ":" + p.y;
}
if (object instanceof Rectangle)
{
Rectangle r = (Rectangle) object;
return r.x + ":" + r.y + ":" + r.width + ":" + r.height;
}
if (object instanceof Instant)
{
return ((Instant) object).toString();
}
if (object instanceof Keybind)
{
Keybind k = (Keybind) object;
return k.getKeyCode() + ":" + k.getModifiers();
}
if (object instanceof WorldPoint)
{
WorldPoint wp = (WorldPoint) object;
return wp.getX() + ":" + wp.getY() + ":" + wp.getPlane();
}
if (object instanceof Duration)
{
return Long.toString(((Duration) object).toMillis());
}
return object.toString();
}
public void sendConfig()
{
boolean changed;
synchronized (pendingChanges)
{
if (client != null)
{
for (Map.Entry<String, String> entry : pendingChanges.entrySet())
{
String key = entry.getKey();
String value = entry.getValue();
if (Strings.isNullOrEmpty(value))
{
client.unset(key);
}
else
{
client.set(key, value);
}
}
}
changed = !pendingChanges.isEmpty();
pendingChanges.clear();
}
if (changed)
{
try
{
saveToFile(propertiesFile);
}
catch (IOException ex)
{
log.warn("unable to save configuration file", ex);
}
}
}
}
|
package org.bouncycastle.cms;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.DigestInputStream;
import java.security.InvalidAlgorithmParameterException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.cert.CRLException;
import java.security.cert.CertStore;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.CollectionCertStoreParameters;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.bouncycastle.asn1.*;
import org.bouncycastle.asn1.util.ASN1Dump;
import org.bouncycastle.asn1.cms.SignerInfo;
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
import org.bouncycastle.sasn1.*;
import org.bouncycastle.sasn1.cms.SignedDataParser;
import org.bouncycastle.util.encoders.Hex;
/**
* Parsing class for an CMS Signed Data object from an input stream.
* <p>
* Note: that because we are in a streaming mode only one signer can be tried and it is important
* that the methods on the parser are called in the appropriate order.
* </p>
* <p>
* A simple example of usage for an encapsulated signature.
* </p>
* <p>
* Two notes: first, in the example below the validity of
* the certificate isn't verified, just the fact that one of the certs
* matches the given signer, and, second, because we are in a streaming
* mode the order of the operations is important.
* </p>
* <pre>
* CMSSignedDataParser sp = new CMSSignedDataParser(encapSigData);
*
* sp.getSignedContent().drain();
*
* CertStore certs = sp.getCertificatesAndCRLs("Collection", "BC");
* SignerInformationStore signers = sp.getSignerInfos();
*
* Collection c = signers.getSigners();
* Iterator it = c.iterator();
*
* while (it.hasNext())
* {
* SignerInformation signer = (SignerInformation)it.next();
* Collection certCollection = certs.getCertificates(signer.getSID());
*
* Iterator certIt = certCollection.iterator();
* X509Certificate cert = (X509Certificate)certIt.next();
*
* System.out.println("verify returns: " + signer.verify(cert, "BC"));
* }
* </pre>
* Note also: this class does not introduce buffering - if you are processing large files you should create
* the parser with:
* <pre>
* CMSSignedDataParser ep = new CMSSignedDataParser(new BufferedInputStream(encapSigData, bufSize));
* </pre>
* where bufSize is a suitably large buffer size.
*/
public class CMSSignedDataParser
extends CMSContentInfoParser
{
private SignedDataParser _signedData;
private CMSTypedStream _signedContent;
private Map _digests;
private CertStore _certStore;
private SignerInformationStore _signerInfoStore;
public CMSSignedDataParser(
byte[] sigBlock)
throws CMSException
{
this(new ByteArrayInputStream(sigBlock));
}
public CMSSignedDataParser(
CMSTypedStream signedContent,
byte[] sigBlock)
throws CMSException
{
this(signedContent, new ByteArrayInputStream(sigBlock));
}
/**
* base constructor - with encapsulated content
*/
public CMSSignedDataParser(
InputStream sigData)
throws CMSException
{
this(null, sigData);
}
/**
* base constructor
*
* @param signedContent the content that was signed.
* @param sigData the signature object stream.
*/
public CMSSignedDataParser(
CMSTypedStream signedContent,
InputStream sigData)
throws CMSException
{
super(sigData);
try
{
this._signedContent = signedContent;
this._signedData = new SignedDataParser((Asn1Sequence)_contentInfo.getContent(BerTag.SEQUENCE));
this._digests = new HashMap();
Asn1Set digAlgs = _signedData.getDigestAlgorithms();
Asn1Object o;
while ((o = digAlgs.readObject()) != null)
{
AlgorithmIdentifier id = AlgorithmIdentifier.getInstance(new ASN1InputStream(((DerSequence)o).getEncoded()).readObject());
try
{
String digestName = CMSSignedHelper.INSTANCE.getDigestAlgName(id.getObjectId().toString());
MessageDigest dig = MessageDigest.getInstance(digestName);
this._digests.put(digestName, dig);
}
catch (NoSuchAlgorithmException e)
{
// ignore
}
}
if (_signedContent == null)
{
// If the message is simply a certificate chain message getContent() may return null.
Asn1OctetString octs = (Asn1OctetString)_signedData.getEncapContentInfo().getContent(BerTag.OCTET_STRING);
if (octs != null)
{
this._signedContent = new CMSTypedStream(octs.getOctetStream());
}
}
else
{
// content passed in, need to read past empty encapsulated content info object if present
Asn1OctetString octs = (Asn1OctetString)_signedData.getEncapContentInfo().getContent(BerTag.OCTET_STRING);
if (octs != null)
{
InputStream in = octs.getOctetStream();
while (in.read() >= 0)
{
// ignore
}
}
}
}
catch (IOException e)
{
throw new CMSException("io exception: " + e.getMessage(), e);
}
if (_digests.isEmpty())
{
throw new CMSException("no digests could be created for message.");
}
}
/**
* return the collection of signers that are associated with the
* signatures for the message.
* @throws CMSException
*/
public SignerInformationStore getSignerInfos()
throws CMSException
{
if (_signerInfoStore == null)
{
List signerInfos = new ArrayList();
Map hashes = new HashMap();
Iterator it = _digests.keySet().iterator();
while (it.hasNext())
{
Object digestKey = it.next();
hashes.put(digestKey, ((MessageDigest)_digests.get(digestKey)).digest());
}
try
{
Asn1Set s = _signedData.getSignerInfos();
Asn1Object o = null;
while ((o = s.readObject()) != null)
{
SignerInfo info = SignerInfo.getInstance(convertObject(o));
String digestName = CMSSignedHelper.INSTANCE.getDigestAlgName(info.getDigestAlgorithm().getObjectId().getId());
byte[] hash = (byte[])hashes.get(digestName);
signerInfos.add(new SignerInformation(info, new DERObjectIdentifier(_signedContent.getContentType()), null, hash));
}
}
catch (IOException e)
{
throw new CMSException("io exception: " + e.getMessage(), e);
}
_signerInfoStore = new SignerInformationStore(signerInfos);
}
return _signerInfoStore;
}
private DERObject convertObject(Asn1Object obj)
throws IOException
{
if (obj instanceof DerObject)
{
return new ASN1InputStream(((DerObject)obj).getEncoded()).readObject();
}
else
{
if (obj instanceof Asn1Sequence)
{
Asn1Sequence seq = (Asn1Sequence)obj;
ASN1EncodableVector v = new ASN1EncodableVector();
while ((obj = seq.readObject()) != null)
{
v.add(convertObject(obj));
}
return new DERSequence(v);
}
else if (obj instanceof Asn1Set)
{
Asn1Set set = (Asn1Set)obj;
ASN1EncodableVector v = new ASN1EncodableVector();
while ((obj = set.readObject()) != null)
{
v.add(convertObject(obj));
}
return new DERSet(v);
}
else if (obj instanceof Asn1TaggedObject)
{
Asn1TaggedObject tagged = (Asn1TaggedObject)obj;
// TODO find a more general way of handling this
return new DERTaggedObject(false, tagged.getTagNumber(), convertObject(tagged.getObject(BerTag.SET, false)));
}
else
{
throw new IOException("unrecognised object: " + obj);
}
}
}
/**
* return a CertStore containing the certificates and CRLs associated with
* this message.
*
* @exception NoSuchProviderException if the provider requested isn't available.
* @exception NoSuchAlgorithmException if the cert store isn't available.
*/
public CertStore getCertificatesAndCRLs(
String type,
String provider)
throws NoSuchAlgorithmException, NoSuchProviderException, CMSException
{
if (_certStore == null)
{
List certsAndcrls = new ArrayList();
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
CertificateFactory cf;
try
{
cf = CertificateFactory.getInstance("X.509", provider);
}
catch (CertificateException ex)
{
throw new CMSException("can't get certificate factory.", ex);
}
// load the certificates and revocation lists if we have any
try
{
Asn1Set s = _signedData.getCertificates();
if (s != null)
{
DerSequence seq;
while ((seq = (DerSequence)s.readObject()) != null)
{
try
{
certsAndcrls.add(cf.generateCertificate(
new ByteArrayInputStream(seq.getEncoded())));
}
catch (IOException ex)
{
throw new CMSException(
"can't re-encode certificate!", ex);
}
catch (CertificateException ex)
{
throw new CMSException(
"can't re-encode certificate!", ex);
}
bOut.reset();
}
}
s = _signedData.getCrls();
if (s != null)
{
DerSequence seq;
while ((seq = (DerSequence)s.readObject()) != null)
{
try
{
certsAndcrls.add(cf.generateCRL(
new ByteArrayInputStream(seq.getEncoded())));
}
catch (IOException ex)
{
throw new CMSException("can't re-encode CRL!", ex);
}
catch (CRLException ex)
{
throw new CMSException("can't re-encode CRL!", ex);
}
bOut.reset();
}
}
}
catch (IOException e)
{
throw new CMSException("io exception: " + e.getMessage(), e);
}
try
{
_certStore = CertStore.getInstance(type,
new CollectionCertStoreParameters(certsAndcrls), provider);
}
catch (InvalidAlgorithmParameterException e)
{
throw new CMSException("can't setup the CertStore", e);
}
}
return _certStore;
}
public CMSTypedStream getSignedContent()
{
if (_signedContent != null)
{
InputStream digStream = _signedContent.getContentStream();
Iterator it = _digests.values().iterator();
while (it.hasNext())
{
digStream = new DigestInputStream(digStream, (MessageDigest)it.next());
}
return new CMSTypedStream(_signedContent.getContentType(), digStream);
}
else
{
return null;
}
}
}
|
package gov.nrel.apiumbrella;
import java.io.IOException;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.NumberFormat;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.parquet.avro.AvroParquetWriter;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Period;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import io.searchbox.client.JestClient;
import io.searchbox.client.JestClientFactory;
import io.searchbox.client.JestResult;
import io.searchbox.client.config.HttpClientConfig;
import io.searchbox.core.Search;
import io.searchbox.core.SearchScroll;
import io.searchbox.params.Parameters;
public class DayWorker implements Runnable {
private DateTime date;
private String startDateString;
private String endDateString;
private Schema schema;
private HashSet<String> schemaIntFields;
private HashSet<String> schemaDoubleFields;
private HashSet<String> schemaBooleanFields;
private App app;
private int totalProcessedHits = 0;
private int totalHits;
ParquetWriter<GenericRecord> parquetWriter;
DateTimeFormatter dateTimeParser = ISODateTimeFormat.dateTimeParser();
DateTimeFormatter dateFormatter = ISODateTimeFormat.date();
public DayWorker(App app, DateTime date) {
this.app = app;
this.date = date;
this.schema = app.getSchema();
this.schemaIntFields = app.getSchemaIntFields();
this.schemaDoubleFields = app.getSchemaDoubleFields();
this.schemaBooleanFields = app.getSchemaBooleanFields();
this.startDateString = this.dateFormatter.print(this.date);
DateTime tomorrow = this.date.plus(Period.days(1));
this.endDateString = this.dateFormatter.print(tomorrow);
}
public void run() {
try {
JestClientFactory factory = new JestClientFactory();
factory.setHttpClientConfig(new HttpClientConfig.Builder(App.ELASTICSEARCH_URL)
.multiThreaded(true)
.connTimeout(10000)
.readTimeout(30000)
.build());
JestClient client = factory.getObject();
// Perform a scroll query to fetch the specified day's data from
// elasticsearch.
String query = "{" +
" \"sort\":\"request_at\"," +
" \"query\":{" +
" \"filtered\":{" +
" \"filter\":{" +
" \"range\":{" +
" \"request_at\":{" +
" \"gte\":\"" + this.startDateString + "\"," +
" \"lt\":\"" + this.endDateString + "\"" +
" }" +
" }" +
" }" +
" }" +
" }" +
"}";
String indexName = "api-umbrella-logs-" + ISODateTimeFormat.yearMonth().print(this.date);
Search search = new Search.Builder(query)
.addIndex(indexName)
.setParameter(Parameters.SIZE, App.PAGE_SIZE)
.setParameter(Parameters.SCROLL, "1m")
.build();
JestResult result = client.execute(search);
if(!result.isSucceeded()) {
System.out.println(result.getErrorMessage());
System.exit(1);
}
String scrollId = result.getJsonObject().get("_scroll_id").getAsString();
this.totalHits = result.getJsonObject().getAsJsonObject("hits").get("total").getAsInt();
while(true) {
// Keep looping until the scroll result returns no results.
if(!processResult(result)) {
break;
}
SearchScroll scroll = new SearchScroll.Builder(scrollId, "1m").build();
result = client.execute(scroll);
if(!result.isSucceeded()) {
System.out.println(result.getErrorMessage());
System.exit(1);
}
scrollId = result.getJsonObject().get("_scroll_id").getAsString();
}
// Close the parquet file (but only if it exists, so we skip over days
// with no data).
if(this.parquetWriter != null) {
parquetWriter.close();
}
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private ParquetWriter<GenericRecord> getParquetWriter() {
if(this.parquetWriter == null) {
try {
// Create a new file in /dir/YYYY/MM/YYYY-MM-DD.par
Path path = Paths.get(App.DIR,
this.date.toString("YYYY"),
this.date.toString("MM"),
this.startDateString + ".par");
Files.createDirectories(path.getParent());
this.parquetWriter = AvroParquetWriter
.<GenericRecord> builder(new org.apache.hadoop.fs.Path(path.toString()))
.withSchema(schema)
.withCompressionCodec(CompressionCodecName.SNAPPY)
.withDictionaryEncoding(true)
.withValidation(false)
.build();
} catch(IOException e) {
e.printStackTrace();
System.exit(1);
}
}
return this.parquetWriter;
}
private boolean processResult(JestResult result) throws Exception {
JsonArray hits = result.getJsonObject().getAsJsonObject("hits").getAsJsonArray("hits");
int pageHits = hits.size();
if(pageHits == 0) {
return false;
}
this.totalProcessedHits += pageHits;
BigInteger globalHits = this.app.incrementGlobalHits(pageHits);
NumberFormat numberFormatter = NumberFormat.getNumberInstance(Locale.US);
DateTime firstRequestAt = this.parseTimestamp(
hits.get(0).getAsJsonObject().get("_source").getAsJsonObject().get("request_at"));
System.out.println(String.format(
"%s | Thread %2s | Processing %s to %s | %10s / %10s | %12s | %s",
new DateTime(),
Thread.currentThread().getId(),
this.startDateString,
this.endDateString,
numberFormatter.format(this.totalProcessedHits),
numberFormatter.format(this.totalHits),
numberFormatter.format(globalHits),
firstRequestAt));
for(int i = 0; i < pageHits; i++) {
JsonObject hit = hits.get(i).getAsJsonObject();
this.processHit(hit);
}
return true;
}
private void processHit(JsonObject hit) throws Exception {
JsonObject source = hit.get("_source").getAsJsonObject();
try {
// For each hit, create a new Avro record to serialize it into the new
// format for parquet storage.
GenericRecord log = new GenericData.Record(schema);
log.put("id", hit.get("_id"));
// Loop over each attribute in the source data, assigning each value to
// the new data record.
for(Map.Entry<String, JsonElement> entry : source.entrySet()) {
String key = entry.getKey();
// Skip this field if we've explicitly marked it as not migrating.
if(App.SKIP_FIELDS.contains(key)) {
continue;
}
JsonElement value = entry.getValue();
// Skip setting anything if the value is null.
if(value == null || value.isJsonNull()) {
continue;
}
// Handle special processing for certain fields.
switch(key) {
case "request_at":
// Split up the timestamp into several fields for better compatibility
// with the Kylin's cube's that will be created (which doesn't support
// timestamps yet).
DateTime requestAt = this.parseTimestamp(value);
log.put("request_at", requestAt.getMillis());
log.put("request_at_year", requestAt.getYear());
log.put("request_at_month", requestAt.getMonthOfYear());
log.put("request_at_date", this.dateFormatter.print(requestAt));
log.put("request_at_hour", requestAt.getHourOfDay());
log.put("request_at_minute", requestAt.getMinuteOfHour());
value = null;
break;
case "request_ip_location":
// Flatten the location object into two separate fields.
log.put("request_ip_lat", value.getAsJsonObject().get("lat").getAsDouble());
log.put("request_ip_lon", value.getAsJsonObject().get("lon").getAsDouble());
value = null;
break;
case "request_url":
// Perform various cleanup and sanity checks on storing the URL as
// separate fields (versus the duplicative separate fields plus a full
// URL field). The full URL field sometimes differs in the data versus
// the individual fields, so we want to make sure we're transferring
// the best data possible and not losing anything in the process.
URL url;
try {
url = new URL(value.getAsString());
} catch(MalformedURLException e) {
try {
// Cleanup some oddities in some invalid URLs seen (I think from
// localhost testing).
url = new URL(
value.getAsString().replace(":80:80/", ":80/").replace("://[", "://").replace("]/",
"/"));
} catch(MalformedURLException e2) {
System.out.println("Thread " + Thread.currentThread().getId());
System.out.println(hit);
throw(e2);
}
}
// Store the original request_scheme, since that seems to be more
// are actually https.
String requestScheme = source.get("request_scheme").getAsString();
if(!url.getProtocol().equals(requestScheme)) {
System.out.println("WARNING: request_url's scheme (" + url.getProtocol()
+ ") does not match request_scheme (" + requestScheme + ")");
}
log.put("request_url_scheme", requestScheme);
// Store the host extracted from the full URL, since that seems more
// accurate than the separate request_host field (it seems to better
// handle some odd invalid hostnames, which probably don't actually
// matter too much).
String requestHost = source.get("request_host").getAsString().toLowerCase();
String urlHost = url.getHost().toLowerCase();
if(!urlHost.equals(requestHost)) {
System.out.println("WARNING: request_url's host (" + url.getHost()
+ ") does not match request_host (" + requestHost + ")");
}
log.put("request_url_host", urlHost);
// As a new field, store the port used. Most of the time this will be
// the default 80 or 443, depending on the scheme.
int urlPort = url.getPort();
if(log.get("request_url_scheme").equals("https") && urlPort == 80) {
log.put("request_url_port", 443);
} else {
// If the port isn't set, or it's 50090, set it to the default port
// based on the scheme. We're ignoring port 50090, since this is
// present on some of our rather old imported logs, and was an
// internal-only port that was used (but was never public, so this
// isn't accurate).
if(urlPort == -1 || urlPort == 50090) {
if(log.get("request_url_scheme").equals("https")) {
log.put("request_url_port", 443);
} else {
log.put("request_url_port", 80);
}
} else {
log.put("request_url_port", urlPort);
}
}
// Store the path extracted from the full URL, since it seems to be
// more accurate at dealing with odd URL encoding issues.
String requestPath = source.get("request_path").getAsString();
if(!url.getPath().equals(requestPath)) {
// Before throwing a warning, ignore some semi-common URL encoding
// differences between the full URL and the request_path field
// (where we're comfortable with the encoding of the full URL's
// version).
String encodedUrlPath = URLEncoder.encode(url.getPath(), "UTF-8");
encodedUrlPath = encodedUrlPath.replace("%25", "%");
String encodedRequestPath = requestPath.replaceAll("/(x[0-9])", "\\\\$1");
encodedRequestPath = URLEncoder.encode(encodedRequestPath, "UTF-8");
encodedRequestPath = encodedRequestPath.replace("%25", "%");
if(!encodedUrlPath.equals(encodedRequestPath)) {
System.out.println("WARNING: request_url's path (" + url.getPath() + " - "
+ encodedUrlPath + ") does not match request_path (" + requestPath + " - "
+ encodedRequestPath + ")");
}
}
log.put("request_url_path", url.getPath());
// Store the query string extracted from the full URL.
String requestQuery = url.getQuery();
log.put("request_url_query", requestQuery);
// If a hash fragment is present in the full URL, this is actually a
// flag that something's fishy with the URL encoding, since our
// server-side logs can't possible contain fragment information. So
// we'll assume this information actually represents something
// following a URL-encoded hash fragment, and append that to the
// appropriate place.
String urlRef = url.getRef();
if(urlRef != null) {
if(log.get("request_url_query") != null) {
log.put("request_url_query", log.get("request_url_query") + "%23" + urlRef);
} else {
log.put("request_url_path", log.get("request_url_path") + "%23" + urlRef);
}
}
// Re-assemble the URL based on all of our newly stored individual
// componetns.
String reassmbledUrl = log.get("request_url_scheme") + ":
+ log.get("request_url_host");
if(log.get("request_url_scheme").equals("http")) {
if((Integer) log.get("request_url_port") != 80) {
reassmbledUrl = reassmbledUrl + ":" + log.get("request_url_port");
}
} else if(log.get("request_url_scheme").equals("https")) {
if((Integer) log.get("request_url_port") != 443) {
reassmbledUrl = reassmbledUrl + ":" + log.get("request_url_port");
}
} else {
reassmbledUrl = reassmbledUrl + ":" + log.get("request_url_port");
}
reassmbledUrl = reassmbledUrl + log.get("request_url_path");
if(requestQuery != null) {
reassmbledUrl = reassmbledUrl + "?" + log.get("request_url_query");
}
// As a last sanity check to make sure we're not throwing away data as
// part of this migration, compare the original full URL string to the
// new URL composed of the various parts.
if(!value.getAsString().equals(reassmbledUrl)) {
// Ignore some of the default ports for comparison.
if(!value.getAsString().replaceFirst(":(80|443|50090)/", "/").equals(reassmbledUrl)) {
// Ignore url encoding of the hash fragment.
if(!value.getAsString().replaceFirst("#", "%23").equals(reassmbledUrl)) {
System.out.println("WARNING: request_url (" + value.getAsString()
+ ") does not match reassembled URL (" + reassmbledUrl + ")");
}
}
}
value = null;
break;
// The following are some renamed fields to better normalize the new
// storage schema.
case "response_time":
key = "timer_response";
break;
case "backend_response_time":
key = "timer_backend_response";
break;
case "internal_gatekeeper_time":
key = "timer_internal";
break;
case "proxy_overhead":
key = "timer_proxy_overhead";
break;
case "gatekeeper_denied_code":
key = "denied_reason";
break;
case "imported":
key = "log_imported";
break;
}
if(value != null) {
// Set the value on the new record, performing type-casting as needed.
try {
if(this.schemaIntFields.contains(key)) {
log.put(key, value.getAsInt());
} else if(this.schemaDoubleFields.contains(key)) {
log.put(key, value.getAsDouble());
} else if(this.schemaBooleanFields.contains(key)) {
log.put(key, value.getAsBoolean());
} else {
try {
log.put(key, value.getAsString());
} catch(IllegalStateException e) {
// Handle some unexpected array types by comma-delimiting the
// values.
if(value.isJsonArray()) {
StringBuffer buffer = new StringBuffer();
Iterator<JsonElement> iter = value.getAsJsonArray().iterator();
while(iter.hasNext()) {
buffer.append(iter.next().getAsString());
if(iter.hasNext()) {
buffer.append(", ");
}
}
log.put(key, buffer.toString());
} else {
throw(e);
}
}
}
} catch(Exception e) {
System.out.println("Eror on field: " + key);
System.out.println(e.getMessage());
throw(e);
}
}
}
this.getParquetWriter().write(log);
} catch(Exception e) {
System.out.println("Error on hit: " + hit);
System.out.println(e.getMessage());
System.out.println("Error on thread: " + Thread.currentThread().getId());
throw(e);
}
}
private DateTime parseTimestamp(JsonElement value) {
if(value.getAsJsonPrimitive().isNumber()) {
return new DateTime(value.getAsLong(), DateTimeZone.UTC);
} else {
return this.dateTimeParser.parseDateTime(value.getAsString());
}
}
}
|
package org.bouncycastle.crypto.test;
import java.math.BigInteger;
import java.security.SecureRandom;
import org.bouncycastle.crypto.AsymmetricCipherKeyPair;
import org.bouncycastle.crypto.DataLengthException;
import org.bouncycastle.crypto.engines.ElGamalEngine;
import org.bouncycastle.crypto.generators.ElGamalKeyPairGenerator;
import org.bouncycastle.crypto.generators.ElGamalParametersGenerator;
import org.bouncycastle.crypto.params.ElGamalKeyGenerationParameters;
import org.bouncycastle.crypto.params.ElGamalParameters;
import org.bouncycastle.crypto.params.ElGamalPrivateKeyParameters;
import org.bouncycastle.crypto.params.ElGamalPublicKeyParameters;
import org.bouncycastle.crypto.params.ParametersWithRandom;
import org.bouncycastle.util.test.SimpleTest;
public class ElGamalTest
extends SimpleTest
{
private BigInteger g512 = new BigInteger("153d5d6172adb43045b68ae8e1de1070b6137005686d29d3d73a7749199681ee5b212c9b96bfdcfa5b20cd5e3fd2044895d609cf9b410b7a0f12ca1cb9a428cc", 16);
private BigInteger p512 = new BigInteger("9494fec095f3b85ee286542b3836fc81a5dd0a0349b4c239dd38744d488cf8e31db8bcb7d33b41abb9e5a33cca9144b1cef332c94bf0573bf047a3aca98cdf3b", 16);
private BigInteger g768 = new BigInteger("7c240073c1316c621df461b71ebb0cdcc90a6e5527e5e126633d131f87461c4dc4afc60c2cb0f053b6758871489a69613e2a8b4c8acde23954c08c81cbd36132cfd64d69e4ed9f8e51ed6e516297206672d5c0a69135df0a5dcf010d289a9ca1", 16);
private BigInteger p768 = new BigInteger("8c9dd223debed1b80103b8b309715be009d48860ed5ae9b9d5d8159508efd802e3ad4501a7f7e1cfec78844489148cd72da24b21eddd01aa624291c48393e277cfc529e37075eccef957f3616f962d15b44aeab4039d01b817fde9eaa12fd73f", 16);
private BigInteger g1024 = new BigInteger("1db17639cdf96bc4eabba19454f0b7e5bd4e14862889a725c96eb61048dcd676ceb303d586e30f060dbafd8a571a39c4d823982117da5cc4e0f89c77388b7a08896362429b94a18a327604eb7ff227bffbc83459ade299e57b5f77b50fb045250934938efa145511166e3197373e1b5b1e52de713eb49792bedde722c6717abf", 16);
private BigInteger p1024 = new BigInteger("a00e283b3c624e5b2b4d9fbc2653b5185d99499b00fd1bf244c6f0bb817b4d1c451b2958d62a0f8a38caef059fb5ecd25d75ed9af403f5b5bdab97a642902f824e3c13789fed95fa106ddfe0ff4a707c85e2eb77d49e68f2808bcea18ce128b178cd287c6bc00efa9a1ad2a673fe0dceace53166f75b81d6709d5f8af7c66bb7", 16);
public String getName()
{
return "ElGamal";
}
private void testEnc(
int size,
BigInteger g,
BigInteger p)
{
ElGamalParameters dhParams = new ElGamalParameters(p, g);
ElGamalKeyGenerationParameters params = new ElGamalKeyGenerationParameters(new SecureRandom(), dhParams);
ElGamalKeyPairGenerator kpGen = new ElGamalKeyPairGenerator();
kpGen.init(params);
// generate pair
AsymmetricCipherKeyPair pair = kpGen.generateKeyPair();
ElGamalPublicKeyParameters pu = (ElGamalPublicKeyParameters)pair.getPublic();
ElGamalPrivateKeyParameters pv = (ElGamalPrivateKeyParameters)pair.getPrivate();
ElGamalEngine e = new ElGamalEngine();
e.init(true, pu);
if (e.getOutputBlockSize() != size / 4)
{
fail(size + " getOutputBlockSize() on encryption failed.");
}
String message = "This is a test";
byte[] pText = message.getBytes();
byte[] cText = e.processBlock(pText, 0, pText.length);
e.init(false, pv);
if (e.getOutputBlockSize() != (size / 8) - 1)
{
fail(size + " getOutputBlockSize() on decryption failed.");
}
pText = e.processBlock(cText, 0, cText.length);
if (!message.equals(new String(pText)))
{
fail(size + " bit test failed");
}
byte[] bytes = new byte[e.getInputBlockSize() + 2];
try
{
e.processBlock(bytes, 0, bytes.length);
fail("out of range block not detected");
}
catch (DataLengthException ex)
{
// expected
}
try
{
bytes[0] = (byte)0xff;
e.processBlock(bytes, 0, bytes.length - 1);
fail("out of range block not detected");
}
catch (DataLengthException ex)
{
// expected
}
try
{
e.processBlock(bytes, 1, bytes.length - 1);
}
catch (DataLengthException ex)
{
fail("in range block failed");
}
}
/**
* this test is can take quiet a while
*/
private void testGeneration(
int size)
{
ElGamalParametersGenerator pGen = new ElGamalParametersGenerator();
pGen.init(size, 10, new SecureRandom());
ElGamalParameters elParams = pGen.generateParameters();
ElGamalKeyGenerationParameters params = new ElGamalKeyGenerationParameters(new SecureRandom(), elParams);
ElGamalKeyPairGenerator kpGen = new ElGamalKeyPairGenerator();
kpGen.init(params);
// generate first pair
AsymmetricCipherKeyPair pair = kpGen.generateKeyPair();
ElGamalPublicKeyParameters pu = (ElGamalPublicKeyParameters)pair.getPublic();
ElGamalPrivateKeyParameters pv = (ElGamalPrivateKeyParameters)pair.getPrivate();
ElGamalEngine e = new ElGamalEngine();
e.init(true, new ParametersWithRandom(pu, new SecureRandom()));
String message = "This is a test";
byte[] pText = message.getBytes();
byte[] cText = e.processBlock(pText, 0, pText.length);
e.init(false, pv);
pText = e.processBlock(cText, 0, cText.length);
if (!message.equals(new String(pText)))
{
fail("generation test failed");
}
}
public void performTest()
{
testEnc(512, g512, p512);
testEnc(768, g768, p768);
testEnc(1024, g1024, p1024);
// generation test.
testGeneration(258);
}
public static void main(
String[] args)
{
runTest(new ElGamalTest());
}
}
|
package org.opencms.db.jpa;
import org.opencms.configuration.CmsConfigurationManager;
import org.opencms.configuration.CmsParameterConfiguration;
import org.opencms.db.CmsDbContext;
import org.opencms.db.CmsDbSqlException;
import org.opencms.db.CmsDriverManager;
import org.opencms.db.CmsSubscriptionFilter;
import org.opencms.db.CmsSubscriptionReadMode;
import org.opencms.db.CmsVisitEntry;
import org.opencms.db.CmsVisitEntryFilter;
import org.opencms.db.CmsVisitedByFilter;
import org.opencms.db.I_CmsDriver;
import org.opencms.db.I_CmsSubscriptionDriver;
import org.opencms.db.jpa.persistence.CmsDAOSubscription;
import org.opencms.db.jpa.persistence.CmsDAOSubscriptionVisit;
import org.opencms.db.jpa.utils.CmsQueryLongParameter;
import org.opencms.db.jpa.utils.CmsQueryStringParameter;
import org.opencms.db.jpa.utils.I_CmsQueryParameter;
import org.opencms.file.CmsDataAccessException;
import org.opencms.file.CmsGroup;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsUser;
import org.opencms.file.history.I_CmsHistoryResource;
import org.opencms.main.CmsException;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.security.CmsPrincipal;
import org.opencms.util.CmsFileUtil;
import org.opencms.util.CmsPair;
import org.opencms.util.CmsStringUtil;
import org.opencms.util.CmsUUID;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.persistence.PersistenceException;
import javax.persistence.Query;
import org.apache.commons.logging.Log;
/**
* JPA database server implementation of the subscription driver methods.<p>
*
* @since 8.0.0
*/
public class CmsSubscriptionDriver implements I_CmsDriver, I_CmsSubscriptionDriver {
/** Query key. */
private static final String C_RESOURCES_SELECT_BY_PARENT_UUID = "C_RESOURCES_SELECT_BY_PARENT_UUID";
/** Query key. */
private static final String C_RESOURCES_SELECT_BY_PATH_PREFIX = "C_RESOURCES_SELECT_BY_PATH_PREFIX";
/** Query key. */
private static final String C_SUBSCRIPTION_CHECK_2 = "C_SUBSCRIPTION_CHECK_2";
/** Query key. */
private static final String C_SUBSCRIPTION_DELETE = "C_SUBSCRIPTION_DELETE";
/** Query key. */
private static final String C_SUBSCRIPTION_DELETE_FILTER_DATE = "C_SUBSCRIPTION_DELETE_FILTER_DATE";
/** Query key. */
private static final String C_SUBSCRIPTION_DELETE_FILTER_PRINCIPAL = "C_SUBSCRIPTION_DELETE_FILTER_PRINCIPAL";
/** Query key. */
private static final String C_SUBSCRIPTION_DELETE_FILTER_STRUCTURE = "C_SUBSCRIPTION_DELETE_FILTER_STRUCTURE";
/** Query key. */
private static final String C_SUBSCRIPTION_DELETED = "C_SUBSCRIPTION_DELETED";
/** Query key. */
private static final String C_SUBSCRIPTION_DELETED_FILTER_PRINCIPAL_SINGLE = "C_SUBSCRIPTION_DELETED_FILTER_PRINCIPAL_SINGLE";
/** Query key. */
private static final String C_SUBSCRIPTION_DELETED_FILTER_PRINCIPALS = "C_SUBSCRIPTION_DELETED_FILTER_PRINCIPALS";
/** Query key. */
private static final String C_SUBSCRIPTION_FILTER_PRINCIPAL_SINGLE = "C_SUBSCRIPTION_FILTER_PRINCIPAL_SINGLE";
/** Query key. */
private static final String C_SUBSCRIPTION_FILTER_PRINCIPALS = "C_SUBSCRIPTION_FILTER_PRINCIPALS";
/** Query key. */
private static final String C_SUBSCRIPTION_FILTER_PRINCIPALS_END = "C_SUBSCRIPTION_FILTER_PRINCIPALS_END";
/** Query key. */
private static final String C_SUBSCRIPTION_FILTER_READ = "C_SUBSCRIPTION_FILTER_READ";
/** Query key. */
private static final String C_SUBSCRIPTION_FILTER_RESOURCES_DATE_MODIFIED = "C_SUBSCRIPTION_FILTER_RESOURCES_DATE_MODIFIED";
/** Query key. */
private static final String C_SUBSCRIPTION_READ_ALL_1 = "C_SUBSCRIPTION_READ_ALL_1";
/** Query key. */
private static final String C_SUBSCRIPTION_UPDATE_DATE_2 = "C_SUBSCRIPTION_UPDATE_DATE_2";
/** Query key. */
private static final String C_VISIT_DELETE_ENTRIES = "C_VISIT_DELETE_ENTRIES";
/** Query key. */
private static final String C_VISIT_FILTER_DATE_FROM = "C_VISIT_FILTER_DATE_FROM";
/** Query key. */
private static final String C_VISIT_FILTER_DATE_TO = "C_VISIT_FILTER_DATE_TO";
/** Query key. */
private static final String C_VISIT_FILTER_STRUCTURE_ID = "C_VISIT_FILTER_STRUCTURE_ID";
/** Query key. */
private static final String C_VISIT_FILTER_USER_ID = "C_VISIT_FILTER_USER_ID";
/** Query key. */
private static final String C_VISIT_READ_ENTRIES = "C_VISIT_READ_ENTRIES";
/** Query key. */
private static final String C_VISITED_USER_COUNT_1 = "C_VISITED_USER_COUNT_1";
/** Query key. */
private static final String C_VISITED_USER_DELETE_GETDATE_2 = "C_VISITED_USER_DELETE_GETDATE_2";
/** Query key. */
private static final String C_VISITED_USER_READ_4 = "C_VISITED_USER_READ_4";
/** The log object for this class. */
private static final Log LOG = CmsLog.getLog(org.opencms.db.generic.CmsSubscriptionDriver.class);
/** A reference to the driver manager used by this driver. */
protected CmsDriverManager m_driverManager;
/** The SQL manager used by this driver. */
protected CmsSqlManager m_sqlManager;
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#deleteVisits(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.db.CmsVisitEntryFilter)
*/
public void deleteVisits(CmsDbContext dbc, String poolName, CmsVisitEntryFilter filter)
throws CmsDataAccessException {
try {
// compose statement
StringBuffer queryBuf = new StringBuffer(256);
queryBuf.append(m_sqlManager.readQuery(C_VISIT_DELETE_ENTRIES));
CmsPair<String, List<I_CmsQueryParameter>> conditionsAndParams = prepareVisitConditions(filter);
queryBuf.append(conditionsAndParams.getFirst());
if (LOG.isDebugEnabled()) {
LOG.debug(queryBuf.toString());
}
Query q = m_sqlManager.createQueryFromJPQL(dbc, queryBuf.toString());
List<I_CmsQueryParameter> params = conditionsAndParams.getSecond();
for (int i = 0; i < params.size(); i++) {
I_CmsQueryParameter param = conditionsAndParams.getSecond().get(i);
param.insertIntoQuery(q, i + 1);
}
// execute
q.executeUpdate();
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_VISIT_DELETE_ENTRIES), e);
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#getDateLastVisitedBy(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.file.CmsUser, org.opencms.file.CmsResource)
*/
public long getDateLastVisitedBy(CmsDbContext dbc, String poolName, CmsUser user, CmsResource resource)
throws CmsException {
CmsVisitEntryFilter filter = CmsVisitEntryFilter.ALL.filterResource(resource.getStructureId()).filterUser(
user.getId());
List<CmsVisitEntry> entries = readVisits(dbc, poolName, filter);
if (!entries.isEmpty()) {
return entries.get(0).getDate();
}
return 0;
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#getSqlManager()
*/
public CmsSqlManager getSqlManager() {
return m_sqlManager;
}
/**
* @see org.opencms.db.I_CmsDriver#init(org.opencms.db.CmsDbContext, org.opencms.configuration.CmsConfigurationManager, java.util.List, org.opencms.db.CmsDriverManager)
*/
public void init(
CmsDbContext dbc,
CmsConfigurationManager configurationManager,
List<String> successiveDrivers,
CmsDriverManager driverManager) {
// TODO: Auto-generated method stub
CmsParameterConfiguration config = configurationManager.getConfiguration();
String poolUrl = config.get("db.subscription.pool");
String classname = config.get("db.subscription.sqlmanager");
m_sqlManager = this.initSqlManager(classname);
m_driverManager = driverManager;
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_ASSIGNED_POOL_1, poolUrl));
}
if ((successiveDrivers != null) && !successiveDrivers.isEmpty()) {
if (LOG.isWarnEnabled()) {
LOG.warn(Messages.get().getBundle().key(
Messages.LOG_SUCCESSIVE_DRIVERS_UNSUPPORTED_1,
getClass().getName()));
}
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#initSqlManager(java.lang.String)
*/
public CmsSqlManager initSqlManager(String classname) {
return CmsSqlManager.getInstance(classname);
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#markResourceAsVisitedBy(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.file.CmsResource, org.opencms.file.CmsUser)
*/
public void markResourceAsVisitedBy(CmsDbContext dbc, String poolName, CmsResource resource, CmsUser user)
throws CmsDataAccessException {
boolean entryExists = false;
CmsVisitEntryFilter filter = CmsVisitEntryFilter.ALL.filterResource(resource.getStructureId()).filterUser(
user.getId());
// delete existing visited entry for the resource
if (readVisits(dbc, OpenCms.getSubscriptionManager().getPoolName(), filter).size() > 0) {
entryExists = true;
deleteVisits(dbc, OpenCms.getSubscriptionManager().getPoolName(), filter);
}
CmsVisitEntry entry = new CmsVisitEntry(user.getId(), System.currentTimeMillis(), resource.getStructureId());
addVisit(dbc, poolName, entry);
if (!entryExists) {
// new entry, check if maximum number of stored visited resources is exceeded
int count = 0;
try {
Query q = m_sqlManager.createQuery(dbc, dbc.currentProject(), C_VISITED_USER_COUNT_1);
q.setParameter(1, user.getId().toString());
count = ((Number)q.getSingleResult()).intValue();
int maxCount = OpenCms.getSubscriptionManager().getMaxVisitedCount();
if (count > maxCount) {
// delete old visited entries
q = m_sqlManager.createQuery(dbc, dbc.currentProject(), C_VISITED_USER_DELETE_GETDATE_2);
q.setParameter(1, user.getId().toString());
q.setMaxResults(count - maxCount);
@SuppressWarnings("unchecked")
List<Number> res = q.getResultList();
long deleteDate = 0;
for (Number n : res) {
// get last date of result set
deleteDate = n.longValue();
}
if (deleteDate > 0) {
filter = CmsVisitEntryFilter.ALL.filterUser(user.getId()).filterTo(deleteDate);
deleteVisits(dbc, OpenCms.getSubscriptionManager().getPoolName(), filter);
}
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(
Messages.ERR_GENERIC_SQL_1,
C_VISITED_USER_DELETE_GETDATE_2), e);
}
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#readAllSubscribedResources(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.security.CmsPrincipal)
*/
public List<CmsResource> readAllSubscribedResources(CmsDbContext dbc, String poolName, CmsPrincipal principal)
throws CmsDataAccessException {
CmsResource currentResource = null;
List<CmsResource> resources = new ArrayList<CmsResource>();
try {
Query q = m_sqlManager.createQuery(dbc, dbc.currentProject(), C_SUBSCRIPTION_READ_ALL_1);
q.setParameter(1, principal.getId().toString());
@SuppressWarnings("unchecked")
List<Object[]> res = q.getResultList();
for (Object[] obj : res) {
currentResource = ((CmsVfsDriver)m_driverManager.getVfsDriver(dbc)).createFile(
obj,
dbc.currentProject().getUuid(),
false);
resources.add(currentResource);
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(
Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_READ_ALL_1),
e);
}
return resources;
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#readResourcesVisitedBy(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.db.CmsVisitedByFilter)
*/
public List<CmsResource> readResourcesVisitedBy(CmsDbContext dbc, String poolName, CmsVisitedByFilter filter)
throws CmsDataAccessException {
CmsResource currentResource = null;
StringBuffer conditions = new StringBuffer(256);
List<String> params = new ArrayList<String>(1);
List<CmsResource> resources = new ArrayList<CmsResource>();
try {
// path filter
if (CmsStringUtil.isNotEmpty(filter.getParentPath())) {
CmsResource parent = m_driverManager.getVfsDriver(dbc).readResource(
dbc,
dbc.currentProject().getUuid(),
filter.getParentPath(),
false);
conditions.append(BEGIN_INCLUDE_CONDITION);
if (filter.isIncludeSubFolders()) {
conditions.append(m_sqlManager.readQuery(dbc.currentProject(), C_RESOURCES_SELECT_BY_PATH_PREFIX));
params.add(CmsFileUtil.addTrailingSeparator(CmsVfsDriver.escapeDbWildcard(filter.getParentPath()))
+ "%");
} else {
conditions.append(m_sqlManager.readQuery(dbc.currentProject(), C_RESOURCES_SELECT_BY_PARENT_UUID));
params.add(parent.getStructureId().toString());
}
conditions.append(END_CONDITION);
}
String query = m_sqlManager.readQuery(dbc.currentProject(), C_VISITED_USER_READ_4);
query = CmsStringUtil.substitute(query, "%(CONDITIONS)", conditions.toString());
Query q = m_sqlManager.createQueryFromJPQL(dbc, query);
q.setParameter(1, filter.getUser().getId().toString());
q.setParameter(2, Long.valueOf(filter.getFromDate()));
q.setParameter(3, Long.valueOf(filter.getToDate()));
for (int i = 0; i < params.size(); i++) {
q.setParameter(i + 4, params.get(i));
}
@SuppressWarnings("unchecked")
List<Object[]> res = q.getResultList();
for (Object[] obj : res) {
currentResource = ((CmsVfsDriver)m_driverManager.getVfsDriver(dbc)).createFile(
obj,
dbc.currentProject().getUuid(),
false);
resources.add(currentResource);
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_VISITED_USER_READ_4), e);
}
return resources;
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#readSubscribedDeletedResources(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.file.CmsUser, java.util.List, org.opencms.file.CmsResource, boolean, long)
*/
public List<I_CmsHistoryResource> readSubscribedDeletedResources(
CmsDbContext dbc,
String poolName,
CmsUser user,
List<CmsGroup> groups,
CmsResource parent,
boolean includeSubFolders,
long deletedFrom) throws CmsDataAccessException {
List<I_CmsHistoryResource> resources = new ArrayList<I_CmsHistoryResource>();
Set<CmsUUID> historyIDs = new HashSet<CmsUUID>();
List<String> principalIds = new ArrayList<String>();
// add user ID
principalIds.add(user.getId().toString());
// add group IDs
if ((groups != null) && !groups.isEmpty()) {
Iterator<CmsGroup> it = groups.iterator();
while (it.hasNext()) {
principalIds.add(it.next().getId().toString());
}
}
StringBuffer conditions = new StringBuffer(256);
List<String> params = new ArrayList<String>();
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETED));
if (principalIds.size() == 1) {
// single principal filter
conditions.append(BEGIN_INCLUDE_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETED_FILTER_PRINCIPAL_SINGLE));
params.add(principalIds.get(0));
conditions.append(END_CONDITION);
} else {
// multiple principals filter
conditions.append(BEGIN_INCLUDE_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETED_FILTER_PRINCIPALS));
conditions.append(BEGIN_CONDITION);
Iterator<String> it = principalIds.iterator();
while (it.hasNext()) {
params.add(it.next());
conditions.append("?");
if (it.hasNext()) {
conditions.append(", ");
}
}
conditions.append(END_CONDITION);
conditions.append(END_CONDITION);
}
try {
Query q = m_sqlManager.createQueryFromJPQL(dbc, conditions.toString());
// set parameters
q.setParameter(1, Long.valueOf(deletedFrom));
for (int i = 0; i < params.size(); i++) {
q.setParameter(i + 2, params.get(i));
}
@SuppressWarnings("unchecked")
List<String> result = q.getResultList();
for (String id : result) {
historyIDs.add(new CmsUUID(id));
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_DELETED), e);
}
// get the matching history resources from the found structure IDs
String parentFolderPath = "";
if (parent != null) {
parentFolderPath = CmsResource.getFolderPath(parent.getRootPath());
}
for (Iterator<CmsUUID> i = historyIDs.iterator(); i.hasNext();) {
CmsUUID id = i.next();
int version = m_driverManager.getHistoryDriver(dbc).readLastVersion(dbc, id);
if (version > 0) {
I_CmsHistoryResource histRes = m_driverManager.getHistoryDriver(dbc).readResource(dbc, id, version);
if (parent != null) {
if (!includeSubFolders
&& !parentFolderPath.equals(CmsResource.getFolderPath(histRes.getRootPath()))) {
// deleted history resource is not in the specified parent folder, skip it
continue;
} else if (includeSubFolders && !histRes.getRootPath().startsWith(parentFolderPath)) {
// deleted history resource is not in the specified parent folder or sub folder, skip it
continue;
}
}
resources.add(histRes);
}
}
return resources;
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#readSubscribedResources(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.db.CmsSubscriptionFilter)
*/
public List<CmsResource> readSubscribedResources(CmsDbContext dbc, String poolName, CmsSubscriptionFilter filter)
throws CmsDataAccessException {
CmsResource currentResource = null;
List<CmsResource> resources = new ArrayList<CmsResource>();
String queryBuf = m_sqlManager.readQuery(dbc.currentProject(), C_SUBSCRIPTION_FILTER_READ);
StringBuffer conditions = new StringBuffer(256);
List<I_CmsQueryParameter> params = new ArrayList<I_CmsQueryParameter>();
boolean userDefined = filter.getUser() != null;
boolean groupsDefined = !filter.getGroups().isEmpty();
if (!groupsDefined && !userDefined) {
filter.setUser(dbc.currentUser());
userDefined = true;
}
// check if a user has been set for the "visited" and "unvisited" mode
if (!filter.getMode().isAll() && (filter.getUser() == null)) {
// change the mode, without user the other modes are not applicable
filter.setMode(CmsSubscriptionReadMode.ALL);
}
List<String> principalIds = new ArrayList<String>();
// add user ID
if (userDefined) {
principalIds.add(filter.getUser().getId().toString());
}
// add group IDs
if (groupsDefined) {
Iterator<CmsGroup> it = filter.getGroups().iterator();
while (it.hasNext()) {
principalIds.add(it.next().getId().toString());
}
}
if (principalIds.size() == 1) {
// single principal filter
conditions.append(BEGIN_CONDITION);
conditions.append(m_sqlManager.readQuery(dbc.currentProject(), C_SUBSCRIPTION_FILTER_PRINCIPAL_SINGLE));
params.add(new CmsQueryStringParameter(principalIds.get(0)));
conditions.append(END_CONDITION);
} else {
// multiple principals filter
conditions.append(BEGIN_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_FILTER_PRINCIPALS));
conditions.append(BEGIN_CONDITION);
Iterator<String> it = principalIds.iterator();
while (it.hasNext()) {
params.add(new CmsQueryStringParameter(it.next()));
conditions.append("?");
if (it.hasNext()) {
conditions.append(", ");
}
}
conditions.append(END_CONDITION);
conditions.append(m_sqlManager.readQuery(dbc.currentProject(), C_SUBSCRIPTION_FILTER_PRINCIPALS_END));
conditions.append(END_CONDITION);
}
// path filter
if (CmsStringUtil.isNotEmpty(filter.getParentPath())) {
CmsResource parent = m_driverManager.getVfsDriver(dbc).readResource(
dbc,
dbc.currentProject().getUuid(),
filter.getParentPath(),
false);
conditions.append(BEGIN_INCLUDE_CONDITION);
if (filter.isIncludeSubFolders()) {
conditions.append(m_sqlManager.readQuery(dbc.currentProject(), C_RESOURCES_SELECT_BY_PATH_PREFIX));
params.add(new CmsQueryStringParameter(
CmsFileUtil.addTrailingSeparator(CmsVfsDriver.escapeDbWildcard(filter.getParentPath())) + "%"));
} else {
conditions.append(m_sqlManager.readQuery(dbc.currentProject(), C_RESOURCES_SELECT_BY_PARENT_UUID));
params.add(new CmsQueryStringParameter(parent.getStructureId().toString()));
}
conditions.append(END_CONDITION);
}
// check from and to date
if ((filter.getFromDate() > 0) || (filter.getToDate() < Long.MAX_VALUE)) {
conditions.append(BEGIN_INCLUDE_CONDITION);
conditions.append(m_sqlManager.readQuery(
dbc.currentProject(),
C_SUBSCRIPTION_FILTER_RESOURCES_DATE_MODIFIED));
params.add(new CmsQueryLongParameter(filter.getFromDate()));
params.add(new CmsQueryLongParameter(filter.getToDate()));
conditions.append(END_CONDITION);
}
try {
queryBuf = CmsStringUtil.substitute(queryBuf, "%(CONDITIONS)", conditions.toString());
if (LOG.isDebugEnabled()) {
LOG.debug(queryBuf.toString());
}
Query q = m_sqlManager.createQueryFromJPQL(dbc, queryBuf);
// set parameters
for (int i = 0; i < params.size(); i++) {
I_CmsQueryParameter param = params.get(i);
param.insertIntoQuery(q, i + 1);
}
@SuppressWarnings("unchecked")
List<Object[]> res = q.getResultList();
for (Object[] obj : res) {
currentResource = ((CmsVfsDriver)m_driverManager.getVfsDriver(dbc)).createFile(
obj,
dbc.currentProject().getUuid(),
false);
resources.add(currentResource);
}
// filter the result if in visited/unvisited mode (faster as creating a query with even more joined tables)
if (!filter.getMode().isAll()) {
List<CmsResource> result = new ArrayList<CmsResource>(resources.size());
for (Iterator<CmsResource> i = resources.iterator(); i.hasNext();) {
CmsResource resource = i.next();
long visitedDate = 0;
try {
visitedDate = m_driverManager.getDateLastVisitedBy(dbc, poolName, filter.getUser(), resource);
} catch (CmsException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_0), e);
}
if (filter.getMode().isUnVisited() && (visitedDate >= resource.getDateLastModified())) {
// unvisited mode: resource was visited after the last modification, skip it
continue;
}
if (filter.getMode().isVisited() && (resource.getDateLastModified() > visitedDate)) {
// visited mode: resource was not visited after last modification, skip it
continue;
}
// add the current resource to the result
result.add(resource);
}
resources = result;
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(
Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_FILTER_READ),
e);
}
return resources;
}
/**
* Reads {@link CmsVisitEntry} objects from the database.<p>
*
* @param dbc the database context to use
* @param poolName the name of the pool which should be used for the database operation
* @param filter a filter for constraining the list of results
*
* @return a list of visit entries
*
* @throws CmsDataAccessException if the database operation fails
*/
public List<CmsVisitEntry> readVisits(CmsDbContext dbc, String poolName, CmsVisitEntryFilter filter)
throws CmsDataAccessException {
List<CmsVisitEntry> entries = new ArrayList<CmsVisitEntry>();
try {
// compose statement
StringBuffer queryBuf = new StringBuffer(256);
queryBuf.append(m_sqlManager.readQuery(C_VISIT_READ_ENTRIES));
CmsPair<String, List<I_CmsQueryParameter>> conditionsAndParameters = prepareVisitConditions(filter);
List<I_CmsQueryParameter> params = conditionsAndParameters.getSecond();
queryBuf.append(conditionsAndParameters.getFirst());
if (LOG.isDebugEnabled()) {
LOG.debug(queryBuf.toString());
}
Query q = m_sqlManager.createQueryFromJPQL(dbc, queryBuf.toString());
for (int i = 0; i < params.size(); i++) {
I_CmsQueryParameter param = params.get(i);
param.insertIntoQuery(q, i + 1);
}
// execute
@SuppressWarnings("unchecked")
List<CmsDAOSubscriptionVisit> res = q.getResultList();
for (CmsDAOSubscriptionVisit sv : res) {
// get results
entries.add(internalReadVisitEntry(sv));
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_VISIT_READ_ENTRIES), e);
}
return entries;
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#setSubscribedResourceAsDeleted(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.file.CmsResource)
*/
public void setSubscribedResourceAsDeleted(CmsDbContext dbc, String poolName, CmsResource resource)
throws CmsDataAccessException {
long deletedTime = System.currentTimeMillis();
try {
// set resource as deleted for all users and groups
Query q = m_sqlManager.createQuery(dbc, C_SUBSCRIPTION_UPDATE_DATE_2);
q.setParameter(1, resource.getStructureId().toString());
@SuppressWarnings("unchecked")
List<CmsDAOSubscription> res = q.getResultList();
for (CmsDAOSubscription s : res) {
s.setDateDeleted(deletedTime);
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(
Messages.ERR_GENERIC_SQL_1,
C_SUBSCRIPTION_UPDATE_DATE_2), e);
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#subscribeResourceFor(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.security.CmsPrincipal, org.opencms.file.CmsResource)
*/
public void subscribeResourceFor(CmsDbContext dbc, String poolName, CmsPrincipal principal, CmsResource resource)
throws CmsDataAccessException {
try {
Query q = m_sqlManager.createQuery(dbc, C_SUBSCRIPTION_CHECK_2);
q.setParameter(1, principal.getId().toString());
q.setParameter(2, resource.getStructureId().toString());
@SuppressWarnings("unchecked")
List<CmsDAOSubscription> res = q.getResultList();
// only create subscription entry if principal is not subscribed to resource
if (res.size() > 0) {
// do nothing
} else {
// subscribe principal
CmsDAOSubscription sb = new CmsDAOSubscription();
sb.setPrincipalId(principal.getId().toString());
sb.setStructureId(resource.getStructureId().toString());
sb.setDateDeleted(0);
m_sqlManager.persist(dbc, sb);
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_CHECK_2), e);
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#unsubscribeAllDeletedResources(org.opencms.db.CmsDbContext, java.lang.String, long)
*/
public void unsubscribeAllDeletedResources(CmsDbContext dbc, String poolName, long deletedTo)
throws CmsDataAccessException {
try {
StringBuffer conditions = new StringBuffer(256);
// unsubscribe all deleted resources
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE));
conditions.append(BEGIN_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE_FILTER_DATE));
conditions.append(END_CONDITION);
Query q = m_sqlManager.createQueryFromJPQL(dbc, conditions.toString());
q.setParameter(1, Long.valueOf(deletedTo));
@SuppressWarnings("unchecked")
List<CmsDAOSubscription> res = q.getResultList();
for (CmsDAOSubscription sb : res) {
m_sqlManager.remove(dbc, sb);
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_DELETE), e);
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#unsubscribeAllResourcesFor(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.security.CmsPrincipal)
*/
public void unsubscribeAllResourcesFor(CmsDbContext dbc, String poolName, CmsPrincipal principal)
throws CmsDataAccessException {
try {
if (principal != null) {
StringBuffer conditions = new StringBuffer(256);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE));
conditions.append(BEGIN_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE_FILTER_PRINCIPAL));
conditions.append(END_CONDITION);
Query q = m_sqlManager.createQueryFromJPQL(dbc, conditions.toString());
q.setParameter(1, principal.getId().toString());
@SuppressWarnings("unchecked")
List<CmsDAOSubscription> res = q.getResultList();
for (CmsDAOSubscription sb : res) {
m_sqlManager.remove(dbc, sb);
}
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_DELETE), e);
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#unsubscribeResourceFor(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.security.CmsPrincipal, org.opencms.file.CmsResource)
*/
public void unsubscribeResourceFor(CmsDbContext dbc, String poolName, CmsPrincipal principal, CmsResource resource)
throws CmsDataAccessException {
try {
StringBuffer conditions = new StringBuffer(256);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE));
conditions.append(BEGIN_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE_FILTER_PRINCIPAL));
conditions.append(END_CONDITION);
conditions.append(BEGIN_INCLUDE_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE_FILTER_STRUCTURE));
conditions.append(END_CONDITION);
Query q = m_sqlManager.createQueryFromJPQL(dbc, conditions.toString());
q.setParameter(1, principal.getId().toString());
q.setParameter(2, resource.getStructureId().toString());
@SuppressWarnings("unchecked")
List<CmsDAOSubscription> res = q.getResultList();
for (CmsDAOSubscription sb : res) {
m_sqlManager.remove(dbc, sb);
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_DELETE), e);
}
}
/**
* @see org.opencms.db.I_CmsSubscriptionDriver#unsubscribeResourceForAll(org.opencms.db.CmsDbContext, java.lang.String, org.opencms.file.CmsResource)
*/
public void unsubscribeResourceForAll(CmsDbContext dbc, String poolName, CmsResource resource)
throws CmsDataAccessException {
try {
StringBuffer conditions = new StringBuffer(256);
// unsubscribe resource for all principals
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE));
conditions.append(BEGIN_CONDITION);
conditions.append(m_sqlManager.readQuery(C_SUBSCRIPTION_DELETE_FILTER_STRUCTURE));
conditions.append(END_CONDITION);
Query q = m_sqlManager.createQueryFromJPQL(dbc, conditions.toString());
q.setParameter(1, resource.getStructureId().toString());
@SuppressWarnings("unchecked")
List<CmsDAOSubscription> res = q.getResultList();
for (CmsDAOSubscription sb : res) {
m_sqlManager.remove(dbc, sb);
}
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, C_SUBSCRIPTION_DELETE), e);
}
}
/**
* Adds an entry to the table of visits.<p>
*
* @param dbc the database context to use
* @param poolName the name of the database pool to use
* @param visit the visit bean
*
* @throws CmsDbSqlException if the database operation fails
*/
protected void addVisit(CmsDbContext dbc, String poolName, CmsVisitEntry visit) throws CmsDbSqlException {
try {
CmsDAOSubscriptionVisit sv = new CmsDAOSubscriptionVisit();
sv.setUserId(visit.getUserId().toString());
sv.setVisitDate(visit.getDate());
sv.setStructureId(visit.getStructureId() == null ? null : visit.getStructureId().toString());
m_sqlManager.persist(dbc, sv);
} catch (PersistenceException e) {
throw new CmsDbSqlException(Messages.get().container(Messages.ERR_GENERIC_SQL_1, ""), e);
}
}
/**
* Creates a new {@link CmsVisitEntry} object from the given result set entry.<p>
*
* @param sv the result set
*
* @return the new {@link CmsVisitEntry} object
*/
protected CmsVisitEntry internalReadVisitEntry(CmsDAOSubscriptionVisit sv) {
CmsUUID userId = new CmsUUID(sv.getUserId());
long date = sv.getVisitDate();
CmsUUID structureId = new CmsUUID(sv.getStructureId());
return new CmsVisitEntry(userId, date, structureId);
}
/**
* Build the whole WHERE SQL statement part for the given visit entry filter.<p>
*
* @param filter the filter
*
* @return a pair containing both the SQL and the parameters for it
*/
protected CmsPair<String, List<I_CmsQueryParameter>> prepareVisitConditions(CmsVisitEntryFilter filter) {
List<I_CmsQueryParameter> params = new ArrayList<I_CmsQueryParameter>();
StringBuffer conditions = new StringBuffer();
// user id filter
if (filter.getUserId() != null) {
if (conditions.length() == 0) {
conditions.append(BEGIN_CONDITION);
} else {
conditions.append(BEGIN_INCLUDE_CONDITION);
}
conditions.append(m_sqlManager.readQuery(C_VISIT_FILTER_USER_ID));
params.add(new CmsQueryStringParameter(filter.getUserId().toString()));
conditions.append(END_CONDITION);
}
// resource id filter
if (filter.getStructureId() != null) {
if (conditions.length() == 0) {
conditions.append(BEGIN_CONDITION);
} else {
conditions.append(BEGIN_INCLUDE_CONDITION);
}
conditions.append(m_sqlManager.readQuery(C_VISIT_FILTER_STRUCTURE_ID));
params.add(new CmsQueryStringParameter(filter.getStructureId().toString()));
conditions.append(END_CONDITION);
}
// date from filter
if (filter.getDateFrom() != CmsResource.DATE_RELEASED_DEFAULT) {
if (conditions.length() == 0) {
conditions.append(BEGIN_CONDITION);
} else {
conditions.append(BEGIN_INCLUDE_CONDITION);
}
conditions.append(m_sqlManager.readQuery(C_VISIT_FILTER_DATE_FROM));
params.add(new CmsQueryLongParameter(filter.getDateFrom()));
conditions.append(END_CONDITION);
}
// date to filter
if (filter.getDateTo() != CmsResource.DATE_RELEASED_DEFAULT) {
if (conditions.length() == 0) {
conditions.append(BEGIN_CONDITION);
} else {
conditions.append(BEGIN_INCLUDE_CONDITION);
}
conditions.append(m_sqlManager.readQuery(C_VISIT_FILTER_DATE_TO));
params.add(new CmsQueryLongParameter(filter.getDateTo()));
conditions.append(END_CONDITION);
}
return CmsPair.create(conditions.toString(), params);
}
}
|
package org.simpleim.server.server;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerAdapter;
import io.netty.channel.ChannelHandlerContext;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.simpleim.common.message.FailureResponse;
import org.simpleim.common.message.LoginRequest;
import org.simpleim.common.message.NewAccountOkResponse;
import org.simpleim.common.message.NewAccountRequest;
import org.simpleim.common.message.Response;
import org.simpleim.server.database.DataBase;
import org.simpleim.server.util.AccountGenerator;
import com.lambdaworks.crypto.SCryptUtil;
public class ServerHandler extends ChannelHandlerAdapter {
private static final Logger logger = Logger.getLogger(ServerHandler.class.getName());
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
boolean closeNow = true;
Response response = null;
String id;
String password;
if(msg instanceof NewAccountRequest) {
closeNow = true;
id=AccountGenerator.nextId();
password=AccountGenerator.generatePassword();
response = new NewAccountOkResponse()
.setId(id)
.setPassword(password);
String hashedPassword = SCryptUtil.scrypt(password, 1 << 15, 8, 1);
DataBase.InsertNumberRow(id, hashedPassword);
} else if(msg instanceof LoginRequest){
closeNow = false;
LoginRequest request = (LoginRequest) msg;
id = request.getId();
password = request.getPassword();
} else {
closeNow = true;
response = new FailureResponse();
}
ChannelFuture f = ctx.write(response);
if(closeNow)
f.addListener(ChannelFutureListener.CLOSE);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
// Close the connection when an exception is raised.
logger.log(Level.WARNING, "Unexpected exception from downstream.", cause);
ctx.close();
}
}
|
// This file is part of JavaSMT,
// an API wrapper for a collection of SMT solvers:
package org.sosy_lab.java_smt.api;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.List;
import java.util.Map;
import org.sosy_lab.common.Appender;
import org.sosy_lab.java_smt.api.visitors.FormulaTransformationVisitor;
import org.sosy_lab.java_smt.api.visitors.FormulaVisitor;
import org.sosy_lab.java_smt.api.visitors.TraversalProcess;
/** FormulaManager class contains all operations which can be performed on formulas. */
public interface FormulaManager {
/**
* Returns the Integer-Theory. Because most SAT-solvers support automatic casting between Integer-
* and Rational-Theory, the Integer- and the RationalFormulaManager both return the same Formulas
* for numeric operations like ADD, SUBTRACT, TIMES, LESSTHAN, EQUAL and others.
*
* @throws UnsupportedOperationException If the theory is not supported by the solver.
*/
IntegerFormulaManager getIntegerFormulaManager();
/**
* Returns the Rational-Theory. Because most SAT-solvers support automatic casting between
* Integer- and Rational-Theory, the Integer- and the RationalFormulaManager both return the same
* Formulas for numeric operations like ADD, SUBTRACT, TIMES, LESSTHAN, EQUAL, etc.
*
* @throws UnsupportedOperationException If the theory is not supported by the solver.
*/
RationalFormulaManager getRationalFormulaManager();
/** Returns the Boolean-Theory. */
BooleanFormulaManager getBooleanFormulaManager();
/**
* Returns the Array-Theory.
*
* @throws UnsupportedOperationException If the theory is not supported by the solver.
*/
ArrayFormulaManager getArrayFormulaManager();
/**
* Returns the Bitvector-Theory.
*
* @throws UnsupportedOperationException If the theory is not supported by the solver.
*/
BitvectorFormulaManager getBitvectorFormulaManager();
/**
* Returns the Floating-Point-Theory.
*
* @throws UnsupportedOperationException If the theory is not supported by the solver.
*/
FloatingPointFormulaManager getFloatingPointFormulaManager();
/** Returns the function for dealing with uninterpreted functions (UFs). */
UFManager getUFManager();
/**
* Returns the Seperation-Logic-Theory.
*
* @throws UnsupportedOperationException If the theory is not supported by the solver.
*/
SLFormulaManager getSLFormulaManager();
/**
* Returns the interface for handling quantifiers.
*
* @throws UnsupportedOperationException If the theory is not supported by the solver.
*/
QuantifiedFormulaManager getQuantifiedFormulaManager();
/**
* Create variable of the type equal to {@code formulaType}.
*
* @param formulaType the type of the variable.
* @param name the name of the variable.
* @return the created variable.
*/
<T extends Formula> T makeVariable(FormulaType<T> formulaType, String name);
/**
* Create a function application to the given list of arguments.
*
* @param declaration Function declaration
* @param args List of arguments
* @return Constructed formula
*/
<T extends Formula> T makeApplication(
FunctionDeclaration<T> declaration, List<? extends Formula> args);
/**
* Create a function application to the given list of arguments.
*
* @param declaration Function declaration
* @param args List of arguments
* @return Constructed formula
*/
<T extends Formula> T makeApplication(FunctionDeclaration<T> declaration, Formula... args);
/** Returns the type of the given Formula. */
<T extends Formula> FormulaType<T> getFormulaType(T formula);
BooleanFormula parse(String s) throws IllegalArgumentException;
/**
* Serialize an input formula to an SMT-LIB format. Very useful when passing formulas between
* different solvers.
*
* <p>To get a String, simply call {@link Object#toString()} on the returned object. This method
* is lazy and does not create an output string until the returned object is actually used.
*
* @return SMT-LIB formula serialization.
*/
Appender dumpFormula(BooleanFormula pT);
/**
* Apply a tactic which performs formula transformation. The available tactics depend on the used
* solver.
*/
BooleanFormula applyTactic(BooleanFormula input, Tactic tactic) throws InterruptedException;
/**
* Simplify an input formula, while ensuring equivalence.
*
* <p>For solvers that do not provide a simplification API, an original formula is returned.
*
* @param input The input formula
* @return Simplified version of the formula
*/
<T extends Formula> T simplify(T input) throws InterruptedException;
/** Visit the formula with a given visitor. */
@CanIgnoreReturnValue
<R> R visit(Formula f, FormulaVisitor<R> rFormulaVisitor);
/**
* Visit the formula recursively with a given {@link FormulaVisitor}.
*
* <p>This method guarantees that the traversal is done iteratively, without using Java recursion,
* and thus is not prone to StackOverflowErrors.
*
* <p>Furthermore, this method also guarantees that every equal part of the formula is visited
* only once. Thus it can be used to traverse DAG-like formulas efficiently.
*/
void visitRecursively(Formula f, FormulaVisitor<TraversalProcess> rFormulaVisitor);
/**
* Visit the formula recursively with a given {@link FormulaVisitor}.
*
* <p>This method guarantees that the traversal is done iteratively, without using Java recursion,
* and thus is not prone to StackOverflowErrors.
*
* <p>Furthermore, this method also guarantees that every equal part of the formula is visited
* only once. Thus it can be used to traverse DAG-like formulas efficiently.
*
* @param pFormulaVisitor Transformation described by the user.
*/
<T extends Formula> T transformRecursively(T f, FormulaTransformationVisitor pFormulaVisitor);
/**
* Extract the names of all free variables and UFs in a formula.
*
* @param f The input formula
* @return Map from variable names to the corresponding formulas.
*/
Map<String, Formula> extractVariables(Formula f);
/**
* Extract the names of all free variables and UFs in a formula.
*
* @param f The input formula
* @return Map from variable names to the corresponding formulas. If an UF occurs multiple times
* in the input formula, an arbitrary instance of an application of this UF is in the map.
*/
Map<String, Formula> extractVariablesAndUFs(Formula f);
/**
* Substitute every occurrence of any item from {@code changeFrom} in formula {@code f} to the
* corresponding occurrence from {@code changeTo}.
*
* <p>E.g. if {@code changeFrom} contains a variable {@code a} and {@code changeTo} contains a
* variable {@code b} all occurrences of {@code a} will be changed to {@code b} in the returned
* formula.
*
* @param f Formula to change.
* @param fromToMapping Mapping of old and new formula parts.
* @return Formula with parts replaced.
*/
<T extends Formula> T substitute(T f, Map<? extends Formula, ? extends Formula> fromToMapping);
/**
* Translates the formula from another context into the context represented by {@code this}.
* Default implementation relies on string serialization ({@link #dumpFormula(BooleanFormula)} and
* {@link #parse(String)}), but each solver may implement more efficient translation between its
* own contexts.
*
* @param formula Formula belonging to {@code otherContext}.
* @param otherContext Formula manager belonging to the other context.
* @return Formula belonging to {@code this} context.
*/
BooleanFormula translateFrom(BooleanFormula formula, FormulaManager otherContext);
/**
* Check whether the given String can be used as symbol/name for variables or undefined functions.
*
* <p>We explicitly state that with further development of SMT solvers and the SMTLib
* specification, the list of forbidden variable names may change in the future. Users should if
* possible not use logical or mathematical operators, or keywords strongly depending on SMTlib.
*
* <p>If a variable name is rejected, a possibility is escaping, e.g. either substituting the
* whole variable name or just every invalid character with an escaped form. We recommend to use
* an escape sequence based on the token "JAVASMT", because it might be unusual enough to appear
* when encoding a user's problem in SMT. Please not that you might also have to handle escaping
* the escape sequence. Examples:
*
* <ul>
* <li>the invalid variable name <code>"="</code> (logical operator for equality) can be
* replaced with a string <code>"JAVASMT_EQUALS"</code>.
* <li>the invalid SMTlib-escaped variable name <code>"|test|"</code> (the solver SMTInterpol
* does not allow the pipe symbol <code>"|"</code> in names) can be replaced with <code>
* "JAVASMT_PIPEtestJAVASMT_PIPE"</code>.
* </ul>
*/
boolean isValidName(String variableName);
/**
* Get an escaped symbol/name for variables or undefined functions, if necessary.
*
* <p>See {@link #isValidName(String)} for further details.
*/
String escape(String variableName);
/**
* Unescape the symbol/name for variables or undefined functions, if necessary.
*
* <p>The result is undefined for Strings that are not properly escaped.
*
* <p>See {@link #isValidName(String)} for further details.
*/
String unescape(String variableName);
}
|
package org.torproject.onionoo;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Scanner;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TimeZone;
import java.util.TreeMap;
import java.util.TreeSet;
import org.torproject.descriptor.Descriptor;
import org.torproject.descriptor.NetworkStatusEntry;
import org.torproject.descriptor.RelayNetworkStatusConsensus;
import org.torproject.descriptor.ServerDescriptor;
public class WeightsDataWriter implements DescriptorListener {
private DescriptorSource descriptorSource;
private DocumentStore documentStore;
private SortedSet<String> currentFingerprints = new TreeSet<String>();
public WeightsDataWriter(DescriptorSource descriptorSource,
DocumentStore documentStore) {
this.descriptorSource = descriptorSource;
this.documentStore = documentStore;
this.registerDescriptorListeners();
}
private void registerDescriptorListeners() {
this.descriptorSource.registerListener(this,
DescriptorType.RELAY_CONSENSUSES);
this.descriptorSource.registerListener(this,
DescriptorType.RELAY_SERVER_DESCRIPTORS);
}
public void processDescriptor(Descriptor descriptor, boolean relay) {
if (descriptor instanceof ServerDescriptor) {
this.processRelayServerDescriptor((ServerDescriptor) descriptor);
} else if (descriptor instanceof RelayNetworkStatusConsensus) {
this.processRelayNetworkConsensus(
(RelayNetworkStatusConsensus) descriptor);
}
}
public void setCurrentNodes(
SortedMap<String, NodeStatus> currentNodes) {
this.currentFingerprints.addAll(currentNodes.keySet());
}
private Map<String, Integer> advertisedBandwidths =
new HashMap<String, Integer>();
private void processRelayServerDescriptor(
ServerDescriptor serverDescriptor) {
/* Read advertised bandwidths of all server descriptors in
* in/relay-descriptors/server-descriptors/ to memory. Ideally, we'd
* skip descriptors that we read before and obtain their advertised
* bandwidths from some temp file. This approach should do for now,
* though. */
String digest = serverDescriptor.getServerDescriptorDigest().
toUpperCase();
int advertisedBandwidth = Math.min(Math.min(
serverDescriptor.getBandwidthBurst(),
serverDescriptor.getBandwidthObserved()),
serverDescriptor.getBandwidthRate());
this.advertisedBandwidths.put(digest, advertisedBandwidth);
}
private void processRelayNetworkConsensus(
RelayNetworkStatusConsensus consensus) {
long validAfterMillis = consensus.getValidAfterMillis(),
freshUntilMillis = consensus.getFreshUntilMillis();
SortedMap<String, double[]> pathSelectionWeights =
this.calculatePathSelectionProbabilities(consensus);
this.updateWeightsHistory(validAfterMillis, freshUntilMillis,
pathSelectionWeights);
}
private static final int HISTORY_UPDATER_WORKERS_NUM = 4;
private void updateWeightsHistory(long validAfterMillis,
long freshUntilMillis,
SortedMap<String, double[]> pathSelectionWeights) {
List<HistoryUpdateWorker> historyUpdateWorkers =
new ArrayList<HistoryUpdateWorker>();
for (int i = 0; i < HISTORY_UPDATER_WORKERS_NUM; i++) {
HistoryUpdateWorker historyUpdateWorker =
new HistoryUpdateWorker(validAfterMillis, freshUntilMillis,
pathSelectionWeights, this);
historyUpdateWorkers.add(historyUpdateWorker);
historyUpdateWorker.setDaemon(true);
historyUpdateWorker.start();
}
for (HistoryUpdateWorker historyUpdateWorker : historyUpdateWorkers) {
try {
historyUpdateWorker.join();
} catch (InterruptedException e) {
/* This is not something that we can take care of. Just leave the
* worker thread alone. */
}
}
}
private class HistoryUpdateWorker extends Thread {
private long validAfterMillis;
private long freshUntilMillis;
private SortedMap<String, double[]> pathSelectionWeights;
private WeightsDataWriter parent;
public HistoryUpdateWorker(long validAfterMillis,
long freshUntilMillis,
SortedMap<String, double[]> pathSelectionWeights,
WeightsDataWriter parent) {
this.validAfterMillis = validAfterMillis;
this.freshUntilMillis = freshUntilMillis;
this.pathSelectionWeights = pathSelectionWeights;
this.parent = parent;
}
public void run() {
String fingerprint = null;
double[] weights = null;
do {
fingerprint = null;
synchronized (pathSelectionWeights) {
if (!pathSelectionWeights.isEmpty()) {
fingerprint = pathSelectionWeights.firstKey();
weights = pathSelectionWeights.remove(fingerprint);
}
}
if (fingerprint != null) {
this.parent.addToHistory(fingerprint, this.validAfterMillis,
this.freshUntilMillis, weights);
}
} while (fingerprint != null);
}
}
private SortedMap<String, double[]> calculatePathSelectionProbabilities(
RelayNetworkStatusConsensus consensus) {
double wgg = 1.0, wgd = 1.0, wmg = 1.0, wmm = 1.0, wme = 1.0,
wmd = 1.0, wee = 1.0, wed = 1.0;
SortedMap<String, Integer> bandwidthWeights =
consensus.getBandwidthWeights();
if (bandwidthWeights != null) {
SortedSet<String> missingWeightKeys = new TreeSet<String>(
Arrays.asList("Wgg,Wgd,Wmg,Wmm,Wme,Wmd,Wee,Wed".split(",")));
missingWeightKeys.removeAll(bandwidthWeights.keySet());
if (missingWeightKeys.isEmpty()) {
wgg = ((double) bandwidthWeights.get("Wgg")) / 10000.0;
wgd = ((double) bandwidthWeights.get("Wgd")) / 10000.0;
wmg = ((double) bandwidthWeights.get("Wmg")) / 10000.0;
wmm = ((double) bandwidthWeights.get("Wmm")) / 10000.0;
wme = ((double) bandwidthWeights.get("Wme")) / 10000.0;
wmd = ((double) bandwidthWeights.get("Wmd")) / 10000.0;
wee = ((double) bandwidthWeights.get("Wee")) / 10000.0;
wed = ((double) bandwidthWeights.get("Wed")) / 10000.0;
}
}
SortedMap<String, Double>
advertisedBandwidths = new TreeMap<String, Double>(),
consensusWeights = new TreeMap<String, Double>(),
guardWeights = new TreeMap<String, Double>(),
middleWeights = new TreeMap<String, Double>(),
exitWeights = new TreeMap<String, Double>();
double totalAdvertisedBandwidth = 0.0;
double totalConsensusWeight = 0.0;
double totalGuardWeight = 0.0;
double totalMiddleWeight = 0.0;
double totalExitWeight = 0.0;
for (NetworkStatusEntry relay :
consensus.getStatusEntries().values()) {
String fingerprint = relay.getFingerprint();
if (!relay.getFlags().contains("Running")) {
continue;
}
boolean isExit = relay.getFlags().contains("Exit") &&
!relay.getFlags().contains("BadExit");
boolean isGuard = relay.getFlags().contains("Guard");
String serverDescriptorDigest = relay.getDescriptor().
toUpperCase();
double advertisedBandwidth = 0.0;
if (this.advertisedBandwidths.containsKey(
serverDescriptorDigest)) {
advertisedBandwidth = (double) this.advertisedBandwidths.get(
serverDescriptorDigest);
}
double consensusWeight = (double) relay.getBandwidth();
double guardWeight = (double) relay.getBandwidth();
double middleWeight = (double) relay.getBandwidth();
double exitWeight = (double) relay.getBandwidth();
if (isGuard && isExit) {
guardWeight *= wgd;
middleWeight *= wmd;
exitWeight *= wed;
} else if (isGuard) {
guardWeight *= wgg;
middleWeight *= wmg;
exitWeight = 0.0;
} else if (isExit) {
guardWeight = 0.0;
middleWeight *= wme;
exitWeight *= wee;
} else {
guardWeight = 0.0;
middleWeight *= wmm;
exitWeight = 0.0;
}
advertisedBandwidths.put(fingerprint, advertisedBandwidth);
consensusWeights.put(fingerprint, consensusWeight);
guardWeights.put(fingerprint, guardWeight);
middleWeights.put(fingerprint, middleWeight);
exitWeights.put(fingerprint, exitWeight);
totalAdvertisedBandwidth += advertisedBandwidth;
totalConsensusWeight += consensusWeight;
totalGuardWeight += guardWeight;
totalMiddleWeight += middleWeight;
totalExitWeight += exitWeight;
}
SortedMap<String, double[]> pathSelectionProbabilities =
new TreeMap<String, double[]>();
for (NetworkStatusEntry relay :
consensus.getStatusEntries().values()) {
String fingerprint = relay.getFingerprint();
double[] probabilities = new double[] {
advertisedBandwidths.get(fingerprint)
/ totalAdvertisedBandwidth,
consensusWeights.get(fingerprint) / totalConsensusWeight,
guardWeights.get(fingerprint) / totalGuardWeight,
middleWeights.get(fingerprint) / totalMiddleWeight,
exitWeights.get(fingerprint) / totalExitWeight };
pathSelectionProbabilities.put(fingerprint, probabilities);
}
return pathSelectionProbabilities;
}
private void addToHistory(String fingerprint, long validAfterMillis,
long freshUntilMillis, double[] weights) {
SortedMap<long[], double[]> history =
this.readHistoryFromDisk(fingerprint);
long[] interval = new long[] { validAfterMillis, freshUntilMillis };
if ((history.headMap(interval).isEmpty() ||
history.headMap(interval).lastKey()[1] <= validAfterMillis) &&
(history.tailMap(interval).isEmpty() ||
history.tailMap(interval).firstKey()[0] >= freshUntilMillis)) {
history.put(interval, weights);
history = this.compressHistory(history);
this.writeHistoryToDisk(fingerprint, history);
}
}
private SortedMap<long[], double[]> readHistoryFromDisk(
String fingerprint) {
SortedMap<long[], double[]> history =
new TreeMap<long[], double[]>(new Comparator<long[]>() {
public int compare(long[] a, long[] b) {
return a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0;
}
});
WeightsStatus weightsStatus = this.documentStore.retrieve(
WeightsStatus.class, false, fingerprint);
if (weightsStatus != null) {
String historyString = weightsStatus.documentString;
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setLenient(false);
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
try {
Scanner s = new Scanner(historyString);
while (s.hasNextLine()) {
String line = s.nextLine();
String[] parts = line.split(" ");
if (parts.length != 9) {
System.err.println("Illegal line '" + line + "' in weights "
+ "history for fingerprint '" + fingerprint + "'. "
+ "Skipping this line.");
continue;
}
if (parts[4].equals("NaN")) {
/* Remove corrupt lines written on 2013-07-07 and the days
* after. */
continue;
}
long validAfterMillis = dateTimeFormat.parse(parts[0]
+ " " + parts[1]).getTime();
long freshUntilMillis = dateTimeFormat.parse(parts[2]
+ " " + parts[3]).getTime();
long[] interval = new long[] { validAfterMillis,
freshUntilMillis };
double[] weights = new double[] {
Double.parseDouble(parts[4]),
Double.parseDouble(parts[5]),
Double.parseDouble(parts[6]),
Double.parseDouble(parts[7]),
Double.parseDouble(parts[8]) };
history.put(interval, weights);
}
s.close();
} catch (ParseException e) {
System.err.println("Could not parse timestamp while reading "
+ "weights history for fingerprint '" + fingerprint + "'. "
+ "Skipping.");
e.printStackTrace();
}
}
return history;
}
private long now = System.currentTimeMillis();
private SortedMap<long[], double[]> compressHistory(
SortedMap<long[], double[]> history) {
SortedMap<long[], double[]> compressedHistory =
new TreeMap<long[], double[]>(history.comparator());
long lastStartMillis = 0L, lastEndMillis = 0L;
double[] lastWeights = null;
for (Map.Entry<long[], double[]> e : history.entrySet()) {
long startMillis = e.getKey()[0], endMillis = e.getKey()[1];
double[] weights = e.getValue();
long intervalLengthMillis;
if (this.now - endMillis <= 7L * 24L * 60L * 60L * 1000L) {
intervalLengthMillis = 60L * 60L * 1000L;
} else if (this.now - endMillis <= 31L * 24L * 60L * 60L * 1000L) {
intervalLengthMillis = 4L * 60L * 60L * 1000L;
} else if (this.now - endMillis <= 92L * 24L * 60L * 60L * 1000L) {
intervalLengthMillis = 12L * 60L * 60L * 1000L;
} else if (this.now - endMillis <= 366L * 24L * 60L * 60L * 1000L) {
intervalLengthMillis = 2L * 24L * 60L * 60L * 1000L;
} else {
intervalLengthMillis = 10L * 24L * 60L * 60L * 1000L;
}
if (lastEndMillis == startMillis &&
(lastEndMillis / intervalLengthMillis) ==
(endMillis / intervalLengthMillis)) {
double lastIntervalInHours = (double) ((lastEndMillis
- lastStartMillis) / 60L * 60L * 1000L);
double currentIntervalInHours = (double) ((endMillis
- startMillis) / 60L * 60L * 1000L);
double newIntervalInHours = (double) ((endMillis
- lastStartMillis) / 60L * 60L * 1000L);
for (int i = 0; i < lastWeights.length; i++) {
lastWeights[i] *= lastIntervalInHours;
lastWeights[i] += weights[i] * currentIntervalInHours;
lastWeights[i] /= newIntervalInHours;
}
lastEndMillis = endMillis;
} else {
if (lastStartMillis > 0L) {
compressedHistory.put(new long[] { lastStartMillis,
lastEndMillis }, lastWeights);
}
lastStartMillis = startMillis;
lastEndMillis = endMillis;
lastWeights = weights;
}
}
if (lastStartMillis > 0L) {
compressedHistory.put(new long[] { lastStartMillis, lastEndMillis },
lastWeights);
}
return compressedHistory;
}
private void writeHistoryToDisk(String fingerprint,
SortedMap<long[], double[]> history) {
StringBuilder sb = new StringBuilder();
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
for (Map.Entry<long[], double[]> e : history.entrySet()) {
long[] fresh = e.getKey();
double[] weights = e.getValue();
sb.append(dateTimeFormat.format(fresh[0]) + " "
+ dateTimeFormat.format(fresh[1]));
for (double weight : weights) {
sb.append(String.format(" %.12f", weight));
}
sb.append("\n");
}
WeightsStatus weightsStatus = new WeightsStatus();
weightsStatus.documentString = sb.toString();
this.documentStore.store(weightsStatus, fingerprint);
}
public void writeWeightsDataFiles() {
for (String fingerprint : this.currentFingerprints) {
SortedMap<long[], double[]> history =
this.readHistoryFromDisk(fingerprint);
if (history.isEmpty() || history.lastKey()[1] < this.now
- 7L * 24L * 60L * 60L * 1000L) {
/* Don't write weights data file to disk. */
continue;
}
WeightsDocument weightsDocument = new WeightsDocument();
weightsDocument.documentString = this.formatHistoryString(
fingerprint, history);
this.documentStore.store(weightsDocument, fingerprint);
}
}
private String[] graphTypes = new String[] {
"advertised_bandwidth_fraction",
"consensus_weight_fraction",
"guard_probability",
"middle_probability",
"exit_probability"
};
private String[] graphNames = new String[] {
"1_week",
"1_month",
"3_months",
"1_year",
"5_years" };
private long[] graphIntervals = new long[] {
7L * 24L * 60L * 60L * 1000L,
31L * 24L * 60L * 60L * 1000L,
92L * 24L * 60L * 60L * 1000L,
366L * 24L * 60L * 60L * 1000L,
5L * 366L * 24L * 60L * 60L * 1000L };
private long[] dataPointIntervals = new long[] {
60L * 60L * 1000L,
4L * 60L * 60L * 1000L,
12L * 60L * 60L * 1000L,
2L * 24L * 60L * 60L * 1000L,
10L * 24L * 60L * 60L * 1000L };
private String formatHistoryString(String fingerprint,
SortedMap<long[], double[]> history) {
StringBuilder sb = new StringBuilder();
sb.append("{\"fingerprint\":\"" + fingerprint + "\"");
for (int graphTypeIndex = 0; graphTypeIndex < this.graphTypes.length;
graphTypeIndex++) {
String graphType = this.graphTypes[graphTypeIndex];
sb.append(",\n\"" + graphType + "\":{");
int graphIntervalsWritten = 0;
for (int graphIntervalIndex = 0; graphIntervalIndex <
this.graphIntervals.length; graphIntervalIndex++) {
String timeline = this.formatTimeline(graphTypeIndex,
graphIntervalIndex, history);
if (timeline != null) {
sb.append((graphIntervalsWritten++ > 0 ? "," : "") + "\n"
+ timeline);
}
}
sb.append("}");
}
sb.append("\n}\n");
return sb.toString();
}
private String formatTimeline(int graphTypeIndex,
int graphIntervalIndex, SortedMap<long[], double[]> history) {
String graphName = this.graphNames[graphIntervalIndex];
long graphInterval = this.graphIntervals[graphIntervalIndex];
long dataPointInterval =
this.dataPointIntervals[graphIntervalIndex];
List<Double> dataPoints = new ArrayList<Double>();
long intervalStartMillis = ((this.now - graphInterval)
/ dataPointInterval) * dataPointInterval;
long totalMillis = 0L;
double totalWeightTimesMillis = 0.0;
for (Map.Entry<long[], double[]> e : history.entrySet()) {
long startMillis = e.getKey()[0], endMillis = e.getKey()[1];
double weight = e.getValue()[graphTypeIndex];
if (endMillis < intervalStartMillis) {
continue;
}
while ((intervalStartMillis / dataPointInterval) !=
(endMillis / dataPointInterval)) {
dataPoints.add(totalMillis * 5L < dataPointInterval
? -1.0 : totalWeightTimesMillis / (double) totalMillis);
totalWeightTimesMillis = 0.0;
totalMillis = 0L;
intervalStartMillis += dataPointInterval;
}
totalWeightTimesMillis += weight
* ((double) (endMillis - startMillis));
totalMillis += (endMillis - startMillis);
}
dataPoints.add(totalMillis * 5L < dataPointInterval
? -1.0 : totalWeightTimesMillis / (double) totalMillis);
double maxValue = 0.0;
int firstNonNullIndex = -1, lastNonNullIndex = -1;
for (int dataPointIndex = 0; dataPointIndex < dataPoints.size();
dataPointIndex++) {
double dataPoint = dataPoints.get(dataPointIndex);
if (dataPoint >= 0.0) {
if (firstNonNullIndex < 0) {
firstNonNullIndex = dataPointIndex;
}
lastNonNullIndex = dataPointIndex;
if (dataPoint > maxValue) {
maxValue = dataPoint;
}
}
}
if (firstNonNullIndex < 0) {
return null;
}
long firstDataPointMillis = (((this.now - graphInterval)
/ dataPointInterval) + firstNonNullIndex) * dataPointInterval
+ dataPointInterval / 2L;
if (graphIntervalIndex > 0 && firstDataPointMillis >=
this.now - graphIntervals[graphIntervalIndex - 1]) {
/* Skip weights history object, because it doesn't contain
* anything new that wasn't already contained in the last
* weights history object(s). */
return null;
}
long lastDataPointMillis = firstDataPointMillis
+ (lastNonNullIndex - firstNonNullIndex) * dataPointInterval;
double factor = ((double) maxValue) / 999.0;
int count = lastNonNullIndex - firstNonNullIndex + 1;
StringBuilder sb = new StringBuilder();
SimpleDateFormat dateTimeFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
dateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
sb.append("\"" + graphName + "\":{"
+ "\"first\":\"" + dateTimeFormat.format(firstDataPointMillis)
+ "\",\"last\":\"" + dateTimeFormat.format(lastDataPointMillis)
+ "\",\"interval\":" + String.valueOf(dataPointInterval / 1000L)
+ ",\"factor\":" + String.format(Locale.US, "%.9f", factor)
+ ",\"count\":" + String.valueOf(count) + ",\"values\":[");
int dataPointsWritten = 0, previousNonNullIndex = -2;
boolean foundTwoAdjacentDataPoints = false;
for (int dataPointIndex = firstNonNullIndex; dataPointIndex <=
lastNonNullIndex; dataPointIndex++) {
double dataPoint = dataPoints.get(dataPointIndex);
if (dataPoint >= 0.0) {
if (dataPointIndex - previousNonNullIndex == 1) {
foundTwoAdjacentDataPoints = true;
}
previousNonNullIndex = dataPointIndex;
}
sb.append((dataPointsWritten++ > 0 ? "," : "")
+ (dataPoint < 0.0 ? "null" :
String.valueOf((long) ((dataPoint * 999.0) / maxValue))));
}
sb.append("]}");
if (foundTwoAdjacentDataPoints) {
return sb.toString();
} else {
return null;
}
}
public void deleteObsoleteWeightsDataFiles() {
SortedSet<String> obsoleteWeightsFiles;
obsoleteWeightsFiles = this.documentStore.list(WeightsDocument.class,
false);
for (String fingerprint : this.currentFingerprints) {
if (obsoleteWeightsFiles.contains(fingerprint)) {
obsoleteWeightsFiles.remove(fingerprint);
}
}
for (String fingerprint : obsoleteWeightsFiles) {
this.documentStore.remove(WeightsDocument.class, fingerprint);
}
}
}
|
package com.ame.bus3.common;
/**
* Stores variables important to multiple pieces of code in a single location.
* @author Amelorate
*/
public class Variables {
/**
* Stores if it is running in client or server mode.
*/
public static boolean isServer;
/**
* The IP address of the server being connected to.
*/
public static String serverIP;
/**
* Which port to use when connecting to a client/server.
*/
public static int port = 0;
/**
* What the renderer will render.
* Valid options are "tiles" and "mainmenu" for now.
*/
public static String renderMode = "mainmenu";
/**
* The main map of the game.
*/
public static GameMap map = new GameMap();
}
|
package de.dakror.vloxlands;
import com.badlogic.gdx.Application.ApplicationType;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Buttons;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.InputMultiplexer;
import com.badlogic.gdx.assets.AssetManager;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.PerspectiveCamera;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g3d.Environment;
import com.badlogic.gdx.graphics.g3d.ModelBatch;
import com.badlogic.gdx.graphics.g3d.attributes.ColorAttribute;
import com.badlogic.gdx.graphics.g3d.environment.DirectionalLight;
import com.badlogic.gdx.graphics.g3d.utils.FirstPersonCameraController;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.input.GestureDetector;
import com.badlogic.gdx.math.Intersector;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Matrix4;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.math.collision.BoundingBox;
import com.badlogic.gdx.math.collision.Ray;
import com.badlogic.gdx.physics.bullet.Bullet;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Skin;
import com.badlogic.gdx.scenes.scene2d.ui.Touchpad;
import com.badlogic.gdx.scenes.scene2d.ui.Touchpad.TouchpadStyle;
import com.badlogic.gdx.scenes.scene2d.utils.Drawable;
import com.badlogic.gdx.utils.viewport.ScreenViewport;
import de.dakror.vloxlands.game.entity.Entity;
import de.dakror.vloxlands.game.entity.creature.Human;
import de.dakror.vloxlands.game.voxel.Voxel;
import de.dakror.vloxlands.game.world.Chunk;
import de.dakror.vloxlands.game.world.Island;
import de.dakror.vloxlands.game.world.World;
import de.dakror.vloxlands.render.MeshingThread;
import de.dakror.vloxlands.screen.LoadingScreen;
import de.dakror.vloxlands.util.Direction;
import de.dakror.vloxlands.util.GameBase;
import de.dakror.vloxlands.util.VoxelSelection;
public class Vloxlands extends GameBase
{
public static final long seed = (long) (Math.random() * Long.MAX_VALUE);
public static final float velocity = 10;
public static final float rotateSpeed = 0.2f;
public static final float pickRayMaxDistance = 30f;
public static Vloxlands currentGame;
public static World world;
public static AssetManager assets;
public static PerspectiveCamera camera;
public static ShapeRenderer shapeRenderer;
public Environment lights;
ModelBatch modelBatch;
FirstPersonCameraController controller;
SpriteBatch spriteBatch;
BitmapFont font;
// -- on screen controls -- //
Stage stage;
OrthographicCamera camera2;
Touchpad moveTouchpad;
TouchpadStyle touchpadStyle;
Skin touchpadSkin;
Drawable touchpadBack;
Drawable touchpadFront;
long last;
int tick;
public static boolean debug;
public static boolean showChunkBorders;
boolean middleDown;
Vector3 worldMiddle;
public Vector3 intersection = new Vector3();
public Vector3 intersection2 = new Vector3();
// -- temp -- //
public final Vector3 tmp = new Vector3();
public final Vector3 tmp1 = new Vector3();
public final Vector3 tmp2 = new Vector3();
public final Vector3 tmp3 = new Vector3();
public final Vector3 tmp4 = new Vector3();
public final Vector3 tmp5 = new Vector3();
public final Vector3 tmp6 = new Vector3();
public final Vector3 tmp7 = new Vector3();
public final Vector3 tmp8 = new Vector3();
public final Matrix4 m4 = new Matrix4();
public final BoundingBox bb = new BoundingBox();
public final BoundingBox bb2 = new BoundingBox();
public final BoundingBox bb3 = new BoundingBox();
@Override
public void create()
{
Bullet.init();
currentGame = this;
Gdx.app.log("Vloxlands.create", "Seed: " + seed + "");
MathUtils.random.setSeed(seed);
Voxel.loadVoxels();
spriteBatch = new SpriteBatch();
font = new BitmapFont();
assets = new AssetManager();
modelBatch = new ModelBatch(Gdx.files.internal("shader/shader.vs"), Gdx.files.internal("shader/shader.fs"));
camera = new PerspectiveCamera(60, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
camera.near = 0.1f;
camera.far = 100;
controller = new FirstPersonCameraController(camera)
{
@Override
public boolean touchDragged(int screenX, int screenY, int pointer)
{
if (middleDown || Gdx.app.getType() == ApplicationType.Android) super.touchDragged(screenX, screenY, pointer);
return false;
}
};
controller.setDegreesPerPixel(rotateSpeed);
controller.setVelocity(velocity);
shapeRenderer = new ShapeRenderer();
new MeshingThread();
InputMultiplexer multiplexer = new InputMultiplexer();
lights = new Environment();
lights.set(new ColorAttribute(ColorAttribute.AmbientLight, 0.4f, 0.4f, 0.4f, 1.f), new ColorAttribute(ColorAttribute.Fog, 0.5f, 0.8f, 0.85f, 1.f));
lights.add(new DirectionalLight().set(255, 255, 255, 0, -1, 1));
int w = MathUtils.random(1, 1);
int d = MathUtils.random(1, 1);
world = new World(w, d);
Gdx.app.log("Vloxlands.create", "World size: " + w + "x" + d);
// -- stage -- //
if (Gdx.app.getType() == ApplicationType.Android)
{
camera2 = new OrthographicCamera();
touchpadSkin = new Skin();
touchpadSkin.add("touchpadBack", new Texture("img/gui/touchpadBack.png"));
touchpadSkin.add("touchpadFront", new Texture("img/gui/touchpadFront.png"));
touchpadStyle = new TouchpadStyle();
touchpadBack = touchpadSkin.getDrawable("touchpadBack");
touchpadFront = touchpadSkin.getDrawable("touchpadFront");
touchpadStyle.background = touchpadBack;
touchpadStyle.knob = touchpadFront;
int size = (int) (160 * (Gdx.graphics.getHeight() / 720f));
int size2 = (int) (100 * (Gdx.graphics.getHeight() / 720f));
touchpadStyle.knob.setMinWidth(size2);
touchpadStyle.knob.setMinHeight(size2);
int delta = 30;
moveTouchpad = new Touchpad(10, touchpadStyle);
moveTouchpad.setBounds(delta, delta, size, size);
stage = new Stage(new ScreenViewport(camera2));
stage.addActor(moveTouchpad);
multiplexer.addProcessor(stage);
}
multiplexer.addProcessor(new GestureDetector(this));
multiplexer.addProcessor(this);
multiplexer.addProcessor(controller);
Gdx.input.setInputProcessor(multiplexer);
setScreen(new LoadingScreen());
}
public void doneLoading()
{
Vector3 p = world.getIslands()[0].pos;
world.addEntity(new Human(Island.SIZE / 2, Island.SIZE / 4 * 3 + 2 + p.y, Island.SIZE / 2));
worldMiddle = new Vector3(p.x * Island.SIZE + Island.SIZE / 2, p.y + Island.SIZE, p.z * Island.SIZE + Island.SIZE / 2);
camera.position.set(worldMiddle);
camera.position.y -= Island.SIZE / 4;
camera.position.z -= Island.SIZE / 2;
camera.rotate(Vector3.Y, 180);
}
@Override
public void render()
{
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
if (getScreen() != null)
{
super.render();
}
else
{
Gdx.gl.glClearColor(0.5f, 0.8f, 0.85f, 1);
controller.update();
world.update();
modelBatch.begin(camera);
world.render(modelBatch, lights);
modelBatch.end();
if (Gdx.app.getType() == ApplicationType.Android)
{
camera2.update();
stage.act(Gdx.graphics.getDeltaTime());
stage.draw();
}
if (last == 0) last = System.currentTimeMillis();
if (System.currentTimeMillis() - last >= 16) // ~60 a sec
{
if (Gdx.app.getType() == ApplicationType.Android)
{
float delta = Gdx.graphics.getDeltaTime();
camera.position.add(camera.direction.cpy().nor().scl(delta * moveTouchpad.getKnobPercentY() * velocity));
camera.position.add(camera.direction.cpy().crs(camera.up).nor().scl(delta * moveTouchpad.getKnobPercentX() * velocity));
}
world.tick(tick++);
last = System.currentTimeMillis();
}
}
if (debug)
{
spriteBatch.begin();
font.draw(spriteBatch, "FPS: " + Gdx.graphics.getFramesPerSecond(), 0, Gdx.graphics.getHeight());
font.draw(spriteBatch, "C: " + world.visibleChunks + " / " + world.chunks, 0, Gdx.graphics.getHeight() - 20);
font.draw(spriteBatch, "E: " + world.visibleEntities + " / " + world.getEntityCount(), 0, Gdx.graphics.getHeight() - 40);
font.draw(spriteBatch, "X: " + camera.position.x, 0, Gdx.graphics.getHeight() - 60);
font.draw(spriteBatch, "Y: " + camera.position.y, 0, Gdx.graphics.getHeight() - 80);
font.draw(spriteBatch, "Z: " + camera.position.z, 0, Gdx.graphics.getHeight() - 100);
font.draw(spriteBatch, "Seed: " + seed, 0, Gdx.graphics.getHeight() - 120);
spriteBatch.end();
}
}
@Override
public void resize(int width, int height)
{
spriteBatch.getProjectionMatrix().setToOrtho2D(0, 0, width, height);
camera.viewportWidth = width;
camera.viewportHeight = height;
camera.update();
if (Gdx.app.getType() == ApplicationType.Android) camera2.update();
}
public void pickRay(boolean hover, boolean lmb, int x, int y)
{
Ray ray = camera.getPickRay(x, y);
if (hover)
{
Entity hovered = null;
float distance = 0;
for (Entity entity : world.getEntities())
{
entity.hovered = false;
if (!entity.inFrustum) continue;
if (Intersector.intersectRayBounds(ray, entity.boundingBox, tmp))
{
float dst = ray.origin.dst(tmp);
if (hovered == null || dst < distance)
{
hovered = entity;
distance = dst;
}
}
}
if (hovered != null) hovered.hovered = true;
}
else
{
boolean entitySelected = false;
for (Entity entity : world.getEntities())
{
entity.selected = false;
if (entity.inFrustum && entity.hovered)
{
entity.selected = true;
entitySelected = true;
}
}
if (entitySelected) return;
for (int i = 0; i < world.getIslands().length; i++)
{
Island island = world.getIslands()[i];
if (island == null) continue;
float distance = 0;
Chunk chunk = null;
Vector3 voxel = null;
for (Chunk c : island.getChunks())
{
if (c.inFrustum && !c.isEmpty())
{
tmp1.set(island.pos.x + c.pos.x, island.pos.y + c.pos.y, island.pos.z + c.pos.z);
tmp2.set(tmp1.cpy().add(Chunk.SIZE, Chunk.SIZE, Chunk.SIZE));
bb.set(tmp1, tmp2);
if (Intersector.intersectRayBounds(ray, bb, null) && c.pickVoxel(ray, tmp5, tmp6))
{
float dist = ray.origin.dst(tmp5);
if ((chunk == null || dist < distance) && dist <= pickRayMaxDistance)
{
intersection.set(tmp5);
distance = dist;
voxel = tmp6.cpy();
chunk = c;
}
}
}
}
if (chunk != null)
{
// -- determine selectedVoxelFace -- //
Direction dir = null;
float distanc = 0;
Vector3 is2 = new Vector3();
byte air = Voxel.get("AIR").getId();
for (Direction d : Direction.values())
{
tmp7.set(island.pos.x + chunk.pos.x + voxel.x + d.dir.x, island.pos.y + chunk.pos.y + voxel.y + d.dir.y, island.pos.z + chunk.pos.z + voxel.z + d.dir.z);
tmp8.set(tmp7.cpy().add(1, 1, 1));
bb3.set(tmp7, tmp8);
if (island.get(chunk.pos.x + voxel.x + d.dir.x, chunk.pos.y + voxel.y + d.dir.y, chunk.pos.z + voxel.z + d.dir.z) != air) continue;
if (Intersector.intersectRayBounds(ray, bb3, is2))
{
float dist = ray.origin.dst(is2);
if (dir == null || dist < distanc)
{
intersection2.set(is2);
distanc = dist;
dir = d;
}
}
}
new VoxelSelection(i, Voxel.getForId(chunk.get((int) voxel.x, (int) voxel.y, (int) voxel.z)), voxel.cpy().add(chunk.pos), dir);
// TODO use VoxelSelection
}
}
}
}
@Override
public boolean keyUp(int keycode)
{
if (keycode == Keys.F1) debug = !debug;
if (keycode == Keys.F2) showChunkBorders = !showChunkBorders;
if (keycode == Keys.F11)
{
if (Gdx.graphics.isFullscreen()) Gdx.graphics.setDisplayMode(1280, 720, false);
else Gdx.graphics.setDisplayMode(Gdx.graphics.getDesktopDisplayMode().width, Gdx.graphics.getDesktopDisplayMode().height, true);
}
return false;
}
@Override
public boolean mouseMoved(int screenX, int screenY)
{
pickRay(true, false, screenX, screenY);
return false;
}
@Override
public boolean touchDown(int screenX, int screenY, int pointer, int button)
{
if (button == Buttons.MIDDLE)
{
middleDown = true;
Gdx.input.setCursorCatched(true);
}
else pickRay(false, button == Buttons.LEFT, screenX, screenY);
return false;
}
@Override
public boolean touchUp(int screenX, int screenY, int pointer, int button)
{
if (button == Buttons.MIDDLE)
{
middleDown = false;
Gdx.input.setCursorCatched(false);
}
return false;
}
}
|
package com.jcabi.github;
import java.util.Collections;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Assume;
import org.junit.Test;
/**
* Integration case for {@link Gists}.
* @author Mihai Andronache (amihaiemil@gmail.com)
* @version $Id$
*/
public final class RtGistsITCase {
/**
* RtGists can create a gist.
* @throws Exception If some problem inside
*/
@Test
public void createGist() throws Exception {
final String filename = "filename.txt";
final String content = "content of file";
final Gists gists = gists();
final Gist gist = gists.create(
Collections.singletonMap(filename, content)
);
MatcherAssert.assertThat(
new Gist.Smart(gist).read(filename),
Matchers.equalTo(content)
);
gists.remove(gist.name());
}
/**
* RtGists can iterate all gists.
* @throws Exception If some problem inside
*/
@Test
public void iterateGists() throws Exception {
final Gists gists = gists();
final Gist gist = gists.create(
Collections.singletonMap("test.txt", "content")
);
MatcherAssert.assertThat(
gists.iterate(),
Matchers.hasItem(gist)
);
gists.remove(gist.name());
}
/**
* RtGists can get a single gist.
* @throws Exception If some problem inside
*/
@Test
public void singleGist() throws Exception {
final String filename = "single-name.txt";
final Gists gists = gists();
final Gist gist = gists.create(
Collections.singletonMap(filename, "body")
);
MatcherAssert.assertThat(
gists.get(gist.name()),
Matchers.sameInstance(gist)
);
gists.remove(gist.name());
}
/**
* This tests that RtGists can remove a gist by name.
* @throws Exception - if something goes wrong.
*/
@Test
public void removesGistByName() throws Exception {
final Gists gists = gists();
final Gist gist = gists.create(
Collections.singletonMap("fileName.txt", "content of test file")
);
MatcherAssert.assertThat(
gists.iterate(),
Matchers.notNullValue()
);
gists.remove(gist.json().getString("id"));
MatcherAssert.assertThat(
gists.iterate(),
Matchers.not(Matchers.hasItem(gist))
);
}
/**
* Return gists to test.
* @return Gists
* @throws Exception If some problem inside
*/
private static Gists gists() throws Exception {
final String key = System.getProperty("failsafe.github.key");
Assume.assumeThat(key, Matchers.notNullValue());
return new RtGithub(key).gists();
}
}
|
package guitests;
import javafx.scene.control.ComboBox;
import javafx.scene.input.KeyCode;
import org.junit.Test;
import ui.UI;
import ui.components.KeyboardShortcuts;
import ui.listpanel.ListPanel;
import util.PlatformEx;
import util.events.IssueSelectedEventHandler;
import util.events.PanelClickedEventHandler;
import util.events.testevents.UIComponentFocusEvent;
import util.events.testevents.UIComponentFocusEventHandler;
import static org.junit.Assert.assertEquals;
import static ui.components.KeyboardShortcuts.*;
import static ui.components.KeyboardShortcuts.SWITCH_DEFAULT_REPO;
public class KeyboardShortcutsTest extends UITest {
private UIComponentFocusEvent.EventType uiComponentFocusEventType;
private int selectedIssueId;
private int panelIndex;
@Test
public void keyboardShortcutsTest() {
UI.events.registerEvent((IssueSelectedEventHandler) e -> selectedIssueId = e.id);
UI.events.registerEvent((UIComponentFocusEventHandler) e -> uiComponentFocusEventType = e.eventType);
UI.events.registerEvent((PanelClickedEventHandler) e -> panelIndex = e.panelIndex);
clearSelectedIssueId();
clearUiComponentFocusEventType();
clearPanelIndex();
// maximize
assertEquals(false, stage.getWidth() > 500);
press(MAXIMIZE_WINDOW);
assertEquals(true, stage.getWidth() > 500);
// mid-sized window
press(DEFAULT_SIZE_WINDOW);
assertEquals(false, stage.getWidth() > 500);
// jump from panel focus to first issue
// - This is because on startup focus is on panel and not on filter box
press(JUMP_TO_FIRST_ISSUE);
assertEquals(10, selectedIssueId);
clearSelectedIssueId();
// jump from issue list to filter box
press(JUMP_TO_FILTER_BOX);
assertEquals(UIComponentFocusEvent.EventType.FILTER_BOX, uiComponentFocusEventType);
clearUiComponentFocusEventType();
// jump from filter box to first issue
// - To ensure shortcut works from filter box, too
press(JUMP_TO_FIRST_ISSUE);
press(JUMP_TO_FILTER_BOX);
assertEquals(10, selectedIssueId);
clearSelectedIssueId();
// jump to first issue using number key(1) or ENTER
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(1));
PlatformEx.waitOnFxThread();
assertEquals(10, selectedIssueId);
clearSelectedIssueId();
press(JUMP_TO_FILTER_BOX);
assertEquals(UIComponentFocusEvent.EventType.FILTER_BOX, uiComponentFocusEventType);
clearUiComponentFocusEventType();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(2));
PlatformEx.waitOnFxThread();
assertEquals(9, selectedIssueId);
clearSelectedIssueId();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(3));
PlatformEx.waitOnFxThread();
assertEquals(8, selectedIssueId);
clearSelectedIssueId();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(4));
PlatformEx.waitOnFxThread();
assertEquals(7, selectedIssueId);
clearSelectedIssueId();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(5));
PlatformEx.waitOnFxThread();
assertEquals(6, selectedIssueId);
clearSelectedIssueId();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(6));
PlatformEx.waitOnFxThread();
assertEquals(5, selectedIssueId);
clearSelectedIssueId();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(7));
PlatformEx.waitOnFxThread();
assertEquals(4, selectedIssueId);
clearSelectedIssueId();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(8));
PlatformEx.waitOnFxThread();
assertEquals(3, selectedIssueId);
clearSelectedIssueId();
push(KeyCode.ESCAPE);
press(JUMP_TO_NTH_ISSUE_KEYS.get(9));
PlatformEx.waitOnFxThread();
assertEquals(2, selectedIssueId);
clearSelectedIssueId();
// jump to last issue
push(KeyCode.END);
assertEquals(1, selectedIssueId);
clearSelectedIssueId();
// jump to first issue
push(KeyCode.HOME);
sleep(1000);
assertEquals(10, selectedIssueId);
clearSelectedIssueId();
push(getKeyCode("DOWN_ISSUE"));
assertEquals(9, selectedIssueId);
clearSelectedIssueId();
push(getKeyCode("DOWN_ISSUE"));
assertEquals(8, selectedIssueId);
clearSelectedIssueId();
push(getKeyCode("UP_ISSUE"));
assertEquals(9, selectedIssueId);
clearSelectedIssueId();
press(CREATE_RIGHT_PANEL);
press(JUMP_TO_FIRST_ISSUE);
push(getKeyCode("RIGHT_PANEL"));
assertEquals(0, panelIndex);
clearPanelIndex();
push(getKeyCode("LEFT_PANEL"));
assertEquals(1, panelIndex);
clearPanelIndex();
push(getKeyCode("RIGHT_PANEL"));
assertEquals(0, panelIndex);
clearPanelIndex();
push(getKeyCode("LEFT_PANEL"));
assertEquals(1, panelIndex);
clearPanelIndex();
// remove focus from repo selector
ComboBox<String> repoSelectorComboBox = find("#repositorySelector");
click(repoSelectorComboBox);
assertEquals(true, repoSelectorComboBox.isFocused());
press(KeyCode.ESCAPE).release(KeyCode.ESCAPE);
assertEquals(false, repoSelectorComboBox.isFocused());
clearUiComponentFocusEventType();
// switch default repo tests
assertEquals(1, repoSelectorComboBox.getItems().size());
// setup - add a new repo
click(repoSelectorComboBox);
selectAll();
type("dummy1/dummy1");
push(KeyCode.ENTER);
PlatformEx.waitOnFxThread();
assertEquals(2, repoSelectorComboBox.getItems().size());
assertEquals(repoSelectorComboBox.getValue(), "dummy1/dummy1");
// test shortcut on repo dropdown
doubleClick(repoSelectorComboBox);
pushKeys(SWITCH_DEFAULT_REPO);
// wait for issue 9 to appear then click on it
// issue 9 is chosen instead of issue 10 due to a problem with finding issue 10's node
waitUntilNodeAppears("#dummy/dummy_col1_9");
assertEquals(repoSelectorComboBox.getValue(), "dummy/dummy");
// test shortcut when focus is on panel
click("#dummy/dummy_col1_9");
press(SWITCH_DEFAULT_REPO);
PlatformEx.waitOnFxThread();
assertEquals(repoSelectorComboBox.getValue(), "dummy1/dummy1");
// test shortcut when focus is on issue list
press(JUMP_TO_NTH_ISSUE_KEYS.get(1));
press(SWITCH_DEFAULT_REPO);
PlatformEx.waitOnFxThread();
assertEquals(repoSelectorComboBox.getValue(), "dummy/dummy");
// mark as read
ListPanel issuePanel = find("#dummy/dummy_col1");
// mark as read an issue that has another issue below it
push(KeyCode.HOME);
// focus should change to the issue below
int issueIdBeforeMark = selectedIssueId;
int issueIdExpected = issueIdBeforeMark - 1;
push(getKeyCode("MARK_AS_READ"));
PlatformEx.waitOnFxThread();
assertEquals(issueIdExpected, selectedIssueId);
push(getKeyCode("UP_ISSUE")); // required since focus has changed to next issue
assertEquals(true, issuePanel.getSelectedElement().isPresent());
assertEquals(true, issuePanel.getSelectedElement().get().getIssue().isCurrentlyRead());
// mark as read an issue at the bottom
push(KeyCode.END);
push(getKeyCode("MARK_AS_READ"));
// focus should remain at bottom issue
assertEquals(1, selectedIssueId);
assertEquals(true, issuePanel.getSelectedElement().isPresent());
assertEquals(true, issuePanel.getSelectedElement().get().getIssue().isCurrentlyRead());
// mark as unread
push(getKeyCode("MARK_AS_UNREAD"));
assertEquals(true, issuePanel.getSelectedElement().isPresent());
assertEquals(false, issuePanel.getSelectedElement().get().getIssue().isCurrentlyRead());
clearSelectedIssueId();
// testing corner case for mark as read where there is only one issue displayed
click("#dummy/dummy_col1_filterTextField");
type("id:5");
push(KeyCode.ENTER);
press(JUMP_TO_FIRST_ISSUE);
push(getKeyCode("MARK_AS_READ"));
// focus should remain at the only issue shown
assertEquals(5, selectedIssueId);
// minimize window
press(MINIMIZE_WINDOW);
assertEquals(true, stage.isIconified());
}
public KeyCode getKeyCode(String shortcut) {
return KeyCode.getKeyCode(KeyboardShortcuts.getDefaultKeyboardShortcuts().get(shortcut));
}
public void clearSelectedIssueId() {
selectedIssueId = 0;
}
public void clearPanelIndex() {
panelIndex = -1;
}
public void clearUiComponentFocusEventType() {
uiComponentFocusEventType = UIComponentFocusEvent.EventType.NONE;
}
}
|
package org.mahjong4j.hands;
import org.junit.Before;
import org.junit.Test;
import org.mahjong4j.MahjongTileOverFlowException;
import org.mahjong4j.tile.MahjongTile;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.*;
/**
* @author yu1ro
*/
public class ToitsuTest {
Toitsu toitsu1;
Toitsu toitsu2;
Toitsu toitsuF;
@Before
public void setUp() throws Exception {
toitsu1 = new Toitsu(MahjongTile.M1);
toitsu2 = new Toitsu(MahjongTile.M1, MahjongTile.M1);
toitsuF = new Toitsu(MahjongTile.M1, MahjongTile.M2);
}
@Test
public void testCheck() throws Exception {
assertTrue(Toitsu.check(MahjongTile.P1, MahjongTile.P1));
assertFalse(Toitsu.check(MahjongTile.P1, MahjongTile.P4));
}
@Test
public void testGetTile() throws Exception {
assertEquals(MahjongTile.M1, toitsu1.getTile());
assertEquals(MahjongTile.M1, toitsu2.getTile());
assertEquals(null, toitsuF.getTile());
}
@Test
public void testGetIsMentsu() throws Exception {
assertTrue(toitsu1.getIsMentsu());
assertTrue(toitsu2.getIsMentsu());
assertFalse(toitsuF.getIsMentsu());
}
@Test
public void testGetIsOpen() throws Exception {
assertFalse(toitsu1.getIsOpen());
assertFalse(toitsu2.getIsOpen());
assertFalse(toitsuF.getIsOpen());
}
@Test
public void testFindJantoCandidate() throws Exception {
int[] tiles = {
1, 1, 1, 1, 1, 1, 1, 1, 1,
0, 1, 1, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
0, 2, 0
};
List<Toitsu> expected, actual = Toitsu.findJantoCandidate(tiles);
assertEquals(1, actual.size());
expected = new ArrayList<>(7);
expected.add(new Toitsu(MahjongTile.HAK));
assertEquals(MahjongTile.HAK, expected.get(0).getTile());
}
@Test(expected = MahjongTileOverFlowException.class)
public void testThrow() throws Exception {
int[] tiles = {
0, 2, 3, 4, 5, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
0, 0, 0
};
Toitsu.findJantoCandidate(tiles);
}
}
|
//@@author A0093896H
package seedu.todo.logic;
import static seedu.todo.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import org.junit.Test;
import seedu.todo.commons.exceptions.IllegalValueException;
import seedu.todo.logic.commands.MarkCommand;
import seedu.todo.model.DoDoBird;
import seedu.todo.model.task.Completion;
import seedu.todo.model.task.Task;
/**
* Test class for the mark command's logic
*/
public class MarkLogicTest extends CommandLogicTest {
@Test
public void execute_mark_successful_tmr() throws IllegalValueException {
Task toBeMarked = helper.generateFullTaskTmr(0);
expectedTDL.addTask(toBeMarked);
toBeMarked.setCompletion(new Completion(true));
model.addTask(helper.generateFullTaskTmr(0));
assertCommandBehavior("mark 1",
String.format(MarkCommand.MESSAGE_SUCCESS, 1, toBeMarked),
expectedTDL,
(new DoDoBird()).getTaskList());
}
//@@author A0138967J
@Test
public void execute_mark_successful_today() throws IllegalValueException {
Task toBeMarkedToday = helper.generateFullTaskToday(0);
expectedTDL.addTask(toBeMarkedToday);
toBeMarkedToday.setCompletion(new Completion(true));
model.addTask(helper.generateFullTask(0));
assertCommandBehavior("mark 1",
String.format(MarkCommand.MESSAGE_SUCCESS, 1, toBeMarkedToday),
expectedTDL,
(new DoDoBird()).getTaskList());
}
//@@author
@Test
public void execute_markInvalidArgsFormat_errorMessageShown() throws IllegalValueException {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, MarkCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("mark", expectedMessage);
}
@Test
public void execute_markIndexNotFound_errorMessageShown() throws IllegalValueException {
assertIndexNotFoundBehaviorForCommand("mark");
}
}
|
package org.jboss.as.server.manager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.xml.stream.XMLInputFactory;
import org.jboss.as.domain.controller.DomainController;
import org.jboss.as.model.AbstractHostModelUpdate;
import org.jboss.as.model.DomainModel;
import org.jboss.as.model.Element;
import org.jboss.as.model.HostModel;
import org.jboss.as.model.JvmElement;
import org.jboss.as.model.ManagementElement;
import org.jboss.as.model.RemoteDomainControllerElement;
import org.jboss.as.model.ServerElement;
import org.jboss.as.model.ServerGroupDeploymentElement;
import org.jboss.as.model.ServerGroupElement;
import org.jboss.as.model.ServerModel;
import org.jboss.as.model.socket.InterfaceElement;
import org.jboss.as.process.ProcessManagerProtocol.OutgoingPmCommand;
import org.jboss.as.process.ProcessManagerProtocol.OutgoingPmCommandHandler;
import org.jboss.as.process.RespawnPolicy;
import org.jboss.as.server.manager.DirectServerManagerCommunicationHandler.ShutdownListener;
import org.jboss.as.server.manager.ServerManagerProtocol.Command;
import org.jboss.as.server.manager.ServerManagerProtocol.ServerToServerManagerCommandHandler;
import org.jboss.as.server.manager.ServerManagerProtocol.ServerToServerManagerProtocolCommand;
import org.jboss.as.server.manager.management.DomainControllerOperationHandler;
import org.jboss.as.server.manager.management.ManagementCommunicationService;
import org.jboss.as.server.manager.management.ManagementCommunicationServiceInjector;
import org.jboss.as.server.manager.management.ManagementOperationHandlerService;
import org.jboss.as.server.manager.management.ServerManagerOperationHandler;
import org.jboss.as.services.net.NetworkInterfaceBinding;
import org.jboss.as.services.net.NetworkInterfaceService;
import org.jboss.as.threads.ThreadFactoryService;
import org.jboss.logging.Logger;
import org.jboss.msc.inject.InjectionException;
import org.jboss.msc.inject.Injector;
import org.jboss.msc.service.AbstractServiceListener;
import org.jboss.msc.service.BatchBuilder;
import org.jboss.msc.service.BatchServiceBuilder;
import org.jboss.msc.service.Service;
import org.jboss.msc.service.ServiceActivatorContext;
import org.jboss.msc.service.ServiceActivatorContextImpl;
import org.jboss.msc.service.ServiceContainer;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceName;
import org.jboss.msc.service.ServiceRegistryException;
import org.jboss.msc.service.StartContext;
import org.jboss.msc.service.StartException;
import org.jboss.msc.service.StopContext;
import org.jboss.msc.value.InjectedValue;
import org.jboss.staxmapper.XMLMapper;
/**
* A ServerManager.
*
* @author Brian Stansberry
* @author Kabir Khan
*/
public class ServerManager implements ShutdownListener {
private static final Logger log = Logger.getLogger("org.jboss.server.manager");
static final ServiceName SERVICE_NAME_BASE = ServiceName.JBOSS.append("server", "manager");
private final ServerManagerEnvironment environment;
private final StandardElementReaderRegistrar extensionRegistrar;
private final File hostXML;
private final ProcessManagerCommandHandler processManagerCommmandHandler;
private final FileRepository fileRepository;
private volatile ProcessManagerSlave processManagerSlave;
private final ServerToServerManagerCommandHandler serverCommandHandler = new ServerCommandHandler();
private volatile DirectServerCommunicationListener directServerCommunicationListener;
private HostModel hostConfig;
private DomainModel domainConfig;
private DomainControllerConnection domainControllerConnection;
private ServerMaker serverMaker;
private final ServiceContainer serviceContainer = ServiceContainer.Factory.create();
private final AtomicBoolean serversStarted = new AtomicBoolean();
private final AtomicBoolean stopping = new AtomicBoolean();
// TODO figure out concurrency controls
// private final Lock hostLock = new ReentrantLock();
// private final Lock domainLock = new ReentrantLock();
private final Map<String, Server> servers = Collections.synchronizedMap(new HashMap<String, Server>());
public ServerManager(ServerManagerEnvironment environment) {
if (environment == null) {
throw new IllegalArgumentException("bootstrapConfig is null");
}
this.environment = environment;
this.hostXML = new File(environment.getDomainConfigurationDir(), "host.xml");
this.extensionRegistrar = StandardElementReaderRegistrar.Factory.getRegistrar();
this.processManagerCommmandHandler = new ProcessManagerCommandHandler();
this.fileRepository = new LocalFileRepository(environment);
}
public String getName() {
return hostConfig.getName();
}
/**
* Starts the ServerManager. This brings this ServerManager to the point where
* it has processed it's own configuration file, registered with the DomainController
* (including starting one if the host configuration specifies that),
* obtained the domain configuration, and launched any systems needed to make
* this process manageable by remote clients.
*/
public void start() {
this.hostConfig = parseHost();
// TODO set up logging for this process based on config in Host
//Start listening for server communication on our socket
launchDirectServerCommunicationHandler();
// Start communication with the ProcessManager. This also
// creates a daemon thread to keep this process alive
launchProcessManagerSlave();
initializeServerMaker();
final BatchBuilder batchBuilder = serviceContainer.batchBuilder();
batchBuilder.addListener(new AbstractServiceListener<Object>() {
@Override
public void serviceFailed(ServiceController<?> serviceController, StartException reason) {
log.errorf(reason, "Service [%s] failed.", serviceController.getName());
}
});
final ServiceActivatorContext serviceActivatorContext = new ServiceActivatorContextImpl(batchBuilder);
// Always activate the management port
activateManagementCommunication(serviceActivatorContext);
if (hostConfig.getLocalDomainControllerElement() != null) {
activateLocalDomainController(serviceActivatorContext);
} else {
activateRemoteDomainControllerConnection(serviceActivatorContext);
}
try {
batchBuilder.install();
} catch (ServiceRegistryException e) {
throw new RuntimeException(e);
}
}
private void initializeServerMaker() {
CommunicationVariables variables = new CommunicationVariables(environment, this);
this.serverMaker = new ServerMaker(environment, processManagerSlave, processManagerCommmandHandler, variables);
}
private String generateDeploymentPath(final ServerGroupDeploymentElement deployment) {
final String id = deployment.getSha1HashAsHexString();
return id.substring(0, 2) + "/" + id.substring(2);
}
/**
* The connection from a server to SM was closed
*/
@Override
public void connectionClosed(String processName) {
if (stopping.get())
return;
Server server = servers.get(processName);
if (server == null) {
log.errorf("No server called %s with a closed connection", processName);
return;
}
ServerState state = server.getState();
if (state == ServerState.STOPPED || state == ServerState.STOPPING || state == ServerState.MAX_FAILED) {
log.debugf("Ignoring closed connection for server %s in the %s state", processName, state);
return;
}
log.infof("Server %s connection was closed, tell it to reconnect", processName);
try {
processManagerSlave.reconnectServer(processName, directServerCommunicationListener.getSmAddress(), directServerCommunicationListener.getSmPort());
} catch (IOException e) {
if (stopping.get())
return;
log.error("Failed to send RECONNECT_SERVER", e);
}
}
/**
* Callback for when we receive the SERVER_AVAILABLE message from a Server
*
* @param serverName the name of the server
*/
void availableServer(String serverName) {
try {
Server server = servers.get(serverName);
if (server == null) {
log.errorf("No server called %s available", serverName);
return;
}
checkState(server, ServerState.BOOTING);
server.setState(ServerState.AVAILABLE);
log.infof("Sending config to server %s", serverName);
server.start();
server.setState(ServerState.STARTING);
} catch (IOException e) {
log.errorf(e, "Could not start server %s", serverName);
}
}
/**
* Callback for when we receive the SHUTDOWN message from PM
*/
public void stop() {
if (stopping.getAndSet(true)) {
return;
}
log.info("Stopping ServerManager");
directServerCommunicationListener.shutdown();
if(domainControllerConnection != null) {
domainControllerConnection.unregister();
}
serviceContainer.shutdown();
// FIXME stop any local DomainController, stop other internal SM services
}
/**
* Callback for when we receive the SERVER_STOPPED message from a Server
*
* @param serverName the name of the server
*/
void stoppedServer(String serverName) {
if (stopping.get())
return;
Server server = servers.get(serverName);
if (server == null) {
log.errorf("No server called %s exists for stop", serverName);
return;
}
checkState(server, ServerState.STOPPING);
try {
processManagerSlave.stopProcess(serverName);
} catch (IOException e) {
if (stopping.get())
return;
log.errorf(e, "Could not stop server %s in PM", serverName);
}
try {
processManagerSlave.removeProcess(serverName);
} catch (IOException e) {
if (stopping.get())
return;
log.errorf(e, "Could not stop server %s", serverName);
}
}
/**
* Callback for when we receive the SERVER_STARTED message from a Server
*
* @param serverName the name of the server
*/
void startedServer(String serverName) {
Server server = servers.get(serverName);
if (server == null) {
log.errorf("No server called %s exists for start", serverName);
return;
}
checkState(server, ServerState.STARTING);
server.setState(ServerState.STARTED);
}
/**
* Callback for when we receive the SERVER_START_FAILED message from a Server
*
* @param serverName the name of the server
*/
void failedStartServer(String serverName) {
Server server = servers.get(serverName);
if (server == null) {
log.errorf("No server called %s exists", serverName);
return;
}
checkState(server, ServerState.STARTING);
server.setState(ServerState.FAILED);
respawn(server);
}
void reconnectedServer(String serverName, ServerState state) {
Server server = servers.get(serverName);
if (server == null) {
log.errorf("No server found for reconnected server %s", serverName);
return;
}
server.setState(state);
if (state.isRestartOnReconnect()) {
try {
server.start();
} catch (IOException e) {
log.errorf(e, "Could not start reconnected server %s", server.getServerProcessName());
}
server.setState(ServerState.STARTING);
}
}
private void respawn(Server server){
try {
processManagerSlave.stopProcess(server.getServerProcessName());
} catch (IOException e) {
log.errorf(e, "Error respawning server % s", server.getServerProcessName());
}
RespawnPolicy respawnPolicy = server.getRespawnPolicy();
long timeout = respawnPolicy.getTimeOutMs(server.incrementAndGetRespawnCount());
if (timeout < 0 ) {
server.setState(ServerState.MAX_FAILED);
try {
processManagerSlave.removeProcess(server.getServerProcessName());
} catch (IOException e) {
log.errorf(e, "Error stopping respawned server % s", server.getServerProcessName());
}
return;
}
//TODO JBAS-8390 Put in actual sleep
//Thread.sleep(timeout);
try {
server.setState(ServerState.BOOTING);
processManagerSlave.startProcess(server.getServerProcessName());
} catch (IOException e) {
log.errorf(e, "Error respawning server % s", server.getServerProcessName());
}
}
public void downServer(String downServerName) {
Server server = servers.get(downServerName);
if (server == null) {
log.errorf("No server called %s exists", downServerName);
return;
}
if (environment.isRestart() && server.getState() == ServerState.BOOTING && environment.getServerManagerPort() == 0) {
//If this was a restarted SM and a server went down while we were down, PM will send the DOWN message. If the port
//is 0, it will be different following a restart so remove and re-add the server with the new port here
JvmElement jvmElement = getServerJvmElement(domainConfig, hostConfig, server.getServerConfig().getServerName());
try {
serverMaker.removeAndAddProcess(server, jvmElement);
} catch (IOException e) {
log.errorf("Error removing and adding process %s", downServerName);
return;
}
try {
processManagerSlave.startProcess(downServerName);
} catch (IOException e) {
// AutoGenerated
throw new RuntimeException(e);
}
} else {
server.setState(ServerState.FAILED);
respawn(server);
}
}
private void launchProcessManagerSlave() {
this.processManagerSlave = ProcessManagerSlaveFactory.getInstance().getProcessManagerSlave(environment, hostConfig, processManagerCommmandHandler);
Thread t = new Thread(this.processManagerSlave.getController(), "Server Manager Process");
t.start();
}
private void launchDirectServerCommunicationHandler() {
try {
this.directServerCommunicationListener = DirectServerCommunicationListener.create(serverCommandHandler, this, environment.getServerManagerAddress(), environment.getServerManagerPort(), 20);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void activateLocalDomainController(final ServiceActivatorContext serviceActivatorContext) {
try {
final BatchBuilder batchBuilder = serviceActivatorContext.getBatchBuilder();
final XMLMapper mapper = XMLMapper.Factory.create();
extensionRegistrar.registerStandardDomainReaders(mapper);
final DomainController domainController = new DomainController();
batchBuilder.addService(DomainController.SERVICE_NAME, domainController)
.addInjection(domainController.getXmlMapperInjector(), mapper)
.addInjection(domainController.getDomainConfigDirInjector(), environment.getDomainConfigurationDir())
.addDependency(SERVICE_NAME_BASE.append("executor"), ScheduledExecutorService.class, domainController.getScheduledExecutorServiceInjector());
final DomainControllerOperationHandler domainControllerOperationHandler = new DomainControllerOperationHandler();
batchBuilder.addService(DomainControllerOperationHandler.SERVICE_NAME, domainControllerOperationHandler)
.addDependency(DomainController.SERVICE_NAME, DomainController.class, domainControllerOperationHandler.getDomainControllerInjector())
.addDependency(SERVICE_NAME_BASE.append("executor"), ScheduledExecutorService.class, domainControllerOperationHandler.getExecutorServiceInjector())
.addInjection(domainControllerOperationHandler.getLocalFileRepositoryInjector(), fileRepository)
.addDependency(ManagementCommunicationService.SERVICE_NAME, ManagementCommunicationService.class, new ManagementCommunicationServiceInjector(domainControllerOperationHandler));
batchBuilder.addService(SERVICE_NAME_BASE.append("local", "dc", "connection"), Service.NULL)
.addDependency(DomainController.SERVICE_NAME, DomainController.class, new Injector<DomainController>(){
public void inject(DomainController value) throws InjectionException {
setDomainControllerConnection(new LocalDomainControllerConnection(ServerManager.this, domainController, fileRepository));
}
public void uninject() {
setDomainControllerConnection(null);
}
});
} catch (Exception e) {
throw new RuntimeException("Exception starting local domain controller", e);
}
}
private void activateRemoteDomainControllerConnection(final ServiceActivatorContext serviceActivatorContext) {
final BatchBuilder batchBuilder = serviceActivatorContext.getBatchBuilder();
final DomainControllerConnectionService domainControllerClientService = new DomainControllerConnectionService(this, fileRepository, 20, 15L, 10L);
final BatchServiceBuilder<Void> serviceBuilder = batchBuilder.addService(DomainControllerConnectionService.SERVICE_NAME, domainControllerClientService)
.addListener(new AbstractServiceListener<Void>() {
@Override
public void serviceFailed(ServiceController<? extends Void> serviceController, StartException reason) {
log.error("Failed to register with domain controller.", reason);
}
})
.setInitialMode(ServiceController.Mode.IMMEDIATE);
final RemoteDomainControllerElement remoteDomainControllerElement = hostConfig.getRemoteDomainControllerElement();
final InetAddress hostAddress;
try {
hostAddress = InetAddress.getByName(remoteDomainControllerElement.getHost());
} catch (UnknownHostException e) {
throw new RuntimeException("Failed to get remote domain controller address", e);
}
serviceBuilder.addInjection(domainControllerClientService.getDomainControllerAddressInjector(), hostAddress);
serviceBuilder.addInjection(domainControllerClientService.getDomainControllerPortInjector(), remoteDomainControllerElement.getPort());
final ManagementElement managementElement = hostConfig.getManagementElement();
serviceBuilder.addDependency(NetworkInterfaceService.JBOSS_NETWORK_INTERFACE.append(managementElement.getInterfaceName()), NetworkInterfaceBinding.class, domainControllerClientService.getLocalManagementInterfaceInjector());
serviceBuilder.addInjection(domainControllerClientService.getLocalManagementPortInjector(), managementElement.getPort());
serviceBuilder.addDependency(SERVICE_NAME_BASE.append("executor"), ScheduledExecutorService.class, domainControllerClientService.getExecutorServiceInjector());
}
private void activateManagementCommunication(final ServiceActivatorContext serviceActivatorContext) {
final BatchBuilder batchBuilder = serviceActivatorContext.getBatchBuilder();
final ManagementElement managementElement = hostConfig.getManagementElement();
final Set<InterfaceElement> hostInterfaces = hostConfig.getInterfaces();
if(hostInterfaces != null) {
for(InterfaceElement interfaceElement : hostInterfaces) {
if(interfaceElement.getName().equals(managementElement.getInterfaceName())) {
interfaceElement.activate(serviceActivatorContext);
break;
}
}
}
// Add the executor
final ServiceName threadFactoryServiceName = SERVICE_NAME_BASE.append("thread-factory");
batchBuilder.addService(threadFactoryServiceName, new ThreadFactoryService());
final ServiceName executorServiceName = SERVICE_NAME_BASE.append("executor");
/**
* Replace below with fixed ScheduledThreadPoolService
*/
final InjectedValue<ThreadFactory> threadFactoryValue = new InjectedValue<ThreadFactory>();
batchBuilder.addService(executorServiceName, new Service<ScheduledExecutorService>() {
private ScheduledExecutorService executorService;
public synchronized void start(StartContext context) throws StartException {
executorService = Executors.newScheduledThreadPool(20, threadFactoryValue.getValue());
}
public synchronized void stop(StopContext context) {
executorService.shutdown();
}
public synchronized ScheduledExecutorService getValue() throws IllegalStateException {
return executorService;
}
}).addDependency(threadFactoryServiceName, ThreadFactory.class, threadFactoryValue);
// final ScheduledThreadPoolService executorService = new ScheduledThreadPoolService(20, TimeSpec.DEFAULT_KEEPALIVE);
// batchBuilder.addService(executorServiceName, executorService)
// .addDependency(threadFactoryServiceName, ThreadFactory.class, executorService.getThreadFactoryInjector());
// Add the management communication service
final ManagementCommunicationService managementCommunicationService = new ManagementCommunicationService();
batchBuilder.addService(ManagementCommunicationService.SERVICE_NAME, managementCommunicationService)
.addDependency(NetworkInterfaceService.JBOSS_NETWORK_INTERFACE.append(managementElement.getInterfaceName()), NetworkInterfaceBinding.class, managementCommunicationService.getInterfaceInjector())
.addInjection(managementCommunicationService.getPortInjector(), managementElement.getPort())
.addDependency(executorServiceName, ExecutorService.class, managementCommunicationService.getExecutorServiceInjector())
.setInitialMode(ServiceController.Mode.IMMEDIATE);
// Add the server manager operation handler
final ManagementOperationHandlerService<ServerManagerOperationHandler> operationHandlerService
= new ManagementOperationHandlerService<ServerManagerOperationHandler>(new ServerManagerOperationHandler(this));
batchBuilder.addService(ManagementCommunicationService.SERVICE_NAME.append("server", "manager"), operationHandlerService)
.addDependency(ManagementCommunicationService.SERVICE_NAME, ManagementCommunicationService.class, new ManagementCommunicationServiceInjector(operationHandlerService));
}
void setDomainControllerConnection(final DomainControllerConnection domainControllerConnection) {
this.domainControllerConnection = domainControllerConnection;
if(domainControllerConnection == null) {
return;
}
final DomainModel domainModel = domainControllerConnection.register();
setDomain(domainModel);
}
HostModel getHostConfig() {
return hostConfig;
}
protected HostModel parseHost() {
if (!hostXML.exists()) {
throw new IllegalStateException("File " + hostXML.getAbsolutePath() + " does not exist.");
}
else if (! hostXML.canWrite()) {
throw new IllegalStateException("File " + hostXML.getAbsolutePath() + " is not writeable.");
}
try {
final List<AbstractHostModelUpdate<?>> hostUpdates = new ArrayList<AbstractHostModelUpdate<?>>();
final XMLMapper mapper = XMLMapper.Factory.create();
extensionRegistrar.registerStandardHostReaders(mapper);
mapper.parseDocument(hostUpdates, XMLInputFactory.newInstance().createXMLStreamReader(new BufferedReader(new FileReader(this.hostXML))));
final HostModel hostModel = new HostModel();
for(final AbstractHostModelUpdate<?> update : hostUpdates) {
hostModel.update(update);
}
return hostModel;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException("Caught exception during processing of host.xml", e);
}
}
/**
* Set the domain for the server manager. If this is the first time the domain has been set on this instance it will
* also invoke the server launch process.
*
* @param domain The domain configuration
*/
public void setDomain(final DomainModel domain) {
this.domainConfig = domain;
if(serversStarted.compareAndSet(false, true)) {
if (!environment.isRestart()) {
// startServers();
} else {
// reconnectServers();
}
}
}
/**
* Combines information from the domain, server group, host and server levels
* to come up with an overall JVM configuration for a server.
*
* @param domain the domain configuration object
* @param host the host configuration object
* @param serverName the name of the server
* @return the JVM configuration object
*/
private JvmElement getServerJvmElement(DomainModel domain, HostModel host, String serverName) {
ServerElement server = host.getServer(serverName);
if (server == null)
throw new IllegalStateException("Server " + serverName + " is not listed in Host");
String serverGroupName = server.getServerGroup();
ServerGroupElement serverGroup = domain.getServerGroup(serverGroupName);
if (serverGroup == null)
throw new IllegalStateException("Server group" + serverGroupName + " is not listed in Domain");
JvmElement serverVM = server.getJvm();
String serverVMName = serverVM != null ? serverVM.getName() : null;
JvmElement groupVM = serverGroup.getJvm();
String groupVMName = groupVM != null ? groupVM.getName() : null;
String ourVMName = serverVMName != null ? serverVMName : groupVMName;
if (ourVMName == null) {
throw new IllegalStateException("Neither " + Element.SERVER_GROUP.getLocalName() +
" nor " + Element.SERVER.getLocalName() + " has declared a JVM configuration; one or the other must");
}
if (!ourVMName.equals(groupVMName)) {
// the server setting replaced the group, so ignore group
groupVM = null;
}
JvmElement hostVM = host.getJvm(ourVMName);
return new JvmElement(groupVM, hostVM, serverVM);
}
Server getServer(String name) {
return servers.get(name);
}
static String getServerProcessName(ServerModel serverConfig) {
return getServerProcessName(serverConfig.getServerName());
}
static String getServerProcessName(String name) {
return ServerMaker.SERVER_PROCESS_NAME_PREFIX + name;
}
private void checkState(Server server, ServerState expected) {
ServerState state = server.getState();
if (state != expected) {
log.warnf("Server %s is not in the expected %s state: %s" , server.getServerProcessName(), expected, state);
}
}
public Map<String, Server> getServers() {
synchronized (servers) {
return Collections.unmodifiableMap(servers);
}
}
DirectServerCommunicationListener getDirectServerCommunicationListener() {
return directServerCommunicationListener;
}
/**
* Callback for the {@link ServerToServerManagerProtocolCommand#handleCommand(String, ServerToServerManagerCommandHandler, Command)} calls
*/
private class ServerCommandHandler extends ServerToServerManagerCommandHandler{
@Override
public void handleServerAvailable(String sourceProcessName) {
ServerManager.this.availableServer(sourceProcessName);
}
@Override
public void handleServerReconnectStatus(String sourceProcessName, ServerState state) {
ServerManager.this.reconnectedServer(sourceProcessName, state);
}
@Override
public void handleServerStartFailed(String sourceProcessName) {
ServerManager.this.failedStartServer(sourceProcessName);
}
@Override
public void handleServerStarted(String sourceProcessName) {
ServerManager.this.startedServer(sourceProcessName);
}
@Override
public void handleServerStopped(String sourceProcessName) {
ServerManager.this.stoppedServer(sourceProcessName);
}
}
private class ProcessManagerCommandHandler implements OutgoingPmCommandHandler {
@Override
public void handleShutdown() {
ServerManager.this.stop();
Thread t = new Thread(new Runnable() {
@Override
public void run() {
SystemExiter.exit(0);
}
}, "Server Manager Exit Thread");
t.start();
}
@Override
public void handleReconnectServerManager(String addr, String port) {
log.warn("Wrong command received " + OutgoingPmCommand.RECONNECT_SERVER_MANAGER + " for server manager");
}
@Override
public void handleDown(String serverName) {
ServerManager.this.downServer(serverName);
}
// public void registerServer(String serverName, Server server) {
// if (serverName == null) {
// if (server == null) {
// servers.put(serverName, server);
// public void unregisterServer(String serverName) {
// if (serverName == null) {
// servers.remove(serverName);
}
}
|
package com.exalttech.trex;
import com.exalttech.trex.application.TrexApp;
import com.exalttech.trex.core.ConnectionManager;
import javafx.scene.Node;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyCode;
import javafx.stage.Stage;
import org.junit.Assert;
import org.testfx.framework.junit.ApplicationTest;
import java.util.Optional;
import java.util.concurrent.*;
public class TestBase extends ApplicationTest {
private ExecutorService executor = Executors.newSingleThreadExecutor();
enum MenuType {
MENU,
SHORTCUT,
TOOLBAR
}
static private final long TIMEOUT_S = 60;
static private final long POLLING_INTERVAL_MS = 100;
static private final long RENDER_DELAY_MS = 2500;
private TrexApp app;
@Override
public void start(Stage stage) throws Exception {
TrexApp.setPrimaryStage(stage);
app = new TrexApp();
app.start(stage);
}
@Override
public void stop() throws Exception {
app.stop();
}
protected void setText(final String query, final String text) {
final Node node = lookup(query).query();
if (node instanceof TextField) {
((TextField) node).setText(text);
} else if (node instanceof ComboBox) {
((ComboBox) node).getEditor().setText(text);
} else {
Assert.fail("Unsupported node type");
}
}
String getText(final String query) {
final Node node = lookup(query).query();
if (node instanceof Label) {
return ((Label) node).getText();
} else {
Assert.fail("Unsupported node type");
}
return null;
}
protected String getTRexServerIP() {
return "trex-host";
}
protected String getTRexSyncPort() {
return "4501";
}
protected String getTRexAsyncPort() {
return "4500";
}
protected String getTRexScapyPort() {
return "4507";
}
protected String getTRexDefaultUser() {
return "Test";
}
boolean connect(final MenuType menuType, final String user) {
connectOrDisconnectAction(menuType);
sleep(RENDER_DELAY_MS);
setText("#connection-dialog-ip", getTRexServerIP());
clickOn("#connection-dialog-advance");
setText("#connection-dialog-sync-port", getTRexSyncPort());
setText("#connection-dialog-async-port", getTRexAsyncPort());
setText("#connection-dialog-scapy-port", getTRexScapyPort());
setText("#connection-dialog-user", user);
clickOn("#connection-dialog-connect");
final Future<Boolean> future = executor.submit(() -> {
while (!isConnected()) {
sleep(POLLING_INTERVAL_MS);
}
return true;
});
boolean connected = false;
try {
connected = future.get(TIMEOUT_S, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(true);
}
if (connected) {
sleep(RENDER_DELAY_MS);
}
return connected;
}
private boolean isConnected() {
return !isDisconnected();
}
protected Boolean isDisconnected() {
Label connection = lookup("#main-server-status").query();
return "disconnected".equalsIgnoreCase(connection.getText());
}
boolean connect(final MenuType menuType) {
return connect(menuType, getTRexDefaultUser());
}
protected Boolean tryCall(Runnable action, Callable<Boolean> resultValidator) {
final Future<Boolean> future = executor.submit(() -> {
try {
action.run();
} catch (Exception e) {
return null;
}
while (!resultValidator.call()) {
sleep(POLLING_INTERVAL_MS);
}
return true;
});
try {
return future.get(TIMEOUT_S, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(true);
return false;
}
}
boolean disconnect(final MenuType menuType) {
connectOrDisconnectAction(menuType);
sleep(RENDER_DELAY_MS);
final Future<Boolean> future = executor.submit(() -> {
while (ConnectionManager.getInstance().isConnected()) {
sleep(POLLING_INTERVAL_MS);
}
return true;
});
boolean disconnected = false;
try {
disconnected = future.get(TIMEOUT_S, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(true);
}
if (disconnected) {
sleep(RENDER_DELAY_MS);
}
return disconnected;
}
private void connectOrDisconnectAction(MenuType menuType) {
switch (menuType) {
case MENU:
clickOn("#main-menu");
clickOn("#main-menu-connect");
break;
case SHORTCUT:
push(KeyCode.CONTROL, KeyCode.C);
break;
case TOOLBAR:
clickOn("#main-toolbar-connect");
break;
}
}
void resetAllPorts() {
resetPort("Port 0");
resetPort("Port 1");
}
private void resetPort(String port) {
clickOn(port);
tryCall(
() -> clickOn("#resetBtn"),
() -> {
Button resetBtn = lookup("#resetBtn").query();
return resetBtn.getText().equalsIgnoreCase("Reset");
}
);
}
public boolean isVisible(String query) {
Optional<Node> result = lookup(query).tryQuery();
return result.isPresent() && result.get().isVisible();
}
}
|
// URLFixer.java
package ed.appserver;
import java.io.*;
import ed.util.*;
import ed.net.httpserver.*;
public class URLFixer {
public static final boolean NOCDN = Config.get().getBoolean( "NO-CDN" );
public URLFixer( HttpRequest request , AppRequest ar ){
this( getStaticPrefix( request , ar ) , getStaticSuffix( request , ar ) , ar.getContext() );
_ar = ar;
}
public URLFixer( String cdnPrefix , String cdnSuffix , AppContext context ){
_cdnPrefix = cdnPrefix;
_cdnSuffix = cdnSuffix;
_context = context;
}
public String fix( String url ){
StringBuilder buf = new StringBuilder();
fix( url , buf );
return buf.toString();
}
public void fix( String url , Appendable a ){
// don't rewrite w/in js files
if (_ar != null && _ar.getResponse() != null && _ar.getResponse().getContentType() != null) {
String content_type = _ar.getResponse().getContentType();
if (content_type.indexOf("javascript") != -1 || content_type.indexOf("ecmascript") != -1) {
try {
a.append(url);
}
catch (IOException ioe) {
throw new RuntimeException("couldn't append", ioe);
}
return;
}
}
if ( url == null )
return;
if ( url.length() == 0 )
return;
// parse out options
boolean nocdn = false;
boolean forcecdn = false;
if ( url.startsWith( "NOCDN" ) ){
nocdn = true;
url = url.substring( 5 );
}
else if ( url.startsWith( "CDN/" ) ){
forcecdn = true;
url = url.substring( 3 );
}
boolean doVersioning = true;
// weird special cases
if ( ! url.startsWith( "/" ) ){
if ( _ar == null || url.startsWith( "http:
nocdn = true;
doVersioning = false;
}
else {
url = _ar.getDirectory() + url;
}
}
if ( url.startsWith( "
nocdn = true;
doVersioning = false;
}
// setup
String uri = url;
int questionIndex = url.indexOf( "?" );
if ( questionIndex >= 0 )
uri = uri.substring( 0 , questionIndex );
String cdnTags = null;
if ( uri.equals( "/~f" ) || uri.equals( "/~~/f" ) ){
cdnTags = ""; // TODO: should i put a version or timestamp here?
}
else {
cdnTags = _cdnSuffix;
if ( url.contains( "ctxt=" ) || cdnTags == null )
cdnTags = "";
if ( doVersioning && _context != null && ! url.contains( "lm=" ) ){
File f = _context.getFileSafe( uri );
if ( f == null )
cdnTags = _urlAppendNameValue( cdnTags , "lm=cantfind" );
else if ( ! f.exists() )
cdnTags = _urlAppendNameValue( cdnTags , "lm=doesntexist" );
else
cdnTags = _urlAppendNameValue( cdnTags , "lm=" + f.lastModified() );
}
}
// print
try {
if ( forcecdn || ( ! nocdn && cdnTags != null ) )
a.append( cdnPrefix() );
a.append( url );
if ( cdnTags != null && cdnTags.length() > 0 ){
if ( questionIndex < 0 )
a.append( "?" );
else
a.append( "&" );
a.append( cdnTags );
}
}
catch ( IOException ioe ){
throw new RuntimeException( "couldn't append" , ioe );
}
}
public String getCDNPrefix(){
return cdnPrefix();
}
public String getCDNSuffix(){
return _cdnSuffix;
}
public String setCDNPrefix( String s ){
_cdnPrefix = s;
return cdnPrefix();
}
public String setCDNSuffix( String s ){
_cdnSuffix = s;
return _cdnSuffix;
}
String cdnPrefix(){
if ( _ar != null && _ar.isScopeInited() ){
Object foo = _ar.getScope().get( "CDN" );
if ( foo != null )
return foo.toString();
}
return _cdnPrefix;
}
static String getStaticPrefix( HttpRequest request , AppRequest ar ){
if ( NOCDN )
return "";
String host = ar.getHost();
if ( host == null )
return "";
if ( request.getPort() > 0 )
return "";
if ( request.getHeader( "X-SSL" ) != null )
return "";
String prefix= "http://static";
if ( host.indexOf( "local." ) >= 0 )
prefix += "-local";
prefix += "." + Config.getExternalDomain() + "/" + host;
return prefix;
}
static String _urlAppendNameValue( String base , String extra ){
if ( base == null || base.length() == 0 )
return extra;
if ( base.endsWith( "&" ) )
return base + extra;
return base + "&" + extra;
}
static String getStaticSuffix( HttpRequest request , AppRequest ar ){
final AppContext ctxt = ar.getContext();
String suffix = "ctxt=" + ctxt.getEnvironmentName() + "-" + ctxt.getGitBranch();
if ( ctxt.getGitHash() != null )
suffix += "-" + ctxt.getGitHash();
return suffix;
}
private final AppContext _context;
private AppRequest _ar;
private String _cdnPrefix;
private String _cdnSuffix;
}
|
package groovy.lang;
import java.beans.BeanInfo;
import java.beans.EventSetDescriptor;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.CompileUnit;
import org.codehaus.groovy.classgen.CompilerFacade;
import org.codehaus.groovy.classgen.ReflectorGenerator;
import org.codehaus.groovy.runtime.ClosureListener;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.InvokerInvocationException;
import org.codehaus.groovy.runtime.MethodClosure;
import org.codehaus.groovy.runtime.MethodHelper;
import org.codehaus.groovy.runtime.MethodKey;
import org.codehaus.groovy.runtime.NewStaticMetaMethod;
import org.codehaus.groovy.runtime.ReflectionMetaMethod;
import org.codehaus.groovy.runtime.Reflector;
import org.codehaus.groovy.runtime.TemporaryMethodKey;
import org.codehaus.groovy.runtime.TransformMetaMethod;
import org.objectweb.asm.ClassWriter;
/**
* Allows methods to be dynamically added to existing classes at runtime
*
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @version $Revision$
*/
public class MetaClass {
private static final Logger log = Logger.getLogger(MetaClass.class.getName());
public static final Object[] EMPTY_ARRAY = {
};
public static Class[] EMPTY_TYPE_ARRAY = {
};
protected static final Object[] ARRAY_WITH_NULL = { null };
private static boolean useReflection = false;
private MetaClassRegistry registry;
private Class theClass;
private ClassNode classNode;
private Map methodIndex = new HashMap();
private Map staticMethodIndex = new HashMap();
private List newGroovyMethodsList = new ArrayList();
private Map propertyDescriptors = Collections.synchronizedMap(new HashMap());
private Map listeners = new HashMap();
private Map methodCache = Collections.synchronizedMap(new HashMap());
private Map staticMethodCache = Collections.synchronizedMap(new HashMap());
private MetaMethod genericGetMethod;
private MetaMethod genericSetMethod;
private List constructors;
private List allMethods = new ArrayList();
private List interfaceMethods;
private Reflector reflector;
private boolean initialised;
public MetaClass(MetaClassRegistry registry, Class theClass) throws IntrospectionException {
this.registry = registry;
this.theClass = theClass;
constructors = Arrays.asList(theClass.getDeclaredConstructors());
addMethods(theClass);
// introspect
BeanInfo info = Introspector.getBeanInfo(theClass);
PropertyDescriptor[] descriptors = info.getPropertyDescriptors();
for (int i = 0; i < descriptors.length; i++) {
PropertyDescriptor descriptor = descriptors[i];
propertyDescriptors.put(descriptor.getName(), descriptor);
}
EventSetDescriptor[] eventDescriptors = info.getEventSetDescriptors();
for (int i = 0; i < eventDescriptors.length; i++) {
EventSetDescriptor descriptor = eventDescriptors[i];
Method[] listenerMethods = descriptor.getListenerMethods();
for (int j = 0; j < listenerMethods.length; j++) {
Method listenerMethod = listenerMethods[j];
MetaMethod metaMethod = createMetaMethod(descriptor.getAddListenerMethod());
listeners.put(listenerMethod.getName(), metaMethod);
}
}
}
public static boolean isUseReflection() {
return useReflection;
}
/**
* Allows reflection to be enabled in situations where bytecode generation
* of method invocations causes issues.
*
* @param useReflection
*/
public static void setUseReflection(boolean useReflection) {
MetaClass.useReflection = useReflection;
}
private void addInheritendMethods(Class theClass) {
// lets add all the base class methods
Class c = theClass;
if (c != Object.class) {
while (true) {
c = c.getSuperclass();
if (c == Object.class || c == null) {
break;
}
addMethods(c);
addNewStaticMethodsFrom(c);
}
}
// now lets see if there are any methods on one of my interfaces
Class[] interfaces = theClass.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
addNewStaticMethodsFrom(interfaces[i]);
}
// lets add Object methods after interfaces, as all interfaces derive from Object.
// this ensures List and Collection methods come before Object etc
if (theClass != Object.class) {
addMethods(Object.class);
addNewStaticMethodsFrom(Object.class);
}
if (theClass.isArray() && !theClass.equals(Object[].class)) {
addNewStaticMethodsFrom(Object[].class);
}
}
/**
* @return all the normal instance methods avaiable on this class for the
* given name
*/
public List getMethods(String name) {
List answer = (List) methodIndex.get(name);
if (answer == null) {
return Collections.EMPTY_LIST;
}
return answer;
}
/**
* @return all the normal static methods avaiable on this class for the
* given name
*/
public List getStaticMethods(String name) {
List answer = (List) staticMethodIndex.get(name);
if (answer == null) {
return Collections.EMPTY_LIST;
}
return answer;
}
/**
* Allows static method definitions to be added to a meta class as if it
* was an instance method
*
* @param method
*/
protected void addNewStaticInstanceMethod(Method method) {
if (initialised) {
throw new RuntimeException("Already initialized, cannot add new method: " + method);
}
else {
NewStaticMetaMethod newMethod = new NewStaticMetaMethod(createMetaMethod(method));
addMethod(newMethod);
addNewStaticInstanceMethod(newMethod);
}
}
protected void addNewStaticInstanceMethod(MetaMethod method) {
newGroovyMethodsList.add(method);
}
public Object invokeMethod(Object object, String methodName, Object arguments) {
return invokeMethod(object, methodName, asArray(arguments));
}
/**
* Invokes the given method on the object.
*
*/
public Object invokeMethod(Object object, String methodName, Object[] arguments) {
if (object == null) {
throw new NullPointerException("Cannot invoke method: " + methodName + " on null object");
}
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) methodCache.get(methodKey);
if (method == null) {
method = pickMethod(object, methodName, arguments);
if (method != null) {
methodCache.put(methodKey.createCopy(), method);
}
}
if (method != null) {
return doMethodInvoke(object, method, arguments);
}
throw new MissingMethodException(methodName, theClass, arguments);
}
/**
* Picks which method to invoke for the given object, method name and arguments
*/
protected MetaMethod pickMethod(Object object, String methodName, Object[] arguments) {
MetaMethod method = null;
List methods = getMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
if (method == null) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments, true);
if (method == null) {
int size = (arguments != null) ? arguments.length : 0;
if (size == 1) {
Object firstArgument = arguments[0];
if (firstArgument instanceof List) {
// lets coerce the list arguments into an array of
// arguments
// e.g. calling JFrame.setLocation( [100, 100] )
List list = (List) firstArgument;
arguments = list.toArray();
method = (MetaMethod) chooseMethod(methodName, methods, arguments, true);
return new TransformMetaMethod(method) {
public Object invoke(Object object, Object[] arguments) throws Exception {
Object firstArgument = arguments[0];
List list = (List) firstArgument;
arguments = list.toArray();
return super.invoke(object, arguments);
}
};
}
}
}
}
}
return method;
}
public Object invokeStaticMethod(Object object, String methodName, Object[] arguments) {
// System.out.println("Calling static method: " + methodName + " on args: " + InvokerHelper.toString(arguments));
// Class type = arguments == null ? null : arguments.getClass();
// System.out.println("Argument type: " + type);
// System.out.println("Type of first arg: " + arguments[0] + " type: " + arguments[0].getClass());
// lets try use the cache to find the method
MethodKey methodKey = new TemporaryMethodKey(methodName, arguments);
MetaMethod method = (MetaMethod) staticMethodCache.get(methodKey);
if (method == null) {
method = pickStaticMethod(object, methodName, arguments);
if (method != null) {
staticMethodCache.put(methodKey.createCopy(), method);
}
}
if (method != null) {
return doMethodInvoke(object, method, arguments);
}
/*
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
MetaMethod method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
if (method != null) {
return doMethodInvoke(theClass, method, arguments);
}
}
if (theClass != Class.class) {
try {
return registry.getMetaClass(Class.class).invokeMethod(object, methodName, arguments);
}
catch (GroovyRuntimeException e) {
// throw our own exception
}
}
*/
throw new MissingMethodException(methodName, theClass, arguments);
}
protected MetaMethod pickStaticMethod(Object object, String methodName, Object[] arguments) {
MetaMethod method = null;
List methods = getStaticMethods(methodName);
if (!methods.isEmpty()) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments, false);
}
if (method == null && theClass != Class.class) {
MetaClass classMetaClass = registry.getMetaClass(Class.class);
method = classMetaClass.pickMethod(object, methodName, arguments);
}
return method;
}
public Object invokeConstructor(Object[] arguments) {
Constructor constructor = (Constructor) chooseMethod("<init>", constructors, arguments, false);
if (constructor != null) {
return doConstructorInvoke(constructor, arguments);
}
else {
constructor = (Constructor) chooseMethod("<init>", constructors, arguments, true);
if (constructor != null) {
return doConstructorInvoke(constructor, arguments);
}
}
if (arguments.length == 1) {
Object firstArgument = arguments[0];
if (firstArgument instanceof Map) {
constructor = (Constructor) chooseMethod("<init>", constructors, EMPTY_ARRAY, false);
if (constructor != null) {
Object bean = doConstructorInvoke(constructor, EMPTY_ARRAY);
setProperties(bean, ((Map) firstArgument));
return bean;
}
}
}
throw new GroovyRuntimeException("Could not find matching constructor for class: " + theClass.getName());
}
/**
* Sets a number of bean properties from the given Map where the keys are
* the String names of properties and the values are the values of the
* properties to set
*/
public void setProperties(Object bean, Map map) {
for (Iterator iter = map.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry) iter.next();
String key = entry.getKey().toString();
Object value = entry.getValue();
setProperty(bean, key, value);
}
}
/**
* @return the given property's value on the object
*/
public Object getProperty(final Object object, final String property) {
MetaMethod metaMethod = null;
PropertyDescriptor descriptor = (PropertyDescriptor) propertyDescriptors.get(property);
if (descriptor != null) {
Method method = descriptor.getReadMethod();
if (method == null) {
throw new GroovyRuntimeException("Cannot read property: " + property);
}
metaMethod = findMethod(method);
if (metaMethod == null) {
// lets try invoke a static getter method
metaMethod = findGetter(object, "get" + capitalize(property));
}
}
if (metaMethod != null) {
return doMethodInvoke(object, metaMethod, EMPTY_ARRAY);
}
if (genericGetMethod != null) {
Object[] arguments = { property };
Object answer = doMethodInvoke(object, genericGetMethod, arguments);
if (answer != null) {
return answer;
}
}
// is the property the name of a method - in which case return a
// closure
List methods = getMethods(property);
if (!methods.isEmpty()) {
return new MethodClosure(object, property);
}
// lets try invoke a static getter method
Exception lastException = null;
try {
MetaMethod method = findGetter(object, "get" + capitalize(property));
if (method != null) {
return doMethodInvoke(object, method, EMPTY_ARRAY);
}
}
catch (GroovyRuntimeException e) {
lastException = e;
}
/** @todo or are we an extensible groovy class? */
if (genericGetMethod != null) {
return null;
}
else {
/** @todo these special cases should be special MetaClasses maybe */
if (object instanceof Class) {
// lets try a static field
return getStaticProperty((Class) object, property);
}
if (object instanceof Collection) {
return DefaultGroovyMethods.getAt((Collection) object, property);
}
if (object instanceof Object[]) {
return DefaultGroovyMethods.getAt(Arrays.asList((Object[]) object), property);
}
MetaMethod addListenerMethod = (MetaMethod) listeners.get(property);
if (addListenerMethod != null) {
/* @todo one day we could try return the previously registered Closure listener for easy removal */
return null;
}
// lets try the getter method
if (lastException == null) {
throw new MissingPropertyException(property, theClass);
}
else {
throw new MissingPropertyException(property, theClass, lastException);
}
}
}
/**
* Sets the property value on an object
*/
public void setProperty(Object object, String property, Object newValue) {
PropertyDescriptor descriptor = (PropertyDescriptor) propertyDescriptors.get(property);
if (descriptor != null) {
Method method = descriptor.getWriteMethod();
if (method == null) {
throw new ReadOnlyPropertyException(property, theClass);
}
MetaMethod metaMethod = findMethod(method);
Object[] arguments = { newValue };
try {
doMethodInvoke(object, metaMethod, arguments);
}
catch (GroovyRuntimeException e) {
// if the value is a List see if we can construct the value
// from a constructor
if (newValue instanceof List) {
List list = (List) newValue;
int params = list.size();
Constructor[] constructors = descriptor.getPropertyType().getConstructors();
for (int i = 0; i < constructors.length; i++) {
Constructor constructor = constructors[i];
if (constructor.getParameterTypes().length == params) {
Object value = doConstructorInvoke(constructor, list.toArray());
doMethodInvoke(object, metaMethod, new Object[] { value });
return;
}
}
}
throw new MissingPropertyException(property, theClass, e);
}
return;
}
try {
MetaMethod addListenerMethod = (MetaMethod) listeners.get(property);
if (addListenerMethod != null && newValue instanceof Closure) {
// lets create a dynamic proxy
Object proxy =
createListenerProxy(addListenerMethod.getParameterTypes()[0], property, (Closure) newValue);
doMethodInvoke(object, addListenerMethod, new Object[] { proxy });
return;
}
if (genericSetMethod != null) {
Object[] arguments = { property, newValue };
doMethodInvoke(object, genericSetMethod, arguments);
return;
}
/** @todo or are we an extensible class? */
// lets try invoke the set method
String method = "set" + capitalize(property);
invokeMethod(object, method, new Object[] { newValue });
}
catch (GroovyRuntimeException e) {
throw new MissingPropertyException(property, theClass, e);
}
}
public ClassNode getClassNode() {
if (classNode == null && GroovyObject.class.isAssignableFrom(theClass)) {
// lets try load it from the classpath
String className = theClass.getName();
String groovyFile = className;
int idx = groovyFile.indexOf('$');
if (idx > 0) {
groovyFile = groovyFile.substring(0, idx);
}
groovyFile = groovyFile.replace('.', '/') + ".groovy";
//System.out.println("Attempting to load: " + groovyFile);
URL url = theClass.getClassLoader().getResource(groovyFile);
if (url == null) {
url = Thread.currentThread().getContextClassLoader().getResource(groovyFile);
}
if (url != null) {
try {
InputStream in = url.openStream();
/**
* @todo there is no CompileUnit in scope so class name
* checking won't work but that mostly affects the bytecode
* generation rather than viewing the AST
*/
CompilerFacade compiler = new CompilerFacade(theClass.getClassLoader(), new CompileUnit()) {
protected void onClass(ClassWriter classWriter, ClassNode classNode) {
if (classNode.getName().equals(theClass.getName())) {
//System.out.println("Found: " +
// classNode.getName());
MetaClass.this.classNode = classNode;
}
}
};
compiler.parseClass(in, groovyFile);
}
catch (Exception e) {
throw new GroovyRuntimeException("Exception thrown parsing: " + groovyFile + ". Reason: " + e, e);
}
}
}
return classNode;
}
public String toString() {
return super.toString() + "[" + theClass + "]";
}
// Implementation methods
/**
* Converts the given object into an array; if its an array then just cast
* otherwise wrap it in an array
*/
protected Object[] asArray(Object arguments) {
if (arguments == null) {
return EMPTY_ARRAY;
}
if (arguments instanceof Tuple) {
Tuple tuple = (Tuple) arguments;
return tuple.toArray();
}
if (arguments instanceof Object[]) {
return (Object[]) arguments;
}
else {
return new Object[] { arguments };
}
}
/**
* @param listenerType
* the interface of the listener to proxy
* @param listenerMethodName
* the name of the method in the listener API to call the
* closure on
* @param closure
* the closure to invoke on the listenerMethodName method
* invocation
* @return a dynamic proxy which calls the given closure on the given
* method name
*/
protected Object createListenerProxy(Class listenerType, final String listenerMethodName, final Closure closure) {
InvocationHandler handler = new ClosureListener(listenerMethodName, closure);
return Proxy.newProxyInstance(listenerType.getClassLoader(), new Class[] { listenerType }, handler);
}
/**
* Adds all the methods declared in the given class to the metaclass
* ignoring any matching methods already defined by a derived class
*
* @param theClass
*/
protected void addMethods(Class theClass) {
Method[] methodArray = theClass.getDeclaredMethods();
for (int i = 0; i < methodArray.length; i++) {
MetaMethod method = createMetaMethod(methodArray[i]);
addMethod(method);
}
}
protected void addMethod(MetaMethod method) {
String name = method.getName();
//System.out.println(theClass.getName() + " == " + name + Arrays.asList(method.getParameterTypes()));
if (isGenericGetMethod(method) && genericGetMethod == null) {
genericGetMethod = method;
}
else if (isGenericSetMethod(method) && genericSetMethod == null) {
genericSetMethod = method;
}
if (method.isStatic()) {
List list = (List) staticMethodIndex.get(name);
if (list == null) {
list = new ArrayList();
staticMethodIndex.put(name, list);
list.add(method);
}
else {
if (!containsMatchingMethod(list, method)) {
list.add(method);
}
}
}
List list = (List) methodIndex.get(name);
if (list == null) {
list = new ArrayList();
methodIndex.put(name, list);
list.add(method);
}
else {
if (!containsMatchingMethod(list, method)) {
list.add(method);
}
}
}
/**
* @return true if a method of the same matching prototype was found in the
* list
*/
protected boolean containsMatchingMethod(List list, MetaMethod method) {
for (Iterator iter = list.iterator(); iter.hasNext();) {
MetaMethod aMethod = (MetaMethod) iter.next();
Class[] params1 = aMethod.getParameterTypes();
Class[] params2 = method.getParameterTypes();
if (params1.length == params2.length) {
boolean matches = true;
for (int i = 0; i < params1.length; i++) {
if (params1[i] != params2[i]) {
matches = false;
break;
}
}
if (matches) {
return true;
}
}
}
return false;
}
/**
* Adds all of the newly defined methods from the given class to this
* metaclass
*
* @param theClass
*/
protected void addNewStaticMethodsFrom(Class theClass) {
MetaClass interfaceMetaClass = registry.getMetaClass(theClass);
Iterator iter = interfaceMetaClass.newGroovyMethodsList.iterator();
while (iter.hasNext()) {
MetaMethod method = (MetaMethod) iter.next();
addMethod(method);
newGroovyMethodsList.add(method);
}
}
/**
* @return the value of the static property of the given class
*/
protected Object getStaticProperty(Class aClass, String property) {
//System.out.println("Invoking property: " + property + " on class: "
// + aClass);
Exception lastException = null;
try {
Field field = aClass.getField(property);
if (field != null) {
if ((field.getModifiers() & Modifier.STATIC) != 0) {
return field.get(null);
}
}
}
catch (Exception e) {
lastException = e;
}
// lets try invoke a static getter method
try {
MetaMethod method = findStaticGetter(aClass, "get" + capitalize(property));
if (method != null) {
return doMethodInvoke(aClass, method, EMPTY_ARRAY);
}
}
catch (GroovyRuntimeException e) {
throw new MissingPropertyException(property, aClass, e);
}
if (lastException == null) {
throw new MissingPropertyException(property, aClass);
}
else {
throw new MissingPropertyException(property, aClass, lastException);
}
}
/**
* @return the matching method which should be found
*/
protected MetaMethod findMethod(Method aMethod) {
List methods = getMethods(aMethod.getName());
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.isMethod(aMethod)) {
return method;
}
}
log.warning("Creating reflection based dispatcher for: " + aMethod);
return new ReflectionMetaMethod(aMethod);
}
/**
* @return the getter method for the given object
*/
protected MetaMethod findGetter(Object object, String name) {
List methods = getMethods(name);
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.getParameterTypes().length == 0) {
return method;
}
}
return null;
}
/**
* @return the Method of the given name with no parameters or null
*/
protected MetaMethod findStaticGetter(Class type, String name) {
List methods = getStaticMethods(name);
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
if (method.getParameterTypes().length == 0) {
return method;
}
}
/** @todo dirty hack - don't understand why this code is necessary - all methods should be in the allMethods list! */
try {
Method method = type.getMethod(name, EMPTY_TYPE_ARRAY);
if ((method.getModifiers() & Modifier.STATIC) != 0) {
return findMethod(method);
}
else {
return null;
}
}
catch (Exception e) {
return null;
}
}
protected Object doMethodInvoke(Object object, MetaMethod method, Object[] argumentArray) {
//System.out.println("Evaluating method: " + method);
//System.out.println("on object: " + object + " with arguments: " +
// InvokerHelper.toString(argumentArray));
//System.out.println(this.theClass);
try {
if (argumentArray == null) {
argumentArray = EMPTY_ARRAY;
}
else if (method.getParameterTypes().length == 1 && argumentArray.length == 0) {
argumentArray = ARRAY_WITH_NULL;
}
return method.invoke(object, argumentArray);
}
catch (ClassCastException e) {
if (coerceGStrings(argumentArray)) {
try {
return doMethodInvoke(object, method, argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (InvocationTargetException e) {
Throwable t = e.getTargetException();
if (t instanceof Error) {
Error error = (Error) t;
throw error;
}
if (t instanceof RuntimeException) {
RuntimeException runtimeEx = (RuntimeException) t;
throw runtimeEx;
}
throw new InvokerInvocationException(e);
}
catch (IllegalAccessException e) {
throw new GroovyRuntimeException(
"could not access method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (IllegalArgumentException e) {
e.printStackTrace();
if (coerceGStrings(argumentArray)) {
try {
return doMethodInvoke(object, method, argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (RuntimeException e) {
throw e;
}
catch (Exception e) {
throw new GroovyRuntimeException(
"failed to invoke method: "
+ method
+ " on: "
+ object
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
}
protected Object doConstructorInvoke(Constructor constructor, Object[] argumentArray) {
//System.out.println("Evaluating constructor: " + constructor + " with
// arguments: " + InvokerHelper.toString(argumentArray));
//System.out.println(this.theClass);
try {
return constructor.newInstance(argumentArray);
}
catch (InvocationTargetException e) {
Throwable t = e.getTargetException();
if (t instanceof Error) {
Error error = (Error) t;
throw error;
}
if (t instanceof RuntimeException) {
RuntimeException runtimeEx = (RuntimeException) t;
throw runtimeEx;
}
throw new InvokerInvocationException(e);
}
catch (IllegalArgumentException e) {
if (coerceGStrings(argumentArray)) {
try {
return constructor.newInstance(argumentArray);
}
catch (Exception e2) {
// allow fall through
}
}
throw new GroovyRuntimeException(
"failed to invoke constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (IllegalAccessException e) {
throw new GroovyRuntimeException(
"could not access constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
catch (Exception e) {
throw new GroovyRuntimeException(
"failed to invoke constructor: "
+ constructor
+ " with arguments: "
+ InvokerHelper.toString(argumentArray)
+ " reason: "
+ e,
e);
}
}
/**
* Chooses the correct method to use from a list of methods which match by
* name.
*
* @param methods
* the possible methods to choose from
* @param arguments
* the original argument to the method
* @return
*/
protected Object chooseMethod(String methodName, List methods, Object[] arguments, boolean coerce) {
int methodCount = methods.size();
if (methodCount <= 0) {
return null;
}
else if (methodCount == 1) {
Object method = methods.get(0);
if (isValidMethod(method, arguments, coerce)) {
return method;
}
return null;
}
Object answer = null;
if (arguments == null || arguments.length == 0) {
answer = chooseEmptyMethodParams(methods);
}
else if (arguments.length == 1 && arguments[0] == null) {
answer = chooseMostGeneralMethodWith1Param(methods);
}
else {
List matchingMethods = new ArrayList();
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes;
// making this false helps find matches
if (isValidMethod(method, arguments, coerce)) {
matchingMethods.add(method);
}
}
if (matchingMethods.isEmpty()) {
return null;
}
else if (matchingMethods.size() == 1) {
return matchingMethods.get(0);
}
return chooseMostSpecificParams(methodName, matchingMethods, arguments);
}
if (answer != null) {
return answer;
}
throw new GroovyRuntimeException(
"Could not find which method to invoke from this list: "
+ methods
+ " for arguments: "
+ InvokerHelper.toString(arguments));
}
protected boolean isValidMethod(Object method, Object[] arguments, boolean includeCoerce) {
Class[] paramTypes = getParameterTypes(method);
return isValidMethod(paramTypes, arguments, includeCoerce);
}
protected static boolean isValidMethod(Class[] paramTypes, Object[] arguments, boolean includeCoerce) {
if (arguments == null) {
return true;
}
int size = arguments.length;
boolean validMethod = false;
if (paramTypes.length == size) {
// lets check the parameter types match
validMethod = true;
for (int i = 0; i < size; i++) {
Object value = arguments[i];
if (!isCompatibleInstance(paramTypes[i], value, includeCoerce)) {
validMethod = false;
}
}
}
else {
if (paramTypes.length == 1 && size == 0) {
return true;
}
}
return validMethod;
}
protected Object chooseMostSpecificParams(String name, List matchingMethods, Object[] arguments) {
Object answer = null;
int size = arguments.length;
Class[] mostSpecificTypes = null;
for (Iterator iter = matchingMethods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
if (answer == null) {
answer = method;
mostSpecificTypes = paramTypes;
}
else {
boolean useThisMethod = false;
for (int i = 0; i < size; i++) {
Class mostSpecificType = mostSpecificTypes[i];
Class type = paramTypes[i];
if (!isAssignableFrom(mostSpecificType, type)) {
useThisMethod = true;
break;
}
}
if (useThisMethod) {
if (size > 1) {
checkForInvalidOverloading(name, mostSpecificTypes, paramTypes);
}
answer = method;
mostSpecificTypes = paramTypes;
}
}
}
return answer;
}
/**
* Checks that one of the parameter types is a superset of the other and
* that the two lists of types don't conflict. e.g. foo(String, Object) and
* foo(Object, String) would conflict if called with foo("a", "b").
*
* Note that this method is only called with 2 possible signatures. i.e.
* possible invalid combinations will already have been filtered out. So if
* there were methods foo(String, Object) and foo(Object, String) then one
* of these would be already filtered out if foo was called as foo(12, "a")
*/
protected void checkForInvalidOverloading(String name, Class[] baseTypes, Class[] derivedTypes) {
for (int i = 0, size = baseTypes.length; i < size; i++) {
Class baseType = baseTypes[i];
Class derivedType = derivedTypes[i];
if (!isAssignableFrom(derivedType, baseType)) {
throw new GroovyRuntimeException(
"Ambiguous method overloading for method: "
+ name
+ ". Cannot resolve which method to invoke due to overlapping prototypes between: "
+ InvokerHelper.toString(baseTypes)
+ " and: "
+ InvokerHelper.toString(derivedTypes));
}
}
}
protected Class[] getParameterTypes(Object methodOrConstructor) {
if (methodOrConstructor instanceof MetaMethod) {
MetaMethod method = (MetaMethod) methodOrConstructor;
return method.getParameterTypes();
}
if (methodOrConstructor instanceof Method) {
Method method = (Method) methodOrConstructor;
return method.getParameterTypes();
}
if (methodOrConstructor instanceof Constructor) {
Constructor constructor = (Constructor) methodOrConstructor;
return constructor.getParameterTypes();
}
throw new IllegalArgumentException("Must be a Method or Constructor");
}
/**
* @return the method with 1 parameter which takes the most general type of
* object (e.g. Object)
*/
protected Object chooseMostGeneralMethodWith1Param(List methods) {
// lets look for methods with 1 argument which matches the type of the
// arguments
Class closestClass = null;
Object answer = null;
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
int paramLength = paramTypes.length;
if (paramLength == 1) {
Class theType = paramTypes[0];
if (closestClass == null || isAssignableFrom(closestClass, theType)) {
closestClass = theType;
answer = method;
}
}
}
return answer;
}
/**
* @return the method with 1 parameter which takes the most general type of
* object (e.g. Object)
*/
protected Object chooseEmptyMethodParams(List methods) {
for (Iterator iter = methods.iterator(); iter.hasNext();) {
Object method = iter.next();
Class[] paramTypes = getParameterTypes(method);
int paramLength = paramTypes.length;
if (paramLength == 0) {
return method;
}
}
return null;
}
protected static boolean isCompatibleInstance(Class type, Object value, boolean includeCoerce) {
boolean answer = value == null || type.isInstance(value);
if (!answer) {
if (type.isPrimitive()) {
if (type == int.class) {
return value instanceof Integer;
}
else if (type == double.class) {
return value instanceof Double || value instanceof Float || value instanceof Integer;
}
else if (type == boolean.class) {
return value instanceof Boolean;
}
else if (type == long.class) {
return value instanceof Long || value instanceof Integer;
}
else if (type == float.class) {
return value instanceof Float || value instanceof Integer;
}
else if (type == char.class) {
return value instanceof Character;
}
else if (type == byte.class) {
return value instanceof Byte;
}
else if (type == short.class) {
return value instanceof Short;
}
}
else if (includeCoerce) {
if (type == String.class && value instanceof GString) {
return true;
}
else if (value instanceof Number) {
// lets allow numbers to be coerced downwards?
return Number.class.isAssignableFrom(type);
}
}
}
return answer;
}
protected boolean isAssignableFrom(Class mostSpecificType, Class type) {
boolean answer = type.isAssignableFrom(mostSpecificType);
if (!answer) {
answer = autoboxType(type).isAssignableFrom(autoboxType(mostSpecificType));
}
return answer;
}
private Class autoboxType(Class type) {
if (type.isPrimitive()) {
if (type == int.class) {
return Integer.class;
}
else if (type == double.class) {
return Double.class;
}
else if (type == long.class) {
return Long.class;
}
else if (type == boolean.class) {
return Boolean.class;
}
else if (type == float.class) {
return Float.class;
}
else if (type == char.class) {
return Character.class;
}
else if (type == byte.class) {
return Byte.class;
}
else if (type == short.class) {
return Short.class;
}
}
return type;
}
/**
* Coerces any GString instances into Strings
*
* @return true if some coercion was done.
*/
protected boolean coerceGStrings(Object[] arguments) {
boolean coerced = false;
for (int i = 0, size = arguments.length; i < size; i++) {
Object argument = arguments[i];
if (argument instanceof GString) {
arguments[i] = argument.toString();
coerced = true;
}
}
return coerced;
}
protected boolean isGenericSetMethod(MetaMethod method) {
return (method.getName().equals("set") || method.getName().equals("setAttribute"))
&& method.getParameterTypes().length == 2;
}
protected boolean isGenericGetMethod(MetaMethod method) {
if (method.getName().equals("get") || method.getName().equals("getAttribute")) {
Class[] parameterTypes = method.getParameterTypes();
return parameterTypes.length == 1 && parameterTypes[0] == String.class;
}
return false;
}
protected void registerStaticMethods() {
Method[] methods = theClass.getMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
if (MethodHelper.isStatic(method)) {
Class[] paramTypes = method.getParameterTypes();
if (paramTypes.length > 0) {
Class owner = paramTypes[0];
registry.lookup(owner).addNewStaticInstanceMethod(method);
}
}
}
}
protected String capitalize(String property) {
return property.substring(0, 1).toUpperCase() + property.substring(1, property.length());
}
protected synchronized void checkInitialised() {
if (!initialised) {
initialised = true;
addInheritendMethods(theClass);
generateReflector();
}
}
protected MetaMethod createMetaMethod(Method method) {
if (registry.useAccessible()) {
method.setAccessible(true);
}
if (useReflection) {
//log.warning("Creating reflection based dispatcher for: " + method);
return new ReflectionMetaMethod(method);
}
MetaMethod answer = new MetaMethod(method);
if (isValidReflectorMethod(answer)) {
allMethods.add(answer);
answer.setMethodIndex(allMethods.size());
}
else {
//log.warning("Creating reflection based dispatcher for: " + method);
answer = new ReflectionMetaMethod(method);
}
return answer;
}
protected boolean isValidReflectorMethod(MetaMethod method) {
if (method.isPrivate() || method.isProtected()) {
return false;
}
Class declaringClass = method.getDeclaringClass();
if (!Modifier.isPublic(declaringClass.getModifiers())) {
// lets see if this method is implemented on an interface
List list = getInterfaceMethods();
for (Iterator iter = list.iterator(); iter.hasNext();) {
MetaMethod aMethod = (MetaMethod) iter.next();
if (method.isSame(aMethod)) {
method.setInterfaceClass(aMethod.getDeclaringClass());
return true;
}
}
/** @todo */
//log.warning("Cannot invoke method on protected/private class which isn't visible on an interface so must use reflection instead: " + method);
return false;
}
return true;
}
protected void generateReflector() {
reflector = loadReflector(allMethods);
if (reflector == null) {
throw new RuntimeException("Should have a reflector!");
}
// lets set the reflector on all the methods
for (Iterator iter = allMethods.iterator(); iter.hasNext();) {
MetaMethod metaMethod = (MetaMethod) iter.next();
//System.out.println("Setting reflector for method: " + metaMethod + " with index: " + metaMethod.getMethodIndex());
metaMethod.setReflector(reflector);
}
}
protected Reflector loadReflector(List methods) {
ReflectorGenerator generator = new ReflectorGenerator(methods);
String className = theClass.getName();
String packagePrefix = "gjdk.";
/*
if (className.startsWith("java.")) {
packagePrefix = "gjdk.";
}
*/
String name = packagePrefix + className + "_GroovyReflector";
if (theClass.isArray()) {
String componentName = theClass.getComponentType().getName();
/*
if (componentName.startsWith("java.")) {
packagePrefix = "gjdk.";
}
*/
name = packagePrefix + componentName + "_GroovyReflectorArray";
}
// lets see if its already loaded
try {
Class type = loadReflectorClass(name);
return (Reflector) type.newInstance();
}
catch (Exception e) {
// lets ignore, lets generate it && load it
}
ClassWriter cw = new ClassWriter(true);
generator.generate(cw, name);
byte[] bytecode = cw.toByteArray();
try {
Class type = loadReflectorClass(name, bytecode);
return (Reflector) type.newInstance();
}
catch (Exception e) {
throw new GroovyRuntimeException("Could not load the reflector for class: " + name + ". Reason: " + e, e);
}
}
protected Class loadReflectorClass(String name, byte[] bytecode) throws ClassNotFoundException {
ClassLoader loader = theClass.getClassLoader();
if (loader instanceof GroovyClassLoader) {
GroovyClassLoader gloader = (GroovyClassLoader) loader;
return gloader.loadClass(name, bytecode);
}
return registry.loadClass(name, bytecode);
}
protected Class loadReflectorClass(String name) throws ClassNotFoundException {
ClassLoader loader = theClass.getClassLoader();
if (loader instanceof GroovyClassLoader) {
GroovyClassLoader gloader = (GroovyClassLoader) loader;
return gloader.loadClass(name);
}
return registry.loadClass(name);
}
public List getMethods() {
return allMethods;
}
protected synchronized List getInterfaceMethods() {
if (interfaceMethods == null) {
interfaceMethods = new ArrayList();
Class type = theClass;
while (type != null) {
Class[] interfaces = type.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
Class iface = interfaces[i];
Method[] methods = iface.getMethods();
addInterfaceMethods(interfaceMethods, methods);
}
type = type.getSuperclass();
}
}
return interfaceMethods;
}
private void addInterfaceMethods(List list, Method[] methods) {
for (int i = 0; i < methods.length; i++) {
list.add(createMetaMethod(methods[i]));
}
}
}
|
package com.jsoniter;
import com.jsoniter.spi.TypeLiteral;
import java.io.IOException;
import java.util.*;
public class Any extends Slice implements Iterable<Any> {
private final static ThreadLocal<JsonIterator> tlsIter = new ThreadLocal<JsonIterator>() {
@Override
protected JsonIterator initialValue() {
return new JsonIterator();
}
};
private ValueType valueType;
private List<Any> array;
private Map<Object, Any> object;
private int objectParsedHead;
public Any(ValueType valueType, byte[] data, int head, int tail) {
super(data, head, tail);
this.valueType = valueType;
objectParsedHead = head;
}
public final ValueType valueType() {
return valueType;
}
public <T> T bindTo(T obj, Object... keys) {
Any found = get(keys);
if (found == null) {
return null;
}
return found.bindTo(obj);
}
public <T> T bindTo(T obj) {
try {
return createIterator().read(obj);
} catch (IOException e) {
throw new JsonException(e);
}
}
public <T> T bindTo(TypeLiteral<T> typeLiteral, T obj, Object... keys) {
Any found = get(keys);
if (found == null) {
return null;
}
return found.bindTo(typeLiteral, obj);
}
public <T> T bindTo(TypeLiteral<T> typeLiteral, T obj) {
try {
return createIterator().read(typeLiteral, obj);
} catch (IOException e) {
throw new JsonException(e);
}
}
public final <T> T to(Class<T> clazz, Object... keys) {
Any found = get(keys);
if (found == null) {
return null;
}
return found.to(clazz);
}
private <T> T to(Class<T> clazz) {
try {
return createIterator().read(clazz);
} catch (IOException e) {
throw new JsonException(e);
}
}
public final <T> T to(TypeLiteral<T> typeLiteral, Object... keys) {
Any found = get(keys);
if (found == null) {
return null;
}
return found.to(typeLiteral);
}
private <T> T to(TypeLiteral<T> typeLiteral) {
try {
return createIterator().read(typeLiteral);
} catch (IOException e) {
throw new JsonException(e);
}
}
public final int toInt(Object... keys) {
Any found = get(keys);
if (found == null) {
return 0;
}
return found.toInt();
}
public final int toInt() {
try {
return toInt_();
} catch (IOException e) {
throw new JsonException(e);
}
}
private int toInt_() throws IOException {
if (ValueType.NUMBER == valueType) {
return createIterator().readInt();
}
if (ValueType.STRING == valueType) {
JsonIterator iter = createIterator();
iter.nextToken();
return iter.readInt();
}
if (ValueType.NULL == valueType) {
return 0;
}
throw unexpectedValueType(ValueType.NUMBER);
}
public final long toLong(Object... keys) {
Any found = get(keys);
if (found == null) {
return 0;
}
return found.toLong();
}
public final long toLong() {
try {
return toLong_();
} catch (IOException e) {
throw new JsonException(e);
}
}
private long toLong_() throws IOException {
if (ValueType.NUMBER == valueType) {
return createIterator().readLong();
}
if (ValueType.STRING == valueType) {
JsonIterator iter = createIterator();
iter.nextToken();
return iter.readLong();
}
if (ValueType.NULL == valueType) {
return 0;
}
throw unexpectedValueType(ValueType.NUMBER);
}
public final float toFloat(Object... keys) {
Any found = get(keys);
if (found == null) {
return 0;
}
return found.toFloat();
}
public final float toFloat() {
try {
return toFloat_();
} catch (IOException e) {
throw new JsonException(e);
}
}
private float toFloat_() throws IOException {
if (ValueType.NUMBER == valueType) {
return createIterator().readFloat();
}
if (ValueType.STRING == valueType) {
JsonIterator iter = createIterator();
iter.nextToken();
return iter.readFloat();
}
if (ValueType.NULL == valueType) {
return 0;
}
throw unexpectedValueType(ValueType.NUMBER);
}
public final double toDouble(Object... keys) {
Any found = get(keys);
if (found == null) {
return 0;
}
return found.toDouble();
}
public final double toDouble() {
try {
return toDouble_();
} catch (IOException e) {
throw new JsonException(e);
}
}
private double toDouble_() throws IOException {
if (ValueType.NUMBER == valueType) {
return createIterator().readDouble();
}
if (ValueType.STRING == valueType) {
JsonIterator iter = createIterator();
iter.nextToken();
return iter.readDouble();
}
if (ValueType.NULL == valueType) {
return 0;
}
throw unexpectedValueType(ValueType.NUMBER);
}
public final String toString(Object... keys) {
Any found = get(keys);
if (found == null) {
return null;
}
return found.toString();
}
@Override
public final String toString() {
try {
return toString_();
} catch (IOException e) {
throw new JsonException(e);
}
}
private String toString_() throws IOException {
if (ValueType.STRING == valueType) {
return createIterator().readString();
}
if (ValueType.NULL == valueType) {
return null;
}
if (ValueType.NUMBER == valueType) {
char[] chars = new char[tail() - head()];
for (int i = head(), j = 0; i < tail(); i++, j++) {
chars[j] = (char) data()[i];
}
return new String(chars);
}
return super.toString();
}
public int size() {
try {
if (ValueType.ARRAY == valueType) {
fillArray();
return array.size();
}
if (ValueType.OBJECT == valueType) {
fillObject();
return object.size();
}
} catch (IOException e) {
throw new JsonException(e);
}
throw unexpectedValueType(ValueType.OBJECT);
}
public Set<Object> keys() {
try {
if (ValueType.ARRAY == valueType) {
fillArray();
Set<Object> keys = new HashSet<Object>(array.size());
for (int i = 0; i < array.size(); i++) {
keys.add(i);
}
return keys;
}
if (ValueType.OBJECT == valueType) {
fillObject();
return object.keySet();
}
} catch (IOException e) {
throw new JsonException(e);
}
throw unexpectedValueType(ValueType.OBJECT);
}
public final Any getValue(int index) {
try {
fillArray();
return array.get(index);
} catch (IndexOutOfBoundsException e) {
return null;
} catch (ClassCastException e) {
return null;
} catch (IOException e) {
throw new JsonException(e);
}
}
public final Any getValue(Object key) {
try {
return fillObject(key);
} catch (IndexOutOfBoundsException e) {
return null;
} catch (ClassCastException e) {
return null;
} catch (IOException e) {
throw new JsonException(e);
}
}
public final Any get(Object... keys) {
try {
return get_(keys, 0);
} catch (IndexOutOfBoundsException e) {
return null;
} catch (ClassCastException e) {
return null;
} catch (IOException e) {
throw new JsonException(e);
}
}
private Any get_(Object[] keys, int idx) throws IOException {
if (idx == keys.length) {
return this;
}
Any result;
if (ValueType.OBJECT == valueType) {
result = fillObject(keys[idx]);
} else if (ValueType.ARRAY == valueType) {
fillArray();
result = array.get((Integer) keys[idx]);
} else {
result = null;
}
Any found = result;
if (found == null) {
return null;
}
return found.get_(keys, idx + 1);
}
public final Any require(Object... keys) {
try {
return require_(keys, 0);
} catch (IOException e) {
throw new JsonException(e);
}
}
private Any require_(Object[] keys, int idx) throws IOException {
if (idx == keys.length) {
return this;
}
Any result = null;
if (ValueType.OBJECT == valueType) {
result = fillObject(keys[idx]);
} else if (ValueType.ARRAY == valueType) {
fillArray();
result = array.get((Integer) keys[idx]);
}
if (result == null) {
throw new JsonException(String.format("failed to get path %s, because %s not found in %s",
Arrays.toString(keys), keys[idx], object));
}
return result.get_(keys, idx + 1);
}
private JsonException unexpectedValueType(ValueType expectedType) {
throw new JsonException("unexpected value type: " + valueType);
}
private JsonIterator createIterator() {
JsonIterator iter = tlsIter.get();
iter.reset(this);
return iter;
}
private Any fillObject(Object target) throws IOException {
if (objectParsedHead == tail() || (object != null && object.containsKey(target))) {
return object.get(target);
}
JsonIterator iter = tlsIter.get();
iter.reset(data(), objectParsedHead, tail());
if (object == null) {
object = new HashMap<Object, Any>(4);
}
if (objectParsedHead == head()) {
if (!CodegenAccess.readObjectStart(iter)) {
objectParsedHead = tail();
return null;
}
String field = CodegenAccess.readObjectFieldAsString(iter);
int start = iter.head;
ValueType elementType = iter.skip();
int end = iter.head;
if (!object.containsKey(field)) {
Any value = new Any(elementType, data(), start, end);
object.put(field, value);
if (field.hashCode() == target.hashCode() && field.equals(target)) {
objectParsedHead = iter.head;
return value;
}
}
}
while (iter.nextToken() == ',') {
String field = CodegenAccess.readObjectFieldAsString(iter);
int start = iter.head;
ValueType elementType = iter.skip();
int end = iter.head;
if (!object.containsKey(field)) {
Any value = new Any(elementType, data(), start, end);
object.put(field, value);
if (field.hashCode() == target.hashCode() && field.equals(target)) {
objectParsedHead = iter.head;
return value;
}
}
}
objectParsedHead = tail();
object.put(target, null);
return null;
}
private void fillObject() throws IOException {
if (objectParsedHead == tail()) {
return;
}
JsonIterator iter = tlsIter.get();
iter.reset(data(), objectParsedHead, tail());
if (object == null) {
object = new HashMap<Object, Any>(4);
}
if (!CodegenAccess.readObjectStart(iter)) {
objectParsedHead = tail();
return;
}
String field = CodegenAccess.readObjectFieldAsString(iter);
int start = iter.head;
ValueType elementType = iter.skip();
int end = iter.head;
if (!object.containsKey(field)) {
Any value = new Any(elementType, data(), start, end);
object.put(field, value);
}
while (iter.nextToken() == ',') {
field = CodegenAccess.readObjectFieldAsString(iter);
start = iter.head;
elementType = iter.skip();
end = iter.head;
if (!object.containsKey(field)) {
Any value = new Any(elementType, data(), start, end);
object.put(field, value);
}
}
objectParsedHead = tail();
}
private void fillArray() throws IOException {
if (array != null) {
return;
}
JsonIterator iter = createIterator();
array = new ArrayList<Any>(4);
if (!CodegenAccess.readArrayStart(iter)) {
return;
}
int start = iter.head;
ValueType elementType = iter.skip();
int end = iter.head;
array.add(new Any(elementType, data(), start, end));
while (iter.nextToken() == ',') {
start = iter.head;
elementType = iter.skip();
end = iter.head;
array.add(new Any(elementType, data(), start, end));
}
}
@Override
public final Iterator<Any> iterator() {
if (ValueType.ARRAY != valueType()) {
throw unexpectedValueType(ValueType.ARRAY);
}
return new ArrayIterator();
}
private class ArrayIterator implements Iterator<Any> {
private final int size;
private int idx;
public ArrayIterator() {
size = size();
idx = 0;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasNext() {
return idx < size;
}
@Override
public Any next() {
return array.get(idx++);
}
}
}
|
package sg.ncl.service.team.web;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.jsonwebtoken.Claims;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.MediaType;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import sg.ncl.common.authentication.Role;
import sg.ncl.common.exception.base.ForbiddenException;
import sg.ncl.common.exception.base.UnauthorizedException;
import sg.ncl.common.jwt.JwtToken;
import sg.ncl.service.analytics.domain.AnalyticsService;
import sg.ncl.service.team.data.jpa.TeamEntity;
import sg.ncl.service.team.data.jpa.TeamQuotaEntity;
import sg.ncl.service.team.data.jpa.TeamRepository;
import sg.ncl.service.team.domain.*;
import sg.ncl.service.team.exceptions.TeamNotFoundException;
import sg.ncl.service.team.exceptions.TeamQuotaOutOfRangeException;
import javax.inject.Inject;
import java.math.BigDecimal;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.*;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mockingDetails;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import static sg.ncl.service.team.util.TestUtil.*;
/**
* @Author dcsyeoty, Tran Ly Vu
*/
@RunWith(SpringRunner.class)
@WebMvcTest(controllers = TeamsController.class, secure = true)
@ContextConfiguration(classes = {TeamsController.class})
public class TeamsControllerTest {
@Rule
public ExpectedException exception = ExpectedException.none();
@Inject
private ObjectMapper mapper;
@Inject
private WebApplicationContext webApplicationContext;
@Mock
private Claims claims;
@Mock
private Authentication authentication;
@Mock
private SecurityContext securityContext;
private MockMvc mockMvc;
@MockBean
private TeamService teamService;
@MockBean
private AnalyticsService analyticsService;
@MockBean
private TeamRepository teamRepository;
@Before
public void before() {
assertThat(mockingDetails(claims).isMock()).isTrue();
assertThat(mockingDetails(securityContext).isMock()).isTrue();
assertThat(mockingDetails(authentication).isMock()).isTrue();
assertThat(mockingDetails(teamService).isMock()).isTrue();
assertThat(mockingDetails(analyticsService).isMock()).isTrue();
assertThat(mockingDetails(teamRepository).isMock()).isTrue();
when(securityContext.getAuthentication()).thenReturn(authentication);
SecurityContextHolder.setContext(securityContext);
when(authentication.getPrincipal()).thenReturn(claims);
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build();
}
@Test
public void testGetAllTeamsWithNoUserInDb() throws Exception {
mockMvc.perform(get(TeamsController.PATH))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON));
}
@Test
public void testGetAllTeamsForbiddenException() throws Exception {
when(securityContext.getAuthentication()).thenReturn(null);
mockMvc.perform(get(TeamsController.PATH))
.andExpect(status().isForbidden());
}
@Test
public void testGetAll() throws Exception {
final List<Team> list = new ArrayList<>();
final TeamEntity entity1 = getTeamEntityWithId();
final TeamEntity entity2 = getTeamEntityWithId();
list.add(entity1);
list.add(entity2);
when(teamService.getAllTeams()).thenReturn(list);
mockMvc.perform(get(TeamsController.PATH))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$", hasSize(2)))
.andExpect(jsonPath("$[0].id", is(equalTo(entity1.getId()))))
.andExpect(jsonPath("$[0].name", is(equalTo(entity1.getName()))))
.andExpect(jsonPath("$[0].description", is(equalTo(entity1.getDescription()))))
.andExpect(jsonPath("$[0].website", is(equalTo(entity1.getWebsite()))))
.andExpect(jsonPath("$[0].organisationType", is(equalTo(entity1.getOrganisationType()))))
.andExpect(jsonPath("$[0].visibility", is(equalTo(entity1.getVisibility().name()))))
.andExpect(jsonPath("$[0].privacy", is(equalTo(entity1.getPrivacy().name()))))
.andExpect(jsonPath("$[0].status", is(equalTo(entity1.getStatus().name()))))
.andExpect(jsonPath("$[1].id", is(equalTo(entity2.getId()))))
.andExpect(jsonPath("$[1].name", is(equalTo(entity2.getName()))))
.andExpect(jsonPath("$[1].description", is(equalTo(entity2.getDescription()))))
.andExpect(jsonPath("$[1].website", is(equalTo(entity2.getWebsite()))))
.andExpect(jsonPath("$[1].organisationType", is(equalTo(entity2.getOrganisationType()))))
.andExpect(jsonPath("$[1].visibility", is(equalTo(entity2.getVisibility().name()))))
.andExpect(jsonPath("$[1].privacy", is(equalTo(entity2.getPrivacy().name()))))
.andExpect(jsonPath("$[1].status", is(equalTo(entity2.getStatus().name()))));
}
@Test
public void testGetTeamByVisibilityPrivateWithNoAuthentication() throws Exception {
when(securityContext.getAuthentication()).thenReturn(null);
mockMvc.perform(get(TeamsController.PATH + "?visibility=" + TeamVisibility.PRIVATE))
.andExpect(status().isForbidden());
}
@Test
public void testGetTeamByVisibilityPublicWithNoAuthentication() throws Exception {
final List<Team> list = new ArrayList<>();
final TeamEntity entity = getTeamEntityWithId();
list.add(entity);
when(securityContext.getAuthentication()).thenReturn(null);
when(teamService.getTeamsByVisibility(any(TeamVisibility.class))).thenReturn(list);
mockMvc.perform(get(TeamsController.PATH + "?visibility=" + TeamVisibility.PUBLIC))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$", hasSize(1)))
.andExpect(jsonPath("$[0].id", is(equalTo(entity.getId()))))
.andExpect(jsonPath("$[0].name", is(equalTo(entity.getName()))))
.andExpect(jsonPath("$[0].description", is(equalTo(entity.getDescription()))))
.andExpect(jsonPath("$[0].website", is(equalTo(entity.getWebsite()))))
.andExpect(jsonPath("$[0].organisationType", is(equalTo(entity.getOrganisationType()))))
.andExpect(jsonPath("$[0].visibility", is(equalTo(TeamVisibility.PUBLIC.name()))))
.andExpect(jsonPath("$[0].privacy", is(equalTo(entity.getPrivacy().name()))))
.andExpect(jsonPath("$[0].status", is(equalTo(entity.getStatus().name()))));
}
@Test
public void testGetTeamByVisibilityPrivateWithAuthentication() throws Exception {
final List<Team> list = new ArrayList<>();
final TeamEntity entity = getTeamEntityWithId();
entity.setVisibility(TeamVisibility.PRIVATE);
list.add(entity);
when(teamService.getTeamsByVisibility(any(TeamVisibility.class))).thenReturn(list);
mockMvc.perform(get(TeamsController.PATH + "?visibility=" + TeamVisibility.PUBLIC))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$", hasSize(1)))
.andExpect(jsonPath("$[0].id", is(equalTo(entity.getId()))))
.andExpect(jsonPath("$[0].name", is(equalTo(entity.getName()))))
.andExpect(jsonPath("$[0].description", is(equalTo(entity.getDescription()))))
.andExpect(jsonPath("$[0].website", is(equalTo(entity.getWebsite()))))
.andExpect(jsonPath("$[0].organisationType", is(equalTo(entity.getOrganisationType()))))
.andExpect(jsonPath("$[0].visibility", is(equalTo(TeamVisibility.PRIVATE.name()))))
.andExpect(jsonPath("$[0].privacy", is(equalTo(entity.getPrivacy().name()))))
.andExpect(jsonPath("$[0].status", is(equalTo(entity.getStatus().name()))));
}
@Test
public void testGetTeamByNameTeamNotFoundException() throws Exception {
String name = RandomStringUtils.randomAlphanumeric(20);
mockMvc.perform(get(TeamsController.PATH + "?name=" + name))
.andExpect(status().isNotFound())
.andExpect(status().reason("Team not found"));
}
@Test
public void testGetTeamByName() throws Exception {
TeamInfo teamInfo = new TeamInfo(getTeamEntityWithId());
String name = RandomStringUtils.randomAlphanumeric(20);
when(teamService.getTeamByName(anyString())).thenReturn(teamInfo);
mockMvc.perform(get(TeamsController.PATH + "?name=" + name))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$.id", is(equalTo(teamInfo.getId()))))
.andExpect(jsonPath("$.name", is(equalTo(teamInfo.getName()))))
.andExpect(jsonPath("$.description", is(equalTo(teamInfo.getDescription()))))
.andExpect(jsonPath("$.website", is(equalTo(teamInfo.getWebsite()))))
.andExpect(jsonPath("$.organisationType", is(equalTo(teamInfo.getOrganisationType()))))
.andExpect(jsonPath("$.visibility", is(equalTo(teamInfo.getVisibility().name()))))
.andExpect(jsonPath("$.privacy", is(equalTo(teamInfo.getPrivacy().name()))))
.andExpect(jsonPath("$.status", is(equalTo(teamInfo.getStatus().name()))));
}
@Test
public void testGetTeamByIdForbiddenException() throws Exception {
when(securityContext.getAuthentication()).thenReturn(null);
mockMvc.perform(get(TeamsController.PATH + "/" + "id"))
.andExpect(status().isForbidden());
}
@Test
public void testGetTeamByIdTeamNotFoundException() throws Exception {
when(securityContext.getAuthentication()).thenReturn(authentication);
mockMvc.perform(get(TeamsController.PATH + "/id"))
.andExpect(status().isNotFound())
.andExpect(status().reason("Team not found"));
}
@Test
public void testGetTeamById() throws Exception {
TeamInfo teamInfo = new TeamInfo(getTeamEntityWithId());
String id = RandomStringUtils.randomAlphanumeric(20);
when(teamService.getTeamById(anyString())).thenReturn(teamInfo);
mockMvc.perform(get(TeamsController.PATH + "/" + id))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$.id", is(equalTo(teamInfo.getId()))))
.andExpect(jsonPath("$.name", is(equalTo(teamInfo.getName()))))
.andExpect(jsonPath("$.description", is(equalTo(teamInfo.getDescription()))))
.andExpect(jsonPath("$.website", is(equalTo(teamInfo.getWebsite()))))
.andExpect(jsonPath("$.organisationType", is(equalTo(teamInfo.getOrganisationType()))))
.andExpect(jsonPath("$.visibility", is(equalTo(teamInfo.getVisibility().name()))))
.andExpect(jsonPath("$.privacy", is(equalTo(teamInfo.getPrivacy().name()))))
.andExpect(jsonPath("$.status", is(equalTo(teamInfo.getStatus().name()))));
}
@Test
public void testUpdateTeamForbiddenException() throws Exception {
final byte[] content = mapper.writeValueAsBytes(new TeamInfo(getTeamEntityWithId()));
when(securityContext.getAuthentication()).thenReturn(null);
mockMvc.perform(put(TeamsController.PATH + "/id").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isForbidden());
}
@Test
public void testUpdateTeam() throws Exception {
TeamInfo teamInfo = new TeamInfo(getTeamEntityWithId());
final byte[] content = mapper.writeValueAsBytes(teamInfo);
when(securityContext.getAuthentication()).thenReturn(authentication);
when(teamService.updateTeam(anyString(), any(Team.class))).thenReturn(teamInfo);
mockMvc.perform(put(TeamsController.PATH + "/id").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isOk())
.andExpect(jsonPath("$.id", is(equalTo(teamInfo.getId()))))
.andExpect(jsonPath("$.name", is(equalTo(teamInfo.getName()))))
.andExpect(jsonPath("$.description", is(equalTo(teamInfo.getDescription()))))
.andExpect(jsonPath("$.website", is(equalTo(teamInfo.getWebsite()))))
.andExpect(jsonPath("$.organisationType", is(equalTo(teamInfo.getOrganisationType()))))
.andExpect(jsonPath("$.visibility", is(equalTo(teamInfo.getVisibility().name()))))
.andExpect(jsonPath("$.privacy", is(equalTo(teamInfo.getPrivacy().name()))))
.andExpect(jsonPath("$.status", is(equalTo(teamInfo.getStatus().name()))));
}
@Test
public void testGetTeamQuotaByTeamId() throws Exception {
final String randomUsage = RandomStringUtils.randomNumeric(10);
TeamQuota teamQuota = getTeamQuotaEntity();
TeamQuotaInfo teamQuotaInfo = new TeamQuotaInfo(teamQuota,randomUsage);
final byte[] content = mapper.writeValueAsBytes(teamQuotaInfo);
TeamEntity team = getTeamEntityWithId();
when(teamService.getTeamQuotaByTeamId(anyString())).thenReturn(teamQuota);
when(teamRepository.findOne(anyString())).thenReturn(team);
when(analyticsService.getUsageStatistics(anyString(), any(ZonedDateTime.class), any(ZonedDateTime.class))).thenReturn(randomUsage);
mockMvc.perform(get(TeamsController.PATH + "/teamId/quota").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isOk())
.andExpect(jsonPath("$.id", is(equalTo(teamQuotaInfo.getId()))))
.andExpect(jsonPath("$.teamId", is(equalTo(teamQuotaInfo.getTeamId()))))
.andExpect(jsonPath("$.quota", is(equalTo(teamQuotaInfo.getQuota().intValue()))))
.andExpect(jsonPath("$.usage", is(equalTo(randomUsage))));
}
@Test
public void testUpdateTeamQuotaUnauthorizedException() throws Exception{
final String randomUsage = RandomStringUtils.randomNumeric(10);
TeamQuotaInfo teamQuotaInfo = new TeamQuotaInfo(getTeamQuotaEntity(),randomUsage);
final byte[] content = mapper.writeValueAsBytes(teamQuotaInfo);
try {
mockMvc.perform(put(TeamsController.PATH + "/teamId/quota").contentType(MediaType.APPLICATION_JSON).content(content));
} catch (Exception e) {
assertThat(e.getCause().getClass()).isEqualTo(UnauthorizedException.class);
}
}
@Test
public void testUpdateTeamQuotaForbiddenException() throws Exception {
final String randomUsage = RandomStringUtils.randomNumeric(10);
TeamQuotaInfo teamQuotaInfo = new TeamQuotaInfo(getTeamQuotaEntity(),randomUsage);
final byte[] content = mapper.writeValueAsBytes(teamQuotaInfo);
when(teamService.isOwner(anyString(),anyString())).thenReturn(false);
try {
mockMvc.perform(put(TeamsController.PATH + "/teamId/quota").contentType(MediaType.APPLICATION_JSON).content(content));
} catch (Exception e) {
assertThat(e.getCause().getClass()).isEqualTo(ForbiddenException.class);
}
}
@Test
public void testUpdateTeamQuotaTeamQuotaOutOfRangeExceptionException1() throws Exception {
final String randomUsage = RandomStringUtils.randomNumeric(10);
TeamQuotaEntity teamQuotaEntity = getTeamQuotaEntity();
teamQuotaEntity.setQuota(BigDecimal.valueOf(0));
TeamQuotaInfo teamQuotaInfo = new TeamQuotaInfo(teamQuotaEntity,randomUsage);
final byte[] content = mapper.writeValueAsBytes(teamQuotaInfo);
TeamEntity team = getTeamEntityWithId();
when(teamService.isOwner(anyString(),anyString())).thenReturn(true);
when(teamService.updateTeamQuota(anyString(), any(TeamQuota.class))).thenReturn(teamQuotaEntity);
when(teamRepository.findOne(anyString())).thenReturn(team);
when(analyticsService.getUsageStatistics(anyString(), any(ZonedDateTime.class), any(ZonedDateTime.class))).thenReturn(randomUsage);
try {
mockMvc.perform(put(TeamsController.PATH + "/teamId/quota").contentType(MediaType.APPLICATION_JSON).content(content));
} catch (Exception e) {
assertThat(e.getCause().getClass()).isEqualTo(TeamQuotaOutOfRangeException.class);
}
}
@Test
public void testUpdateTeamQuotaTeamQuotaOutOfRangeException2() throws Exception {
final String randomUsage = RandomStringUtils.randomNumeric(10);
TeamQuotaEntity teamQuotaEntity = getTeamQuotaEntity();
teamQuotaEntity.setQuota(BigDecimal.valueOf(100000000));
TeamQuotaInfo teamQuotaInfo = new TeamQuotaInfo(getTeamQuotaEntity(),randomUsage);
final byte[] content = mapper.writeValueAsBytes(teamQuotaInfo);
TeamEntity team = getTeamEntityWithId();
when(teamService.isOwner(anyString(),anyString())).thenReturn(true);
when(teamRepository.findOne(anyString())).thenReturn(team);
when(analyticsService.getUsageStatistics(anyString(), any(ZonedDateTime.class), any(ZonedDateTime.class))).thenReturn(randomUsage);
when(teamService.updateTeamQuota(anyString(), any(TeamQuota.class))).thenReturn(teamQuotaEntity);
try {
mockMvc.perform(put(TeamsController.PATH + "/teamId/quota" ).contentType(MediaType.APPLICATION_JSON).content(content));
} catch (Exception e) {
assertThat(e.getCause().getClass()).isEqualTo(TeamQuotaOutOfRangeException.class);
}
}
@Test
public void testUpdateTeamQuota() throws Exception {
final String randomUsage = RandomStringUtils.randomNumeric(10);
final TeamQuotaEntity teamQuotaEntity = getTeamQuotaEntity();
final TeamQuotaInfo teamQuotaInfo = new TeamQuotaInfo(teamQuotaEntity,randomUsage);
final byte[] content = mapper.writeValueAsBytes(teamQuotaInfo);
TeamEntity team = getTeamEntityWithId();
when(teamService.isOwner(anyString(),anyString())).thenReturn(true);
when(teamService.getTeamById(anyString())).thenReturn(team);
when(teamRepository.findOne(anyString())).thenReturn(team);
when(analyticsService.getUsageStatistics(anyString(), any(ZonedDateTime.class), any(ZonedDateTime.class))).thenReturn(randomUsage);
when(teamService.updateTeamQuota(anyString(), any(TeamQuota.class))).thenReturn(teamQuotaEntity);
mockMvc.perform(put(TeamsController.PATH + "/teamId/quota").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isOk())
.andExpect(jsonPath("$.id", is(equalTo(teamQuotaInfo.getId()))))
.andExpect(jsonPath("$.teamId", is(equalTo(teamQuotaInfo.getTeamId()))))
.andExpect(jsonPath("$.quota", is(equalTo(teamQuotaInfo.getQuota().intValue()))));
}
@Test
public void testUpdateTeamStatusUnauthorizedException() throws Exception {
when(securityContext.getAuthentication()).thenReturn(null);
try {
mockMvc.perform(put(TeamsController.PATH + "/id/status/" + TeamStatus.RESTRICTED));
} catch (Exception e) {
assertThat(e.getCause().getClass()).isEqualTo(UnauthorizedException.class);
}
}
@Test
public void testUpdateTeamStatusNotAdmin() throws Exception {
final List<String> roles = new ArrayList<>();
roles.add(Role.USER.getAuthority());
when(securityContext.getAuthentication()).thenReturn(authentication);
when(claims.get(JwtToken.KEY)).thenReturn(roles);
mockMvc.perform(put(TeamsController.PATH + "/id/status/" + TeamStatus.RESTRICTED))
.andExpect(status().isForbidden());
}
@Test
public void testUpdateTeamStatus() throws Exception {
TeamEntity entity = getTeamEntityWithId();
entity.setStatus(TeamStatus.RESTRICTED);
TeamInfo teamInfo = new TeamInfo(entity);
final byte[] content = mapper.writeValueAsBytes(teamInfo);
final List<String> roles = new ArrayList<>();
roles.add(Role.ADMIN.getAuthority());
when(securityContext.getAuthentication()).thenReturn(authentication);
when(claims.get(JwtToken.KEY)).thenReturn(roles);
when(teamService.updateTeamStatus(anyString(), any(TeamStatus.class))).thenReturn(teamInfo);
mockMvc.perform(put(TeamsController.PATH + "/id/status/" + TeamStatus.RESTRICTED).contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isOk())
.andExpect(jsonPath("$.id", is(equalTo(teamInfo.getId()))))
.andExpect(jsonPath("$.status", is(equalTo(TeamStatus.RESTRICTED.name()))));;
}
@Test
public void testAddTeamMemberForbiddenException() throws Exception {
final byte[] content = mapper.writeValueAsBytes(getTeamMemberInfo(MemberType.MEMBER, MemberStatus.APPROVED));
when(securityContext.getAuthentication()).thenReturn(null);
mockMvc.perform(post(TeamsController.PATH + "/id/members").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isForbidden());
}
@Test
public void testAddTeamMember() throws Exception {
Team team = getTeamEntityWithId();
final byte[] content = mapper.writeValueAsBytes(getTeamMemberInfo(MemberType.MEMBER, MemberStatus.APPROVED));
when(securityContext.getAuthentication()).thenReturn(authentication);
when(teamService.addMember(anyString(), any(TeamMember.class))).thenReturn(team);
mockMvc.perform(post(TeamsController.PATH + "/id/members").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isCreated())
.andExpect(jsonPath("$.id", is(equalTo(team.getId()))))
.andExpect(jsonPath("$.name", is(equalTo(team.getName()))))
.andExpect(jsonPath("$.description", is(equalTo(team.getDescription()))))
.andExpect(jsonPath("$.website", is(equalTo(team.getWebsite()))))
.andExpect(jsonPath("$.organisationType", is(equalTo(team.getOrganisationType()))))
.andExpect(jsonPath("$.visibility", is(equalTo(team.getVisibility().name()))))
.andExpect(jsonPath("$.privacy", is(equalTo(team.getPrivacy().name()))))
.andExpect(jsonPath("$.status", is(equalTo(team.getStatus().name()))))
.andExpect(jsonPath("$.members", is(equalTo(team.getMembers()))));
}
@Test
public void testRemoveTeamMemberForbiddenException() throws Exception {
final byte[] content = mapper.writeValueAsBytes(getTeamMemberInfo(MemberType.MEMBER, MemberStatus.APPROVED));
when(securityContext.getAuthentication()).thenReturn(null);
mockMvc.perform(delete(TeamsController.PATH + "/id/members").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isForbidden());
}
@Test
public void testRemoveTeamMember() throws Exception {
Team team = getTeamEntityWithId();
final byte[] content = mapper.writeValueAsBytes(getTeamMemberInfo(MemberType.MEMBER, MemberStatus.APPROVED));
when(securityContext.getAuthentication()).thenReturn(authentication);
when(claims.getSubject()).thenReturn("ownerId");
when(teamService.removeMember(anyString(), any(TeamMember.class), anyString())).thenReturn(team);
mockMvc.perform(delete(TeamsController.PATH + "/id/members").contentType(MediaType.APPLICATION_JSON).content(content))
.andExpect(status().isOk())
.andExpect(jsonPath("$.id", is(equalTo(team.getId()))))
.andExpect(jsonPath("$.name", is(equalTo(team.getName()))))
.andExpect(jsonPath("$.description", is(equalTo(team.getDescription()))))
.andExpect(jsonPath("$.website", is(equalTo(team.getWebsite()))))
.andExpect(jsonPath("$.organisationType", is(equalTo(team.getOrganisationType()))))
.andExpect(jsonPath("$.visibility", is(equalTo(team.getVisibility().name()))))
.andExpect(jsonPath("$.privacy", is(equalTo(team.getPrivacy().name()))))
.andExpect(jsonPath("$.status", is(equalTo(team.getStatus().name()))))
.andExpect(jsonPath("$.members", is(equalTo(team.getMembers()))));
}
}
|
package cz.req.ax;
import com.google.common.base.CaseFormat;
import com.vaadin.data.util.converter.StringToIntegerConverter;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener;
import com.vaadin.server.FontAwesome;
import com.vaadin.ui.Component;
import com.vaadin.ui.TabSheet;
import java.util.Locale;
import java.util.stream.Stream;
public abstract class AxView extends RootLayout implements View, Navigation, Components, Push {
TabSheet tabSheet;
String parameters;
protected AxView() {
String name = getClass().getSimpleName();
setStyleName("page-root");
addStyleName(CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_HYPHEN, name));
addStyleName(name.replaceAll("View", "-view").toLowerCase()); // Pouze zpetna kompatibilita
}
public String getParameters() {
return parameters;
}
public Integer getParameterInteger() {
try {
return new StringToIntegerConverter().convertToModel(parameters, Integer.class, Locale.getDefault());
} catch (Exception ex) {
return null;
}
}
public Integer[] getParameterIntegers() {
try {
return Stream.of(getParameterStrings()).map(Integer::parseInt).toArray(size -> new Integer[size]);
} catch (Exception ex) {
return null;
}
}
public String[] getParameterStrings() {
return parameters.split("/");
}
@Override
public void enter(ViewChangeListener.ViewChangeEvent event) {
parameters = event.getParameters();
}
//TODO Refactorize
public AxView actions(AxAction... actions) {
menuActions(actions);
return this;
}
//TODO Refactorize
public AxView components(Component... components) {
mainComponents(components);
return this;
}
//TODO Refactorize
public AxView components(String layoutName, Component... components) {
bodyLayout(layoutName).addComponents(components);
return this;
}
public TabSheet tabSheet() {
if (tabSheet == null) {
tabSheet = new TabSheet();
tabSheet.setWidth(100, Unit.PERCENTAGE);
tabSheet.setHeightUndefined();
tabSheet.addSelectedTabChangeListener(event -> Refresh.tryRefresh(tabSheet.getSelectedTab()));
components(tabSheet);
}
return tabSheet;
}
public TabSheet.Tab addTabSheet(String caption, FontAwesome awesome, final ComponentWrapper component) {
tabSheet().addSelectedTabChangeListener(event -> {
if (tabSheet().getSelectedTab().equals(component.getComponent())) {
Refresh.tryRefresh(component);
}
});
return tabSheet().addTab(component.getComponent(), caption, awesome);
}
public TabSheet.Tab addTabSheet(String caption, FontAwesome awesome, Component component) {
return tabSheet().addTab(component, caption, awesome);
}
public void removeAllComponents() {
mainPanel().removeAllComponents();
}
}
|
package gui.game;
import common.BaseLogger;
import common.data.GameMap;
import engine.core.Engine;
import javafx.application.Application;
import javafx.collections.ObservableMap;
import javafx.fxml.FXMLLoader;
import javafx.scene.Group;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.Pane;
import javafx.scene.layout.StackPane;
import javafx.stage.Stage;
import org.mapeditor.core.Map;
import org.mapeditor.core.MapLayer;
import org.mapeditor.core.Tile;
import org.mapeditor.core.TileLayer;
import org.mapeditor.io.TMXMapReader;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.RenderedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
public class GameGUI extends Application {
private static final BaseLogger LOGGER = new BaseLogger("MenuGUI");
private Engine _engine = null;
TMXMapReader mapReader = new TMXMapReader();
Map gameMap = null;
Pane _imagePane;
public GameGUI() throws Exception {
new Thread().start();
//Create Engine
_engine = new Engine();
LOGGER.info("Beginning core game battle...");
this._engine.startGame();
start(new Stage());
}
public GameGUI(String[] args) {
new Thread(
() -> launch(args)
).start();
}
@Override
public void start(Stage primaryStage) throws Exception {
FXMLLoader loader = new FXMLLoader(getClass().getResource("game.fxml"));
Parent root = loader.load();
ObservableMap namespace = loader.getNamespace();
StackPane pane = (StackPane) namespace.get("pane");
Group group = new Group();
_imagePane = new Pane();
_imagePane.setPrefSize(1600, 900);
pane.getChildren().add(group);
group.getChildren().add(_imagePane);
primaryStage.setTitle("Code Carnage");
primaryStage.setScene(new Scene(root));
primaryStage.show();
try {
gameMap = mapReader.readMap("./src/main/resources/game-map.tmx");
} catch (Exception ex) {
ex.printStackTrace();
}
updateGameGUI(gameMap);
}
private Image createImage(BufferedImage image) throws IOException {
if (!(image instanceof RenderedImage)) {
BufferedImage bufferedImage = new BufferedImage(image.getWidth(null),
image.getHeight(null), BufferedImage.TYPE_INT_ARGB);
Graphics g = bufferedImage.createGraphics();
g.drawImage(image, 0, 0, null);
g.dispose();
image = bufferedImage;
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
ImageIO.write((RenderedImage) image, "png", out);
out.flush();
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
return new javafx.scene.image.Image(in);
}
public void updateGameGUI(Map gameMap){
ArrayList<MapLayer> layerList = new ArrayList<>(gameMap.getLayers());
for (MapLayer layer : layerList) {
TileLayer tileLayer = (TileLayer) layer;
if (tileLayer == null) {
System.out.println("can't get map layer");
System.exit(-1);
}
int width = tileLayer.getBounds().width;
int height = tileLayer.getBounds().height;
Tile tile = null;
int tileID;
HashMap<Integer, Image> tileHash = new HashMap<Integer, Image>();
Image tileImage = null;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
tile = tileLayer.getTileAt(x, y);
if (tile == null) {
continue;
}
tileID = tile.getId();
if (tileHash.containsKey(tileID)) {
tileImage = tileHash.get(tileID);
} else {
try {
tileImage = createImage(tile.getImage());
} catch (Exception ex) {
ex.printStackTrace();
}
tileHash.put(tileID, tileImage);
}
ImageView i = new ImageView(tileImage);
i.setTranslateX(x * 32);
i.setTranslateY(y * 32);
_imagePane.getChildren().add(i);
}
}
tileHash = null;
gameMap = null;
}
}
}
|
package jdbm;
import java.io.*;
import java.nio.channels.OverlappingFileLockException;
import java.util.ArrayList;
/**
* Storage which used files on disk to store data
*/
class StorageDisk implements Storage{
/** maximal file size not rounded to block size */
private final static long _FILESIZE = 1000000000l;
/** maximal file size rounded to block size */
private final long MAX_FILE_SIZE = _FILESIZE - _FILESIZE % BLOCK_SIZE;
private ArrayList<RandomAccessFile> rafs = new ArrayList<RandomAccessFile>();
private String fileName;
public StorageDisk(String fileName) throws IOException {
this.fileName = fileName;
//make sure first file can be opened
//lock it
try{
getRaf(0).getChannel().tryLock();
}catch(IOException e){
throw new IOException("Could not lock DB file: "+fileName,e);
}catch(OverlappingFileLockException e){
throw new IOException("Could not lock DB file: "+fileName,e);
}
}
RandomAccessFile getRaf(long offset) throws IOException {
int fileNumber = (int) (offset/MAX_FILE_SIZE);
//increase capacity of array lists if needed
for(int i = rafs.size();i<=fileNumber;i++){
rafs.add(null);
}
RandomAccessFile ret = rafs.get(fileNumber);
if(ret == null){
String name = fileName+"."+fileNumber;
ret = new RandomAccessFile(name, "rw");
rafs.set(fileNumber, ret);
}
return ret;
}
/**
* Synchronizes the file.
*/
public void sync() throws IOException {
for(RandomAccessFile file:rafs)
if(file!=null)
file.getFD().sync();
}
public void write(long pageNumber, byte[] data) throws IOException {
if(data.length!= BLOCK_SIZE) throw new IllegalArgumentException();
long offset = pageNumber * BLOCK_SIZE;
RandomAccessFile file = getRaf(offset);
file.seek(offset % MAX_FILE_SIZE);
file.write(data);
}
public void forceClose() throws IOException {
for(RandomAccessFile f :rafs){
if(f!=null)
f.close();
}
rafs = null;
}
public void read(long pageNumber, byte[] buffer) throws IOException {
if(buffer.length!= BLOCK_SIZE) throw new IllegalArgumentException();
long offset = pageNumber * BLOCK_SIZE;
RandomAccessFile file = getRaf(offset);
file.seek(offset%MAX_FILE_SIZE);
int remaining = buffer.length;
int pos = 0;
while (remaining > 0) {
int read = file.read(buffer, pos, remaining);
if (read == -1) {
System.arraycopy(RecordFile.CLEAN_DATA, 0, buffer, pos, remaining);
break;
}
remaining -= read;
pos += read;
}
}
static final String transaction_log_file_extension = ".t";
public DataOutputStream openTransactionLog() throws IOException {
String logName = fileName + transaction_log_file_extension;
final FileOutputStream fileOut = new FileOutputStream(logName);
return new DataOutputStream(new BufferedOutputStream(fileOut)){
//default implementation of flush on FileOutputStream does nothing,
//so we use little workaround to make sure that data were really flushed
public void flush() throws IOException {
super.flush();
fileOut.flush();
fileOut.getFD().sync();
}
};
}
public DataInputStream readTransactionLog() {
File logFile = new File(fileName + transaction_log_file_extension);
if (!logFile.exists())
return null;
if (logFile.length() == 0) {
logFile.delete();
return null;
}
DataInputStream ois = null;
try {
ois = new DataInputStream(new BufferedInputStream(new FileInputStream(logFile)));
} catch (FileNotFoundException e) {
//file should exists, we check for its presents just a miliseconds yearlier, anyway move on
return null;
}
try {
if (ois.readShort() != Magic.LOGFILE_HEADER)
throw new Error("Bad magic on log file");
} catch (IOException e) {
// corrupted/empty logfile
logFile.delete();
return null;
}
return ois;
}
public void deleteTransactionLog() {
File logFile = new File(fileName + transaction_log_file_extension);
if(logFile.exists())
logFile.delete();
}
public boolean isReadonly(){return false;}
}
|
package bankersalgorithm;
import java.io.IOException;
import java.util.Scanner;
public class BankersAlgorithm {
//np = no. of processes \ nr = no. of resources
private int need[][],allocate[][],max[][],avail[][],np,nr;
private void input(){
Scanner sc = new Scanner(System.in);
System.out.print("Enter no. of processes and resources: ");
np=sc.nextInt(); //no. of processes
nr=sc.nextInt(); //no. of resources
need=new int[np][nr]; //initializing arrays
max=new int[np][nr];
allocate=new int[np][nr];
avail=new int[1][nr];
System.out.println("Enter allocation matrix: ");
for(int i=0;i<np;i++)
for(int j=0;j<nr;j++)
allocate[i][j]=sc.nextInt(); //allocation matrix
System.out.println("Enter max. matrix: ");
for(int i=0;i<np;i++)
for(int j=0;j<nr;j++)
max[i][j]=sc.nextInt(); //max matrix
System.out.println("Enter available matrix: ");
for(int j=0;j<nr;j++)
avail[0][j]=sc.nextInt(); //available matrix
sc.close();
}
//calculate the need matrix
private int[][] calc_need(){
for(int i=0;i<np;i++){
for(int j=0;j<nr;j++){ //calculating need matrix
need[i][j]=max[i][j]-allocate[i][j];
}
}
return need;
}
//Check if the requested resource is available or not
private boolean check(int i){
//checking if all resources for it^(h) process can be allocated
for(int j=0;j<nr;j++) {
if(avail[0][j]<need[i][j]) {
return false;
}
}
return true;
}
// Check if by fulfilling the resource request the system remains in safe state
public void isSafe(){
input(); //collecting data from the user
calc_need(); //mathmagics here :DD
boolean done[]=new boolean[np];
int j=0;
while(j<np){ //loop until all process allocated
boolean allocated=false;
for(int i=0;i<np;i++)
if(!done[i] && check(i)){ //trying to allocate
for(int k=0;k<nr;k++)
avail[0][k]=avail[0][k]-need[i][k]+max[i][k];
System.out.println("Allocated process : "+i);
allocated=done[i]=true;
j++;
}
if(!allocated) break; //if no allocation
}
if(j==np) //if all processes are allocated
System.out.println("\nSafely allocated!");
else
System.err.println("\nAll proceess can't be allocated safely!");
}
public static void main(String[] args) throws IOException {
new BankersAlgorithm().isSafe();
}
}
|
package netty;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueue;
import servers.*;
import util.FileOps;
/**
* Handles a server-side channel.
*/
public class InHandler2 extends ChannelInboundHandlerAdapter {
private static final Logger LOG = LogManager.getLogger(InHandler2.class);
private NodeServerProperties1 properties;
private NettyClient1 nettyClientInhandler;
public InHandler2(NodeServerProperties1 nsProperties) {
this.properties = nsProperties;
this.nettyClientInhandler = new NettyClient1(this.properties);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
ByteBuf in = (ByteBuf) msg;
String requestMsg = in.toString(StandardCharsets.UTF_8 );
LOG.debug(">>>Channel Read:" + requestMsg);
String response = handleClientRequest(requestMsg);
LOG.debug("<<<Response:" + response);
if(response.length()>0){
ctx.write(Unpooled.copiedBuffer(response+"\r\n", StandardCharsets.UTF_8));
ctx.flush();
}
ctx.close();
}
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
super.channelRegistered(ctx);
LOG.debug("Channel Registered: "+ ctx.channel().localAddress() + ":" + ctx.channel().remoteAddress());
}
private String handleClientRequest(String requestMsg) {
// LOG.debug("handleClientRequest:"+requestMsg);
if(requestMsg.contains("READ:")){
LOG.info("Node ID:" + this.properties.getNodeId() + "received Read() request from client");
String[] arr = requestMsg.split(":");
String key = arr[1].trim();
Properties properties = this.properties.getDataMap();
if(properties.containsKey(key)){
String value = properties.getProperty(key);
LOG.info("Returning from local replica: Reply:" + key + value);
return value;
}
else{
return "READ ERROR: No result for key:" + key;
}
}
if(requestMsg.contains("WRITE:")){
if(!properties.isLeader()){ //follower
//Forward write request to the leader
LOG.info("Follower received WRITE request from client, forwarding to the leader..!!");
this.nettyClientInhandler.sendMessage(properties.getLeaderAddress().getHostName(), properties.getLeaderAddress().getPort(), requestMsg);
return "OK";
}
else{ //leader
//"WRITE:KEY:VALUE"
String[] arr = requestMsg.split(":");
//Key-value pair to be proposed
String key = arr[1].trim();
String value = arr[2].trim();
long epoch = this.properties.getAcceptedEpoch();
synchronized(new Long(properties.getCounter())){
long counter = this.properties.getCounter();
counter++;
this.properties.setCounter(counter);
}
//Form a proposal
ZxId z = new ZxId(epoch, this.properties.getCounter());
Proposal p = new Proposal(z, key, value);
String proposal = "PROPOSE:" + p.toString();
//enqueue this proposal to proposed transactions to keep the count of Acknowledgements
ConcurrentHashMap<Proposal, AtomicInteger> proposedtransactions = properties.getSynData().getProposedTransactions();
proposedtransactions.put(p, new AtomicInteger(1));
//checking if the entry is enqueued in the proposed transaction map
LOG.debug("Checking the counter right after enqueueing the entry: " + proposedtransactions.get(p));
//send proposal to quorum
LOG.info("Leader:" + "Sending proposal to everyone:" + proposal);
LOG.debug("Number of members:" + properties.getMemberList().size());
for (Entry<Long, InetSocketAddress> member : properties.getMemberList().entrySet()) {
LOG.debug("Sending "+proposal+" to: "+ member.getValue().getHostName() + ":"+ member.getValue().getPort());
this.nettyClientInhandler.sendMessage(member.getValue().getHostName(), member.getValue().getPort(), proposal);
}
}
return "OK";
}
if(requestMsg.contains("PROPOSE")){
if(properties.isLeader()){ // Leader will not accept this message
LOG.debug("I am the Leader, I do not accept proposals");
return "ERROR: I am the eader, I send proposals, not accept!";
}
else{ ///Follower
//enqueue this message to proposal queue
String[] arr = requestMsg.split(":");
Long epoch = Long.parseLong(arr[1].trim());
Long counter = Long.parseLong(arr[2].trim());
ZxId z = new ZxId(epoch, counter);
String key = arr[3].trim();
String value = arr[4].trim();
Proposal proposal = new Proposal(z,key,value);
properties.getSynData().getProposedTransactions().put(proposal, new AtomicInteger(0));
LOG.info("Enqueing proposal in Proposal Queue:" + proposal);
LOG.info("Sending Acknowledgement to the leader");
return "ACK_PROPOSAL:" + proposal.toString();
}
}
if(requestMsg.contains("ACK_PROPOSAL")){
if(!properties.isLeader()){//follower
//follower should disregard this message
LOG.debug("Follower got ACK_PROPOSAL, shouldn't happen!");
return "ERROR:Follower got ACK_PROPOSAL";
}
else{//Leader
String[] arr = requestMsg.split(":");
//Parsing proposal for which acknowledgement was received
Long epoch = Long.parseLong(arr[1].trim());
Long counter = Long.parseLong(arr[2].trim());
ZxId z = new ZxId(epoch, counter);
String key = arr[3].trim();
String value = arr[4].trim();
Proposal p = new Proposal(z,key,value);
//we have to increment the ack count for this zxid
LOG.debug("Leader: Got ACK_PROPOSAL, incrementing count for zxid" + z);
//checking the ack count for the proposal (counter value)
ConcurrentHashMap<Proposal, AtomicInteger> proposedtransactions = properties.getSynData().getProposedTransactions();
synchronized (proposedtransactions) {
int count = proposedtransactions.get(p).incrementAndGet();
proposedtransactions.put(p, new AtomicInteger(count));
}
//LOG.debug("
// int count = proposedtransactions.get(p).incrementAndGet();
// proposedtransactions.put(p, new AtomicInteger(count));
//LOG.debug("
return "OK";
}
}
if(requestMsg.contains("COMMIT:")){
if(properties.isLeader()){ // leader will not accept this message
LOG.debug("I am the Leader, I do not accept commit messages");
return "ERROR: I am the eader, I send proposals, not accept!";
}
else{//follower
LOG.debug ("Follower received COMMIT message");
LOG.info ("COMMIT message is:" + requestMsg);
String[] arr = requestMsg.split(":");
//Parsing proposal for which acknowledgement was received
Long epoch = Long.parseLong(arr[1].trim());
Long counter = Long.parseLong(arr[2].trim());
ZxId z = new ZxId(epoch, counter);
String key = arr[3].trim();
String value = arr[4].trim();
Proposal p = new Proposal(z,key,value);
ConcurrentHashMap<Proposal, AtomicInteger> proposalMap = properties.getSynData().getProposedTransactions();
LOG.debug("Map Size when Commit received: "+proposalMap.size());
LOG.debug("Map when Commit received: "+ proposalMap);
if(proposalMap.containsKey(p)){
LOG.debug("Commit Queue contains the transaction to be removed:" + p);
//String fileName = "CommitedHistory_" + properties.getNodePort() + ".log";
//FileOps.appendTransaction(fileName, p.toString());
synchronized (properties.getSynData().getProposedTransactions()) {
//remove from proposedtransactions map
LOG.debug("Inside synchronized block....!!!");
properties.getSynData().getProposedTransactions().remove(p);
//enqueue in commitQueue
properties.getSynData().getCommittedTransactions().add(p);
}
}
return "OK";
}
}
if(requestMsg.contains("JOIN_GROUP:")){
//add the ip:port to the group member list;
String[] arr = requestMsg.split(":");
long nodeId = Integer.parseInt(arr[1].trim());
InetSocketAddress addr = new InetSocketAddress(arr[2].trim(), Integer.parseInt(arr[3].trim()));
properties.addMemberToList(nodeId, addr);
LOG.debug(properties.getMemberList());
return "OK";
}
if(requestMsg.contains("CNOTIFICATION:")){
//add the ip:port to thefore group member list;
String[] arr = requestMsg.split(":");
Notification responseNotification = new Notification(arr[1].trim());
if(properties.getNodestate() == NodeServerProperties1.State.ELECTION){
MpscArrayQueue<Notification> currentElectionQueue = properties.getSynData().getElectionQueue();
LOG.debug("Before:"+currentElectionQueue.currentProducerIndex());
LOG.debug("adding notification to the queue"+ responseNotification.toString());
currentElectionQueue.offer(responseNotification);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
LOG.debug("After:"+currentElectionQueue.currentProducerIndex());
LOG.debug("NODE is in STATE: "+ properties.getNodestate());
LOG.debug("My Election ROUND: "+ properties.getElectionRound());
LOG.debug("his Election ROUND: "+ responseNotification.getSenderRound());
if(responseNotification.getSenderState() == NodeServerProperties1.State.ELECTION
&& responseNotification.getSenderRound() <= properties.getElectionRound()){
// get my current vote from FLE or when FLE is underway
Vote myVote = properties.getMyVote();
//public Notification(Vote vote, long id, servers.NodeServerProperties1.State state, long round)
Notification myNotification = new Notification(myVote, properties.getNodeId(), properties.getNodestate(), properties.getElectionRound());
return("SNOTIFICATION:"+myNotification.toString());
}
}
else if(responseNotification.getSenderState() == NodeServerProperties1.State.ELECTION){
// get my current vote from FLE or when FLE is underway
Vote myVote = properties.getMyVote();
Notification myNotification = new Notification(myVote, properties.getNodeId(), properties.getNodestate(), properties.getElectionRound());
LOG.debug("myNotification:"+myNotification);
return("SNOTIFICATION:"+myNotification.toString());
}
return("");
}
if(requestMsg.contains("SNOTIFICATION:")){
//add the ip:port to the group member list;
String[] arr = requestMsg.split(":");
Notification responseNotification = new Notification(arr[1].trim());
if(properties.getNodestate() == NodeServerProperties1.State.ELECTION){
MpscArrayQueue<Notification> currentElectionQueue = properties.getSynData().getElectionQueue();
LOG.debug("Before:"+currentElectionQueue.currentProducerIndex());
currentElectionQueue.offer(responseNotification);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
LOG.debug("After:"+currentElectionQueue.currentProducerIndex());
}
LOG.debug(properties.getMemberList());
return("ERROR");
}
if(requestMsg.contains("OK")){
//add the ip:port to the group member list;
// String[] arr = requestMsg.split(":");
// InetSocketAddress addr = new InetSocketAddress(arr[1].trim(), Integer.parseInt(arr[2].trim()));
// server.addMemberToList(addr);
LOG.debug("Client received OK!!");
LOG.debug(properties.getMemberList());
return "";
}
if (requestMsg.contains("FOLLOWERINFO")){
LOG.debug("Request msg is = " + requestMsg);
String[] accEpoch = requestMsg.split(":");
long nodeId = Long.parseLong(accEpoch[1].trim());
long acceptedEpoch = Long.parseLong(accEpoch[2].trim());
// long currentEpoch = Long.parseLong(accEpoch[3].trim());
// long currentCounter = Long.parseLong(accEpoch[4].trim());
//ZxId followerLastCommittedZxid = new ZxId(currentEpoch, currentCounter);
ConcurrentHashMap<Long, Long> acceptedEpochMap = this.properties.getSynData().getAcceptedEpochMap();
//ConcurrentHashMap<Long, ZxId> currentEpochMap = properties.getSynData().getCurrentEpochMap();
synchronized (acceptedEpochMap){
acceptedEpochMap.put(nodeId, acceptedEpoch);
}
//currentEpochMap.put(nodeId, followerLastCommittedZxid);
if (acceptedEpochMap.size() < (this.properties.getMemberList().size() / 2) ){
synchronized (acceptedEpochMap){
try {
acceptedEpochMap.wait(100);
} catch (InterruptedException e){
e.printStackTrace();
}
}
}
while (!this.properties.getSynData().isNewEpochFlag()){
try {
Thread.sleep(100);
} catch (InterruptedException e){
e.printStackTrace();
}
}
return "NEWEPOCH:" + properties.getNewEpoch() + ":" + this.properties.getNodeId();
}
if (requestMsg.contains("NEWEPOCH")){
Map<Long, InetSocketAddress> memberList = this.properties.getMemberList();
LOG.debug("Member List is = " + memberList);
String ackepochmsg = "";
String[] newEpocharr = requestMsg.split(":");
long newEpoch = Long.parseLong(newEpocharr[1].trim());
long nodeId = Long.parseLong(newEpocharr[2].trim());
properties.getSynData().setNewEpoch(newEpoch);
LOG.debug("New Epoch received is = " + newEpoch);
this.properties.setNewEpoch(newEpoch);
long acceptedEpoch = this.properties.getAcceptedEpoch();
if (newEpoch > acceptedEpoch){
this.properties.setAcceptedEpoch(newEpoch);
this.properties.setCounter(0);
String myLastLog = FileOps.readLastLog(properties);
String[] arr = myLastLog.split(",");
long currentEpoch = Long.parseLong(arr[0].trim());
long currentCounter = Long.parseLong(arr[1].trim());
ackepochmsg = "ACKEPOCH:" + this.properties.getNodeId()
+ ":" + currentEpoch + ":" + currentCounter;
this.nettyClientInhandler.sendMessage(memberList.get(nodeId).getHostName(), memberList.get(nodeId).getPort(), ackepochmsg);
return "";
} else {
this.properties.setNodestate(NodeServerProperties1.State.ELECTION);
LOG.debug("Going to Leader Election");
}
}
if (requestMsg.contains("ACKEPOCH")){
LOG.debug("Ack new epoch message received = " + requestMsg);
Map<Long, InetSocketAddress> memberList = this.properties.getMemberList();
String[] currEpochArr = requestMsg.split(":");
long nodeId = Long.parseLong(currEpochArr[1].trim());
long currentEpoch = Long.parseLong(currEpochArr[2].trim());
long currentCounter = Long.parseLong(currEpochArr[3].trim());
ZxId followerLastCommittedZxid = new ZxId(currentEpoch, currentCounter);
LOG.debug("follower last committed zxid = " + followerLastCommittedZxid.getEpoch()
+ " " + followerLastCommittedZxid.getCounter());
// ConcurrentHashMap<Long, ZxId> currentEpochMap = properties.getSynData().getCurrentEpochMap();
// currentEpochMap.put(nodeId, followerLastCommittedZxid);
String leaderLastLog = FileOps.readLastLog(properties);
LOG.debug("Leader last log = " + leaderLastLog);
String[] arr1 = leaderLastLog.split(",");
long epoch = Long.parseLong(arr1[0].trim());
long counter = Long.parseLong(arr1[1].trim());
ZxId leaderLastCommittedZxid = new ZxId(epoch, counter);
LOG.debug("leader last committed zxid = " + leaderLastCommittedZxid.getEpoch()
+ " " + leaderLastCommittedZxid.getCounter());
if (leaderLastCommittedZxid.getEpoch()==followerLastCommittedZxid.getEpoch() ){
if (followerLastCommittedZxid.getCounter() < leaderLastCommittedZxid.getCounter()){
// TODO: Send DIFF message
String diffMsg = "";
List<String> logList= FileOps.getDiffResponse(properties, followerLastCommittedZxid);
diffMsg = "DIFF:" + logList;
this.nettyClientInhandler.sendMessage(memberList.get(nodeId).getHostName(),
memberList.get(nodeId).getPort(), diffMsg);
// TODO: Iterate through CommitHistory (refer readHistory()), stringify and send
} else if (followerLastCommittedZxid.getCounter() == leaderLastCommittedZxid.getCounter()){
// Do nothing
} else if (followerLastCommittedZxid.getCounter() > leaderLastCommittedZxid.getCounter()){
// Go to Leader Election. Ideally, shouldn't happen
this.properties.setNodestate(NodeServerProperties1.State.ELECTION);
LOG.debug("Going to Leader Election");
// change phase
}
}
else if (leaderLastCommittedZxid.getEpoch()-followerLastCommittedZxid.getEpoch() < 4 ){
// TODO: Send DIFF message
String diffMsg = "";
List<String> logList= FileOps.getDiffResponse(properties, followerLastCommittedZxid);
diffMsg = "DIFF:" + logList;
this.nettyClientInhandler.sendMessage(memberList.get(nodeId).getHostName(),
memberList.get(nodeId).getPort(), diffMsg);
// TODO: Iterate through CommitHistory (refer readHistory()), stringify and send
}
else if (followerLastCommittedZxid.getEpoch() > leaderLastCommittedZxid.getEpoch()){
// Go to Leader Election. Ideally, shouldn't happen
this.properties.setNodestate(NodeServerProperties1.State.ELECTION);
LOG.debug("Going to Leader Election");
//changePhase();
}
else {
// TODO: Send SNAP message
String snapmsg = "SNAP:" + properties.getDataMap();
this.nettyClientInhandler.sendMessage(memberList.get(nodeId).getHostName(), memberList.get(nodeId).getPort(), snapmsg);
// TODO: Iterate through the Map, stringify each entry and then send
}
return "";
}
if (requestMsg.contains("DIFF")){
LOG.debug("DIFF message received");
String[] decodedDiff = requestMsg.split(":");
LOG.debug("Diff decoded is = " + decodedDiff[0] + " " + decodedDiff[1]);
//REmove all brackets and split on multiple spaces
String[] diff = decodedDiff[1].replaceAll("\\[", "").replaceAll("\\]", "").split(", ");
for (int i = 0; i < diff.length; i++){
//Remove last comma
diff[i] = diff[i].trim();
if (diff[i].length() == 0) break;
LOG.debug("Diff i = " + i + " Diff[i] = " + diff[i]);
String[] proposalArr = diff[i].split(",");
long epoch = Long.parseLong(proposalArr[0].trim());
long counter = Long.parseLong(proposalArr[1].trim());
String key = proposalArr[2].trim();
String value = proposalArr[3].trim();
ZxId zxid = new ZxId(epoch, counter);
Proposal pro = new Proposal(zxid, key, value);
//If it doesn't exist, create one.
String resp = FileOps.appendTransaction("CommitedHistory_" + properties.getNodePort() + ".log", pro.toString());
Properties dataMap = properties.getDataMap();
//this.properties.getSynData().getCommitQueue().offer(pro);
dataMap.put(key, value);
}
FileOps.writeDataMap(properties);
LOG.debug("Follower ready for Broadcast");
return "READY:" + this.properties.getNodeId();
}
if (requestMsg.contains("SNAP")){
Properties datamap = properties.getDataMap();
datamap.clear();
LOG.debug("SNAP message received = " + requestMsg);
String[] decodedSnap = requestMsg.split(":");
LOG.debug("Snap decoded is = " + decodedSnap[0] + " " + decodedSnap[1]);
String[] snap = decodedSnap[1].replaceAll("\\{", "").replaceAll("\\}", "").split(", ");
for (int i = 0; i < snap.length; i++){
snap[i] = snap[i].trim();
if (snap[i].length() == 0) break;
LOG.debug("Snap i = " + i + " Snap[i] = " + snap[i]);
String[] datamapEntryArr = snap[i].split("=");
String key = datamapEntryArr[0].trim();
String value = datamapEntryArr[1].trim();
LOG.debug("Key is = " + key);
LOG.debug("Value is = " + value);
datamap.put(key, value);
}
LOG.debug("Updated datamap = " + properties.getDataMap());
LOG.debug("Follower ready for Broadcast");
return "READY:" + this.properties.getNodeId();
}
if (requestMsg.contains("READY")){
properties.getSynData().incrementQuorumCount();
}
return "";
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
// Close the connection when an exception is raised.
// cause.printStackTrace();
LOG.error("Context Name:"+ ctx.name());
LOG.error("Context :"+ ctx.toString());
LOG.error(cause.getMessage());
ctx.close();
}
}
|
package org.junit;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* <p>When writing tests, it is common to find that several tests need similar
* objects created before they can run. Annotating a <code>public void</code> method
* with <code>@Before</code> causes that method to be run before the {@link org.junit.Test} method.
* The <code>@Before</code> methods of superclasses will be run before those of the current class.
* No other ordering is defined.
* </p>
*
* Here is a simple example:
* <pre>
* public class Example {
* List empty;
* @Before public void initialize() {
* empty= new ArrayList();
* }
* @Test public void size() {
* ...
* }
* @Test public void remove() {
* ...
* }
* }
* </pre>
*
* @see org.junit.BeforeClass
* @see org.junit.After
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Before {
}
|
package parser;
import gui.CustomProperties;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import org.mapdb.HTreeMap;
import org.mapdb.Serializer;
import java.io.*;
import java.util.ArrayList;
import java.util.Observable;
import java.util.regex.Pattern;
/**
* This class contains a parser to parse a .gfa file into our data structure.
*/
public class GfaParser extends Observable implements Runnable {
private String header1;
private String header2;
private HTreeMap<Long, String> sequenceMap;
private String filePath;
private String partPath;
private CustomProperties properties = new CustomProperties();
private DB db;
/**
* Constructor.
* @param absolutePath The path location of the file.
*/
public GfaParser(String absolutePath) {
filePath = absolutePath;
}
@Override
public void run() {
try {
parseGraph(filePath);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* getter for db of the sequencemap.
* @return db.
*/
public DB getDb() {
return db;
}
/**
* This method parses the file specified in filepath into a sequence graph.
* @param filePath A string specifying where the file is stored.
* @throws IOException For instance when the file is not found
*/
@SuppressWarnings("Since15")
private synchronized void parseGraph(String filePath) throws IOException {
properties.updateProperties();
String pattern = Pattern.quote(System.getProperty("file.separator"));
String[] partPaths = filePath.split(pattern);
partPath = partPaths[partPaths.length - 1];
db = DBMaker.fileDB(partPath + ".sequence.db").fileMmapEnable().
fileMmapPreclearDisable().cleanerHackEnable().
closeOnJvmShutdown().checksumHeaderBypass().make();
if (db.get(partPath + ".sequence.db") != null) {
sequenceMap = db.hashMap(partPath + ".sequence.db").
keySerializer(Serializer.LONG).
valueSerializer(Serializer.STRING).createOrOpen();
} else {
properties.setProperty(partPath, "false");
properties.saveProperties();
sequenceMap = db.hashMap(partPath + ".sequence.db").
keySerializer(Serializer.LONG).
valueSerializer(Serializer.STRING).createOrOpen();
parseSpecific(filePath);
}
this.setChanged();
this.notifyObservers(1);
this.setChanged();
this.notifyObservers(filePath);
}
/**
* Getter for the sequenceHashMap.
* @return The HashMap.
*/
public synchronized HTreeMap<Long, String> getSequenceHashMap() {
return sequenceMap;
}
/**
* Parses the file with a boolean whether to create a db file or not. Creates the Graph
* @param filePath The file to parse/
* @throws IOException Reader.
*/
@SuppressWarnings("Since15")
private synchronized void parseSpecific(String filePath) throws IOException {
BufferedWriter parentWriter =
new BufferedWriter(new FileWriter(partPath + "parentArray.txt"));
BufferedWriter childWriter =
new BufferedWriter(new FileWriter(partPath + "childArray.txt"));
InputStream in = new FileInputStream(filePath);
BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
String line = br.readLine();
if (line == null) {
in.close();
br.close();
}
header1 = line.split("H")[1];
line = br.readLine();
if (line == null) {
in.close();
br.close();
}
header2 = line.split("H")[1];
int sizeOfFile = 0;
while ((line = br.readLine()) != null) {
if (line.startsWith("S")) {
String[] data = line.split(("\t"));
int id = Integer.parseInt(data[1]);
sequenceMap.put((long) (id), data[2]);
} else if (line.startsWith("L")) {
String[] edgeDataString = line.split("\t");
int parentId = (Integer.parseInt(edgeDataString[1]));
int childId = Integer.parseInt(edgeDataString[3]);
parentWriter.write(parentId + ",");
childWriter.write(childId + ",");
sizeOfFile++;
}
}
in.close();
br.close();
parentWriter.flush();
parentWriter.close();
childWriter.flush();
childWriter.close();
db.commit();
properties.updateProperties();
properties.setProperty(partPath + "childArray.txtsize", Integer.toString(sizeOfFile));
properties.setProperty(partPath, "true");
properties.saveProperties();
}
private int[] read(boolean isParent) throws IOException {
String additionToPath;
if (isParent) {
additionToPath = "parentArray.txt";
} else {
additionToPath = "childArray.txt";
}
InputStream in = new FileInputStream(System.getProperty("user.dir")
+ System.getProperty("file.separator") + partPath + additionToPath);
BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
String []strNums = br.readLine().split(",");
int size = Integer.parseInt(properties.getProperty(partPath + "childArray.txtsize", "-1"));
if (size == -1) {
throw new java.lang.RuntimeException("Size not in preferences file");
}
int [] nodeArray = new int[size];
for (int i = 0; i < strNums.length; i++) {
nodeArray[i] = Integer.parseInt(strNums[i]);
}
return nodeArray;
}
public int[] getParentArray() throws IOException {
return read(true);
}
public int[] getChildArray() throws IOException {
return read(false);
}
/**
* Cretes an ArrayList of Strings specifying headers.
* @return an arrayList containing all headers
*/
public ArrayList<String> getHeaders() {
ArrayList<String> headers = new ArrayList<String>();
headers.add(header1);
headers.add(header2);
return headers;
}
}
|
package gov.nih.nci.nbia.dbadapter;
import gov.nih.nci.nbia.annotations.AnnotationStorage;
import gov.nih.nci.nbia.query.DicomSOPInstanceUIDQueryInterface;
import gov.nih.nci.nbia.util.AdapterUtil;
import gov.nih.nci.nbia.util.DicomConstants;
import java.io.File;
import java.io.InputStream;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.log4j.Logger;
import org.dcm4che.data.Dataset;
import org.dcm4che.data.DcmElement;
import org.dcm4che.dict.DictionaryFactory;
import org.dcm4che.dict.TagDictionary;
import org.rsna.ctp.objects.DicomObject;
import org.rsna.ctp.objects.FileObject;
import org.rsna.ctp.objects.XmlObject;
import org.rsna.ctp.objects.ZipObject;
import org.rsna.ctp.pipeline.Status;
import org.rsna.ctp.stdstages.database.UIDResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
public class NCIADatabaseDelegator {
@Transactional (propagation=Propagation.REQUIRED)
public void process(DicomObject file, File storedFile,String url) throws RuntimeException {
if (storedFile == null)
{
log.error("Unable to obtain the stored DICOM file");
failedSubmission("Unable to obtain the stored DICOM file");
}
String filename = storedFile.getAbsolutePath();
//long filesize = storedFile.length();
boolean visibility=false;
try {
numbers = new HashMap();
numbers.put("current_timestamp", new java.util.Date());
//Dataset set = DcmUtil.parse(storedFile, 0x5FFFffff);
//Based on what John Perry's request
Dataset set = file.getDataset();
parseDICOMPropertiesFile(set);
//enhancement of storage service
if (!preProcess()) {
log.error("Storage Service - Preprocess: Error occurs when trying to find project, site in preprocess() for file " + file.getFile().getAbsolutePath());
failedSubmission("Storage Service - Preprocess: Error occurs when trying to find project, site in preprocess() for file " + file.getFile().getAbsolutePath());
}
String temp = (String) numbers.get(DicomConstants.TRIAL_VISIBILITY);
if ((temp != null) &&
temp.equals(DicomConstants.SPECIFIC_CHARACTER_SET)) {
visibility = true;
} else {
visibility = false;
}
// get the checksum with the file unannoymized for DB verifier to work
String md5 = file.getDigest()== null? " " : file.getDigest();
imageStorage.setMd5(md5);
Status status = imageStorage.storeDicomObject(numbers,filename,visibility);
if(status.equals(Status.FAIL)) {
log.error("Rollback in process(DicomObject,String) for file " + file.getFile().getAbsolutePath());
failedSubmission("Rollback in process(DicomObject,String) for file " + file.getFile().getAbsolutePath());
}
}
catch (Exception e) {
failedSubmission("Rollback in process(DicomObject,String) for file " + file.getFile().getAbsolutePath());
}
}
/* (non-Javadoc)
* @see org.rsna.mircsite.util.DatabaseAdapter#process(org.rsna.mircsite.util.XmlObject, java.lang.String)
*/
@Transactional(propagation=Propagation.REQUIRED)
public Status process(XmlObject file,
File storedFile,
String url) {
return annotationStorage.process(file, storedFile);
}
/* (non-Javadoc)
* @see org.rsna.mircsite.util.DatabaseAdapter#process(org.rsna.mircsite.util.ZipObject, java.lang.String)
*/
@Transactional (propagation=Propagation.REQUIRED)
public Status process(ZipObject file,
File storedFile,
String url) {
return annotationStorage.process(file, storedFile);
}
@Transactional (propagation=Propagation.REQUIRED)
public Status process(FileObject file, File storedFile, String url) {
String fileExtension = file.getExtension();
log.info("file extension: " + fileExtension);
log.error("FileObject is not supported yet" + storedFile.getAbsolutePath() + "\tfile extension is " + fileExtension);
return Status.FAIL;
}
public Map<String, UIDResult> uidQuery(Set<String> uidSet) {
Map<String, UIDResult> result = null;
try
{
sopQuery.setDicomSOPInstanceUIDQuery(uidSet);
result = sopQuery.getUIDResult();
}catch(Exception e)
{
log.error("In NCIA database uidQuery method, " + e.getMessage());
}
return result;
}
////////////////////////////////////////PACAKAGE/////////////////////////////////////////////
static String handleSQField(Dataset dicomSet, int pName) throws Exception {
String elementHeader = "";
DcmElement dcm_Element = dicomSet.get(pName);
if (dcm_Element != null){
for (int i = 0; i < dcm_Element.countItems(); i++)
{
Dataset ds = dcm_Element.getItem(i);
Iterator iterator = ds.iterator();
while(iterator.hasNext())
{
DcmElement dcmElement = (DcmElement)iterator.next();
String tagIdentifier = getTagIdentifierByTagId(dcmElement.tag());
String elementValue = dcmElement.getString(null);
elementHeader += tagIdentifier + "=" + elementValue + "/";
}
}
elementHeader = elementHeader.substring(0,elementHeader.lastIndexOf('/'));
}
return elementHeader;
}
/////////////////////////////////////////PRIVATE///////////////////////////////////////////
Logger log = Logger.getLogger(NCIADatabaseDelegator.class);
@Autowired
private AdapterUtil adapterUtil;
@Autowired
private ImageStorage imageStorage;
@Autowired
private AnnotationStorage annotationStorage;
@Autowired
private DicomSOPInstanceUIDQueryInterface sopQuery;
Map numbers = new HashMap();
Properties dicomProp = new Properties();
static final String DICOM_PROPERITIES = "dicom.properties";
ClassPathXmlApplicationContext ctx;
NCIADatabaseDelegator nciaDelegator;
/**
* If ALL of the group 13 tags aren't found in the number map,
* then reject this submission.
*
* <p>Historically this method tried to piece together the missing provenance
* information if some of the provenance information wasn't found.
* Too complicated for to little gain... so submitters must include
* all provenance information.
*/
private boolean preProcess() throws Exception {
boolean ok = false;
// Pass the check if none of the project name, site id, site name and trial name is null.
if ((numbers.get(DicomConstants.PROJECT_NAME) != null) &&
(numbers.get(DicomConstants.SITE_ID) != null) &&
(numbers.get(DicomConstants.SITE_NAME) != null) &&
(numbers.get(DicomConstants.TRIAL_NAME) != null)) {
ok = adapterUtil.checkSeriesStatus(numbers);
}
return ok;
}
private static boolean isSQFieldThatWeCareAbout(String propname) {
return propname.equals("00081084") || propname.equals("00082218");
}
private static boolean isMultiStringFieldThatWeCareAbout(String propname) {
return propname.equals("00200037") || propname.equals("00200032")||propname.equals("00080008")||propname.equals("00180021");
}
private static String getTagIdentifierByTagId(int tag)
{
TagDictionary dict = DictionaryFactory.getInstance().getDefaultTagDictionary();
String tagIdentifier = dict.toString(tag) ;
int beginIndex = tagIdentifier.indexOf('(');
int endIndex = tagIdentifier.indexOf(')');
tagIdentifier = tagIdentifier.substring(beginIndex, endIndex+1);
return tagIdentifier;
}
private void parseDICOMPropertiesFile(Dataset dicomSet)
throws Exception {
InputStream in = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(DICOM_PROPERITIES);
dicomProp.load(in);
Enumeration enum1 = dicomProp.propertyNames();
while (enum1.hasMoreElements()) {
String propname = enum1.nextElement().toString();
int pName = Integer.parseInt(propname, 16);
String elementheader = null;
if (isMultiStringFieldThatWeCareAbout(propname)) {
String[] temp = dicomSet.getStrings(pName);
if ((temp != null) && (temp.length > 0)) {
elementheader = temp[0];
for (int i = 1; i < temp.length; i++) {
elementheader += ("\\" + temp[i]);
}
}
}
else if (isSQFieldThatWeCareAbout(propname))
{
elementheader = handleSQField(dicomSet, pName);
}
else {
try {
elementheader = getElementValue(pName, dicomSet);
} catch (UnsupportedOperationException uoe) {
elementheader = null;
}
}
if (elementheader != null) {
elementheader = elementheader.replaceAll("'", "");
String[] temp = dicomProp.getProperty(propname).split("\t");
numbers.put(temp[0], elementheader);
if(log.isDebugEnabled()) {
log.debug("Parsing:"+propname+"/"+temp[0]+" as "+elementheader);
}
}
} //while
}
/**
* Get the contents of a DICOM element in the DicomObject's dataset.
* If the element is part of a private group owned by CTP, it returns the
* value as text. This method returns the defaultString argument if the
* element does not exist.
* @param tag the tag specifying the element (in the form 0xggggeeee).
* @param dataset is Dicom dataset.
* @return the text of the element, or defaultString if the element does not exist.
* Notes: Make sure defaultString initial value must be null
*/
private String getElementValue(int tag, Dataset dataset) {
boolean ctp = false;
//if group is odd (private) and element is > 1000 (VR=UN or content v. header)
if (((tag & 0x00010000) != 0) && ((tag & 0x0000ff00) != 0)) {
//each block of 1000hex in the tag address space maps
//back to a slot in the first 1000hex. from the tag address
//compute the header address. so 0013,1010 -> 0013, 0010
//at a bit level - strip the element/keep the group and
//then compute the element by stripping of the bottom two
int blk = (tag & 0xffff0000) | ((tag & 0x0000ff00) >> 8);
try {
ctp = dataset.getString(blk).equals("CTP");
}
catch (Exception notCTP) {
log.warn("Is [0013,0010] missing, or it doesnt equal CTP?");
notCTP.printStackTrace();
}
}
String value = null;
try {
if (ctp) {
value = new String(dataset.getByteBuffer(tag).array());
}
else {
value = dataset.getString(tag);
}
}
catch (Exception notAvailable) {
//notAvailable.printStackTrace();
log.warn("in NICADatabase class, cannot get element value"+Integer.toHexString(tag));
}
if (value != null) {
value = value.trim();
}
return value;
}
private void failedSubmission(String message) throws RuntimeException
{
throw new RuntimeException(message);
}
public void setCorrectFileSize(File file) {
// Temporary fix until new CTP release provides a better solution
long fileSize = file.length();
imageStorage.setFileSize(fileSize);
/*JP needs the digest with file not being anonymized for DB Verifier to work
try {
DicomObject tempFile = new DicomObject(file);
String md5 = tempFile.getDigest()== null? " " : tempFile.getDigest();
imageStorage.setMd5(md5);
file.delete();
}
catch (Exception ex) {
log.warn("Bad DICOM file:"+file.getAbsolutePath());
}
*/
}
}
|
package water.exec;
import java.util.*;
import org.apache.hadoop.mapred.analysejobhistory_jsp;
import water.*;
import water.fvec.*;
import water.util.Log;
import water.util.Utils;
/** Parse a generic R string and build an AST, in the context of an H2O Cloud
* @author cliffc@0xdata.com
*/
public abstract class ASTOp extends AST {
// The order of operator precedence follows R rules.
// Highest the first
static final public int OPP_PREFIX = 100; /* abc() */
static final public int OPP_POWER = 13;
static final public int OPP_UPLUS = 12;
static final public int OPP_UMINUS = 12;
static final public int OPP_MOD = 11; /* %xyz% */
static final public int OPP_MUL = 10;
static final public int OPP_DIV = 10;
static final public int OPP_PLUS = 9;
static final public int OPP_MINUS = 9;
static final public int OPP_GT = 8;
static final public int OPP_GE = 8;
static final public int OPP_LT = 8;
static final public int OPP_LE = 8;
static final public int OPP_EQ = 8;
static final public int OPP_NE = 8;
static final public int OPP_NOT = 7;
static final public int OPP_AND = 6;
static final public int OPP_OR = 5;
static final public int OPP_DILDA = 4;
static final public int OPP_RARROW = 3;
static final public int OPP_ASSN = 2;
static final public int OPP_LARROW = 1;
// Operator assocation order
static final public int OPA_LEFT = 0;
static final public int OPA_RIGHT = 1;
// Operation formula notations
static final public int OPF_INFIX = 0;
static final public int OPF_PREFIX = 1;
// Tables of operators by arity
static final public HashMap<String,ASTOp> UNI_INFIX_OPS = new HashMap();
static final public HashMap<String,ASTOp> BIN_INFIX_OPS = new HashMap();
static final public HashMap<String,ASTOp> PREFIX_OPS = new HashMap();
static {
// Unary infix ops
putUniInfix(new ASTUniPlus());
putUniInfix(new ASTUniMinus());
putUniInfix(new ASTNot());
// Binary infix ops
putBinInfix(new ASTPlus());
putBinInfix(new ASTSub());
putBinInfix(new ASTMul());
putBinInfix(new ASTDiv());
putBinInfix(new ASTPow());
putBinInfix(new ASTPow2());
putBinInfix(new ASTMod());
putBinInfix(new ASTAND());
putBinInfix(new ASTOR());
putBinInfix(new ASTLT());
putBinInfix(new ASTLE());
putBinInfix(new ASTGT());
putBinInfix(new ASTGE());
putBinInfix(new ASTEQ());
putBinInfix(new ASTNE());
putBinInfix(new ASTLA());
putBinInfix(new ASTLO());
// Unary prefix ops
putPrefix(new ASTIsNA());
putPrefix(new ASTNrow());
putPrefix(new ASTNcol());
putPrefix(new ASTAbs ());
putPrefix(new ASTSgn ());
putPrefix(new ASTSqrt());
putPrefix(new ASTCeil());
putPrefix(new ASTFlr ());
putPrefix(new ASTLog ());
putPrefix(new ASTExp ());
putPrefix(new ASTScale());
putPrefix(new ASTFactor());
putPrefix(new ASTIsFactor());
putPrefix(new ASTAnyFactor()); // For Runit testing
putPrefix(new ASTAnyNA());
putPrefix(new ASTIsTRUE());
putPrefix(new ASTCos()); // Trigonometric functions
putPrefix(new ASTSin());
putPrefix(new ASTTan());
putPrefix(new ASTACos());
putPrefix(new ASTASin());
putPrefix(new ASTATan());
putPrefix(new ASTCosh());
putPrefix(new ASTSinh());
putPrefix(new ASTTanh());
// More generic reducers
putPrefix(new ASTMin ());
putPrefix(new ASTMax ());
putPrefix(new ASTSum ());
putPrefix(new ASTSdev());
putPrefix(new ASTMean());
putPrefix(new ASTMinNaRm());
putPrefix(new ASTMaxNaRm());
putPrefix(new ASTSumNaRm());
// Misc
putPrefix(new ASTSeq ());
putPrefix(new ASTCat ());
putPrefix(new ASTCbind ());
putPrefix(new ASTTable ());
putPrefix(new ASTReduce());
putPrefix(new ASTIfElse());
putPrefix(new ASTRApply());
putPrefix(new ASTRunif ());
putPrefix(new ASTCut ());
putPrefix(new ASTPrint ());
putPrefix(new ASTLs ());
}
static private void putUniInfix(ASTOp ast) { UNI_INFIX_OPS.put(ast.opStr(),ast); }
static private void putBinInfix(ASTOp ast) { BIN_INFIX_OPS.put(ast.opStr(),ast); }
static private void putPrefix (ASTOp ast) { PREFIX_OPS.put(ast.opStr(),ast); }
static public boolean isOp(String id) {
return UNI_INFIX_OPS.containsKey(id)
|| BIN_INFIX_OPS.containsKey(id)
|| PREFIX_OPS .containsKey(id);
}
static public Set<String> opStrs() {
Set<String> all = UNI_INFIX_OPS.keySet();
all.addAll(BIN_INFIX_OPS.keySet());
all.addAll(PREFIX_OPS.keySet());
return all;
}
final int _form; // formula notation, 0 - infix, 1 - prefix
final int _precedence; // operator precedence number
final int _association; // 0 - left associated, 1 - right associated
// All fields are final, because functions are immutable
final String _vars[]; // Variable names
ASTOp( String vars[], Type ts[], int form, int prec, int asso) {
super(Type.fcn(ts));
_form = form;
_precedence = prec;
_association = asso;
_vars = vars;
}
abstract String opStr();
abstract ASTOp make();
public boolean leftAssociate( ) {
return _association == OPA_LEFT;
}
@Override public String toString() {
String s = _t._ts[0]+" "+opStr()+"(";
int len=_vars.length;
for( int i=1; i<len-1; i++ )
s += _t._ts[i]+" "+_vars[i]+", ";
return s + (len > 1 ? _t._ts[len-1]+" "+_vars[len-1] : "")+")";
}
public String toString(boolean verbose) {
if( !verbose ) return toString(); // Just the fun name& arg names
return toString();
}
static ASTOp parse(Exec2 E) {
int x = E._x;
String id = E.isID();
if( id == null ) return null;
ASTOp op;
// This order matters. If used as a prefix OP, `+` and `-` are binary only.
if( (op = PREFIX_OPS.get(id)) != null
|| (op = BIN_INFIX_OPS.get(id)) != null
|| (op = UNI_INFIX_OPS.get(id)) != null)
return op.make();
E._x = x;
return ASTFunc.parseFcn(E);
}
// Parse a prefix OP or return null.
static ASTOp parsePrefixOp(Exec2 E) {
int x = E._x;
String id = E.isID();
if( id == null ) return null;
ASTOp op = PREFIX_OPS.get(id);
if( op != null ) return op.make();
E._x = x; // Roll back, no parse happened
// Attempt a user-mode function parse
return ASTFunc.parseFcn(E);
}
// Parse a unary infix OP or return null.
static ASTOp parseUniInfixOp(Exec2 E) {
int x = E._x;
String id = E.isID();
if( id == null ) return null;
ASTOp op = UNI_INFIX_OPS.get(id);
if( op != null) return op.make();
E._x = x; // Roll back, no parse happened
return null;
}
// Parse a binary infix OP or return null.
static ASTOp parseBinInfixOp(Exec2 E) {
int x = E._x;
String id = E.isID();
if( id == null ) return null;
ASTOp op = BIN_INFIX_OPS.get(id);
if( op != null) return op.make();
E._x = x; // Roll back, no parse happened
return null;
}
@Override void exec(Env env) { env.push(this); }
abstract void apply(Env env, int argcnt);
}
abstract class ASTUniOp extends ASTOp {
static final String VARS[] = new String[]{ "", "x"};
static Type[] newsig() {
Type t1 = Type.dblary();
return new Type[]{Type.anyary(new Type[]{t1}),t1};
}
ASTUniOp( int form, int precedence, int association ) {
super(VARS,newsig(),form,precedence,association);
}
double op( double d ) { throw H2O.fail(); }
protected ASTUniOp( String[] vars, Type[] types, int form, int precedence, int association ) {
super(vars,types,form,precedence,association);
}
@Override void apply(Env env, int argcnt) {
// Expect we can broadcast across all functions as needed.
if( !env.isAry() ) { env.poppush(op(env.popDbl())); return; }
Frame fr = env.popAry();
String skey = env.key();
final ASTUniOp uni = this; // Final 'this' so can use in closure
Frame fr2 = new MRTask2() {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
for( int i=0; i<nchks.length; i++ ) {
NewChunk n =nchks[i];
Chunk c = chks[i];
int rlen = c._len;
for( int r=0; r<rlen; r++ )
n.addNum(uni.op(c.at0(r)));
}
}
}.doAll(fr.numCols(),fr).outputFrame(fr._names, fr.domains());
env.subRef(fr,skey);
env.pop(); // Pop self
env.push(fr2);
}
}
abstract class ASTUniPrefixOp extends ASTUniOp {
ASTUniPrefixOp( ) { super(OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); }
ASTUniPrefixOp( String[] vars, Type[] types ) { super(vars,types,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); }
}
class ASTCos extends ASTUniPrefixOp { String opStr(){ return "cos"; } ASTOp make() {return new ASTCos ();} double op(double d) { return Math.cos(d);}}
class ASTSin extends ASTUniPrefixOp { String opStr(){ return "sin"; } ASTOp make() {return new ASTSin ();} double op(double d) { return Math.sin(d);}}
class ASTTan extends ASTUniPrefixOp { String opStr(){ return "tan"; } ASTOp make() {return new ASTTan ();} double op(double d) { return Math.tan(d);}}
class ASTACos extends ASTUniPrefixOp { String opStr(){ return "acos"; } ASTOp make() {return new ASTACos();} double op(double d) { return Math.acos(d);}}
class ASTASin extends ASTUniPrefixOp { String opStr(){ return "asin"; } ASTOp make() {return new ASTASin();} double op(double d) { return Math.asin(d);}}
class ASTATan extends ASTUniPrefixOp { String opStr(){ return "atan"; } ASTOp make() {return new ASTATan();} double op(double d) { return Math.atan(d);}}
class ASTCosh extends ASTUniPrefixOp { String opStr(){ return "cosh"; } ASTOp make() {return new ASTCosh ();} double op(double d) { return Math.cosh(d);}}
class ASTSinh extends ASTUniPrefixOp { String opStr(){ return "sinh"; } ASTOp make() {return new ASTSinh ();} double op(double d) { return Math.sinh(d);}}
class ASTTanh extends ASTUniPrefixOp { String opStr(){ return "tanh"; } ASTOp make() {return new ASTTanh ();} double op(double d) { return Math.tanh(d);}}
class ASTAbs extends ASTUniPrefixOp { String opStr(){ return "abs"; } ASTOp make() {return new ASTAbs ();} double op(double d) { return Math.abs(d);}}
class ASTSgn extends ASTUniPrefixOp { String opStr(){ return "sgn" ; } ASTOp make() {return new ASTSgn ();} double op(double d) { return Math.signum(d);}}
class ASTSqrt extends ASTUniPrefixOp { String opStr(){ return "sqrt"; } ASTOp make() {return new ASTSqrt();} double op(double d) { return Math.sqrt(d);}}
class ASTCeil extends ASTUniPrefixOp { String opStr(){ return "ceil"; } ASTOp make() {return new ASTCeil();} double op(double d) { return Math.ceil(d);}}
class ASTFlr extends ASTUniPrefixOp { String opStr(){ return "floor"; } ASTOp make() {return new ASTFlr ();} double op(double d) { return Math.floor(d);}}
class ASTLog extends ASTUniPrefixOp { String opStr(){ return "log"; } ASTOp make() {return new ASTLog ();} double op(double d) { return Math.log(d);}}
class ASTExp extends ASTUniPrefixOp { String opStr(){ return "exp"; } ASTOp make() {return new ASTExp ();} double op(double d) { return Math.exp(d);}}
class ASTIsNA extends ASTUniPrefixOp { String opStr(){ return "is.na"; } ASTOp make() {return new ASTIsNA();} double op(double d) { return Double.isNaN(d)?1:0;}}
class ASTNrow extends ASTUniPrefixOp {
ASTNrow() { super(VARS,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "nrow"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
Frame fr = env.popAry();
String skey = env.key();
double d = fr.numRows();
env.subRef(fr,skey);
env.poppush(d);
}
}
class ASTNcol extends ASTUniPrefixOp {
ASTNcol() { super(VARS,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "ncol"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
Frame fr = env.popAry();
String skey = env.key();
double d = fr.numCols();
env.subRef(fr,skey);
env.poppush(d);
}
}
class ASTIsFactor extends ASTUniPrefixOp {
ASTIsFactor() { super(VARS,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "is.factor"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(0); return; }
Frame fr = env.popAry();
String skey = env.key();
double d = 1;
Vec[] v = fr.vecs();
for(int i = 0; i < v.length; i++) {
if(!v[i].isEnum()) { d = 0; break; }
}
env.subRef(fr,skey);
env.poppush(d);
}
}
// Added to facilitate Runit testing
class ASTAnyFactor extends ASTUniPrefixOp {
ASTAnyFactor() { super(VARS,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "any.factor"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(0); return; }
Frame fr = env.popAry();
String skey = env.key();
double d = 0;
Vec[] v = fr.vecs();
for(int i = 0; i < v.length; i++) {
if(v[i].isEnum()) { d = 1; break; }
}
env.subRef(fr,skey);
env.poppush(d);
}
}
class ASTAnyNA extends ASTUniPrefixOp {
ASTAnyNA() { super(VARS,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "any.na"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(0); return; }
Frame fr = env.popAry();
String skey = env.key();
double d = 0;
Vec[] v = fr.vecs();
for(int i = 0; i < v.length; i++) {
if(v[i].naCnt() > 0) { d = 1; break; }
}
env.subRef(fr, skey);
env.poppush(d);
}
}
class ASTIsTRUE extends ASTUniPrefixOp {
ASTIsTRUE() {super(VARS,new Type[]{Type.DBL,Type.unbound()});}
@Override String opStr() { return "isTRUE"; }
@Override ASTOp make() {return new ASTIsTRUE();} // to make sure fcn get bound at each new context
@Override void apply(Env env, int argcnt) {
double res = env.isDbl() && env.popDbl()==1.0 ? 1:0;
env.pop();
env.poppush(res);
}
}
class ASTScale extends ASTUniPrefixOp {
ASTScale() { super(VARS,new Type[]{Type.ARY,Type.ARY}); }
@Override String opStr() { return "scale"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(Double.NaN); return; }
Frame fr = env.popAry();
String skey = env.key();
Frame fr2 = new Scale().doIt(fr.numCols(), fr).outputFrame(fr._names, fr.domains());
env.subRef(fr,skey);
env.pop(); // Pop self
env.push(fr2);
}
private static class Scale extends MRTask2<Scale> {
protected int _nums = 0;
protected int[] _ind; // Saves indices of numeric cols first, followed by enums
protected double[] _normSub;
protected double[] _normMul;
@Override public void map(Chunk chks[], NewChunk nchks[]) {
// Normalize numeric cols only
for(int k = 0; k < _nums; k++) {
int i = _ind[k];
NewChunk n = nchks[i];
Chunk c = chks[i];
int rlen = c._len;
for(int r = 0; r < rlen; r++)
n.addNum((c.at0(r)-_normSub[i])*_normMul[i]);
}
for(int k = _nums; k < chks.length; k++) {
int i = _ind[k];
NewChunk n = nchks[i];
Chunk c = chks[i];
int rlen = c._len;
for(int r = 0; r < rlen; r++)
n.addNum(c.at0(r));
}
}
public Scale doIt(int outputs, Frame fr) { return dfork2(outputs, fr).getResult(); }
public Scale dfork2(int outputs, Frame fr) {
final Vec [] vecs = fr.vecs();
for(int i = 0; i < vecs.length; i++) {
if(!vecs[i].isEnum()) _nums++;
}
if(_normSub == null) _normSub = MemoryManager.malloc8d(_nums);
if(_normMul == null) { _normMul = MemoryManager.malloc8d(_nums); Arrays.fill(_normMul,1); }
if(_ind == null) _ind = MemoryManager.malloc4(vecs.length);
int ncnt = 0; int ccnt = 0;
for(int i = 0; i < vecs.length; i++){
if(!vecs[i].isEnum()) {
_normSub[ncnt] = vecs[i].mean();
_normMul[ncnt] = 1.0/vecs[i].sigma();
_ind[ncnt++] = i;
} else
_ind[_nums+(ccnt++)] = i;
}
assert ncnt == _nums && (ncnt + ccnt == vecs.length);
return dfork(outputs, fr, false);
}
}
}
// Class of things that will auto-expand across arrays in a 2-to-1 way:
// applying 2 things (from an array or scalar to array or scalar) producing an
// array or scalar result.
abstract class ASTBinOp extends ASTOp {
static final String VARS[] = new String[]{ "", "x","y"};
static Type[] newsig() {
Type t1 = Type.dblary(), t2 = Type.dblary();
return new Type[]{Type.anyary(new Type[]{t1,t2}),t1,t2};
}
ASTBinOp( int form, int precedence, int association ) {
super(VARS, newsig(), form, precedence, association); // binary ops are infix ops
}
abstract double op( double d0, double d1 );
@Override void apply(Env env, int argcnt) {
// Expect we can broadcast across all functions as needed.
Frame fr0 = null, fr1 = null;
double d0=0, d1=0;
if( env.isAry() ) fr1 = env.popAry(); else d1 = env.popDbl(); String k0 = env.key();
if( env.isAry() ) fr0 = env.popAry(); else d0 = env.popDbl(); String k1 = env.key();
if( fr0==null && fr1==null ) {
env.poppush(op(d0,d1));
return;
}
final boolean lf = fr0 != null;
final boolean rf = fr1 != null;
final double fd0 = d0;
final double fd1 = d1;
Frame fr = null; // Do-All frame
int ncols = 0; // Result column count
if( fr0 !=null ) { // Left?
ncols = fr0.numCols();
if( fr1 != null ) {
if( fr0.numCols() != fr1.numCols() ||
fr0.numRows() != fr1.numRows() )
throw new IllegalArgumentException("Arrays must be same size: "+fr0+" vs "+fr1);
fr = new Frame(fr0).add(fr1);
} else {
fr = fr0;
}
} else {
ncols = fr1.numCols();
fr = fr1;
}
final ASTBinOp bin = this; // Final 'this' so can use in closure
// Run an arbitrary binary op on one or two frames & scalars
Frame fr2 = new MRTask2() {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
for( int i=0; i<nchks.length; i++ ) {
NewChunk n =nchks[i];
Chunk c0= !lf ? null : chks[i];
Chunk c1= !rf ? null : chks[i+(lf?nchks.length:0)];
int rlen = (lf ? c0 : c1)._len;
for( int r=0; r<rlen; r++ ) {
if(chks[i]._vec.isEnum())
n.addNA(); //slam in NA if op on enum; same as R
else
n.addNum(bin.op(lf ? c0.at0(r) : fd0, rf ? c1.at0(r) : fd1));
}
}
}
}.doAll(ncols,fr).outputFrame((lf ? fr0 : fr1)._names,(lf ? fr0 : fr1).domains());
if( fr0 != null ) env.subRef(fr0,k0);
if( fr1 != null ) env.subRef(fr1,k1);
env.pop();
env.push(fr2);
}
}
class ASTUniPlus extends ASTUniOp { ASTUniPlus() { super(OPF_INFIX, OPP_UPLUS, OPA_RIGHT); } String opStr(){ return "+" ;} ASTOp make() {return new ASTUniPlus(); } double op(double d) { return d;}}
class ASTUniMinus extends ASTUniOp { ASTUniMinus() { super(OPF_INFIX, OPP_UMINUS, OPA_RIGHT); } String opStr(){ return "-" ;} ASTOp make() {return new ASTUniMinus();} double op(double d) { return -d;}}
class ASTNot extends ASTUniOp { ASTNot() { super(OPF_INFIX, OPP_NOT, OPA_RIGHT); } String opStr(){ return "!" ;} ASTOp make() {return new ASTNot(); } double op(double d) { return d==0?1:0; }}
class ASTPlus extends ASTBinOp { ASTPlus() { super(OPF_INFIX, OPP_PLUS, OPA_LEFT ); } String opStr(){ return "+" ;} ASTOp make() {return new ASTPlus();} double op(double d0, double d1) { return d0+d1;}}
class ASTSub extends ASTBinOp { ASTSub() { super(OPF_INFIX, OPP_MINUS, OPA_LEFT); } String opStr(){ return "-" ;} ASTOp make() {return new ASTSub ();} double op(double d0, double d1) { return d0-d1;}}
class ASTMul extends ASTBinOp { ASTMul() { super(OPF_INFIX, OPP_MUL, OPA_LEFT); } String opStr(){ return "*" ;} ASTOp make() {return new ASTMul ();} double op(double d0, double d1) { return d0*d1;}}
class ASTDiv extends ASTBinOp { ASTDiv() { super(OPF_INFIX, OPP_DIV, OPA_LEFT); } String opStr(){ return "/" ;} ASTOp make() {return new ASTDiv ();} double op(double d0, double d1) { return d0/d1;}}
class ASTPow extends ASTBinOp { ASTPow() { super(OPF_INFIX, OPP_POWER, OPA_RIGHT);} String opStr(){ return "^" ;} ASTOp make() {return new ASTPow ();} double op(double d0, double d1) { return Math.pow(d0,d1);}}
class ASTPow2 extends ASTBinOp { ASTPow2() { super(OPF_INFIX, OPP_POWER, OPA_RIGHT);} String opStr(){ return "**" ;} ASTOp make() {return new ASTPow2();} double op(double d0, double d1) { return Math.pow(d0,d1);}}
class ASTMod extends ASTBinOp { ASTMod() { super(OPF_INFIX, OPP_MOD, OPA_LEFT); } String opStr(){ return "%" ;} ASTOp make() {return new ASTMod ();} double op(double d0, double d1) { return d0%d1;}}
class ASTLT extends ASTBinOp { ASTLT() { super(OPF_INFIX, OPP_LT, OPA_LEFT); } String opStr(){ return "<" ;} ASTOp make() {return new ASTLT ();} double op(double d0, double d1) { return d0<d1 && !Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTLE extends ASTBinOp { ASTLE() { super(OPF_INFIX, OPP_LE, OPA_LEFT); } String opStr(){ return "<=" ;} ASTOp make() {return new ASTLE ();} double op(double d0, double d1) { return d0<d1 || Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTGT extends ASTBinOp { ASTGT() { super(OPF_INFIX, OPP_GT, OPA_LEFT); } String opStr(){ return ">" ;} ASTOp make() {return new ASTGT ();} double op(double d0, double d1) { return d0>d1 && !Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTGE extends ASTBinOp { ASTGE() { super(OPF_INFIX, OPP_GE, OPA_LEFT); } String opStr(){ return ">=" ;} ASTOp make() {return new ASTGE ();} double op(double d0, double d1) { return d0>d1 || Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTEQ extends ASTBinOp { ASTEQ() { super(OPF_INFIX, OPP_EQ, OPA_LEFT); } String opStr(){ return "==" ;} ASTOp make() {return new ASTEQ ();} double op(double d0, double d1) { return Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTNE extends ASTBinOp { ASTNE() { super(OPF_INFIX, OPP_NE, OPA_LEFT); } String opStr(){ return "!=" ;} ASTOp make() {return new ASTNE ();} double op(double d0, double d1) { return Utils.equalsWithinOneSmallUlp(d0,d1)?0:1;}}
class ASTLA extends ASTBinOp { ASTLA() { super(OPF_INFIX, OPP_AND, OPA_LEFT); } String opStr(){ return "&" ;} ASTOp make() {return new ASTLA ();} double op(double d0, double d1) { return (d0!=0 && d1!=0)?1:0;}}
class ASTLO extends ASTBinOp { ASTLO() { super(OPF_INFIX, OPP_OR, OPA_LEFT); } String opStr(){ return "|" ;} ASTOp make() {return new ASTLO ();} double op(double d0, double d1) { return (d0==0 && d1==0)?0:1;}}
// Variable length; instances will be created of required length
abstract class ASTReducerOp extends ASTOp {
final double _init;
final boolean _narm;
ASTReducerOp( double init, boolean narm ) {
super(new String[]{"","dbls"},
new Type[]{Type.DBL,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
_init = init;
_narm = narm;
}
abstract double op( double d0, double d1 );
@Override void apply(Env env, int argcnt) {
double sum=_init;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) sum = op(sum,env.popDbl());
else {
Frame fr = env.popAry();
String skey = env.key();
sum = op(sum,_narm?new NaRmRedOp(this).doAll(fr)._d:new RedOp(this).doAll(fr)._d);
env.subRef(fr,skey);
}
env.poppush(sum);
}
private static class RedOp extends MRTask2<RedOp> {
final ASTReducerOp _bin;
RedOp( ASTReducerOp bin ) { _bin = bin; _d = bin._init; }
double _d;
@Override public void map( Chunk chks[] ) {
for( int i=0; i<chks.length; i++ ) {
Chunk C = chks[i];
for( int r=0; r<C._len; r++ )
_d = _bin.op(_d,C.at0(r));
if( Double.isNaN(_d) ) break;
}
}
@Override public void reduce( RedOp s ) { _d = _bin.op(_d,s._d); }
}
private static class NaRmRedOp extends MRTask2<NaRmRedOp> {
final ASTReducerOp _bin;
NaRmRedOp( ASTReducerOp bin ) { _bin = bin; _d = bin._init; }
double _d;
@Override public void map( Chunk chks[] ) {
for( int i=0; i<chks.length; i++ ) {
Chunk C = chks[i];
for( int r=0; r<C._len; r++ )
if (!Double.isNaN(C.at0(r)))
_d = _bin.op(_d,C.at0(r));
if( Double.isNaN(_d) ) break;
}
}
@Override public void reduce( NaRmRedOp s ) { _d = _bin.op(_d,s._d); }
}
}
class ASTSum extends ASTReducerOp { ASTSum( ) {super(0,false);} String opStr(){ return "sum" ;} ASTOp make() {return new ASTSum();} double op(double d0, double d1) { return d0+d1;}}
class ASTSumNaRm extends ASTReducerOp { ASTSumNaRm( ) {super(0,true) ;} String opStr(){ return "sum.na.rm";} ASTOp make() {return new ASTSumNaRm();} double op(double d0, double d1) { return d0+d1;}}
class ASTReduce extends ASTOp {
static final String VARS[] = new String[]{ "", "op2", "ary"};
static final Type TYPES[]= new Type []{ Type.ARY, Type.fcn(new Type[]{Type.DBL,Type.DBL,Type.DBL}), Type.ARY };
ASTReduce( ) { super(VARS,TYPES,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); }
@Override String opStr(){ return "Reduce";}
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) { throw H2O.unimpl(); }
}
// TODO: Check refcnt mismatch issue: tmp = cbind(h.hex,3.5) results in different refcnts per col
class ASTCbind extends ASTOp {
@Override String opStr() { return "cbind"; }
ASTCbind( ) { super(new String[]{"cbind","ary"},
new Type[]{Type.ARY,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,OPA_RIGHT); }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
Vec vmax = null;
for(int i = 0; i < argcnt-1; i++) {
if(env.isAry(-argcnt+1+i)) {
Frame tmp = env.ary(-argcnt+1+i);
if(vmax == null) vmax = tmp.vecs()[0];
else if(tmp.numRows() != vmax.length())
// R pads shorter cols to match max rows by cycling/repeating, but we won't support that
throw new IllegalArgumentException("Row mismatch! Expected " + String.valueOf(vmax.length()) + " but frame has " + String.valueOf(tmp.numRows()));
}
}
Frame fr = null;
if(env.isAry(-argcnt+1))
fr = new Frame(env.ary(-argcnt+1));
else {
// Vec v = new Vec(Key.make(), env.dbl(-argcnt+1));
double d = env.dbl(-argcnt+1);
Vec v = vmax == null ? new Vec(Key.make(), d) : vmax.makeCon(d);
fr = new Frame(new String[] {"c0"}, new Vec[] {v});
env.addRef(v);
}
for(int i = 1; i < argcnt-1; i++) {
if(env.isAry(-argcnt+1+i))
fr.add(env.ary(-argcnt+1+i));
else {
double d = env.dbl(-argcnt+1+i);
// Vec v = fr.vecs()[0].makeCon(d);
Vec v = vmax == null ? new Vec(Key.make(), d) : vmax.makeCon(d);
fr.add("c" + String.valueOf(i), v);
env.addRef(v);
}
}
env._ary[env._sp-argcnt] = fr;
env._sp -= argcnt-1;
assert env.check_refcnt(fr.anyVec());
}
}
class ASTMinNaRm extends ASTOp {
ASTMinNaRm( ) {
super(new String[]{"","dbls"},
new Type[]{Type.DBL,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
String opStr(){ return "min.na.rm";}
ASTOp make() {return new ASTMinNaRm();}
@Override void apply(Env env, int argcnt) {
double min = Double.POSITIVE_INFINITY;
int nacnt = 0;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) {
double a = env.popDbl();
if (Double.isNaN(a)) nacnt++;
else min = Math.min(min, a);
}
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
min = Math.min(min, v.min());
env.pop();
}
if (nacnt > 0 && min == Double.POSITIVE_INFINITY)
min = Double.NaN;
env.poppush(min);
}
}
class ASTMaxNaRm extends ASTOp {
ASTMaxNaRm( ) {
super(new String[]{"","dbls"},
new Type[]{Type.DBL,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
String opStr(){ return "max.na.rm";}
ASTOp make() {return new ASTMaxNaRm();}
@Override void apply(Env env, int argcnt) {
double max = Double.NEGATIVE_INFINITY;
int nacnt = 0;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) {
double a = env.popDbl();
if (Double.isNaN(a)) nacnt++;
else max = Math.max(max, a);
}
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
max = Math.max(max, v.max());
env.pop();
}
if (nacnt > 0 && max == Double.NEGATIVE_INFINITY)
max = Double.NaN;
env.poppush(max);
}
}
class ASTMin extends ASTOp {
ASTMin( ) {
super(new String[]{"","dbls"},
new Type[]{Type.DBL,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
String opStr(){ return "min";}
ASTOp make() {return new ASTMin();}
@Override void apply(Env env, int argcnt) {
double min = Double.POSITIVE_INFINITY;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) min = Math.min(min, env.popDbl());
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
if (v.naCnt() > 0) { min = Double.NaN; break; }
else min = Math.min(min, v.min());
env.pop();
}
env.poppush(min);
}
}
class ASTMax extends ASTOp {
ASTMax( ) {
super(new String[]{"","dbls"},
new Type[]{Type.DBL,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
String opStr(){ return "max";}
ASTOp make() {return new ASTMax();}
@Override void apply(Env env, int argcnt) {
double max = Double.NEGATIVE_INFINITY;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) max = Math.max(max, env.popDbl());
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
if (v.naCnt() > 0) { max = Double.NaN; break; }
else max = Math.max(max, v.max());
env.pop();
}
env.poppush(max);
}
}
// R like binary operator &&
class ASTAND extends ASTOp {
@Override String opStr() { return "&&"; }
ASTAND( ) {
super(new String[]{"", "x", "y"},
new Type[]{Type.DBL,Type.dblary(),Type.dblary()},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
@Override ASTOp make() { return new ASTAND(); }
@Override void apply(Env env, int argcnt) {
double op1 = env.isAry(-2) ? env.ary(-2).vecs()[0].at(0) : env.dbl(-2);
double op2 = op1==0 ? 0 :
Double.isNaN(op1) ? Double.NaN :
env.isAry(-1) ? env.ary(-1).vecs()[0].at(0) : env.dbl(-1);
env.pop(3);
if (!Double.isNaN(op2)) op2 = op2==0?0:1;
env.push(op2);
}
}
// R like binary operator ||
class ASTOR extends ASTOp {
@Override String opStr() { return "||"; }
ASTOR( ) {
super(new String[]{"", "x", "y"},
new Type[]{Type.DBL,Type.dblary(),Type.dblary()},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
@Override ASTOp make() { return new ASTOR(); }
@Override void apply(Env env, int argcnt) {
double op1 = env.isAry(-2) ? env.ary(-2).vecs()[0].at(0) : env.dbl(-2);
double op2 = !Double.isNaN(op1) && op1!=0 ? 1 :
env.isAry(-1) ? env.ary(-1).vecs()[0].at(0) : env.dbl(-1);
if (!Double.isNaN(op2) && op2 != 0)
op2 = 1;
else if (op2 == 0 && Double.isNaN(op1))
op2 = Double.NaN;
env.push(op2);
}
}
// Similar to R's seq_len
class ASTSeq extends ASTOp {
@Override String opStr() { return "seq_len"; }
ASTSeq( ) {
super(new String[]{"seq_len", "n"},
new Type[]{Type.ARY,Type.DBL},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
@Override ASTOp make() { return this; }
@Override void apply(Env env, int argcnt) {
int len = (int)env.popDbl();
if (len <= 0)
throw new IllegalArgumentException("Error in seq_len(" +len+"): argument must be coercible to positive integer");
Key key = Vec.VectorGroup.VG_LEN1.addVecs(1)[0];
AppendableVec av = new AppendableVec(key);
NewChunk nc = new NewChunk(av,0);
for (int r = 0; r < len; r++) nc.addNum(r+1);
nc.close(0,null);
Vec v = av.close(null);
env.pop();
env.push(new Frame(new String[]{"c"}, new Vec[]{v}));
}
}
// Variable length; flatten all the component arys
class ASTCat extends ASTOp {
@Override String opStr() { return "c"; }
ASTCat( ) { super(new String[]{"cat","dbls"},
new Type[]{Type.ARY,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override ASTOp make() {return new ASTCat();}
@Override void apply(Env env, int argcnt) {
Key key = Vec.VectorGroup.VG_LEN1.addVecs(1)[0];
AppendableVec av = new AppendableVec(key);
NewChunk nc = new NewChunk(av,0);
for( int i=0; i<argcnt-1; i++ ) {
if (env.isAry(i-argcnt+1)) for (Vec vec : env.ary(i-argcnt+1).vecs()) {
if (vec.nChunks() > 1) H2O.unimpl();
for (int r = 0; r < vec.length(); r++) nc.addNum(vec.at(r));
}
else nc.addNum(env.dbl(i-argcnt+1));
}
nc.close(0,null);
Vec v = av.close(null);
env.pop(argcnt);
env.push(new Frame(new String[]{"c"}, new Vec[]{v}));
}
}
class ASTRunif extends ASTOp {
@Override String opStr() { return "runif"; }
ASTRunif() { super(new String[]{"runif","dbls"},
new Type[]{Type.ARY,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override ASTOp make() {return new ASTRunif();}
@Override void apply(Env env, int argcnt) {
Frame fr = env.popAry();
String skey = env.key();
long [] espc = fr.anyVec()._espc;
long rem = fr.numRows();
if(rem > espc[espc.length-1])throw H2O.unimpl();
for(int i = 0; i < espc.length; ++i){
if(rem <= espc[i]){
espc = Arrays.copyOf(espc, i+1);
break;
}
}
espc[espc.length-1] = rem;
Vec randVec = new Vec(fr.anyVec().group().addVecs(1)[0],espc);
Futures fs = new Futures();
DKV.put(randVec._key,randVec, fs);
for(int i = 0; i < espc.length-1; ++i)
DKV.put(randVec.chunkKey(i),new C0DChunk(0,(int)(espc[i+1]-espc[i])),fs);
fs.blockForPending();
final long seed = System.currentTimeMillis();
new MRTask2() {
@Override public void map(Chunk c){
Random rng = new Random(seed*c.cidx());
for(int i = 0; i < c._len; ++i)
c.set0(i, (float)rng.nextDouble());
}
}.doAll(randVec);
env.subRef(fr,skey);
env.pop();
env.push(new Frame(new String[]{"rnd"},new Vec[]{randVec}));
}
}
class ASTSdev extends ASTOp {
ASTSdev() { super(new String[]{"sd", "ary"}, new Type[]{Type.DBL,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "sd"; }
@Override ASTOp make() { return new ASTSdev(); }
@Override void apply(Env env, int argcnt) {
Frame fr = env.peekAry();
if (fr.vecs().length > 1)
throw new IllegalArgumentException("sd does not apply to multiple cols.");
if (fr.vecs()[0].isEnum())
throw new IllegalArgumentException("sd only applies to numeric vector.");
double sig = fr.vecs()[0].sigma();
env.pop();
env.poppush(sig);
}
}
class ASTMean extends ASTOp {
ASTMean() { super(new String[]{"mean", "ary"}, new Type[]{Type.DBL,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "mean"; }
@Override ASTOp make() { return new ASTMean(); }
@Override void apply(Env env, int argcnt) {
Frame fr = env.peekAry();
if (fr.vecs().length > 1)
throw new IllegalArgumentException("sd does not apply to multiple cols.");
if (fr.vecs()[0].isEnum())
throw new IllegalArgumentException("sd only applies to numeric vector.");
double ave = fr.vecs()[0].mean();
env.pop();
env.poppush(ave);
}
}
class ASTTable extends ASTOp {
ASTTable() { super(new String[]{"table", "ary"}, new Type[]{Type.ARY,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "table"; }
@Override ASTOp make() { return new ASTTable(); }
@Override void apply(Env env, int argcnt) {
Frame fr = env.ary(-1);
if (fr.vecs().length > 1)
throw new IllegalArgumentException("table does not apply to multiple cols.");
if (! fr.vecs()[0].isInt())
throw new IllegalArgumentException("table only applies to integer vector.");
int[] domain = new Vec.CollectDomain(fr.vecs()[0]).doAll(fr).domain();
long[] counts = new Tabularize(domain).doAll(fr)._counts;
// Build output vecs
Key keys[] = Vec.VectorGroup.VG_LEN1.addVecs(2);
AppendableVec v0 = new AppendableVec(keys[0]);
v0._domain = fr.vecs()[0].domain() == null ? null : fr.vecs()[0].domain().clone();
NewChunk c0 = new NewChunk(v0,0);
for( int i=0; i<domain.length; i++ ) c0.addNum((double) domain[i]);
c0.close(0,null);
AppendableVec v1 = new AppendableVec(keys[1]);
NewChunk c1 = new NewChunk(v1,0);
for( int i=0; i<domain.length; i++ ) c1.addNum((double) counts[i]);
c1.close(0,null);
env.pop(2);
env.push(new Frame(new String[]{fr._names[0],"count"}, new Vec[]{v0.close(null), v1.close(null)}));
}
private static class Tabularize extends MRTask2<Tabularize> {
public final int[] _domain;
public long[] _counts;
public Tabularize(int[] dom) { super(); _domain=dom; }
@Override public void map(Chunk chk) {
_counts=new long[_domain.length];
for (int i = 0; i < chk._len; i++)
if (! chk.isNA0(i)) {
int cls = Arrays.binarySearch(_domain,(int)chk.at80(i));
assert 0 <= cls && cls < _domain.length;
_counts[cls] ++;
}
}
@Override public void reduce(Tabularize that) { Utils.add(_counts,that._counts); }
}
}
// Selective return. If the selector is a double, just eval both args and
// return the selected one. If the selector is an array, then it must be
// compatible with argument arrays (if any), and the selection is done
// element-by-element.
class ASTIfElse extends ASTOp {
static final String VARS[] = new String[]{"ifelse","tst","true","false"};
static Type[] newsig() {
Type t1 = Type.unbound(), t2 = Type.unbound(), t3=Type.unbound();
return new Type[]{Type.anyary(new Type[]{t1,t2,t3}),t1,t2,t3};
}
ASTIfElse( ) { super(VARS, newsig(),OPF_INFIX,OPP_PREFIX,OPA_RIGHT); }
@Override ASTOp make() {return new ASTIfElse();}
@Override String opStr() { return "ifelse"; }
// Parse an infix trinary ?: operator
static AST parse(Exec2 E, AST tst) {
if( !E.peek('?') ) return null;
int x=E._x;
AST tru=E.xpeek(':',E._x,parseCXExpr(E));
if( tru == null ) E.throwErr("Missing expression in trinary",x);
x = E._x;
AST fal=parseCXExpr(E);
if( fal == null ) E.throwErr("Missing expression in trinary",x);
return ASTApply.make(new AST[]{new ASTIfElse(),tst,tru,fal},E,x);
}
@Override void apply(Env env, int argcnt) {
// All or none are functions
assert ( env.isFcn(-1) && env.isFcn(-2) && _t.ret().isFcn())
|| (!env.isFcn(-1) && !env.isFcn(-2) && !_t.ret().isFcn());
// If the result is an array, then one of the other of the two must be an
// array. , and this is a broadcast op.
assert !_t.isAry() || env.isAry(-1) || env.isAry(-2);
// Single selection? Then just pick slots
if( !env.isAry(-3) ) {
if( env.dbl(-3)==0 ) env.pop_into_stk(-4);
else { env.pop(); env.pop_into_stk(-3); }
return;
}
Frame frtst=null, frtru= null, frfal= null;
double dtst= 0 , dtru= 0 , dfal= 0 ;
if( env.isAry() ) frfal= env.popAry(); else dfal = env.popDbl(); String kf = env.key();
if( env.isAry() ) frtru= env.popAry(); else dtru = env.popDbl(); String kt = env.key();
if( env.isAry() ) frtst= env.popAry(); else dtst = env.popDbl(); String kq = env.key();
// Multi-selection
// Build a doAll frame
Frame fr = new Frame(frtst); // Do-All frame
final int ncols = frtst.numCols(); // Result column count
final long nrows = frtst.numRows(); // Result row count
if( frtru !=null ) { // True is a Frame?
if( frtru.numCols() != ncols || frtru.numRows() != nrows )
throw new IllegalArgumentException("Arrays must be same size: "+frtst+" vs "+frtru);
fr.add(frtru);
}
if( frfal !=null ) { // False is a Frame?
if( frfal.numCols() != ncols || frfal.numRows() != nrows )
throw new IllegalArgumentException("Arrays must be same size: "+frtst+" vs "+frfal);
fr.add(frfal);
}
final boolean t = frtru != null;
final boolean f = frfal != null;
final double fdtru = dtru;
final double fdfal = dfal;
// Run a selection picking true/false across the frame
Frame fr2 = new MRTask2() {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
for( int i=0; i<nchks.length; i++ ) {
NewChunk n =nchks[i];
int off=i;
Chunk ctst= chks[off];
Chunk ctru= t ? chks[off+=ncols] : null;
Chunk cfal= f ? chks[off+=ncols] : null;
int rlen = ctst._len;
for( int r=0; r<rlen; r++ )
if( ctst.isNA0(r) ) n.addNA();
else n.addNum(ctst.at0(r)!=0 ? (t ? ctru.at0(r) : fdtru) : (f ? cfal.at0(r) : fdfal));
}
}
}.doAll(ncols,fr).outputFrame(fr._names,fr.domains());
env.subRef(frtst,kq);
if( frtru != null ) env.subRef(frtru,kt);
if( frfal != null ) env.subRef(frfal,kf);
env.pop();
env.push(fr2);
}
}
// R's Apply. Function is limited to taking a single column and returning
// a single column. Double is limited to 1 or 2, statically determined.
class ASTRApply extends ASTOp {
static final String VARS[] = new String[]{ "", "ary", "dbl1.2", "fcn"};
ASTRApply( ) { super(VARS,
new Type[]{ Type.ARY, Type.ARY, Type.DBL, Type.fcn(new Type[]{Type.dblary(),Type.ARY}) },
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr(){ return "apply";}
@Override ASTOp make() {return new ASTRApply();}
@Override void apply(Env env, int argcnt) {
int oldsp = env._sp;
// Peek everything from the stack
ASTOp op = env.fcn(-1); // ary->dblary but better be ary[,1]->dblary[,1]
double d = env.dbl(-2); // MARGIN: ROW=1, COLUMN=2 selector
Frame fr = env.ary(-3); // The Frame to work on
if( d==2 || d== -1 ) { // Work on columns?
int ncols = fr.numCols();
// If results are doubles, make vectors-of-length-1 for them all
Key keys[] = null;
if( op._t.ret().isDbl() ) {
keys = Vec.VectorGroup.VG_LEN1.addVecs(ncols);
} else assert op._t.ret().isAry();
// Apply the function across columns
Frame fr2 = new Frame(new String[0],new Vec[0]);
Vec vecs[] = fr.vecs();
for( int i=0; i<ncols; i++ ) {
env.push(op);
env.push(new Frame(new String[]{fr._names[i]},new Vec[]{vecs[i]}));
env.fcn(-2).apply(env, 2);
Vec v;
if( keys != null ) { // Doubles or Frame results?
// Jam the double into a Vec of its own
AppendableVec av = new AppendableVec(keys[i]);
NewChunk nc = new NewChunk(av,0);
nc.addNum(env.popDbl());
nc.close(0, null);
env.push(new Frame(v = av.close(null)));
} else { // Frame results
if( env.ary(-1).numCols() != 1 )
throw new IllegalArgumentException("apply requires that "+op+" return 1 column");
// Leave the ary on stack
//v = env.popAry().anyVec();// Remove without lowering refcnt
}
//fr2.add(fr._names[i],v); // Add, with refcnt already +1
}
for( int i=0; i<ncols; i++ )
fr2.add(fr._names[i], env.ary(-ncols+i).anyVec());
int narg = env._sp - oldsp + 4;
env.poppush(narg, fr2, null);
assert env.isAry();
assert env._sp == oldsp-4+1;
return;
}
if( d==1 || d == -2 ) // Work on rows
throw H2O.unimpl();
throw new IllegalArgumentException("MARGIN limited to 1 (rows) or 2 (cols)");
}
}
class ASTCut extends ASTOp {
ASTCut() { super(new String[]{"cut", "ary", "dbls"},
new Type[]{Type.ARY, Type.ARY, Type.dblary()},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "cut"; }
@Override ASTOp make() {return new ASTCut();}
@Override void apply(Env env, int argcnt) {
if(env.isDbl()) {
final int nbins = (int) Math.floor(env.popDbl());
if(nbins < 2)
throw new IllegalArgumentException("Number of intervals must be at least 2");
Frame fr = env.popAry();
String skey = env.key();
if(fr.vecs().length != 1 || fr.vecs()[0].isEnum())
throw new IllegalArgumentException("First argument must be a numeric column vector");
final double fmax = fr.vecs()[0].max();
final double fmin = fr.vecs()[0].min();
final double width = (fmax - fmin)/nbins;
if(width == 0) throw new IllegalArgumentException("Data vector is constant!");
// Note: I think R perturbs constant vecs slightly so it can still bin values
// Construct domain names from bins intervals
String[][] domains = new String[1][nbins];
domains[0][0] = "(" + String.valueOf(fmin - 0.001*(fmax-fmin)) + "," + String.valueOf(fmin + width) + "]";
for(int i = 1; i < nbins; i++)
domains[0][i] = "(" + String.valueOf(fmin + i*width) + "," + String.valueOf(fmin + (i+1)*width) + "]";
Frame fr2 = new MRTask2() {
@Override public void map(Chunk chk, NewChunk nchk) {
for(int r = 0; r < chk._len; r++) {
double x = chk.at0(r);
double n = x == fmax ? nbins-1 : Math.floor((x - fmin)/width);
nchk.addNum(n);
}
}
}.doAll(1,fr).outputFrame(fr._names, domains);
env.subRef(fr, skey);
env.pop();
env.push(fr2);
} else if(env.isAry()) {
Frame ary = env.popAry();
String skey1 = env.key();
if(ary.vecs().length != 1 || ary.vecs()[0].isEnum())
throw new IllegalArgumentException("Second argument must be a numeric column vector");
Vec brks = ary.vecs()[0];
// TODO: Check that num rows below some cutoff, else this will likely crash
// Remove duplicates and sort vector of breaks in ascending order
SortedSet<Double> temp = new TreeSet<Double>();
for(int i = 0; i < brks.length(); i++) temp.add(brks.at(i));
int cnt = 0; final double[] cutoffs = new double[temp.size()];
for(Double x : temp) { cutoffs[cnt] = x; cnt++; }
if(cutoffs.length < 2)
throw new IllegalArgumentException("Vector of breaks must have at least 2 unique values");
Frame fr = env.popAry();
String skey2 = env.key();
if(fr.vecs().length != 1 || fr.vecs()[0].isEnum())
throw new IllegalArgumentException("First argument must be a numeric column vector");
// Construct domain names from bin intervals
final int nbins = cutoffs.length-1;
String[][] domains = new String[1][nbins];
for(int i = 0; i < nbins; i++)
domains[0][i] = "(" + cutoffs[i] + "," + cutoffs[i+1] + "]";
Frame fr2 = new MRTask2() {
@Override public void map(Chunk chk, NewChunk nchk) {
for(int r = 0; r < chk._len; r++) {
double x = chk.at0(r);
if(x <= cutoffs[0] || x > cutoffs[cutoffs.length-1])
nchk.addNum(Double.NaN);
else {
for(int i = 1; i < cutoffs.length; i++) {
if(x <= cutoffs[i]) { nchk.addNum(i-1); break; }
}
}
}
}
}.doAll(1,fr).outputFrame(fr._names, domains);
env.subRef(ary, skey1);
env.subRef(fr, skey2);
env.pop();
env.push(fr2);
} else throw H2O.unimpl();
}
}
class ASTFactor extends ASTOp {
ASTFactor() { super(new String[]{"factor", "ary"},
new Type[]{Type.ARY, Type.ARY},
OPF_PREFIX,
OPP_PREFIX,OPA_RIGHT); }
@Override String opStr() { return "factor"; }
@Override ASTOp make() {return new ASTFactor();}
@Override void apply(Env env, int argcnt) {
Frame ary = env.peekAry(); // Ary on top of stack, keeps +1 refcnt
String skey = env.peekKey();
if( ary.numCols() != 1 )
throw new IllegalArgumentException("factor requires a single column");
Vec v0 = ary.vecs()[0];
Vec v1 = v0.isEnum() ? null : v0.toEnum();
if (v1 != null) {
ary = new Frame(ary._names,new Vec[]{v1});
skey = null;
}
env.poppush(2, ary, skey);
}
}
class ASTPrint extends ASTOp {
static Type[] newsig() {
Type t1 = Type.unbound();
return new Type[]{t1, t1, Type.varargs(Type.unbound())};
}
ASTPrint() { super(new String[]{"print", "x", "y..."},
newsig(),
OPF_PREFIX,
OPP_PREFIX,OPA_RIGHT); }
@Override String opStr() { return "print"; }
@Override ASTOp make() {return new ASTPrint();}
@Override void apply(Env env, int argcnt) {
for( int i=1; i<argcnt; i++ ) {
if( env.isAry(i-argcnt) ) {
env._sb.append(env.ary(i-argcnt).toStringAll());
} else {
env._sb.append(env.toString(env._sp+i-argcnt,true));
}
}
env.pop(argcnt-2); // Pop most args
env.pop_into_stk(-2); // Pop off fcn, returning 1st arg
}
}
/**
* R 'ls' command.
*
* This method is purely for the console right now. Print stuff into the string buffer.
* JSON response is not configured at all.
*/
class ASTLs extends ASTOp {
ASTLs() { super(new String[]{"ls"},
new Type[]{Type.DBL},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "ls"; }
@Override ASTOp make() {return new ASTLs();}
@Override void apply(Env env, int argcnt) {
for( Key key : H2O.keySet() )
if( key.user_allowed() && H2O.get(key) != null )
env._sb.append(key.toString());
// Pop the self-function and push a zero.
env.pop();
env.push(0.0);
}
}
|
package com.intellij.codeInsight.generation;
import com.intellij.codeInsight.CodeInsightActionHandler;
import com.intellij.codeInsight.CodeInsightUtil;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.FoldRegion;
import com.intellij.openapi.editor.SelectionModel;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.impl.FoldingModelImpl;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.Indent;
import com.intellij.psi.jsp.JspFile;
import com.intellij.psi.jsp.JspToken;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.psi.xml.XmlToken;
import com.intellij.psi.xml.XmlTokenType;
import com.intellij.util.text.CharArrayUtil;
import java.util.HashMap;
import java.util.Map;
public class CommentByLineCommentHandler implements CodeInsightActionHandler, LineCommenter.LineCommenterContext {
private Project myProject;
private PsiFile myFile;
private Editor myEditor;
private Document myDocument;
private int myStartOffset;
private int myEndOffset;
private int myLine1;
private int myLine2;
private int[] myStartOffsets;
private int[] myEndOffsets;
private boolean myAllLineComments;
private CodeStyleManager myCodeStyleManager;
private static final Map<FileType,LineCommenter> customCommenters = new HashMap<FileType, LineCommenter>(4);
public static final void registerCommenter(FileType fileType, LineCommenter commenter) {
customCommenters.put(fileType, commenter);
}
static {
registerCommenter(StdFileTypes.XML,new XmlLineCommenter());
HtmlLineCommenter commenter = new HtmlLineCommenter();
registerCommenter(StdFileTypes.HTML,commenter);
registerCommenter(StdFileTypes.XHTML,commenter);
registerCommenter(StdFileTypes.JAVA, new JavaLineCommenter());
registerCommenter(StdFileTypes.JSP, new JspLineCommenter());
registerCommenter(StdFileTypes.JSPX, new JspxLineCommenter());
}
public static LineCommenter getCustomCommenter(FileType fileType) {
return customCommenters.get(fileType);
}
public static LineCommenter getCommenter(PsiFile file) {
LineCommenter customCommenter = customCommenters.get(file.getFileType());
if (customCommenter!=null) {
return customCommenter;
} else if (isJavaFile(file)) {
return new JavaLineCommenter();
}
else if (file instanceof XmlFile) {
return new XmlLineCommenter();
}
else if (file instanceof JspFile) {
return new JspLineCommenter();
} else {
return null;
}
}
public void invoke(Project project, Editor editor, PsiFile file) {
myProject = project;
myFile = file;
myEditor = editor;
myDocument = myEditor.getDocument();
if (!myFile.isWritable()) {
if (!FileDocumentManager.fileForDocumentCheckedOutSuccessfully(getDocument(), project)){
return;
}
}
FeatureUsageTracker.getInstance().triggerFeatureUsed("codeassists.comment.line");
//myCodeInsightSettings = (CodeInsightSettings)ApplicationManager.getApplication().getComponent(CodeInsightSettings.class);
myCodeStyleManager = CodeStyleManager.getInstance(myProject);
final SelectionModel selectionModel = myEditor.getSelectionModel();
boolean hasSelection = selectionModel.hasSelection();
myStartOffset = selectionModel.getSelectionStart();
myEndOffset = selectionModel.getSelectionEnd();
if (myDocument.getTextLength() == 0) return;
int lastLineEnd = myDocument.getLineEndOffset(myDocument.getLineNumber(myEndOffset));
FoldRegion collapsedAt = ((FoldingModelImpl) myEditor.getFoldingModel()).getCollapsedRegionAtOffset(lastLineEnd);
if (collapsedAt != null) {
myEndOffset = Math.max(myEndOffset, collapsedAt.getEndOffset());
}
boolean wholeLinesSelected = !hasSelection ||
(myStartOffset == myDocument.getLineStartOffset(myDocument.getLineNumber(myStartOffset)) &&
myEndOffset == myDocument.getLineEndOffset(myDocument.getLineNumber(myEndOffset - 1)) + 1);
doComment();
if (!hasSelection) {
editor.getCaretModel().moveCaretRelatively(0, 1, false, false, true);
}
else {
if (wholeLinesSelected) {
selectionModel.setSelection(myStartOffset, selectionModel.getSelectionEnd());
}
}
}
public boolean startInWriteAction() {
return true;
}
private void doComment() {
myLine1 = myDocument.getLineNumber(myStartOffset);
myLine2 = myDocument.getLineNumber(myEndOffset);
if (myLine2 > myLine1 && myDocument.getLineStartOffset(myLine2) == myEndOffset) {
myLine2
}
myStartOffsets = new int[myLine2 - myLine1 + 1];
myEndOffsets = new int[myLine2 - myLine1 + 1];
myAllLineComments = true;
CharSequence chars = myDocument.getCharsSequence();
LineCommenter lineCommenter = customCommenters.get(myFile.getFileType());
if (lineCommenter==null) return;
LineCommenter customCommenter = (LineCommenter)lineCommenter.clone();
for (int line = myLine1; line <= myLine2; line++) {
int offset1 = getCommentStart(line,customCommenter);
myStartOffsets[line - myLine1] = offset1;
if (offset1 == -1) {
myAllLineComments = false;
break;
}
int offset = myDocument.getLineEndOffset(line) + ((DocumentEx) myDocument).getLineSeparatorLength(line);
offset = CharArrayUtil.shiftBackward(chars, offset - 1, "\n\r") + 1;
int offset2 = customCommenter.getCommentEnd(offset,this);
myEndOffsets[line - myLine1] = offset2;
if (offset2 == -1) {
myAllLineComments = false;
break;
}
}
if (!myAllLineComments) {
new CommentPerformer(customCommenter).perform();
}
else {
for (int line = myLine2; line >= myLine1; line
int offset1 = myStartOffsets[line - myLine1];
int offset2 = myEndOffsets[line - myLine1];
customCommenter.doUncomment(offset1,offset2,this);
}
}
}
private Indent computeMinIndent(int line1, int line2, CharSequence chars, CodeStyleManager codeStyleManager, FileType fileType, LineCommenter commenter) {
Indent minIndent = CodeInsightUtil.getMinLineIndent(myProject, myDocument, line1, line2, fileType);
if (line1 > 0) {
int commentOffset = getCommentStart(line1 - 1,commenter);
if (commentOffset >= 0) {
int lineStart = myDocument.getLineStartOffset(line1 - 1);
String space = chars.subSequence(lineStart, commentOffset).toString();
Indent indent = codeStyleManager.getIndent(space, fileType);
minIndent = minIndent != null ? indent.min(minIndent) : indent;
}
}
if (minIndent == null) {
minIndent = codeStyleManager.zeroIndent();
}
return minIndent;
}
private int getCommentStart(int line, LineCommenter commenter ) {
int offset = myDocument.getLineStartOffset(line);
CharSequence chars = myDocument.getCharsSequence();
offset = CharArrayUtil.shiftForward(chars, offset, " \t");
return commenter.getCommentStart(offset,this);
}
private static boolean isJavaFile(PsiFile file) {
return file instanceof PsiJavaFile || file instanceof PsiCodeFragment;
}
public CharSequence getChars() {
return myDocument.getCharsSequence();
}
public Document getDocument() {
return myDocument;
}
public Project getProject() {
return myProject;
}
public PsiFile getFile() {
return myFile;
}
public int getStartLine() {
return myLine1;
}
public int getEndLine() {
return myLine2;
}
private class CommentPerformer {
LineCommenter myCommenter;
public CommentPerformer(LineCommenter commenter) {
myCommenter = commenter;
}
public void perform() {
if (CodeStyleSettingsManager.getSettings(myProject).LINE_COMMENT_AT_FIRST_COLUMN) {
new DefaultCommentPerformer(myCommenter).perform();
}
else {
new IndentCommentPerformer(myCommenter).perform();
}
}
}
private class DefaultCommentPerformer {
LineCommenter myCommenter;
public DefaultCommentPerformer(LineCommenter commenter) {
myCommenter = commenter;
}
public void perform() {
for (int line = myLine2; line >= myLine1; line
int offset = myDocument.getLineStartOffset(line);
myCommenter.doComment(offset, line, CommentByLineCommentHandler.this);
}
}
}
private class IndentCommentPerformer {
LineCommenter myCommenter;
public IndentCommentPerformer(LineCommenter commenter) {
myCommenter = commenter;
}
public void perform() {
CharSequence chars = myDocument.getCharsSequence();
final FileType fileType = myFile.getFileType();
Indent minIndent = computeMinIndent(myLine1, myLine2, chars, myCodeStyleManager, fileType,myCommenter);
for (int line = myLine2; line >= myLine1; line
int lineStart = myDocument.getLineStartOffset(line);
int offset = lineStart;
StringBuffer buffer = new StringBuffer();
while (true) {
String space = buffer.toString();
Indent indent = myCodeStyleManager.getIndent(space, fileType);
if (indent.isGreaterThan(minIndent) || indent.equals(minIndent)) break;
char c = chars.charAt(offset);
if (c != ' ' && c != '\t') {
String newSpace = myCodeStyleManager.fillIndent(minIndent, fileType);
myDocument.replaceString(lineStart, offset, newSpace);
offset = lineStart + newSpace.length();
break;
}
buffer.append(c);
offset++;
}
myCommenter.doComment(offset, line, CommentByLineCommentHandler.this);
}
}
}
public static class HtmlLineCommenter implements LineCommenter {
private static LineCommenter ourStyleCommenter;
private boolean myInitialized;
private LineCommenter myCommenterToUse;
private static LineCommenter ourScriptCommenter;
private void initialize(LineCommenterContext context) {
if (!myInitialized) {
PsiElement elementInclusiveRange = PsiUtil.getElementInclusiveRange(
context.getFile(),
new TextRange(
context.getDocument().getLineStartOffset(context.getStartLine()),
context.getDocument().getLineEndOffset(context.getEndLine())
)
);
elementInclusiveRange = PsiTreeUtil.getParentOfType(elementInclusiveRange,XmlTag.class,false);
if (elementInclusiveRange instanceof XmlTag) {
String tagName = ((XmlTag)elementInclusiveRange).getName();
if (tagName.equalsIgnoreCase("style") &&
ourStyleCommenter!=null
) {
myCommenterToUse = (LineCommenter)ourStyleCommenter.clone();
} else if (tagName.equalsIgnoreCase("script") &&
ourScriptCommenter!=null
) {
myCommenterToUse = (LineCommenter)ourScriptCommenter.clone();
}
}
if (myCommenterToUse == null) {
myCommenterToUse = new XmlLineCommenter();
}
myInitialized = true;
}
}
public static final void setStyleCommenter(LineCommenter _styleCommenter) {
ourStyleCommenter = _styleCommenter;
}
public void doComment(int offset, int line, LineCommenterContext context) {
initialize(context);
myCommenterToUse.doComment(offset, line, context);
}
public int getCommentEnd(int offset, LineCommenterContext context) {
initialize(context);
return myCommenterToUse.getCommentEnd(offset, context);
}
public int getCommentStart(int offset, LineCommenterContext context) {
initialize(context);
return myCommenterToUse.getCommentStart(offset, context);
}
public void doUncomment(int offset1, int offset2, LineCommenterContext context) {
initialize(context);
myCommenterToUse.doUncomment(offset1, offset2, context);
}
public Object clone() {
try {
return super.clone();
}
catch (CloneNotSupportedException e) {
e.printStackTrace();
return null;
}
}
public static void setScriptCommenter(LineCommenter scriptCommenter) {
ourScriptCommenter = scriptCommenter;
}
}
private static class XmlLineCommenter implements LineCommenter {
public void doComment(int offset, int line, LineCommenterContext context) {
final Document myDocument = context.getDocument();
myDocument.insertString(offset, "<!
myDocument.insertString(myDocument.getLineEndOffset(line), "
}
public int getCommentEnd(int offset, LineCommenterContext context) {
offset -= "-->".length();
if (offset < 0) return -1;
if (!CharArrayUtil.regionMatches(context.getChars(), offset, "-->")) return -1;
PsiDocumentManager.getInstance(context.getProject()).commitDocument(context.getDocument());
PsiElement element = context.getFile().findElementAt(offset);
if (element instanceof XmlToken && element.getTextRange().getStartOffset() == offset) {
XmlToken token = (XmlToken)element;
if (token.getTokenType() == XmlTokenType.XML_COMMENT_END) {
return offset;
}
}
return -1;
}
public int getCommentStart(int offset, LineCommenterContext context) {
if (offset > context.getDocument().getTextLength() - "<!--".length()) return -1;
if (!CharArrayUtil.regionMatches(context.getChars(), offset, "<!--")) return -1;
PsiDocumentManager.getInstance(context.getProject()).commitDocument(context.getDocument());
PsiElement element = context.getFile().findElementAt(offset);
if (element instanceof XmlToken && element.getTextRange().getStartOffset() == offset) {
XmlToken token = (XmlToken)element;
if (token.getTokenType() == XmlTokenType.XML_COMMENT_START) {
return offset;
}
}
return offset;
}
public void doUncomment(int offset1, int offset2, LineCommenterContext context) {
context.getDocument().deleteString(offset2, offset2 + 3);
context.getDocument().deleteString(offset1, offset1 + 4);
}
public Object clone() {
try {
return super.clone();
} catch(CloneNotSupportedException ex) {
return null;
}
}
}
private static class JavaLineCommenter implements LineCommenter {
public void doComment(int offset, int line, LineCommenterContext context) {
context.getDocument().insertString(offset, "
}
public int getCommentEnd(int offset, LineCommenterContext context) {
return offset;
}
public void doUncomment(int offset, int offset2, LineCommenterContext context) {
context.getDocument().deleteString(offset, offset + 2);
}
public Object clone() {
try {
return super.clone();
} catch(CloneNotSupportedException ex) {
return null;
}
}
public int getCommentStart(int offset, LineCommenterContext context) {
if (offset > context.getDocument().getTextLength() - 2) return -1;
if (!CharArrayUtil.regionMatches(context.getChars(), offset, "//")) return -1;
PsiDocumentManager.getInstance(context.getProject()).commitDocument(context.getDocument());
PsiElement element = context.getFile().findElementAt(offset);
if (element instanceof PsiComment && element.getTextRange().getStartOffset() == offset) {
return offset;
}
return -1;
}
}
private static class JspLineCommenter implements LineCommenter {
private boolean myJavaComment;
private boolean initialized;
public void doComment(int offset, int line, LineCommenterContext context) {
final Document myDocument = context.getDocument();
if (!initialized) {
PsiDocumentManager.getInstance(context.getProject()).commitAllDocuments();
myJavaComment = true;
for (int line1 = context.getStartLine(); line1 <= context.getEndLine(); line1++) {
int offset1 = myDocument.getLineStartOffset(line1);
final PsiElement elementAt = context.getFile().findElementAt(offset1);
if (elementAt instanceof JspToken) {
myJavaComment = false;
}
}
initialized = true;
}
if (myJavaComment) {
myDocument.insertString(offset, "
}
else {
myDocument.insertString(offset, "<%
myDocument.insertString(myDocument.getLineEndOffset(line), "
}
}
public void doUncomment(int offset1, int offset2, LineCommenterContext context) {
final Document myDocument = context.getDocument();
if (CharArrayUtil.regionMatches(myDocument.getCharsSequence(), offset1, "
myDocument.deleteString(offset1, offset1 + "//".length());
}
else {
myDocument.deleteString(offset2, offset2 + "--%>".length());
myDocument.deleteString(offset1, offset1 + "<%--".length());
}
}
public int getCommentStart(int offset, LineCommenterContext context) {
final Document myDocument = context.getDocument();
if (offset > myDocument.getTextLength() - "//".length()) return -1;
if (CharArrayUtil.regionMatches(myDocument.getCharsSequence(), offset, "
PsiDocumentManager.getInstance(context.getProject()).commitDocument(myDocument);
PsiElement element = context.getFile().findElementAt(offset);
if (element instanceof PsiComment && element.getTextRange().getStartOffset() == offset) {
return offset;
}
}
else {
if (offset > myDocument.getTextLength() - "<%--".length()) return -1;
if (CharArrayUtil.regionMatches(myDocument.getCharsSequence(), offset, "<%--")) return offset;
}
return -1;
}
public int getCommentEnd(int offset, LineCommenterContext context) {
if (offset < 0) return -1;
if (!CharArrayUtil.regionMatches(context.getChars(), offset - "--%>".length(), "--%>")) return offset;
return offset - "--%>".length();
}
public Object clone() {
try {
return super.clone();
} catch(CloneNotSupportedException ex) {
return null;
}
}
}
private static class JspxLineCommenter extends HtmlLineCommenter {
private boolean myJavaComment;
private boolean initialized;
public void doComment(int offset, int line, LineCommenter.LineCommenterContext context) {
final Document myDocument = context.getDocument();
initialize(context);
if (myJavaComment) {
myDocument.insertString(offset, "
}
else {
super.doComment(offset, line, context);
}
}
private void initialize(final LineCommenterContext context) {
if (initialized) return;
PsiDocumentManager.getInstance(context.getProject()).commitAllDocuments();
Document myDocument = context.getDocument();
myJavaComment = false;
for (int line1 = context.getStartLine(); line1 <= context.getEndLine(); line1++) {
int offset1 = myDocument.getLineStartOffset(line1);
final PsiElement elementAt = context.getFile().findElementAt(offset1);
final XmlTag tag = PsiTreeUtil.getParentOfType(elementAt, XmlTag.class, false);
if (tag == null || tag.getName().equals("jsp:scriplet") || tag.getName().equals("jsp:declaration")) {
myJavaComment = true;
}
}
initialized = true;
}
public void doUncomment(int offset1, int offset2, LineCommenterContext context) {
final Document myDocument = context.getDocument();
initialize(context);
if (myJavaComment && CharArrayUtil.regionMatches(myDocument.getCharsSequence(), offset1, "
myDocument.deleteString(offset1, offset1 + "//".length());
}
else {
super.doUncomment(offset1, offset2, context);
}
}
public int getCommentStart(int offset, LineCommenterContext context) {
final Document myDocument = context.getDocument();
if (offset > myDocument.getTextLength() - "//".length()) return -1;
initialize(context);
if (myJavaComment && CharArrayUtil.regionMatches(myDocument.getCharsSequence(), offset, "
PsiDocumentManager.getInstance(context.getProject()).commitDocument(myDocument);
PsiElement element = context.getFile().findElementAt(offset);
if (element instanceof PsiComment && element.getTextRange().getStartOffset() == offset) {
return offset;
}
}
else {
return super.getCommentStart(offset, context);
}
return -1;
}
public int getCommentEnd(int offset, LineCommenterContext context) {
if (offset < 0) return -1;
initialize(context);
if (myJavaComment) return offset;
return super.getCommentEnd(offset, context);
}
}
}
|
package water.exec;
import hex.la.Matrix;
import java.util.*;
import org.joda.time.DateTime;
import org.joda.time.MutableDateTime;
import water.*;
import water.fvec.*;
import water.util.Utils;
/** Parse a generic R string and build an AST, in the context of an H2O Cloud
* @author cliffc@0xdata.com
*/
public abstract class ASTOp extends AST {
// The order of operator precedence follows R rules.
// Highest the first
static final public int OPP_PREFIX = 100; /* abc() */
static final public int OPP_POWER = 13;
static final public int OPP_UPLUS = 12;
static final public int OPP_UMINUS = 12;
static final public int OPP_MOD = 11; /* %xyz% */
static final public int OPP_MUL = 10;
static final public int OPP_DIV = 10;
static final public int OPP_PLUS = 9;
static final public int OPP_MINUS = 9;
static final public int OPP_GT = 8;
static final public int OPP_GE = 8;
static final public int OPP_LT = 8;
static final public int OPP_LE = 8;
static final public int OPP_EQ = 8;
static final public int OPP_NE = 8;
static final public int OPP_NOT = 7;
static final public int OPP_AND = 6;
static final public int OPP_OR = 5;
static final public int OPP_DILDA = 4;
static final public int OPP_RARROW = 3;
static final public int OPP_ASSN = 2;
static final public int OPP_LARROW = 1;
// Operator assocation order
static final public int OPA_LEFT = 0;
static final public int OPA_RIGHT = 1;
// Operation formula notations
static final public int OPF_INFIX = 0;
static final public int OPF_PREFIX = 1;
// Tables of operators by arity
static final public HashMap<String,ASTOp> UNI_INFIX_OPS = new HashMap();
static final public HashMap<String,ASTOp> BIN_INFIX_OPS = new HashMap();
static final public HashMap<String,ASTOp> PREFIX_OPS = new HashMap();
static final public HashMap<String,ASTOp> UDF_OPS = new HashMap();
// Too avoid a cyclic class-loading dependency, these are init'd before subclasses.
static final String VARS1[] = new String[]{ "", "x"};
static final String VARS2[] = new String[]{ "", "x","y"};
static {
// Unary infix ops
putUniInfix(new ASTUniPlus());
putUniInfix(new ASTUniMinus());
putUniInfix(new ASTNot());
// Binary infix ops
putBinInfix(new ASTPlus());
putBinInfix(new ASTSub());
putBinInfix(new ASTMul());
putBinInfix(new ASTDiv());
putBinInfix(new ASTPow());
putBinInfix(new ASTPow2());
putBinInfix(new ASTMod());
putBinInfix(new ASTAND());
putBinInfix(new ASTOR());
putBinInfix(new ASTLT());
putBinInfix(new ASTLE());
putBinInfix(new ASTGT());
putBinInfix(new ASTGE());
putBinInfix(new ASTEQ());
putBinInfix(new ASTNE());
putBinInfix(new ASTLA());
putBinInfix(new ASTLO());
putBinInfix(new ASTMMult());
// Unary prefix ops
putPrefix(new ASTIsNA());
putPrefix(new ASTNrow());
putPrefix(new ASTNcol());
putPrefix(new ASTAbs ());
putPrefix(new ASTSgn ());
putPrefix(new ASTSqrt());
putPrefix(new ASTCeil());
putPrefix(new ASTFlr ());
putPrefix(new ASTLog ());
putPrefix(new ASTExp ());
putPrefix(new ASTScale());
putPrefix(new ASTFactor());
putPrefix(new ASTIsFactor());
putPrefix(new ASTAnyFactor()); // For Runit testing
putPrefix(new ASTAnyNA());
putPrefix(new ASTIsTRUE());
putPrefix(new ASTCos()); // Trigonometric functions
putPrefix(new ASTSin());
putPrefix(new ASTTan());
putPrefix(new ASTACos());
putPrefix(new ASTASin());
putPrefix(new ASTATan());
putPrefix(new ASTCosh());
putPrefix(new ASTSinh());
putPrefix(new ASTTanh());
// Time extractions, to and from msec since the Unix Epoch
putPrefix(new ASTYear ());
putPrefix(new ASTMonth ());
putPrefix(new ASTDay ());
putPrefix(new ASTHour ());
putPrefix(new ASTMinute());
putPrefix(new ASTSecond());
putPrefix(new ASTMillis());
// More generic reducers
putPrefix(new ASTMin ());
putPrefix(new ASTMax ());
putPrefix(new ASTSum ());
putPrefix(new ASTSdev());
putPrefix(new ASTMean());
putPrefix(new ASTMinNaRm());
putPrefix(new ASTMaxNaRm());
putPrefix(new ASTSumNaRm());
// Misc
putPrefix(new ASTSeq ());
putPrefix(new ASTQtile ());
putPrefix(new ASTCat ());
putPrefix(new ASTCbind ());
putPrefix(new ASTTable ());
putPrefix(new ASTReduce());
putPrefix(new ASTIfElse());
putPrefix(new ASTRApply());
putPrefix(new ASTSApply());
putPrefix(new ASTddply ());
putPrefix(new ASTUnique());
putPrefix(new ASTRunif ());
putPrefix(new ASTCut ());
putPrefix(new ASTPrint ());
putPrefix(new ASTLs ());
}
static private void putUniInfix(ASTOp ast) { UNI_INFIX_OPS.put(ast.opStr(),ast); }
static private void putBinInfix(ASTOp ast) { BIN_INFIX_OPS.put(ast.opStr(),ast); }
static private void putPrefix (ASTOp ast) { PREFIX_OPS.put(ast.opStr(),ast); }
static void putUDF (ASTOp ast, String fn) { UDF_OPS.put(fn,ast); }
static void removeUDF (String fn) { UDF_OPS.remove(fn); }
static public ASTOp isOp(String id) {
// This order matters. If used as a prefix OP, `+` and `-` are binary only.
ASTOp op4 = UDF_OPS.get(id); if( op4 != null ) return op4;
return isBuiltinOp(id);
}
static public ASTOp isBuiltinOp(String id) {
ASTOp op3 = PREFIX_OPS.get(id); if( op3 != null ) return op3;
ASTOp op2 = BIN_INFIX_OPS.get(id); if( op2 != null ) return op2;
ASTOp op1 = UNI_INFIX_OPS.get(id); return op1;
}
static public boolean isInfixOp(String id) {
return BIN_INFIX_OPS.containsKey(id) || UNI_INFIX_OPS.containsKey(id);
}
static public boolean isUDF(String id) {
return UDF_OPS.containsKey(id);
}
static public boolean isUDF(ASTOp op) { return isUDF(op.opStr()); }
static public Set<String> opStrs() {
Set<String> all = UNI_INFIX_OPS.keySet();
all.addAll(BIN_INFIX_OPS.keySet());
all.addAll(PREFIX_OPS.keySet());
all.addAll(UDF_OPS.keySet());
return all;
}
final int _form; // formula notation, 0 - infix, 1 - prefix
final int _precedence; // operator precedence number
final int _association; // 0 - left associated, 1 - right associated
// All fields are final, because functions are immutable
final String _vars[]; // Variable names
ASTOp( String vars[], Type ts[], int form, int prec, int asso) {
super(Type.fcn(ts));
_form = form;
_precedence = prec;
_association = asso;
_vars = vars;
assert ts.length==vars.length : "No vars?" + this;
}
ASTOp( String vars[], Type t, int form, int prec, int asso) {
super(t);
_form = form;
_precedence = prec;
_association = asso;
_vars = vars;
assert t._ts.length==vars.length : "No vars?" + this;
}
abstract String opStr();
abstract ASTOp make();
public boolean leftAssociate( ) {
return _association == OPA_LEFT;
}
@Override public String toString() {
String s = _t._ts[0]+" "+opStr()+"(";
int len=_t._ts.length;
for( int i=1; i<len-1; i++ )
s += _t._ts[i]+" "+(_vars==null?"":_vars[i])+", ";
return s + (len > 1 ? _t._ts[len-1]+" "+(_vars==null?"":_vars[len-1]) : "")+")";
}
public String toString(boolean verbose) {
if( !verbose ) return toString(); // Just the fun name& arg names
return toString();
}
static ASTOp parse(Exec2 E) {
int x = E._x;
String id = E.isID();
if( id == null ) return null;
ASTOp op = isOp(id); // The order matters. If used as a prefix OP, `+` and `-` are binary only.
// Also, if assigning to a built-in function then do not parse-as-a-fcn.
// Instead it will default to parsing as an ID in ASTAssign.parse
if( op != null ) {
int x1 = E._x;
if (!E.peek('=') && !(E.peek('<') && E.peek('-'))) {
E._x = x1; return op.make();
}
}
E._x = x;
return ASTFunc.parseFcn(E);
}
// Parse a unary infix OP or return null.
static ASTOp parseUniInfixOp(Exec2 E) {
int x = E._x;
String id = E.isID();
if( id == null ) return null;
ASTOp op = UNI_INFIX_OPS.get(id);
if( op != null) return op.make();
E._x = x; // Roll back, no parse happened
return null;
}
// Parse a binary infix OP or return null.
static ASTOp parseBinInfixOp(Exec2 E) {
int x = E._x;
String id = E.isID();
if( id == null ) return null;
ASTOp op = BIN_INFIX_OPS.get(id);
if( op != null) return op.make();
E._x = x; // Roll back, no parse happened
return null;
}
@Override void exec(Env env) { env.push(this); }
// Standard column-wise function application
abstract void apply(Env env, int argcnt);
// Special row-wise 'apply'
double[] map(Env env, double[] in, double[] out) { throw H2O.unimpl(); }
}
abstract class ASTUniOp extends ASTOp {
static Type[] newsig() {
Type t1 = Type.dblary();
return new Type[]{t1,t1};
}
ASTUniOp( int form, int precedence, int association ) {
super(VARS1,newsig(),form,precedence,association);
}
double op( double d ) { throw H2O.fail(); }
protected ASTUniOp( String[] vars, Type[] types, int form, int precedence, int association ) {
super(vars,types,form,precedence,association);
}
@Override void apply(Env env, int argcnt) {
// Expect we can broadcast across all functions as needed.
if( !env.isAry() ) { env.poppush(op(env.popDbl())); return; }
Frame fr = env.popAry();
String skey = env.key();
final ASTUniOp uni = this; // Final 'this' so can use in closure
Frame fr2 = new MRTask2() {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
for( int i=0; i<nchks.length; i++ ) {
NewChunk n =nchks[i];
Chunk c = chks[i];
int rlen = c._len;
for( int r=0; r<rlen; r++ )
n.addNum(uni.op(c.at0(r)));
}
}
}.doAll(fr.numCols(),fr).outputFrame(fr._names, null);
env.subRef(fr,skey);
env.pop(); // Pop self
env.push(fr2);
}
}
abstract class ASTUniPrefixOp extends ASTUniOp {
ASTUniPrefixOp( ) { super(OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); }
ASTUniPrefixOp( String[] vars, Type[] types ) { super(vars,types,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); }
}
class ASTCos extends ASTUniPrefixOp { @Override String opStr(){ return "cos"; } @Override ASTOp make() {return new ASTCos ();} @Override double op(double d) { return Math.cos(d);}}
class ASTSin extends ASTUniPrefixOp { @Override String opStr(){ return "sin"; } @Override ASTOp make() {return new ASTSin ();} @Override double op(double d) { return Math.sin(d);}}
class ASTTan extends ASTUniPrefixOp { @Override String opStr(){ return "tan"; } @Override ASTOp make() {return new ASTTan ();} @Override double op(double d) { return Math.tan(d);}}
class ASTACos extends ASTUniPrefixOp { @Override String opStr(){ return "acos"; } @Override ASTOp make() {return new ASTACos();} @Override double op(double d) { return Math.acos(d);}}
class ASTASin extends ASTUniPrefixOp { @Override String opStr(){ return "asin"; } @Override ASTOp make() {return new ASTASin();} @Override double op(double d) { return Math.asin(d);}}
class ASTATan extends ASTUniPrefixOp { @Override String opStr(){ return "atan"; } @Override ASTOp make() {return new ASTATan();} @Override double op(double d) { return Math.atan(d);}}
class ASTCosh extends ASTUniPrefixOp { @Override String opStr(){ return "cosh"; } @Override ASTOp make() {return new ASTCosh ();} @Override double op(double d) { return Math.cosh(d);}}
class ASTSinh extends ASTUniPrefixOp { @Override String opStr(){ return "sinh"; } @Override ASTOp make() {return new ASTSinh ();} @Override double op(double d) { return Math.sinh(d);}}
class ASTTanh extends ASTUniPrefixOp { @Override String opStr(){ return "tanh"; } @Override ASTOp make() {return new ASTTanh ();} @Override double op(double d) { return Math.tanh(d);}}
class ASTAbs extends ASTUniPrefixOp { @Override String opStr(){ return "abs"; } @Override ASTOp make() {return new ASTAbs ();} @Override double op(double d) { return Math.abs(d);}}
class ASTSgn extends ASTUniPrefixOp { @Override String opStr(){ return "sgn" ; } @Override ASTOp make() {return new ASTSgn ();} @Override double op(double d) { return Math.signum(d);}}
class ASTSqrt extends ASTUniPrefixOp { @Override String opStr(){ return "sqrt"; } @Override ASTOp make() {return new ASTSqrt();} @Override double op(double d) { return Math.sqrt(d);}}
class ASTCeil extends ASTUniPrefixOp { @Override String opStr(){ return "ceil"; } @Override ASTOp make() {return new ASTCeil();} @Override double op(double d) { return Math.ceil(d);}}
class ASTFlr extends ASTUniPrefixOp { @Override String opStr(){ return "floor"; } @Override ASTOp make() {return new ASTFlr ();} @Override double op(double d) { return Math.floor(d);}}
class ASTLog extends ASTUniPrefixOp { @Override String opStr(){ return "log"; } @Override ASTOp make() {return new ASTLog ();} @Override double op(double d) { return Math.log(d);}}
class ASTExp extends ASTUniPrefixOp { @Override String opStr(){ return "exp"; } @Override ASTOp make() {return new ASTExp ();} @Override double op(double d) { return Math.exp(d);}}
class ASTIsNA extends ASTUniPrefixOp { @Override String opStr(){ return "is.na"; } @Override ASTOp make() {return new ASTIsNA();} @Override double op(double d) { return Double.isNaN(d)?1:0;}}
class ASTNrow extends ASTUniPrefixOp {
ASTNrow() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "nrow"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
Frame fr = env.popAry();
String skey = env.key();
double d = fr.numRows();
env.subRef(fr,skey);
env.poppush(d);
}
}
class ASTNcol extends ASTUniPrefixOp {
ASTNcol() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "ncol"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
Frame fr = env.popAry();
String skey = env.key();
double d = fr.numCols();
env.subRef(fr,skey);
env.poppush(d);
}
}
class ASTIsFactor extends ASTUniPrefixOp {
ASTIsFactor() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "is.factor"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(0); return; }
Frame fr = env.popAry();
String skey = env.key();
double d = 1;
Vec[] v = fr.vecs();
for(int i = 0; i < v.length; i++) {
if(!v[i].isEnum()) { d = 0; break; }
}
env.subRef(fr,skey);
env.poppush(d);
}
}
// Added to facilitate Runit testing
class ASTAnyFactor extends ASTUniPrefixOp {
ASTAnyFactor() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "any.factor"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(0); return; }
Frame fr = env.popAry();
String skey = env.key();
double d = 0;
Vec[] v = fr.vecs();
for(int i = 0; i < v.length; i++) {
if(v[i].isEnum()) { d = 1; break; }
}
env.subRef(fr,skey);
env.poppush(d);
}
}
class ASTAnyNA extends ASTUniPrefixOp {
ASTAnyNA() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); }
@Override String opStr() { return "any.na"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(0); return; }
Frame fr = env.popAry();
String skey = env.key();
double d = 0;
Vec[] v = fr.vecs();
for(int i = 0; i < v.length; i++) {
if(v[i].naCnt() > 0) { d = 1; break; }
}
env.subRef(fr, skey);
env.poppush(d);
}
}
class ASTIsTRUE extends ASTUniPrefixOp {
ASTIsTRUE() {super(VARS1,new Type[]{Type.DBL,Type.unbound()});}
@Override String opStr() { return "isTRUE"; }
@Override ASTOp make() {return new ASTIsTRUE();} // to make sure fcn get bound at each new context
@Override void apply(Env env, int argcnt) {
double res = env.isDbl() && env.popDbl()==1.0 ? 1:0;
env.pop();
env.poppush(res);
}
}
class ASTScale extends ASTUniPrefixOp {
ASTScale() { super(VARS1,new Type[]{Type.ARY,Type.ARY}); }
@Override String opStr() { return "scale"; }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
if(!env.isAry()) { env.poppush(Double.NaN); return; }
Frame fr = env.popAry();
String skey = env.key();
Frame fr2 = new Scale().doIt(fr.numCols(), fr).outputFrame(fr._names, fr.domains());
env.subRef(fr,skey);
env.pop(); // Pop self
env.push(fr2);
}
private static class Scale extends MRTask2<Scale> {
protected int _nums = 0;
protected int[] _ind; // Saves indices of numeric cols first, followed by enums
protected double[] _normSub;
protected double[] _normMul;
@Override public void map(Chunk chks[], NewChunk nchks[]) {
// Normalize numeric cols only
for(int k = 0; k < _nums; k++) {
int i = _ind[k];
NewChunk n = nchks[i];
Chunk c = chks[i];
int rlen = c._len;
for(int r = 0; r < rlen; r++)
n.addNum((c.at0(r)-_normSub[i])*_normMul[i]);
}
for(int k = _nums; k < chks.length; k++) {
int i = _ind[k];
NewChunk n = nchks[i];
Chunk c = chks[i];
int rlen = c._len;
for(int r = 0; r < rlen; r++)
n.addNum(c.at0(r));
}
}
public Scale doIt(int outputs, Frame fr) { return dfork2(outputs, fr).getResult(); }
public Scale dfork2(int outputs, Frame fr) {
final Vec [] vecs = fr.vecs();
for(int i = 0; i < vecs.length; i++) {
if(!vecs[i].isEnum()) _nums++;
}
if(_normSub == null) _normSub = MemoryManager.malloc8d(_nums);
if(_normMul == null) { _normMul = MemoryManager.malloc8d(_nums); Arrays.fill(_normMul,1); }
if(_ind == null) _ind = MemoryManager.malloc4(vecs.length);
int ncnt = 0; int ccnt = 0;
for(int i = 0; i < vecs.length; i++){
if(!vecs[i].isEnum()) {
_normSub[ncnt] = vecs[i].mean();
_normMul[ncnt] = 1.0/vecs[i].sigma();
_ind[ncnt++] = i;
} else
_ind[_nums+(ccnt++)] = i;
}
assert ncnt == _nums && (ncnt + ccnt == vecs.length);
return dfork(outputs, fr, false);
}
}
}
abstract class ASTTimeOp extends ASTOp {
static Type[] newsig() {
Type t1 = Type.dblary();
return new Type[]{t1,t1};
}
ASTTimeOp() { super(VARS1,newsig(),OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); }
abstract long op( MutableDateTime dt );
@Override void apply(Env env, int argcnt) {
// Single instance of MDT for the single call
if( !env.isAry() ) { // Single point
double d = env.popDbl();
if( !Double.isNaN(d) ) d = op(new MutableDateTime((long)d));
env.poppush(d);
return;
}
// Whole column call
Frame fr = env.popAry();
String skey = env.key();
final ASTTimeOp uni = this; // Final 'this' so can use in closure
Frame fr2 = new MRTask2() {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
MutableDateTime dt = new MutableDateTime(0);
for( int i=0; i<nchks.length; i++ ) {
NewChunk n =nchks[i];
Chunk c = chks[i];
int rlen = c._len;
for( int r=0; r<rlen; r++ ) {
double d = c.at0(r);
if( !Double.isNaN(d) ) {
dt.setMillis((long)d);
d = uni.op(dt);
}
n.addNum(d);
}
}
}
}.doAll(fr.numCols(),fr).outputFrame(fr._names, null);
env.subRef(fr,skey);
env.pop(); // Pop self
env.push(fr2);
}
}
class ASTYear extends ASTTimeOp { @Override String opStr(){ return "year" ; } @Override ASTOp make() {return new ASTYear ();} @Override long op(MutableDateTime dt) { return dt.getYear();}}
class ASTMonth extends ASTTimeOp { @Override String opStr(){ return "month"; } @Override ASTOp make() {return new ASTMonth ();} @Override long op(MutableDateTime dt) { return dt.getMonthOfYear()-1;}}
class ASTDay extends ASTTimeOp { @Override String opStr(){ return "day" ; } @Override ASTOp make() {return new ASTDay ();} @Override long op(MutableDateTime dt) { return dt.getDayOfMonth();}}
class ASTHour extends ASTTimeOp { @Override String opStr(){ return "hour" ; } @Override ASTOp make() {return new ASTHour ();} @Override long op(MutableDateTime dt) { return dt.getHourOfDay();}}
class ASTMinute extends ASTTimeOp { @Override String opStr(){return "minute";} @Override ASTOp make() {return new ASTMinute();} @Override long op(MutableDateTime dt) { return dt.getMinuteOfHour();}}
class ASTSecond extends ASTTimeOp { @Override String opStr(){return "second";} @Override ASTOp make() {return new ASTSecond();} @Override long op(MutableDateTime dt) { return dt.getSecondOfMinute();}}
class ASTMillis extends ASTTimeOp { @Override String opStr(){return "millis";} @Override ASTOp make() {return new ASTMillis();} @Override long op(MutableDateTime dt) { return dt.getMillisOfSecond();}}
// Class of things that will auto-expand across arrays in a 2-to-1 way:
// applying 2 things (from an array or scalar to array or scalar) producing an
// array or scalar result.
abstract class ASTBinOp extends ASTOp {
static Type[] newsig() {
Type t1 = Type.dblary(), t2 = Type.dblary();
return new Type[]{Type.anyary(new Type[]{t1,t2}),t1,t2};
}
ASTBinOp( int form, int precedence, int association ) {
super(VARS2, newsig(), form, precedence, association); // binary ops are infix ops
}
abstract double op( double d0, double d1 );
@Override void apply(Env env, int argcnt) {
// Expect we can broadcast across all functions as needed.
Frame fr0 = null, fr1 = null;
double d0=0, d1=0;
if( env.isAry() ) fr1 = env.popAry(); else d1 = env.popDbl(); String k0 = env.key();
if( env.isAry() ) fr0 = env.popAry(); else d0 = env.popDbl(); String k1 = env.key();
if( fr0==null && fr1==null ) {
env.poppush(op(d0,d1));
return;
}
final boolean lf = fr0 != null;
final boolean rf = fr1 != null;
final double df0 = d0, df1 = d1;
Frame fr = null; // Do-All frame
int ncols = 0; // Result column count
if( fr0 !=null ) { // Left?
ncols = fr0.numCols();
if( fr1 != null ) {
if( fr0.numCols() != fr1.numCols() ||
fr0.numRows() != fr1.numRows() )
throw new IllegalArgumentException("Arrays must be same size: "+fr0+" vs "+fr1);
fr = new Frame(fr0).add(fr1,true);
} else {
fr = fr0;
}
} else {
ncols = fr1.numCols();
fr = fr1;
}
final ASTBinOp bin = this; // Final 'this' so can use in closure
// Run an arbitrary binary op on one or two frames & scalars
Frame fr2 = new MRTask2() {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
for( int i=0; i<nchks.length; i++ ) {
NewChunk n =nchks[i];
int rlen = chks[0]._len;
Chunk c0 = chks[i];
if( (!c0._vec.isEnum() &&
!(lf && rf && chks[i+nchks.length]._vec.isEnum())) ||
bin instanceof ASTEQ ||
bin instanceof ASTNE ) {
for( int r=0; r<rlen; r++ )
n.addNum(bin.op(lf ? chks[i ].at0(r) : df0,
rf ? chks[i+(lf ? nchks.length:0)].at0(r) : df1));
} else {
for( int r=0; r<rlen; r++ ) n.addNA();
}
}
}
}.doAll(ncols,fr).outputFrame((lf ? fr0 : fr1)._names,null);
if( fr0 != null ) env.subRef(fr0,k0);
if( fr1 != null ) env.subRef(fr1,k1);
env.pop();
env.push(fr2);
}
}
class ASTUniPlus extends ASTUniOp { ASTUniPlus() { super(OPF_INFIX, OPP_UPLUS, OPA_RIGHT); } @Override String opStr(){ return "+" ;} @Override ASTOp make() {return new ASTUniPlus(); } @Override double op(double d) { return d;}}
class ASTUniMinus extends ASTUniOp { ASTUniMinus() { super(OPF_INFIX, OPP_UMINUS, OPA_RIGHT); } @Override String opStr(){ return "-" ;} @Override ASTOp make() {return new ASTUniMinus();} @Override double op(double d) { return -d;}}
class ASTNot extends ASTUniOp { ASTNot() { super(OPF_INFIX, OPP_NOT, OPA_RIGHT); } @Override String opStr(){ return "!" ;} @Override ASTOp make() {return new ASTNot(); } @Override double op(double d) { return d==0?1:0; }}
class ASTPlus extends ASTBinOp { ASTPlus() { super(OPF_INFIX, OPP_PLUS, OPA_LEFT ); } @Override String opStr(){ return "+" ;} @Override ASTOp make() {return new ASTPlus();} @Override double op(double d0, double d1) { return d0+d1;}}
class ASTSub extends ASTBinOp { ASTSub() { super(OPF_INFIX, OPP_MINUS, OPA_LEFT); } @Override String opStr(){ return "-" ;} @Override ASTOp make() {return new ASTSub ();} @Override double op(double d0, double d1) { return d0-d1;}}
class ASTMul extends ASTBinOp { ASTMul() { super(OPF_INFIX, OPP_MUL, OPA_LEFT); } @Override String opStr(){ return "*" ;} @Override ASTOp make() {return new ASTMul ();} @Override double op(double d0, double d1) { return d0*d1;}}
class ASTDiv extends ASTBinOp { ASTDiv() { super(OPF_INFIX, OPP_DIV, OPA_LEFT); } @Override String opStr(){ return "/" ;} @Override ASTOp make() {return new ASTDiv ();} @Override double op(double d0, double d1) { return d0/d1;}}
class ASTPow extends ASTBinOp { ASTPow() { super(OPF_INFIX, OPP_POWER, OPA_RIGHT);} @Override String opStr(){ return "^" ;} @Override ASTOp make() {return new ASTPow ();} @Override double op(double d0, double d1) { return Math.pow(d0,d1);}}
class ASTPow2 extends ASTBinOp { ASTPow2() { super(OPF_INFIX, OPP_POWER, OPA_RIGHT);} @Override String opStr(){ return "**" ;} @Override ASTOp make() {return new ASTPow2();} @Override double op(double d0, double d1) { return Math.pow(d0,d1);}}
class ASTMod extends ASTBinOp { ASTMod() { super(OPF_INFIX, OPP_MOD, OPA_LEFT); } @Override String opStr(){ return "%" ;} @Override ASTOp make() {return new ASTMod ();} @Override double op(double d0, double d1) { return d0%d1;}}
class ASTLT extends ASTBinOp { ASTLT() { super(OPF_INFIX, OPP_LT, OPA_LEFT); } @Override String opStr(){ return "<" ;} @Override ASTOp make() {return new ASTLT ();} @Override double op(double d0, double d1) { return d0<d1 && !Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTLE extends ASTBinOp { ASTLE() { super(OPF_INFIX, OPP_LE, OPA_LEFT); } @Override String opStr(){ return "<=" ;} @Override ASTOp make() {return new ASTLE ();} @Override double op(double d0, double d1) { return d0<d1 || Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTGT extends ASTBinOp { ASTGT() { super(OPF_INFIX, OPP_GT, OPA_LEFT); } @Override String opStr(){ return ">" ;} @Override ASTOp make() {return new ASTGT ();} @Override double op(double d0, double d1) { return d0>d1 && !Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTGE extends ASTBinOp { ASTGE() { super(OPF_INFIX, OPP_GE, OPA_LEFT); } @Override String opStr(){ return ">=" ;} @Override ASTOp make() {return new ASTGE ();} @Override double op(double d0, double d1) { return d0>d1 || Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTEQ extends ASTBinOp { ASTEQ() { super(OPF_INFIX, OPP_EQ, OPA_LEFT); } @Override String opStr(){ return "==" ;} @Override ASTOp make() {return new ASTEQ ();} @Override double op(double d0, double d1) { return Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}}
class ASTNE extends ASTBinOp { ASTNE() { super(OPF_INFIX, OPP_NE, OPA_LEFT); } @Override String opStr(){ return "!=" ;} @Override ASTOp make() {return new ASTNE ();} @Override double op(double d0, double d1) { return Utils.equalsWithinOneSmallUlp(d0,d1)?0:1;}}
class ASTLA extends ASTBinOp { ASTLA() { super(OPF_INFIX, OPP_AND, OPA_LEFT); } @Override String opStr(){ return "&" ;} @Override ASTOp make() {return new ASTLA ();} @Override double op(double d0, double d1) { return (d0!=0 && d1!=0) ? (Double.isNaN(d0) || Double.isNaN(d1)?Double.NaN:1) :0;}}
class ASTLO extends ASTBinOp { ASTLO() { super(OPF_INFIX, OPP_OR, OPA_LEFT); } @Override String opStr(){ return "|" ;} @Override ASTOp make() {return new ASTLO ();} @Override double op(double d0, double d1) { return (d0==0 && d1==0) ? (Double.isNaN(d0) || Double.isNaN(d1)?Double.NaN:0) :1;}}
// Variable length; instances will be created of required length
abstract class ASTReducerOp extends ASTOp {
final double _init;
final boolean _narm; // na.rm in R
ASTReducerOp( double init, boolean narm ) {
super(new String[]{"","dbls"},
new Type[]{Type.DBL,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
_init = init;
_narm = narm;
}
@Override double[] map(Env env, double[] in, double[] out) {
double s = _init;
for (double v : in) if (!_narm || !Double.isNaN(v)) s = op(s,v);
if (out == null || out.length < 1) out = new double[1];
out[0] = s;
return out;
}
abstract double op( double d0, double d1 );
@Override void apply(Env env, int argcnt) {
double sum=_init;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) sum = op(sum,env.popDbl());
else {
Frame fr = env.popAry();
String skey = env.key();
sum = op(sum,_narm?new NaRmRedOp(this).doAll(fr)._d:new RedOp(this).doAll(fr)._d);
env.subRef(fr,skey);
}
env.poppush(sum);
}
private static class RedOp extends MRTask2<RedOp> {
final ASTReducerOp _bin;
RedOp( ASTReducerOp bin ) { _bin = bin; _d = bin._init; }
double _d;
@Override public void map( Chunk chks[] ) {
for( int i=0; i<chks.length; i++ ) {
Chunk C = chks[i];
for( int r=0; r<C._len; r++ )
_d = _bin.op(_d,C.at0(r));
if( Double.isNaN(_d) ) break;
}
}
@Override public void reduce( RedOp s ) { _d = _bin.op(_d,s._d); }
}
private static class NaRmRedOp extends MRTask2<NaRmRedOp> {
final ASTReducerOp _bin;
NaRmRedOp( ASTReducerOp bin ) { _bin = bin; _d = bin._init; }
double _d;
@Override public void map( Chunk chks[] ) {
for( int i=0; i<chks.length; i++ ) {
Chunk C = chks[i];
for( int r=0; r<C._len; r++ )
if (!Double.isNaN(C.at0(r)))
_d = _bin.op(_d,C.at0(r));
if( Double.isNaN(_d) ) break;
}
}
@Override public void reduce( NaRmRedOp s ) { _d = _bin.op(_d,s._d); }
}
}
class ASTSum extends ASTReducerOp { ASTSum( ) {super(0,false);} @Override String opStr(){ return "sum" ;} @Override ASTOp make() {return new ASTSum(); } @Override double op(double d0, double d1) { return d0+d1;}}
class ASTSumNaRm extends ASTReducerOp { ASTSumNaRm( ) {super(0,true) ;} @Override String opStr(){ return "sum.na.rm";} @Override ASTOp make() {return new ASTSumNaRm();} @Override double op(double d0, double d1) { return d0+d1;}}
class ASTReduce extends ASTOp {
static final String VARS[] = new String[]{ "", "op2", "ary"};
static final Type TYPES[]= new Type []{ Type.ARY, Type.fcn(new Type[]{Type.DBL,Type.DBL,Type.DBL}), Type.ARY };
ASTReduce( ) { super(VARS,TYPES,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); }
@Override String opStr(){ return "Reduce";}
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) { throw H2O.unimpl(); }
}
// TODO: Check refcnt mismatch issue: tmp = cbind(h.hex,3.5) results in different refcnts per col
class ASTCbind extends ASTOp {
@Override String opStr() { return "cbind"; }
ASTCbind( ) { super(new String[]{"cbind","ary"},
new Type[]{Type.ARY,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,OPA_RIGHT); }
@Override ASTOp make() {return this;}
@Override void apply(Env env, int argcnt) {
Vec vmax = null;
for(int i = 0; i < argcnt-1; i++) {
if(env.isAry(-argcnt+1+i)) {
Frame tmp = env.ary(-argcnt+1+i);
if(vmax == null) vmax = tmp.vecs()[0];
else if(tmp.numRows() != vmax.length())
// R pads shorter cols to match max rows by cycling/repeating, but we won't support that
throw new IllegalArgumentException("Row mismatch! Expected " + String.valueOf(vmax.length()) + " but frame has " + String.valueOf(tmp.numRows()));
}
}
Frame fr = null;
if(env.isAry(-argcnt+1))
fr = new Frame(env.ary(-argcnt+1));
else {
// Vec v = new Vec(Key.make(), env.dbl(-argcnt+1));
double d = env.dbl(-argcnt+1);
Vec v = vmax == null ? new Vec(Key.make(), d) : vmax.makeCon(d);
fr = new Frame(new String[] {"c0"}, new Vec[] {v});
env.addRef(v);
}
for(int i = 1; i < argcnt-1; i++) {
if(env.isAry(-argcnt+1+i))
fr.add(env.ary(-argcnt+1+i),true);
else {
double d = env.dbl(-argcnt+1+i);
// Vec v = fr.vecs()[0].makeCon(d);
Vec v = vmax == null ? new Vec(Key.make(), d) : vmax.makeCon(d);
fr.add("c" + String.valueOf(i), v);
env.addRef(v);
}
}
env._ary[env._sp-argcnt] = fr;
env._sp -= argcnt-1;
assert env.check_refcnt(fr.anyVec());
}
}
class ASTMinNaRm extends ASTReducerOp {
ASTMinNaRm( ) { super( Double.POSITIVE_INFINITY, true ); }
@Override
String opStr(){ return "min.na.rm";}
@Override
ASTOp make() {return new ASTMinNaRm();}
@Override double op(double d0, double d1) { return Math.min(d0, d1); }
@Override void apply(Env env, int argcnt) {
double min = Double.POSITIVE_INFINITY;
int nacnt = 0;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) {
double a = env.popDbl();
if (Double.isNaN(a)) nacnt++;
else min = Math.min(min, a);
}
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
min = Math.min(min, v.min());
env.pop();
}
if (nacnt > 0 && min == Double.POSITIVE_INFINITY)
min = Double.NaN;
env.poppush(min);
}
}
class ASTMaxNaRm extends ASTReducerOp {
ASTMaxNaRm( ) { super( Double.NEGATIVE_INFINITY, true ); }
@Override
String opStr(){ return "max.na.rm";}
@Override
ASTOp make() {return new ASTMaxNaRm();}
@Override double op(double d0, double d1) { return Math.max(d0,d1); }
@Override void apply(Env env, int argcnt) {
double max = Double.NEGATIVE_INFINITY;
int nacnt = 0;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) {
double a = env.popDbl();
if (Double.isNaN(a)) nacnt++;
else max = Math.max(max, a);
}
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
max = Math.max(max, v.max());
env.pop();
}
if (nacnt > 0 && max == Double.NEGATIVE_INFINITY)
max = Double.NaN;
env.poppush(max);
}
}
class ASTMin extends ASTReducerOp {
ASTMin( ) { super( Double.POSITIVE_INFINITY, false); }
@Override
String opStr(){ return "min";}
@Override
ASTOp make() {return new ASTMin();}
@Override double op(double d0, double d1) { return Math.min(d0, d1); }
@Override void apply(Env env, int argcnt) {
double min = Double.POSITIVE_INFINITY;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) min = Math.min(min, env.popDbl());
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
if (v.naCnt() > 0) { min = Double.NaN; break; }
else min = Math.min(min, v.min());
env.pop();
}
env.poppush(min);
}
}
class ASTMax extends ASTReducerOp {
ASTMax( ) { super( Double.NEGATIVE_INFINITY, false ); }
@Override
String opStr(){ return "max";}
@Override
ASTOp make() {return new ASTMax();}
@Override double op(double d0, double d1) { return Math.max(d0,d1); }
@Override void apply(Env env, int argcnt) {
double max = Double.NEGATIVE_INFINITY;
for( int i=0; i<argcnt-1; i++ )
if( env.isDbl() ) max = Math.max(max, env.popDbl());
else {
Frame fr = env.peekAry();
for (Vec v : fr.vecs())
if (v.naCnt() > 0) { max = Double.NaN; break; }
else max = Math.max(max, v.max());
env.pop();
}
env.poppush(max);
}
}
// R like binary operator &&
class ASTAND extends ASTOp {
@Override String opStr() { return "&&"; }
ASTAND( ) {
super(new String[]{"", "x", "y"},
new Type[]{Type.DBL,Type.dblary(),Type.dblary()},
OPF_PREFIX,
OPP_AND,
OPA_RIGHT);
}
@Override ASTOp make() { return new ASTAND(); }
@Override void apply(Env env, int argcnt) {
double op1 = env.isAry(-2) ? env.ary(-2).vecs()[0].at(0) : env.dbl(-2);
double op2 = op1==0 ? 0 :
Double.isNaN(op1) ? Double.NaN :
env.isAry(-1) ? env.ary(-1).vecs()[0].at(0) : env.dbl(-1);
env.pop(3);
if (!Double.isNaN(op2)) op2 = op2==0?0:1;
env.push(op2);
}
}
// R like binary operator ||
class ASTOR extends ASTOp {
@Override String opStr() { return "||"; }
ASTOR( ) {
super(new String[]{"", "x", "y"},
new Type[]{Type.DBL,Type.dblary(),Type.dblary()},
OPF_PREFIX,
OPP_OR,
OPA_RIGHT);
}
@Override ASTOp make() { return new ASTOR(); }
@Override void apply(Env env, int argcnt) {
double op1 = env.isAry(-2) ? env.ary(-2).vecs()[0].at(0) : env.dbl(-2);
double op2 = !Double.isNaN(op1) && op1!=0 ? 1 :
env.isAry(-1) ? env.ary(-1).vecs()[0].at(0) : env.dbl(-1);
if (!Double.isNaN(op2) && op2 != 0)
op2 = 1;
else if (op2 == 0 && Double.isNaN(op1))
op2 = Double.NaN;
env.push(op2);
}
}
// Brute force implementation of matrix multiply
class ASTMMult extends ASTOp {
@Override String opStr() { return "%*%"; }
ASTMMult( ) {
super(new String[]{"", "x", "y"},
new Type[]{Type.ARY,Type.ARY,Type.ARY},
OPF_INFIX,
OPP_MUL,
OPA_RIGHT);
}
@Override ASTOp make() { return new ASTMMult(); }
@Override void apply(Env env, int argcnt) {
if(!env.isAry(-2) || !env.isAry(-1))
throw new IllegalArgumentException("Operation requires two frames.");
Matrix fr1 = new Matrix(env.ary(-2));
Frame out = fr1.mult(env.ary(-1));
env.push(out);
}
}
// Similar to R's seq_len
class ASTSeq extends ASTOp {
@Override String opStr() { return "seq_len"; }
ASTSeq( ) {
super(new String[]{"seq_len", "n"},
new Type[]{Type.ARY,Type.DBL},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
@Override ASTOp make() { return this; }
@Override void apply(Env env, int argcnt) {
int len = (int)env.popDbl();
if (len <= 0)
throw new IllegalArgumentException("Error in seq_len(" +len+"): argument must be coercible to positive integer");
env.poppush(1,new Frame(new String[]{"c"}, new Vec[]{Vec.makeSeq(len)}),null);
}
}
// Compute sample quantiles given a set of cutoffs.
class ASTQtile extends ASTOp {
@Override String opStr() { return "quantile"; }
ASTQtile( ) {
super(new String[]{"quantile","x","probs"},
new Type[]{Type.ARY, Type.ARY, Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT);
}
@Override ASTQtile make() { return new ASTQtile(); }
@Override void apply(Env env, int argcnt) {
Frame x = env.ary(-2);
Vec xv = x .theVec("Argument #1 in Quantile contains more than 1 column.");
Vec pv = env.ary(-1).theVec("Argument #2 in Quantile contains more than 1 column.");
double p[] = new double[(int)pv.length()];
for (int i = 0; i < pv.length(); i++)
if ((p[i]=pv.at((long)i)) < 0 || p[i] > 1)
throw new IllegalArgumentException("Quantile: probs must be in the range of [0, 1].");
double samples[] = new Resample(10000).doAll(x)._local;
Arrays.sort(samples);
// create output vec
Key key = Vec.VectorGroup.VG_LEN1.addVecs(1)[0];
AppendableVec av = new AppendableVec(key);
NewChunk nc = new NewChunk(av,0);
for (double prob : p) {
double value;
int ix = (int)(samples.length * prob);
if (ix >= samples.length) value = xv.max();
else if (prob == 0) value = xv.min();
else value = samples[ix];
nc.addNum(value);
}
nc.close(0,null);
Vec v = av.close(null);
env.poppush(argcnt, new Frame(new String[]{"Quantile"}, new Vec[]{v}), null);
}
static class Resample extends MRTask2<Resample> {
final int _total;
public double _local[];
public Resample(int nsample) { _total = nsample; }
@Override public void map(Chunk chk) {
Random r = new Random(chk._start);
int ns = Math.min(chk._len,(int)(_total*(double)chk._len/vecs(0).length()));
_local = new double[ns];
int n = 0, fill=0;
double val;
if (ns == chk._len)
for (n = 0; n < ns; n++) {
if (!Double.isNaN(val = chk.at0(n))) _local[fill++] = val;
}
else
for (n = 0; n < ns; n++) {
int i = r.nextInt(chk._len);
if (!Double.isNaN(val = chk.at0(i))) _local[fill++] = val;
}
_local = Arrays.copyOf(_local,fill);
}
@Override public void reduce(Resample other) {
int appendAt = _local.length;
_local = Arrays.copyOf(_local, _local.length+other._local.length);
System.arraycopy(other._local,0,_local,appendAt,other._local.length);
}
}
}
// Variable length; flatten all the component arys
class ASTCat extends ASTOp {
@Override String opStr() { return "c"; }
ASTCat( ) { super(new String[]{"cat","dbls"},
new Type[]{Type.ARY,Type.varargs(Type.dblary())},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override ASTOp make() {return new ASTCat();}
@Override double[] map(Env env, double[] in, double[] out) {
if (out == null || out.length < in.length) out = new double[in.length];
for (int i = 0; i < in.length; i++) out[i] = in[i];
return out;
}
@Override void apply(Env env, int argcnt) {
Key key = Vec.VectorGroup.VG_LEN1.addVecs(1)[0];
AppendableVec av = new AppendableVec(key);
NewChunk nc = new NewChunk(av,0);
for( int i=0; i<argcnt-1; i++ ) {
if (env.isAry(i-argcnt+1)) for (Vec vec : env.ary(i-argcnt+1).vecs()) {
if (vec.nChunks() > 1) H2O.unimpl();
for (int r = 0; r < vec.length(); r++) nc.addNum(vec.at(r));
}
else nc.addNum(env.dbl(i-argcnt+1));
}
nc.close(0,null);
Vec v = av.close(null);
env.pop(argcnt);
env.push(new Frame(new String[]{"c"}, new Vec[]{v}));
}
}
class ASTRunif extends ASTOp {
@Override String opStr() { return "runif"; }
ASTRunif() { super(new String[]{"runif","dbls"},
new Type[]{Type.ARY,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override ASTOp make() {return new ASTRunif();}
@Override void apply(Env env, int argcnt) {
Frame fr = env.popAry();
String skey = env.key();
long [] espc = fr.anyVec()._espc;
long rem = fr.numRows();
if(rem > espc[espc.length-1])throw H2O.unimpl();
for(int i = 0; i < espc.length; ++i){
if(rem <= espc[i]){
espc = Arrays.copyOf(espc, i+1);
break;
}
}
espc[espc.length-1] = rem;
Vec randVec = new Vec(fr.anyVec().group().addVecs(1)[0],espc);
Futures fs = new Futures();
DKV.put(randVec._key,randVec, fs);
for(int i = 0; i < espc.length-1; ++i)
DKV.put(randVec.chunkKey(i),new C0DChunk(0,(int)(espc[i+1]-espc[i])),fs);
fs.blockForPending();
final long seed = System.currentTimeMillis();
new MRTask2() {
@Override public void map(Chunk c){
Random rng = new Random(seed*c.cidx());
for(int i = 0; i < c._len; ++i)
c.set0(i, (float)rng.nextDouble());
}
}.doAll(randVec);
env.subRef(fr,skey);
env.pop();
env.push(new Frame(new String[]{"rnd"},new Vec[]{randVec}));
}
}
class ASTSdev extends ASTOp {
ASTSdev() { super(new String[]{"sd", "ary"}, new Type[]{Type.DBL,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "sd"; }
@Override ASTOp make() { return new ASTSdev(); }
@Override void apply(Env env, int argcnt) {
Frame fr = env.peekAry();
if (fr.vecs().length > 1)
throw new IllegalArgumentException("sd does not apply to multiple cols.");
if (fr.vecs()[0].isEnum())
throw new IllegalArgumentException("sd only applies to numeric vector.");
double sig = fr.vecs()[0].sigma();
env.pop();
env.poppush(sig);
}
}
class ASTMean extends ASTOp {
ASTMean() { super(new String[]{"mean", "ary"}, new Type[]{Type.DBL,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "mean"; }
@Override ASTOp make() { return new ASTMean(); }
@Override void apply(Env env, int argcnt) {
Frame fr = env.peekAry();
if (fr.vecs().length > 1)
throw new IllegalArgumentException("sd does not apply to multiple cols.");
if (fr.vecs()[0].isEnum())
throw new IllegalArgumentException("sd only applies to numeric vector.");
double ave = fr.vecs()[0].mean();
env.pop();
env.poppush(ave);
}
@Override double[] map(Env env, double[] in, double[] out) {
if (out == null || out.length < 1) out = new double[1];
double s = 0; int cnt=0;
for (double v : in) if( !Double.isNaN(v) ) { s+=v; cnt++; }
out[0] = s/cnt;
return out;
}
}
class ASTTable extends ASTOp {
ASTTable() { super(new String[]{"table", "ary"}, new Type[]{Type.ARY,Type.ARY},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "table"; }
@Override ASTOp make() { return new ASTTable(); }
@Override void apply(Env env, int argcnt) {
int ncol;
Frame fr = env.ary(-1);
if ((ncol = fr.vecs().length) > 2)
throw new IllegalArgumentException("table does not apply to more than two cols.");
for (int i = 0; i < ncol; i++) if (!fr.vecs()[i].isInt())
throw new IllegalArgumentException("table only applies to integer vectors.");
String[][] domains = new String[ncol][]; // the domain names to display as row and col names
// if vec does not have original domain, use levels returned by CollectDomain
long[][] levels = new long[ncol][];
for (int i = 0; i < ncol; i++) {
Vec v = fr.vecs()[i];
levels[i] = new Vec.CollectDomain(v).doAll(new Frame(v)).domain();
domains[i] = v.domain();
}
long[][] counts = new Tabularize(levels).doAll(fr)._counts;
// Build output vecs
Key keys[] = Vec.VectorGroup.VG_LEN1.addVecs(counts.length+1);
Vec[] vecs = new Vec[counts.length+1];
String[] colnames = new String[counts.length+1];
AppendableVec v0 = new AppendableVec(keys[0]);
v0._domain = fr.vecs()[0].domain() == null ? null : fr.vecs()[0].domain().clone();
NewChunk c0 = new NewChunk(v0,0);
for( int i=0; i<levels[0].length; i++ ) c0.addNum((double) levels[0][i]);
c0.close(0,null);
vecs[0] = v0.close(null);
colnames[0] = "row.names";
if (ncol==1) colnames[1] = "Count";
for (int level1=0; level1 < counts.length; level1++) {
AppendableVec v = new AppendableVec(keys[level1+1]);
NewChunk c = new NewChunk(v,0);
v._domain = null;
for (int level0=0; level0 < counts[level1].length; level0++)
c.addNum((double) counts[level1][level0]);
c.close(0, null);
vecs[level1+1] = v.close(null);
if (ncol>1) {
colnames[level1+1] = domains[1]==null? Long.toString(levels[1][level1]) : domains[1][(int)(levels[1][level1])];
}
}
env.pop(2);
env.push(new Frame(colnames, vecs));
}
private static class Tabularize extends MRTask2<Tabularize> {
public final long[][] _domains;
public long[][] _counts;
public Tabularize(long[][] dom) { super(); _domains=dom; }
@Override public void map(Chunk[] cs) {
assert cs.length == _domains.length;
_counts = _domains.length==1? new long[1][] : new long[_domains[1].length][];
for (int i=0; i < _counts.length; i++) _counts[i] = new long[_domains[0].length];
for (int i=0; i < cs[0]._len; i++) {
if (cs[0].isNA0(i)) continue;
long ds[] = _domains[0];
int level0 = Arrays.binarySearch(ds,cs[0].at80(i));
assert 0 <= level0 && level0 < ds.length : "l0="+level0+", len0="+ds.length+", min="+ds[0]+", max="+ds[ds.length-1];
int level1;
if (cs.length>1) {
if (cs[1].isNA0(i)) continue; else level1 = Arrays.binarySearch(_domains[1],(int)cs[1].at80(i));
assert 0 <= level1 && level1 < _domains[1].length;
} else {
level1 = 0;
}
_counts[level1][level0]++;
}
}
@Override public void reduce(Tabularize that) { Utils.add(_counts,that._counts); }
}
}
// Selective return. If the selector is a double, just eval both args and
// return the selected one. If the selector is an array, then it must be
// compatible with argument arrays (if any), and the selection is done
// element-by-element.
class ASTIfElse extends ASTOp {
static final String VARS[] = new String[]{"ifelse","tst","true","false"};
static Type[] newsig() {
Type t1 = Type.unbound(), t2 = Type.unbound(), t3=Type.unbound();
return new Type[]{Type.anyary(new Type[]{t1,t2,t3}),t1,t2,t3};
}
ASTIfElse( ) { super(VARS, newsig(),OPF_INFIX,OPP_PREFIX,OPA_RIGHT); }
@Override ASTOp make() {return new ASTIfElse();}
@Override String opStr() { return "ifelse"; }
// Parse an infix trinary ?: operator
static AST parse(Exec2 E, AST tst, boolean EOS) {
if( !E.peek('?',true) ) return null;
int x=E._x;
AST tru=E.xpeek(':',E._x,parseCXExpr(E,false));
if( tru == null ) E.throwErr("Missing expression in trinary",x);
x = E._x;
AST fal=parseCXExpr(E,EOS);
if( fal == null ) E.throwErr("Missing expression in trinary",x);
return ASTApply.make(new AST[]{new ASTIfElse(),tst,tru,fal},E,x);
}
@Override void apply(Env env, int argcnt) {
// All or none are functions
assert ( env.isFcn(-1) && env.isFcn(-2) && _t.ret().isFcn())
|| (!env.isFcn(-1) && !env.isFcn(-2) && !_t.ret().isFcn());
// If the result is an array, then one of the other of the two must be an
// array. , and this is a broadcast op.
assert !_t.isAry() || env.isAry(-1) || env.isAry(-2);
// Single selection? Then just pick slots
if( !env.isAry(-3) ) {
if( env.dbl(-3)==0 ) env.pop_into_stk(-4);
else { env.pop(); env.pop_into_stk(-3); }
return;
}
Frame frtst=null, frtru= null, frfal= null;
double dtst= 0 , dtru= 0 , dfal= 0 ;
if( env.isAry() ) frfal= env.popAry(); else dfal = env.popDbl(); String kf = env.key();
if( env.isAry() ) frtru= env.popAry(); else dtru = env.popDbl(); String kt = env.key();
if( env.isAry() ) frtst= env.popAry(); else dtst = env.popDbl(); String kq = env.key();
// Multi-selection
// Build a doAll frame
Frame fr = new Frame(frtst); // Do-All frame
final int ncols = frtst.numCols(); // Result column count
final long nrows = frtst.numRows(); // Result row count
if( frtru !=null ) { // True is a Frame?
if( frtru.numCols() != ncols || frtru.numRows() != nrows )
throw new IllegalArgumentException("Arrays must be same size: "+frtst+" vs "+frtru);
fr.add(frtru,true);
}
if( frfal !=null ) { // False is a Frame?
if( frfal.numCols() != ncols || frfal.numRows() != nrows )
throw new IllegalArgumentException("Arrays must be same size: "+frtst+" vs "+frfal);
fr.add(frfal,true);
}
final boolean t = frtru != null;
final boolean f = frfal != null;
final double fdtru = dtru;
final double fdfal = dfal;
// Run a selection picking true/false across the frame
Frame fr2 = new MRTask2() {
@Override public void map( Chunk chks[], NewChunk nchks[] ) {
for( int i=0; i<nchks.length; i++ ) {
NewChunk n =nchks[i];
int off=i;
Chunk ctst= chks[off];
Chunk ctru= t ? chks[off+=ncols] : null;
Chunk cfal= f ? chks[off+=ncols] : null;
int rlen = ctst._len;
for( int r=0; r<rlen; r++ )
if( ctst.isNA0(r) ) n.addNA();
else n.addNum(ctst.at0(r)!=0 ? (t ? ctru.at0(r) : fdtru) : (f ? cfal.at0(r) : fdfal));
}
}
}.doAll(ncols,fr).outputFrame(fr._names,fr.domains());
env.subRef(frtst,kq);
if( frtru != null ) env.subRef(frtru,kt);
if( frfal != null ) env.subRef(frfal,kf);
env.pop();
env.push(fr2);
}
}
class ASTCut extends ASTOp {
ASTCut() { super(new String[]{"cut", "ary", "dbls"},
new Type[]{Type.ARY, Type.ARY, Type.dblary()},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "cut"; }
@Override ASTOp make() {return new ASTCut();}
@Override void apply(Env env, int argcnt) {
if(env.isDbl()) {
final int nbins = (int) Math.floor(env.popDbl());
if(nbins < 2)
throw new IllegalArgumentException("Number of intervals must be at least 2");
Frame fr = env.popAry();
String skey = env.key();
if(fr.vecs().length != 1 || fr.vecs()[0].isEnum())
throw new IllegalArgumentException("First argument must be a numeric column vector");
final double fmax = fr.vecs()[0].max();
final double fmin = fr.vecs()[0].min();
final double width = (fmax - fmin)/nbins;
if(width == 0) throw new IllegalArgumentException("Data vector is constant!");
// Note: I think R perturbs constant vecs slightly so it can still bin values
// Construct domain names from bins intervals
String[][] domains = new String[1][nbins];
domains[0][0] = "(" + String.valueOf(fmin - 0.001*(fmax-fmin)) + "," + String.valueOf(fmin + width) + "]";
for(int i = 1; i < nbins; i++)
domains[0][i] = "(" + String.valueOf(fmin + i*width) + "," + String.valueOf(fmin + (i+1)*width) + "]";
Frame fr2 = new MRTask2() {
@Override public void map(Chunk chk, NewChunk nchk) {
for(int r = 0; r < chk._len; r++) {
double x = chk.at0(r);
double n = x == fmax ? nbins-1 : Math.floor((x - fmin)/width);
nchk.addNum(n);
}
}
}.doAll(1,fr).outputFrame(fr._names, domains);
env.subRef(fr, skey);
env.pop();
env.push(fr2);
} else if(env.isAry()) {
Frame ary = env.popAry();
String skey1 = env.key();
if(ary.vecs().length != 1 || ary.vecs()[0].isEnum())
throw new IllegalArgumentException("Second argument must be a numeric column vector");
Vec brks = ary.vecs()[0];
// TODO: Check that num rows below some cutoff, else this will likely crash
// Remove duplicates and sort vector of breaks in ascending order
SortedSet<Double> temp = new TreeSet<Double>();
for(int i = 0; i < brks.length(); i++) temp.add(brks.at(i));
int cnt = 0; final double[] cutoffs = new double[temp.size()];
for(Double x : temp) { cutoffs[cnt] = x; cnt++; }
if(cutoffs.length < 2)
throw new IllegalArgumentException("Vector of breaks must have at least 2 unique values");
Frame fr = env.popAry();
String skey2 = env.key();
if(fr.vecs().length != 1 || fr.vecs()[0].isEnum())
throw new IllegalArgumentException("First argument must be a numeric column vector");
// Construct domain names from bin intervals
final int nbins = cutoffs.length-1;
String[][] domains = new String[1][nbins];
for(int i = 0; i < nbins; i++)
domains[0][i] = "(" + cutoffs[i] + "," + cutoffs[i+1] + "]";
Frame fr2 = new MRTask2() {
@Override public void map(Chunk chk, NewChunk nchk) {
for(int r = 0; r < chk._len; r++) {
double x = chk.at0(r);
if(x <= cutoffs[0] || x > cutoffs[cutoffs.length-1])
nchk.addNum(Double.NaN);
else {
for(int i = 1; i < cutoffs.length; i++) {
if(x <= cutoffs[i]) { nchk.addNum(i-1); break; }
}
}
}
}
}.doAll(1,fr).outputFrame(fr._names, domains);
env.subRef(ary, skey1);
env.subRef(fr, skey2);
env.pop();
env.push(fr2);
} else throw H2O.unimpl();
}
}
class ASTFactor extends ASTOp {
ASTFactor() { super(new String[]{"factor", "ary"},
new Type[]{Type.ARY, Type.ARY},
OPF_PREFIX,
OPP_PREFIX,OPA_RIGHT); }
@Override String opStr() { return "factor"; }
@Override ASTOp make() {return new ASTFactor();}
@Override void apply(Env env, int argcnt) {
Frame ary = env.peekAry(); // Ary on top of stack, keeps +1 refcnt
String skey = env.peekKey();
if( ary.numCols() != 1 )
throw new IllegalArgumentException("factor requires a single column");
Vec v0 = ary.vecs()[0];
Vec v1 = v0.isEnum() ? null : v0.toEnum();
if (v1 != null) {
ary = new Frame(ary._names,new Vec[]{v1});
skey = null;
}
env.poppush(2, ary, skey);
}
}
class ASTPrint extends ASTOp {
static Type[] newsig() {
Type t1 = Type.unbound();
return new Type[]{t1, t1, Type.varargs(Type.unbound())};
}
ASTPrint() { super(new String[]{"print", "x", "y..."},
newsig(),
OPF_PREFIX,
OPP_PREFIX,OPA_RIGHT); }
@Override String opStr() { return "print"; }
@Override ASTOp make() {return new ASTPrint();}
@Override void apply(Env env, int argcnt) {
for( int i=1; i<argcnt; i++ ) {
if( env.isAry(i-argcnt) ) {
env._sb.append(env.ary(i-argcnt).toStringAll());
} else {
env._sb.append(env.toString(env._sp+i-argcnt,true));
}
}
env.pop(argcnt-2); // Pop most args
env.pop_into_stk(-2); // Pop off fcn, returning 1st arg
}
}
/**
* R 'ls' command.
*
* This method is purely for the console right now. Print stuff into the string buffer.
* JSON response is not configured at all.
*/
class ASTLs extends ASTOp {
ASTLs() { super(new String[]{"ls"},
new Type[]{Type.DBL},
OPF_PREFIX,
OPP_PREFIX,
OPA_RIGHT); }
@Override String opStr() { return "ls"; }
@Override ASTOp make() {return new ASTLs();}
@Override void apply(Env env, int argcnt) {
for( Key key : H2O.globalKeySet(null) )
if( key.user_allowed() && H2O.get(key) != null )
env._sb.append(key.toString());
// Pop the self-function and push a zero.
env.pop();
env.push(0.0);
}
}
|
package mondrian.olap;
import java_cup.runtime.Symbol;
import org.apache.log4j.Logger;
import java.util.List;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.io.IOException;
/**
* Lexical analyzer for MDX.
*/
public class Scanner {
private static final Logger LOGGER = Logger.getLogger(Scanner.class);
/** single lookahead character */
protected int nextChar;
/** next lookahead character */
private int lookaheadChars[] = new int[16];
private int firstLookaheadChar = 0;
private int lastLookaheadChar = 0;
private Hashtable m_resWordsTable;
private int iMaxResword;
private String m_aResWords[];
protected boolean debug;
private List lines; // lines[x] is the start of the x'th line
private int iChar; // number of times advance() has been called
private int iPrevChar; // end of previous token
private int previousSymbol; // previous symbol returned
private boolean inFormula;
/**
* Comment delimiters. Modify this list to support other comment styles.
*/
private static final String[][] commentDelim = {
{"//", null},
{"--", null},
{""}
};
/**
* Whether to allow nested comments.
*/
private static final boolean allowNestedComments = true;
Scanner(boolean debug) {
this.debug = debug;
}
/**
* Returns the current nested comments state.
*/
public static boolean getNestedCommentsState() {
return allowNestedComments;
}
/**
* Returns the list of comment delimiters.
*/
public static String[][] getCommentDelimiters() {
return commentDelim;
}
/* Advance input by one character, setting {@link #nextChar}. */
private final void advance()
throws java.io.IOException {
if (firstLookaheadChar == lastLookaheadChar) {
// We have nothing in the lookahead buffer.
nextChar = getChar();
} else {
// We have called lookahead(); advance to the next character it got.
nextChar = lookaheadChars[firstLookaheadChar++];
if (firstLookaheadChar == lastLookaheadChar) {
firstLookaheadChar = 0;
lastLookaheadChar = 0;
}
}
if (nextChar == '\012') {
lines.add(new Integer(iChar));
}
iChar++;
}
/** Peek at the character after {@link #nextChar} without advancing. */
private final int lookahead()
throws java.io.IOException {
return lookahead(1);
}
/**
* Peeks at the character n after {@link #nextChar} without advancing.
* lookahead(0) returns the current char (nextChar).
* lookahead(1) returns the next char (was lookaheadChar, same as lookahead());
*/
private final int lookahead(int n)
throws java.io.IOException {
if (n == 0) {
return nextChar;
}
else {
// if the desired character not in lookahead buffer, read it in
if (n > lastLookaheadChar - firstLookaheadChar) {
int len=lastLookaheadChar - firstLookaheadChar;
int t[];
// make sure we do not go off the end of the buffer
if (n + firstLookaheadChar > lookaheadChars.length) {
if (n > lookaheadChars.length) {
// the array is too small; make it bigger and shift
// everything to the beginning.
t=new int[n * 2];
}
else {
// the array is big enough, so just shift everything
// to the beginning of it.
t = lookaheadChars;
}
for (int x = 0; x < len; x++) {
t[x] = lookaheadChars[x + firstLookaheadChar];
}
lookaheadChars = t;
firstLookaheadChar = 0;
lastLookaheadChar = len;
}
// read ahead enough
while (n > lastLookaheadChar - firstLookaheadChar) {
lookaheadChars[lastLookaheadChar++] = getChar();
}
}
return lookaheadChars[n - 1 + firstLookaheadChar];
}
}
/** Read a character from input, returning -1 if end of input. */
protected int getChar()
throws java.io.IOException {
return System.in.read();
}
/** Initialize the scanner */
public void init()
throws java.io.IOException {
initReswords();
lines = new ArrayList();
iChar = iPrevChar = 0;
advance();
}
/**
* Deduces the line and column (0-based) of a symbol.
* Called by {@link Parser#syntax_error}.
*/
void getLocation(Symbol symbol, int[] loc) {
int iTarget = symbol.left;
int iLine = -1;
int iLineEnd = 0;
int iLineStart;
do {
iLine++;
iLineStart = iLineEnd;
iLineEnd = Integer.MAX_VALUE;
if (iLine < lines.size()) {
iLineEnd = ((Integer) lines.get(iLine)).intValue();
}
} while (iLineEnd < iTarget);
loc[0] = iLine; // line
loc[1] = iTarget - iLineStart; // column
}
private Symbol trace(Symbol s) {
if (debug) {
String name = null;
if (s.sym < m_aResWords.length) {
name = m_aResWords[s.sym];
}
LOGGER.error("Scanner returns #" + s.sym +
(name == null ? "" : ":" + name) +
(s.value == null ? "" : "(" + s.value.toString() + ")"));
}
return s;
}
private void initResword(int id, String s) {
m_resWordsTable.put(s, new Integer(id));
if (id > iMaxResword) {
iMaxResword = id;
}
}
private void initReswords() {
// This list generated by piping the 'terminal' declaration in mdx.cup
// through:
// grep -list // |
// sed -e 's/,//' |
// awk '{printf "initResword(%20s,%c%s%c);",$1,34,$1,34}'
m_resWordsTable = new Hashtable();
iMaxResword = 0;
// initResword(ParserSym.ALL ,"ALL");
initResword(ParserSym.AND ,"AND");
initResword(ParserSym.AS ,"AS");
// initResword(ParserSym.ASC ,"ASC");
initResword(ParserSym.AXIS ,"AXIS");
// initResword(ParserSym.BACK_COLOR ,"BACK_COLOR");
// initResword(ParserSym.BASC ,"BASC");
// initResword(ParserSym.BDESC ,"BDESC");
initResword(ParserSym.CASE ,"CASE");
initResword(ParserSym.CELL ,"CELL");
// initResword(ParserSym.CELL_ORDINAL ,"CELL_ORDINAL");
// initResword(ParserSym.CHAPTERS ,"CHAPTERS");
// initResword(ParserSym.CHILDREN ,"CHILDREN");
// initResword(ParserSym.COLUMNS ,"COLUMNS");
// initResword(ParserSym.DESC ,"DESC");
initResword(ParserSym.DIMENSION ,"DIMENSION");
initResword(ParserSym.ELSE ,"ELSE");
initResword(ParserSym.EMPTY ,"EMPTY");
initResword(ParserSym.END ,"END");
// initResword(ParserSym.FIRSTCHILD ,"FIRSTCHILD");
// initResword(ParserSym.FIRSTSIBLING ,"FIRSTSIBLING");
// initResword(ParserSym.FONT_FLAGS ,"FONT_FLAGS");
// initResword(ParserSym.FONT_NAME ,"FONT_NAME");
// initResword(ParserSym.FONT_SIZE ,"FONT_SIZE");
// initResword(ParserSym.FORE_COLOR ,"FORE_COLOR");
// initResword(ParserSym.FORMATTED_VALUE ,"FORMATTED_VALUE");
// initResword(ParserSym.FORMAT_STRING ,"FORMAT_STRING");
initResword(ParserSym.FROM ,"FROM");
initResword(ParserSym.IS ,"IS");
// initResword(ParserSym.LAG ,"LAG");
// initResword(ParserSym.LASTCHILD ,"LASTCHILD");
// initResword(ParserSym.LASTSIBLING ,"LASTSIBLING");
// initResword(ParserSym.LEAD ,"LEAD");
initResword(ParserSym.MEMBER ,"MEMBER");
// initResword(ParserSym.MEMBERS ,"MEMBERS");
// initResword(ParserSym.NEXTMEMBER ,"NEXTMEMBER");
initResword(ParserSym.NON ,"NON");
initResword(ParserSym.NOT ,"NOT");
initResword(ParserSym.NULL ,"NULL");
initResword(ParserSym.ON ,"ON");
initResword(ParserSym.OR ,"OR");
// initResword(ParserSym.PAGES ,"PAGES");
// initResword(ParserSym.PARENT ,"PARENT");
// initResword(ParserSym.PREVMEMBER ,"PREVMEMBER");
initResword(ParserSym.PROPERTIES ,"PROPERTIES");
// initResword(ParserSym.RECURSIVE ,"RECURSIVE");
// initResword(ParserSym.ROWS ,"ROWS");
// initResword(ParserSym.SECTIONS ,"SECTIONS");
initResword(ParserSym.SELECT ,"SELECT");
initResword(ParserSym.SET ,"SET");
// initResword(ParserSym.SOLVE_ORDER ,"SOLVE_ORDER");
initResword(ParserSym.THEN ,"THEN");
// initResword(ParserSym.VALUE ,"VALUE");
initResword(ParserSym.WHEN ,"WHEN");
initResword(ParserSym.WHERE ,"WHERE");
initResword(ParserSym.WITH ,"WITH");
initResword(ParserSym.XOR ,"XOR");
m_aResWords = new String[iMaxResword + 1];
Enumeration e = m_resWordsTable.keys();
while (e.hasMoreElements()) {
Object o = e.nextElement();
String s = (String) o;
int i = ((Integer) m_resWordsTable.get(s)).intValue();
m_aResWords[i] = s;
}
}
/** return the name of the reserved word whose token code is "i" */
public String lookupReserved(int i) {
return m_aResWords[i];
}
private Symbol makeSymbol(int id,Object o) {
int iPrevPrevChar = iPrevChar;
this.iPrevChar = iChar;
this.previousSymbol = id;
return trace(new Symbol(id, iPrevPrevChar, iChar, o));
}
private Symbol makeNumber(double mantissa, int exponent) {
double d = mantissa * java.lang.Math.pow(10, exponent);
return makeSymbol(ParserSym.NUMBER, new Double(d));
}
private Symbol makeId(String s, boolean quoted, boolean ampersand) {
return makeSymbol(
quoted && ampersand ? ParserSym.AMP_QUOTED_ID :
quoted ? ParserSym.QUOTED_ID :
ParserSym.ID,
s);
}
private Symbol makeRes(int i) {
return makeSymbol(i, m_aResWords[i]);
}
private Symbol makeToken(int i, String s) {
return makeSymbol(i, s);
}
private Symbol makeString(String s) {
if (inFormula) {
inFormula = false;
return makeSymbol(ParserSym.FORMULA_STRING, s);
} else {
return makeSymbol(ParserSym.STRING, s);
}
}
/**
* Discards all characters until the end of the current line.
*/
private void skipToEOL() throws IOException {
while (nextChar != -1 && nextChar != '\012') {
advance();
}
}
/**
* Eats a delimited comment.
* The type of delimiters are kept in commentDelim. The current
* comment type is indicated by commentType.
* end of file terminates a comment without error.
*/
private void skipComment(
final String startDelim,
final String endDelim) throws IOException {
int depth = 1;
// skip the starting delimiter
for (int x = 0; x < startDelim.length(); x++) {
advance();
}
for (;;) {
if (nextChar == -1) {
return;
}
else if (checkForSymbol(endDelim)) {
// eat the end delimiter
for (int x = 0; x < endDelim.length(); x++) {
advance();
}
if (--depth == 0) {
return;
}
}
else if (allowNestedComments && checkForSymbol(startDelim)) {
// eat the nested start delimiter
for (int x = 0; x < startDelim.length(); x++) {
advance();
}
depth++;
}
else {
advance();
}
}
}
/**
* If the next tokens are comments, skip over them.
*/
private void searchForComments() throws IOException {
// eat all following comments
boolean foundComment;
do {
foundComment = false;
for (int x = 0; x < commentDelim.length; x++) {
if (checkForSymbol(commentDelim[x][0])) {
if (commentDelim[x][1] == null) {
foundComment = true;
skipToEOL();
} else {
foundComment = true;
skipComment(commentDelim[x][0], commentDelim[x][1]);
}
}
}
} while (foundComment);
}
/**
* Checks if the next symbol is the supplied string
*/
private boolean checkForSymbol(final String symb) throws IOException {
for (int x = 0; x < symb.length(); x++) {
if (symb.charAt(x) != lookahead(x)) {
return false;
}
}
return true;
}
/**
* Recognizes and returns the next complete token.
*/
public Symbol next_token() throws IOException {
StringBuffer id;
boolean ampersandId = false;
for (;;) {
searchForComments();
switch (nextChar) {
case '.':
switch (lookahead()) {
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
// We're looking at the '.' on the start of a number,
// e.g. .1; fall through to parse a number.
break;
default:
advance();
return makeToken(ParserSym.DOT, ".");
}
// fall through
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
// Parse a number. Valid examples include 1, 1.2, 0.1, .1,
// 1e2, 1E2, 1e-2, 1e+2. Invalid examples include e2, 1.2.3,
// 1e2e3, 1e2.3.
// Signs preceding numbers (e.g. -1, +1E-5) are valid, but are
// handled by the parser.
final int leftOfPoint = 0;
final int rightOfPoint = 1;
final int inExponent = 2;
int n = 0, nDigits = 0, nSign = 0, exponent = 0;
double mantissa = 0.0;
int state = leftOfPoint;
for (;;) {
if (nextChar == '.') {
if (state == leftOfPoint) {
state = rightOfPoint;
mantissa = n;
n = nDigits = 0;
nSign = 1;
advance();
} else {
// Error: we are seeing a point in the exponent
// (e.g. 1E2.3 or 1.2E3.4) or a second point in the
// mantissa (e.g. 1.2.3). Return what we've got
// and let the parser raise the error.
if (state == rightOfPoint) {
mantissa += (n * java.lang.Math.pow(
10, -nDigits));
} else {
exponent = n * nSign;
}
return makeNumber(mantissa, exponent);
}
} else if (nextChar == 'E' || nextChar == 'e') {
if (state == inExponent) {
// Error: we are seeing an 'e' in the exponent
// (e.g. 1.2e3e4). Return what we've got and let
// the parser raise the error.
exponent = n * nSign;
return makeNumber(mantissa, exponent);
} else {
if (state == leftOfPoint) {
mantissa = n;
} else {
mantissa += (n * java.lang.Math.pow(
10, -nDigits));
}
n = nDigits = 0;
nSign = 1;
advance();
state = inExponent;
}
} else if ((nextChar == '+' || nextChar == '-') &&
state == inExponent &&
nDigits == 0) {
// We're looking at the sign after the 'e'.
nSign = -nSign;
advance();
} else if (nextChar >= '0' && nextChar <= '9') {
n = n * 10 + (nextChar - '0');
nDigits++;
advance();
} else {
// Reached end of number.
if (state == leftOfPoint) {
mantissa = n;
} else if (state == rightOfPoint) {
mantissa += (n * java.lang.Math.pow(10, -nDigits));
} else {
exponent = n * nSign;
}
return makeNumber(mantissa, exponent);
}
}
case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
case 'g': case 'h': case 'i': case 'j': case 'k': case 'l':
case 'm': case 'n': case 'o': case 'p': case 'q': case 'r':
case 's': case 't': case 'u': case 'v': case 'w': case 'x':
case 'y': case 'z':
case 'A': case 'B': case 'C': case 'D': case 'E': case 'F':
case 'G': case 'H': case 'I': case 'J': case 'K': case 'L':
case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R':
case 'S': case 'T': case 'U': case 'V': case 'W': case 'X':
case 'Y': case 'Z':
/* parse an identifier */
id = new StringBuffer();
for (;;) {
id.append((char)nextChar);
advance();
switch (nextChar) {
case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
case 'g': case 'h': case 'i': case 'j': case 'k': case 'l':
case 'm': case 'n': case 'o': case 'p': case 'q': case 'r':
case 's': case 't': case 'u': case 'v': case 'w': case 'x':
case 'y': case 'z':
case 'A': case 'B': case 'C': case 'D': case 'E': case 'F':
case 'G': case 'H': case 'I': case 'J': case 'K': case 'L':
case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R':
case 'S': case 'T': case 'U': case 'V': case 'W': case 'X':
case 'Y': case 'Z':
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
case '_':
break;
default:
String strId = id.toString();
Integer i = (Integer) m_resWordsTable.get(
strId.toUpperCase());
if (i == null) {
// identifier
return makeId(strId, false, false);
} else {
// reserved word
return makeRes(i.intValue());
}
}
}
case '&':
advance();
if (nextChar == '[') {
ampersandId = true;
// fall through
} else {
return makeToken(ParserSym.UNKNOWN, "&");
}
case '[':
/* parse a delimited identifier */
id = new StringBuffer();
for (;;) {
advance();
switch (nextChar) {
case ']':
advance();
if (nextChar == ']') {
// ] escaped with ] - just take one
id.append(']');
break;
} else {
// end of identifier
if (ampersandId) {
ampersandId = false;
return makeId(id.toString(), true, true);
} else {
return makeId(id.toString(), true, false);
}
}
case -1:
if (ampersandId) {
ampersandId = false;
return makeId(id.toString(), true, true);
} else {
return makeId(id.toString(), true, false);
}
default:
id.append((char)nextChar);
}
}
case ':': advance(); return makeToken(ParserSym.COLON, ":");
case ',': advance(); return makeToken(ParserSym.COMMA, ",");
case '=': advance(); return makeToken(ParserSym.EQ, "=");
case '<':
advance();
switch (nextChar) {
case '>': advance(); return makeToken(ParserSym.NE, "<>");
case '=': advance(); return makeToken(ParserSym.LE, "<=");
default: return makeToken(ParserSym.LT, "<");
}
case '>':
advance();
switch (nextChar) {
case '=': advance(); return makeToken(ParserSym.GE, ">=");
default: return makeToken(ParserSym.GT, ">");
}
case '{': advance(); return makeToken(ParserSym.LBRACE, "{");
case '(': advance(); return makeToken(ParserSym.LPAREN, "(");
case '}': advance(); return makeToken(ParserSym.RBRACE, "}");
case ')': advance(); return makeToken(ParserSym.RPAREN, ")");
case '+': advance(); return makeToken(ParserSym.PLUS, "+");
case '-': advance(); return makeToken(ParserSym.MINUS, "-");
case '*': advance(); return makeToken(ParserSym.ASTERISK, "*");
case '/': advance(); return makeToken(ParserSym.SOLIDUS, "/");
case '|':
advance();
switch (nextChar) {
case '|': advance(); return makeToken(ParserSym.CONCAT, "||");
default: return makeToken(ParserSym.UNKNOWN, "|");
}
case '"':
/* parse a double-quoted string */
id = new StringBuffer();
for (;;) {
advance();
switch (nextChar) {
case '"':
advance();
if (nextChar == '"') {
// " escaped with "
id.append('"');
break;
} else {
// end of string
return makeString(id.toString());
}
case -1:
return makeString(id.toString());
default:
id.append((char)nextChar);
}
}
case '\'':
if (previousSymbol == ParserSym.AS) {
inFormula = true;
}
/* parse a single-quoted string */
id = new StringBuffer();
for (;;) {
advance();
switch (nextChar) {
case '\'':
advance();
if (nextChar == '\'') {
// " escaped with "
id.append('\'');
break;
} else {
// end of string
return makeString(id.toString());
}
case -1:
return makeString(id.toString());
default:
id.append((char)nextChar);
}
}
case -1:
// we're done
return makeToken(ParserSym.EOF, "EOF");
default:
// If it's whitespace, skip over it.
// (When we switch to JDK 1.5, use Character.isWhitespace(int);
// til then, there's just Character.isWhitespace(char).)
if (nextChar <= Character.MAX_VALUE &&
Character.isWhitespace((char) nextChar)) {
// fall through
} else {
// everything else is an error
throw new RuntimeException(
"Unexpected character '" + (char) nextChar + "'");
}
case ' ':
case '\t':
case '\n':
case '\r':
// whitespace can be ignored
iPrevChar = iChar;
advance();
break;
}
}
}
}
// End Scanner.java
|
// B a r s B u i l d e r //
// This software is released under the terms of the GNU General Public //
// to report bugs & suggestions. //
package omr.sheet;
import omr.Main;
import omr.ProcessingException;
import omr.check.Check;
import omr.check.CheckSuite;
import omr.check.Checkable;
import omr.check.FailureResult;
import omr.check.Result;
import omr.check.SuccessResult;
import omr.constant.Constant;
import omr.constant.ConstantSet;
import omr.glyph.Glyph;
import omr.glyph.GlyphDirectory;
import omr.glyph.GlyphLag;
import omr.glyph.GlyphSection;
import omr.glyph.Shape;
import omr.glyph.ui.GlyphBoard;
import omr.lag.JunctionDeltaPolicy;
import omr.lag.LagBuilder;
import omr.lag.VerticalOrientation;
import omr.score.Barline;
import omr.score.Measure;
import omr.score.Score;
import omr.score.ScoreView;
import omr.score.Stave;
import omr.score.System;
import omr.stick.Stick;
import omr.stick.StickSection;
import omr.stick.StickView;
import omr.ui.BoardsPane;
import omr.ui.FilterBoard;
import omr.ui.PixelBoard;
import omr.ui.ScrollLagView;
import omr.ui.SectionBoard;
import omr.ui.Zoom;
import omr.util.Dumper;
import omr.util.Logger;
import omr.util.TreeNode;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.GridLayout;
import java.awt.Point;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
* Class <code>BarsBuilder</code> handles the vertical lines that are
* recognized as bar lines.
*
* <p> Input is provided by the list of vertical sticks retrieved by the
* preceding step.
*
* <p> Output is the collection of detected Bar lines.
*
* @author Hervé Bitteur
* @version $Id$
*/
public class BarsBuilder
implements GlyphDirectory
{
private static final Constants constants = new Constants();
private static final Logger logger = Logger.getLogger(BarsBuilder.class);
// Success
private static final SuccessResult BAR_SYSTEM_DEFINING = new SuccessResult("Bar-SystemDefining");
private static final SuccessResult BAR_NOT_SYSTEM_DEFINING = new SuccessResult("Bar-NotSystemDefining");
// Failure
private static final FailureResult TOO_SHORT_BAR = new FailureResult("Bar-TooShort");
private static final FailureResult OUTSIDE_STAVE_WIDTH = new FailureResult("Bar-OutsideStaveWidth");
private static final FailureResult NOT_STAVE_ANCHORED = new FailureResult("Bar-NotStaveAnchored");
private static final FailureResult NOT_SYSTEM_ALIGNED = new FailureResult("Bar-NotSystemAligned");
private static final FailureResult NOT_WITHIN_SYSTEM = new FailureResult("Bar-NotWithinSystem");
private static final FailureResult SHORTER_THAN_STAVE_HEIGHT = new FailureResult("Bar-ShorterThanStaveHeight");
private static final FailureResult THICK_BAR_NOT_ALIGNED = new FailureResult("Bar-ThickBarNotAligned");
private static final FailureResult TOO_HIGH_ADJACENCY = new FailureResult("Bar-TooHighAdjacency");
private static final FailureResult CHUNK_AT_TOP = new FailureResult("Bar-ChunkAtTop");
private static final FailureResult CHUNK_AT_BOTTOM = new FailureResult("Bar-ChunkAtBottom");
private static final FailureResult CANCELLED = new FailureResult("Bar-Cancelled");
// Underlying lag
private GlyphLag vLag;
// Bars area, with retrieved vertical sticks
private VerticalArea barsArea;
// List of found bars
private List<BarInfo> bars = new ArrayList<BarInfo>();
// Retrieved systems
private List<SystemInfo> systems = new ArrayList<SystemInfo>();
// Unused vertical sticks
private List<Stick> clutter;
// Cached data
private final Sheet sheet;
private Score score;
private Scale scale;
private int basicCoreLength;
// Lag view on bars, if so desired
private MyLagView lagView;
// Related glyph board
private GlyphBoard glyphBoard;
// Suite of checks
private BarCheckSuite suite;
// BarsBuilder //
/**
* Prepare a bar retriever on the provided sheet
*
* @param sheet the sheet to process
*/
public BarsBuilder (Sheet sheet)
{
this.sheet = sheet;
}
// getEntity //
public Glyph getEntity (Integer id)
{
return vLag.getGlyph(id);
}
// buildInfo //
/**
* Retrieve and store the bars information on the provided sheet
*
* @return the built Bars info
* @throws ProcessingException raised when step processing must stop,
* due to encountered error
*/
public List<BarInfo> buildInfo ()
throws ProcessingException
{
// Stuff to be made available
scale = sheet.getScale();
//sheet.getStaves();
sheet.getHorizontals();
// Retrieve the vertical lag of runs
vLag = new GlyphLag(new VerticalOrientation());
vLag.setId("vLag");
vLag.setVertexClass(StickSection.class);
new LagBuilder<GlyphLag,GlyphSection>().rip
(vLag,
sheet.getPicture(),
0, // minRunLength
new JunctionDeltaPolicy(constants.maxDeltaLength
.getValue())); // maxDeltaLength
sheet.setVerticalLag(vLag);
// Retrieve (vertical) sticks
barsArea = new VerticalArea(sheet, vLag,
scale.fracToPixels(constants.maxBarThickness));
// Allocate score
createScore();
// Retrieve true bar lines and thus SystemInfos
retrieveBarLines();
// Build score Systems & Staves from SystemInfos
buildSystemsAndStaves();
// Build Measures
buildMeasures();
// Check Measures
checkMeasures();
// Erase bar pixels from picture
//////eraseBars();
// Update score internal data
score.computeChildren();
// Assign bar line shape
for (BarInfo info : bars){
Stick stick = info.getStick();
stick.setShape(isThickBar(stick) ?
Shape.THICK_BAR_LINE :
Shape.THIN_BAR_LINE);
}
// Report number of measures retrieved
logger.info(score.getLastSystem().getLastMeasureId()
+ " measure(s) found");
// Split everything, including horizontals, per system
SystemSplit.computeSystemLimits(sheet);
SystemSplit.splitHorizontals(sheet);
SystemSplit.splitBars(sheet, bars);
// Display the resulting stickarea is so asked for
if (constants.displayFrame.getValue() &&
Main.getJui() != null) {
displayFrame();
}
return bars;
}
// getBars //
/**
* Report the list of bar lines retrieved
*
* @return the bar lines
*/
public List<BarInfo> getBars ()
{
return bars;
}
// getLag //
/**
* Report the vertical lag used for this step
*
* @return the vertical lag used for bar source
*/
public GlyphLag getLag ()
{
return vLag;
}
// isThickBar //
/**
* Check if the stick/bar is a thick one
*
* @param stick the bar stick to check
*
* @return true if thick
*/
private boolean isThickBar (Stick stick)
{
// Max width of a thin bar line, otherwise this must be a thick bar
final int maxThinWidth = scale.fracToPixels(constants.maxThinWidth);
final int meanWidth = (int) Math.rint(stick.getWeight()
/ stick.getLength());
return meanWidth > maxThinWidth;
}
// getSystemOf //
/**
* Report the SystemInfo that contains the given BarInfo.
*
* @param bar the given BarInfo
* @param sheet the sheet context
* @return the containing SystemInfo, null if not found
*/
public SystemInfo getSystemOf (BarInfo bar,
Sheet sheet)
{
int topIdx = bar.getTopIdx();
int botIdx = bar.getBotIdx();
if (topIdx == -1) {
topIdx = botIdx;
}
if (botIdx == -1) {
botIdx = topIdx;
}
Score score = sheet.getScore();
if (score == null) {
return null;
}
for (Iterator it = score.getSystems().iterator(); it.hasNext();) {
System system = (omr.score.System) it.next();
SystemInfo systemInfo = system.getInfo();
if ((systemInfo.getStartIdx() <= botIdx)
&& (systemInfo.getStopIdx() >= topIdx)) {
return systemInfo;
}
}
// Not found
return null;
}
// buildMeasures //
/**
* Bar lines are first sorted according to their abscissa, then we run
* additional checks on each bar line, since we now know its enclosing
* system. If OK, then we add a corresponding measure in each stave.
*/
private void buildMeasures ()
{
final int maxDy = scale.fracToPixels(constants.maxBarOffset);
// Sort bar lines by increasing abscissa
Collections.sort(bars,
new Comparator<BarInfo>()
{
public int compare (BarInfo b1,
BarInfo b2)
{
return b1.getStick().getMidPos()
- b2.getStick().getMidPos();
}
});
// Measures building (Bars are already sorted by increasing
// abscissa)
for (Iterator<BarInfo> bit = bars.iterator(); bit.hasNext();) {
BarInfo barInfo = bit.next();
// Determine the system this bar line belongs to
SystemInfo systemInfo = getSystemOf(barInfo, sheet);
if (systemInfo == null) { // Should not occur, but that's safer
logger.warning("Bar not belonging to any system");
logger.debug("barInfo = " + barInfo);
Dumper.dump(barInfo);
Dumper.dump(barInfo.getStick());
continue;
}
omr.score.System system = systemInfo.getScoreSystem();
// We don't check that the bar does not start before first
// stave, this is too restrictive because of alternate endings.
// We however do check that the bar does not end after last
// stave of the system.
int barAbscissa = barInfo.getStick().getMidPos();
int systemBottom = system.getLastStave().getInfo().getLastLine()
.getLine().yAt(barAbscissa);
if ((barInfo.getStick().getStop() - systemBottom) > maxDy) {
if (logger.isDebugEnabled()) {
logger.debug("Bar stopping too low");
}
barInfo.getStick().setResult(NOT_WITHIN_SYSTEM);
bit.remove();
continue;
}
// We add a measure in each stave of this system, provided that
// the stave is embraced by the bar line
for (TreeNode node : system.getStaves()) {
Stave stave = (Stave) node;
if (isStaveEmbraced (stave, barInfo)) {
if (logger.isDebugEnabled()) {
logger.debug("Creating measure for bar-line " + barInfo.getStick());
}
new Measure(barInfo, stave, Barline.SINGLE,
scale.pixelsToUnits(barAbscissa)
- stave.getLeft(),
scale.pixelsToUnits(barAbscissa)
- stave.getLeft(), false); // invented ?
}
}
}
}
// isStaveEmbraced //
private boolean isStaveEmbraced (Stave stave,
BarInfo bar)
{
// Extrema of bar, units
int topUnit = scale.pixelsToUnits(bar.getStick().getStart());
int botUnit = scale.pixelsToUnits(bar.getStick().getStop());
// Check that middle of stave is within bar top & bottom
final int midStave = stave.getTop() + (stave.getSize() / 2);
return (midStave > topUnit) && (midStave < botUnit);
}
// buildSystemInfos //
/**
* Knowing the starting stave indice of each stave system, we are able
* to allocate and describe the proper number of systems in the score.
*
* @param starts indexed by any stave, to give the stave index of the
* containing system. For a system with just one stave,
* both indices are equal. For a system of more than 1
* stave, the indices differ.
*
* @throws omr.ProcessingException raised if processing failed
*/
private void buildSystemInfos (int[] starts)
throws omr.ProcessingException
{
int id = 0; // Id for created SystemInfo's
int start = -1;
for (int i = 0; i < starts.length; i++) {
if (starts[i] != start) {
if (start != -1) {
systems.add(new SystemInfo(++id, sheet, start, starts[i] - 1));
}
start = i;
}
}
systems.add(new SystemInfo(++id, sheet, start, starts.length - 1));
if (logger.isDebugEnabled()) {
for (SystemInfo info : systems) {
Dumper.dump(info);
}
}
// Finally, store this list into the sheet instance
sheet.setSystems(systems);
}
// buildSystemsAndStaves //
/**
* For each SystemInfo, build the corresponding System entity with all
* its depending Staves
*/
private void buildSystemsAndStaves ()
{
// Systems
for (SystemInfo info : systems) {
// Allocate the system
omr.score.System system =
new omr.score.System(info, score,
scale.pixelsToUnits(info.getTop()),
scale.pixelsToUnits(info.getLeft()),
scale.pixelsToUnits(info.getWidth()),
scale.pixelsToUnits(info.getDeltaY()));
// Set the link SystemInfo -> System
info.setScoreSystem(system);
// Allocate the staves in this system
int staveLink = 0;
for (StaveInfo set : info.getStaves()) {
LineInfo line = set.getFirstLine();
new Stave(set, system,
scale.pixelsToUnits(line.getLine().yAt(line.getLeft())),
scale.pixelsToUnits(set.getLeft()),
scale.pixelsToUnits(set.getRight() - set.getLeft()),
64, // Staff vertical size in units
staveLink++);
}
}
}
// checkBarAlignments //
/**
* Check alignment of each measure of each stave with the other stave
* measures, a test that needs several staves in the system
*
* @param system the system to check
*/
private void checkBarAlignments (omr.score.System system)
{
if (system.getStaves().size() > 1) {
int maxShiftDx = scale.fracToPixels(constants.maxAlignShiftDx);
for (Iterator sit = system.getStaves().iterator(); sit.hasNext();) {
Stave stave = (Stave) sit.next();
for (Iterator mit = stave.getMeasures().iterator();
mit.hasNext();) {
Measure measure = (Measure) mit.next();
// Compare the abscissa with corresponding position in
// the other staves
int x = measure.getLeftlinex();
for (Iterator it = system.getStaves().iterator();
it.hasNext();) {
Stave stv = (Stave) it.next();
if (stv == stave) {
continue;
}
if (null == stv.getMeasureAt(x, maxShiftDx)) {
if (logger.isDebugEnabled()) {
logger.debug("Singular measure removed: "
+ Dumper.dumpOf(measure));
}
// Remove the false bar info
for (BarInfo info : measure.getInfos()) {
Stick stick = info.getStick();
stick.setResult(NOT_SYSTEM_ALIGNED);
bars.remove(info);
}
// Remove the false measure
mit.remove();
break;
}
}
}
}
}
}
// checkEndingBar //
/**
* Use ending bar line if any, to adjust the right abscissa of the
* system and its staves.
*
* @param system the system to check
*/
private void checkEndingBar (omr.score.System system)
{
Stave stave = system.getFirstStave();
Measure measure = stave.getLastMeasure();
int lastX = measure.getRightlinex();
int minWidth = scale.fracToPixels(constants.minMeasureWidth);
if ((stave.getWidth() - lastX) < minWidth) {
if (logger.isDebugEnabled()) {
logger.debug("Adjusting EndingBar " + system);
}
// Adjust end of system & stave(s) to this one
system.setWidth(lastX);
for (Iterator sit = system.getStaves().iterator(); sit.hasNext();) {
Stave stv = (Stave) sit.next();
stv.setWidth(system.getWidth());
}
}
}
// checkMeasures //
/**
* Check measure reality, using a set of aditional tests.
*/
private void checkMeasures ()
{
// Check are performed on a system basis
for (Iterator sysit = score.getSystems().iterator();
sysit.hasNext();) {
omr.score.System system = (omr.score.System) sysit.next();
// Check alignment of each measure of each stave with the other
// stave measures, a test that needs several staves in the
// system
checkBarAlignments(system);
// Detect very narrow measures which in fact indicate double
// bar lines.
mergeLines(system);
// First barline may be just the beginning of the stave, so do
// not count the very first bar line, which in general defines
// the beginning of the stave rather than the end of a measure,
// but use it to precisely define the left abscissa of the
// system and all its contained staves.
removeStartingBar(system);
// Similarly, use the very last bar line, which generally ends
// the system, to define the right abscissa of the system and
// its staves.
checkEndingBar(system);
}
}
// createScore //
private void createScore ()
{
if (logger.isDebugEnabled()) {
logger.debug("Allocating score");
}
score = new Score(scale.pixelsToUnits(sheet.getWidth()),
scale.pixelsToUnits(sheet.getHeight()),
(int) Math.rint(sheet.getSkew().angle()
* ScoreView.BASE),
scale.spacing(), sheet.getPath());
// Mutual referencing
score.setSheet(sheet);
sheet.setScore(score);
}
// displayFrame //
private void displayFrame ()
{
lagView = new MyLagView(vLag);
lagView.colorize();
// Ids of recognized glyphs
List<Integer> knownIds = new ArrayList<Integer>(bars.size() +1);
knownIds.add(GlyphBoard.NO_VALUE);
for (BarInfo bar : bars) {
knownIds.add(new Integer(bar.getStick().getId()));
}
glyphBoard = new GlyphBoard(vLag.getLastGlyphId(), knownIds);
BoardsPane boardsPane = new BoardsPane
(lagView,
new PixelBoard(),
new SectionBoard(vLag.getLastVertexId()),
glyphBoard,
new FilterBoard());
// Create a hosting frame for the view
ScrollLagView slv = new ScrollLagView(lagView);
sheet.getAssembly().addViewTab("Bars", slv, boardsPane);
}
// // eraseBars //
// private void eraseBars ()
// Picture picture = sheet.getPicture();
// for (BarInfo bar : bars) {
// Stick stick = bar.getStick();
// stick.erasePixels(picture);
// mergeLines //
/**
* Check whether two close bar lines are not in fact double lines (with
* variants)
*
* @param system the system to check
*/
private void mergeLines (omr.score.System system)
{
int maxDoubleDx = scale.fracToPixels(constants.maxDoubleBarDx);
for (Iterator sit = system.getStaves().iterator(); sit.hasNext();) {
Stave stave = (Stave) sit.next();
Measure prevMeasure = null;
for (Iterator mit = stave.getMeasures().iterator();
mit.hasNext();) {
Measure measure = (Measure) mit.next();
if (prevMeasure != null) {
final int measureWidth = measure.getLeftlinex()
- prevMeasure.getLeftlinex();
if (measureWidth <= maxDoubleDx) {
BarInfo bar = measure.getInfos().get(0);
if (isThickBar(bar.getStick())) {
if (logger.isDebugEnabled()) {
logger.debug("Merging a thinThick bar");
}
prevMeasure.setLinetype(Barline.THIN_THICK);
} else {
if (logger.isDebugEnabled()) {
logger.debug("Merging a double bar");
}
prevMeasure.setLinetype(Barline.DOUBLE);
}
prevMeasure.setRightlinex(measure.getRightlinex());
prevMeasure.addInfos(measure.getInfos());
mit.remove();
} else {
prevMeasure = measure;
}
} else {
prevMeasure = measure;
}
}
}
}
// removeStartingBar //
/**
* We associate measures only with their ending bar line(s), so the
* starting bar of a stave (or system) does not end a measure, we thus
* have to remove the measure that we first had associated with it.
*
* @param system the system whose starting measure has to be checked
* (the check is based on the width of this false
* measure)
*/
private void removeStartingBar (omr.score.System system)
{
Stave stave = system.getFirstStave();
Measure measure = stave.getFirstMeasure();
BarInfo bar = measure.getInfos().get(0);
int firstX = measure.getLeftlinex();
int minWidth = scale.fracToPixels(constants.minMeasureWidth);
if (firstX < minWidth) {
// Adjust beginning of system to this one
if (logger.isDebugEnabled()) {
logger.debug("Adjusting firstX=" + firstX + " " + system);
}
system.setLeft(system.getLeft() + firstX);
// Adjust beginning of all stave(s) to this one
// Remove this false "measure" in all staves of the system
for (Iterator sit = system.getStaves().iterator(); sit.hasNext();) {
Stave stv = (Stave) sit.next();
int staveDx = system.getLeft() - stv.getLeft();
stv.setLeft(system.getLeft());
// Remove this first measure
stv.getMeasures().remove(0);
// Set the bar as starting bar for the stave
stv.setStartingBar(bar);
// Update other bar lines abscissae accordingly
for (Iterator mit = stv.getMeasures().iterator();
mit.hasNext();) {
Measure meas = (Measure) mit.next();
meas.setLeftlinex(meas.getLeftlinex() - staveDx);
meas.setRightlinex(meas.getRightlinex() - staveDx);
}
}
}
}
/**
* Remove a bar together with all its related entities. This means
* removing reference in the bars list of this builder, reference in
* the containing SystemInfo, reference in the Measure it ends, and
* removing this Measure itself if this (false) bar was the only ending
* bar left for the Measure. The related stick must also be assigned a
* failure result.
*
* @param glyph the (false) bar glyph to deassign
*/
public void deassignBarGlyph (Glyph glyph)
{
BarInfo bar = getBarOf(glyph);
if (bar == null) {
return;
} else {
logger.info("Removing a " + glyph.getShape());
}
// Related stick has to be freed
bar.getStick().setShape(null);
bar.getStick().setResult(CANCELLED);
// Remove from the internal all-bars list
bars.remove(bar);
// Remove from the containing SystemInfo
SystemInfo system = getSystemOf(bar, sheet);
if (system == null) {
return;
} else {
system.getBars().remove(bar);
}
// Remove from the containing Measure
System scoreSystem = system.getScoreSystem();
for (Iterator it = scoreSystem.getStaves().iterator(); it.hasNext();) {
Stave stave = (Stave) it.next();
if (isStaveEmbraced (stave, bar)) {
for (Iterator mit = stave.getMeasures().iterator();
mit.hasNext();) {
Measure measure = (Measure) mit.next();
for (Iterator bit = measure.getInfos().iterator();
bit.hasNext();) {
BarInfo info = (BarInfo) bit.next();
if (info == bar) {
// Remove the bar info
if (logger.isDebugEnabled()) {
logger.debug("Removing " + info +
" from " + measure);
}
bit.remove();
// Remove measure as well ?
if (measure.getInfos().size() == 0) {
if (logger.isDebugEnabled()) {
logger.debug("Removing " + measure);
}
mit.remove();
}
break;
}
}
}
}
}
// Update the glyph board
if (glyphBoard != null) {
glyphBoard.update(bar.getStick());
}
// Update the view accordingly
if (lagView != null) {
lagView.colorize();
lagView.repaint();
}
}
// getBarOf //
private BarInfo getBarOf (Glyph glyph)
{
for (BarInfo bar : bars) {
if (bar.getStick() == glyph) {
return bar;
}
}
logger.warning("Cannot find bar for " + glyph);
return null;
}
// retrieveBarLines //
/**
* From the list of vertical sticks, this method uses several tests
* based on stick location, and stick shape (the test is based on
* adjacency, it should be improved), to detect true bar lines.
*
* <p> The output is thus a filled 'bars' list of bar lines, and the
* list of SystemInfos which describe the parameters of each
* system.
*
* @throws ProcessingException Raised when a sanity check on systems
* found has failed
*/
private void retrieveBarLines ()
throws ProcessingException
{
// The list of candidate vertical sticks
clutter = new ArrayList<Stick>(barsArea.getSticks());
if (logger.isDebugEnabled()) {
logger.debug(clutter.size() + " sticks to check");
}
// A way to tell the System for each stave, by providing the stave
// index of the starting stave of the containing system.
int[] starts = new int[sheet.getStaves().size()];
for (int i = starts.length - 1; i >= 0; i
starts[i] = -1;
}
suite = new BarCheckSuite();
double minResult = constants.minCheckResult.getValue();
// Check each candidate stick in turn
for (Stick stick : clutter) {
// Allocate the candidate context, and pass the whole check
// suite
Context context = new Context(stick);
double res = suite.pass(context);
if (logger.isDebugEnabled()) {
logger.debug("suite => " + res + " for " + stick);
}
if (res >= minResult) {
// OK, we insert this candidate stick as a true bars
// member.
bars.add(new BarInfo(stick, context.topIdx, context.botIdx));
// Bars that define a system (they start AND end with
// staves limits)
if ((context.topIdx != -1) && (context.botIdx != -1)) {
for (int i = context.topIdx; i <= context.botIdx; i++) {
if (starts[i] == -1) {
starts[i] = context.topIdx;
}
}
stick.setResult(BAR_SYSTEM_DEFINING);
if (logger.isDebugEnabled()) {
logger.debug("System-defining Bar line from stave "
+ context.topIdx + " to stave "
+ context.botIdx + " " + stick);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Non-System-defining Bar line "
+ ((context.topIdx != -1)
? (" topIdx=" + context.topIdx)
: "")
+ ((context.botIdx != -1)
? (" botIdx=" + context.botIdx)
: ""));
}
stick.setResult(BAR_NOT_SYSTEM_DEFINING);
}
}
}
// Sanity check on the systems found
for (int i = 0; i < starts.length; i++) {
if (logger.isDebugEnabled()) {
logger.debug("stave " + i + " system " + starts[i]);
}
if (starts[i] == -1) {
logger.warning("No system found for stave " + i);
throw new ProcessingException();
}
}
// System retrieval
buildSystemInfos(starts);
}
// MyLagView //
private class MyLagView
extends StickView
{
private MyLagView (GlyphLag lag)
{
super(lag, null, BarsBuilder.this);
}
// colorize //
public void colorize ()
{
super.colorize();
// Determine my view index in the lag views
final int viewIndex = vLag.getViews().indexOf(this);
// All remaining vertical sticks clutter
for (Stick stick : clutter) {
stick.colorize(lag, viewIndex, Color.red);
}
// Recognized bar lines
for (BarInfo info : bars) {
info.getStick().colorize(lag, viewIndex, Color.yellow);
}
}
// renderItems //
public void renderItems (Graphics g)
{
Zoom z = getZoom();
// Render all physical info known so far, which is just the
// staff line info, lineset by lineset
sheet.render(g, z);
// Draw the contour of bar lines
for (BarInfo info : bars) {
info.getStick().renderContour(g, z);
}
}
// glyphSelected //
@Override
protected void glyphSelected (Glyph glyph,
Point pt)
{
if (glyph instanceof Stick) {
suite = new BarCheckSuite(); // To get a fresh suite
Stick stick = (Stick) glyph;
filterMonitor.tellHtml(suite.passHtml(null,
new Context(stick)));
}
}
// deassignGlyph //
@Override
public void deassignGlyph (Glyph glyph)
{
if (glyph.getShape() == Shape.THICK_BAR_LINE ||
glyph.getShape() == Shape.THIN_BAR_LINE) {
deassignBarGlyph(glyph);
} else {
logger.warning("No deassign meant for " + glyph.getShape() + " glyph");
}
}
}
// TopCheck //
private class TopCheck
extends Check<Context>
{
protected TopCheck ()
{
super("Top", constants.maxStaveShiftDyLow.getValue(),
constants.maxStaveShiftDyHigh.getValue(), false, null);
}
// Retrieve the distance with proper stave border
protected double getValue (Context context)
{
Stick stick = context.stick;
int start = stick.getStart();
// Which stave area contains the top of the stick?
context.topArea = sheet.getStaveIndexAtY(start);
StaveInfo area = sheet.getStaves().get(context.topArea);
// How far are we from the start of the stave?
int staveTop = area.getFirstLine().getLine().yAt(stick.getMidPos());
double dy = sheet.getScale().pixelsToFrac(Math.abs(staveTop
- start));
// Side-effect
if (dy <= getLow()) {
context.topIdx = context.topArea;
}
return dy;
}
}
// BottomCheck //
private class BottomCheck
extends Check<Context>
{
protected BottomCheck ()
{
super("Bottom", constants.maxStaveShiftDyLow.getValue(),
constants.maxStaveShiftDyHigh.getValue(), false, null);
}
// Retrieve the distance with proper stave border
protected double getValue (Context context)
{
Stick stick = context.stick;
int stop = stick.getStop();
// Which stave area contains the bottom of the stick?
context.bottomArea = sheet.getStaveIndexAtY(stop);
StaveInfo area = sheet.getStaves().get(context.bottomArea);
// How far are we from the stop of the stave?
int staveBottom = area.getLastLine().getLine().yAt(stick
.getMidPos());
double dy = sheet.getScale().pixelsToFrac(Math.abs(staveBottom
- stop));
// Side-effect
if (dy <= getLow()) {
context.botIdx = context.bottomArea;
}
return dy;
}
}
// AnchorCheck //
private class AnchorCheck
extends Check<Context>
{
protected AnchorCheck ()
{
super("Anchor", 0.5, 0.5, true, NOT_STAVE_ANCHORED);
}
// Make sure that at least top or bottom are stave anchors, and
// that both are stave anchors in the case of thick bars.
protected double getValue (Context context)
{
Stick stick = context.stick;
context.isThick = isThickBar(stick);
if (context.isThick) {
if ((context.topIdx != -1) && (context.botIdx != -1)) {
return 1;
}
} else {
if ((context.topIdx != -1) || (context.botIdx != -1)) {
return 1;
}
}
return 0;
}
}
// MinLengthCheck //
private class MinLengthCheck
extends Check<Context>
{
protected MinLengthCheck ()
{
super("MinLength", -constants.maxStaveShiftDyLow.getValue(), 0,
true, TOO_SHORT_BAR);
}
// Retrieve the length data
protected double getValue (Context context)
{
Stick stick = context.stick;
int x = stick.getMidPos();
int height = Integer.MAX_VALUE;
// Check wrt every stave in the stick range
for (int i = context.topArea; i <= context.bottomArea; i++) {
StaveInfo area = sheet.getStaves().get(i);
height = Math.min(height, area.getHeight());
}
return sheet.getScale().pixelsToFrac(stick.getLength() - height);
}
}
// LeftCheck //
private class LeftCheck
extends Check<Context>
{
protected LeftCheck ()
{
super("Left", 0, 0, true, OUTSIDE_STAVE_WIDTH);
}
// Retrieve the stick abscissa
protected double getValue (Context context)
{
Stick stick = context.stick;
int x = stick.getMidPos();
int dist = Integer.MAX_VALUE;
// Check wrt every stave in the stick range
for (int i = context.topArea; i <= context.bottomArea; i++) {
StaveInfo area = sheet.getStaves().get(i);
dist = Math.min(dist, x - area.getLeft());
}
return sheet.getScale().pixelsToFrac(dist);
}
}
// RightCheck //
private class RightCheck
extends Check<Context>
{
protected RightCheck ()
{
super("Right", 0, 0, true, OUTSIDE_STAVE_WIDTH);
}
// Retrieve the stick abscissa
protected double getValue (Context context)
{
Stick stick = context.stick;
int x = stick.getMidPos();
int dist = Integer.MAX_VALUE;
// Check wrt every stave in the stick range
for (int i = context.topArea; i <= context.bottomArea; i++) {
StaveInfo area = sheet.getStaves().get(i);
dist = Math.min(dist, area.getRight() - x);
}
return sheet.getScale().pixelsToFrac(dist);
}
}
// TopChunkCheck //
/**
* Class <code>TopChunkCheck</code> checks for lack of chunk at top
*/
private class TopChunkCheck
extends Check<Context>
{
// Half-dimensions for window at top, checking for chunks
private final int nWidth;
private final int nHeight;
protected TopChunkCheck ()
{
super("TopChunk", 0, 0, false, CHUNK_AT_TOP);
// Adjust chunk window according to system scale (problem, we
// have sheet scale and stave scale, not system scale...)
Scale scale = sheet.getScale();
nWidth = scale.fracToPixels(constants.chunkWidth);
nHeight = scale.fracToPixels(constants.chunkHeight);
int area = 4 * nWidth * nHeight;
setLowHigh(area * constants.chunkRatioLow.getValue(),
area * constants.chunkRatioHigh.getValue());
}
protected double getValue (Context context)
{
Stick stick = context.stick;
// Retrieve the stick chunk at top
return stick.getAliensAtStart(nHeight, nWidth);
}
}
// BottomChunkCheck //
/**
* Class <code>BottomChunkCheck</code> checks for lack of chunk at
* bottom
*/
private class BottomChunkCheck
extends Check<Context>
{
// Half-dimensions for window at bottom, checking for chunks
private final int nWidth;
private final int nHeight;
protected BottomChunkCheck ()
{
super("BotChunk", 0, 0, false, CHUNK_AT_BOTTOM);
// Adjust chunk window according to system scale (problem, we
// have sheet scale and stave scale, not system scale...)
Scale scale = sheet.getScale();
nWidth = scale.fracToPixels(constants.chunkWidth);
nHeight = scale.fracToPixels(constants.chunkHeight);
int area = 4 * nWidth * nHeight;
setLowHigh(area * constants.chunkRatioLow.getValue(),
area * constants.chunkRatioHigh.getValue());
}
protected double getValue (Context context)
{
Stick stick = context.stick;
// Retrieve the stick chunk at bottom
return stick.getAliensAtStop(nHeight, nWidth);
}
}
// LeftAdjacencyCheck //
private static class LeftAdjacencyCheck
extends Check<Context>
{
protected LeftAdjacencyCheck ()
{
super("LeftAdj", constants.maxAdjacencyLow.getValue(),
constants.maxAdjacencyHigh.getValue(), false,
TOO_HIGH_ADJACENCY);
}
// Retrieve the adjacency value
protected double getValue (Context context)
{
Stick stick = context.stick;
int length = stick.getLength();
return (double) stick.getFirstStuck() / (double) length;
}
}
// RightAdjacencyCheck //
private static class RightAdjacencyCheck
extends Check<Context>
{
protected RightAdjacencyCheck ()
{
super("RightAdj", constants.maxAdjacencyLow.getValue(),
constants.maxAdjacencyHigh.getValue(), false,
TOO_HIGH_ADJACENCY);
}
// Retrieve the adjacency value
protected double getValue (Context context)
{
Stick stick = context.stick;
int length = stick.getLength();
return (double) stick.getLastStuck() / (double) length;
}
}
// Context //
private class Context
implements Checkable
{
Stick stick;
int topArea = -1;
int bottomArea = -1;
int topIdx = -1;
int botIdx = -1;
boolean isThick;
public Context (Stick stick)
{
this.stick = stick;
}
public void setResult (Result result)
{
stick.setResult(result);
}
}
// BarCheckSuite //
private class BarCheckSuite
extends CheckSuite<Context>
{
public BarCheckSuite ()
{
super("Bars", constants.minCheckResult.getValue());
// Be very careful with check order, because of side-effects
add(1, new TopCheck());
add(1, new BottomCheck());
add(1, new MinLengthCheck());
add(1, new AnchorCheck());
add(1, new LeftCheck());
add(1, new RightCheck());
add(1, new TopChunkCheck());
add(1, new BottomChunkCheck());
add(1, new LeftAdjacencyCheck());
add(1, new RightAdjacencyCheck());
if (logger.isDebugEnabled()) {
dump();
}
}
}
private static class Constants
extends ConstantSet
{
Scale.Fraction chunkHeight = new Scale.Fraction
(0.33,
"Height of half area to look for chunks");
Constant.Double chunkRatioLow = new Constant.Double
(0.25,
"LowMinimum ratio of alien pixels to detect chunks");
Constant.Double chunkRatioHigh = new Constant.Double
(0.25,
"HighMinimum ratio of alien pixels to detect chunks");
Scale.Fraction chunkWidth = new Scale.Fraction
(0.33,
"Width of half area to look for chunks");
Constant.Boolean displayFrame = new Constant.Boolean
(false,
"Should we display a frame on the vertical sticks");
Scale.Fraction maxAlignShiftDx = new Scale.Fraction
(0.2,
"Maximum horizontal shift in bars between staves in a system");
Constant.Double maxAdjacencyLow = new Constant.Double
(0.25d,
"LowMaximum adjacency ratio for a bar stick");
Constant.Double maxAdjacencyHigh = new Constant.Double
(0.25d,
"HighMaximum adjacency ratio for a bar stick");
Scale.Fraction maxBarOffset = new Scale.Fraction
(1.0,
"Vertical offset used to detect that a bar extends past a stave");
Constant.Integer maxDeltaLength = new Constant.Integer
(4,
"Maximum difference in run length to be part of the same section");
Scale.Fraction maxDoubleBarDx = new Scale.Fraction
(0.75,
"Maximum horizontal distance between the two bars of a double bar");
Scale.Fraction maxStaveShiftDyLow = new Scale.Fraction
(0.125,
"LowMaximum vertical distance between a bar edge and the stave line");
Scale.Fraction maxStaveShiftDyHigh = new Scale.Fraction
(10,
"HighMaximum vertical distance between a bar edge and the stave line");
Scale.Fraction maxBarThickness = new Scale.Fraction
(0.75,
"Maximum thickness of an interesting vertical stick");
Scale.Fraction maxThinWidth = new Scale.Fraction
(0.3,
"Maximum width of a normal bar, versus a thick bar");
Scale.Fraction minMeasureWidth = new Scale.Fraction
(0.75,
"Minimum width for a measure");
Scale.Fraction minForeWeight = new Scale.Fraction
(1.25,
"Minimum foreground weight for a section to be kept");
Constant.Double minCheckResult = new Constant.Double
(0.50,
"Minimum result for suite of check");
Constants ()
{
initialize();
}
}
}
|
package com.dua3.meja.samples;
import java.io.IOException;
import java.util.Map;
import javax.swing.JFrame;
import javax.swing.WindowConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dua3.meja.model.CellStyle;
import com.dua3.meja.model.FillPattern;
import com.dua3.meja.model.Row;
import com.dua3.meja.model.Sheet;
import com.dua3.meja.model.Workbook;
import com.dua3.meja.model.WorkbookFactory;
import com.dua3.meja.model.generic.GenericWorkbookFactory;
import com.dua3.meja.ui.swing.SwingWorkbookView;
import com.dua3.utility.swing.SwingUtil;
import com.dua3.utility.Color;
/**
*
* @author a5xysq1
*/
public class KitchenSink extends JFrame {
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(KitchenSink.class);
private static void addColorSheet(Workbook wb) {
Sheet sheet = wb.createSheet("colors");
Row row = sheet.getRow(0);
row.getCell(0).set("Color");
row.getCell(1).set("Code");
row.getCell(2).set("darker");
row.getCell(3).set("brighter");
sheet.splitAt(1, 0);
for (Map.Entry<String, Color> e : Color.palette().entrySet()) {
String name = e.getKey();
CellStyle cs = wb.getCellStyle(name);
cs.setFillFgColor(e.getValue());
cs.setFillPattern(FillPattern.SOLID);
CellStyle csDark = wb.getCellStyle(name + "Dark");
csDark.setFillFgColor(e.getValue().darker());
csDark.setFillPattern(FillPattern.SOLID);
CellStyle csBright = wb.getCellStyle(name + "Bright");
csBright.setFillFgColor(e.getValue().brighter());
csBright.setFillPattern(FillPattern.SOLID);
row = sheet.getRow(sheet.getRowCount());
row.getCell(0).set(name).setCellStyle(cs);
row.getCell(1).set(e.getValue().toString()).setCellStyle(cs);
row.getCell(2).set("darker").setCellStyle(csDark);
row.getCell(3).set("brighter").setCellStyle(csBright);
}
}
private static Workbook createWorkbook(WorkbookFactory<?> factory) {
Workbook wb = factory.create();
addColorSheet(wb);
return wb;
}
public static void main(String[] args) {
SwingUtil.setNativeLookAndFeel();
KitchenSink instance = new KitchenSink();
instance.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
instance.setVisible(true);
}
private Workbook wb;
public KitchenSink() {
super("Méja Kitchensink demo");
init();
}
@Override
public void dispose() {
try {
wb.close();
} catch (IOException e) {
logger.error("Exception occured while closing workbook.", e);
}
super.dispose();
}
private void init() {
setSize(800, 600);
final SwingWorkbookView view = new SwingWorkbookView();
this.setContentPane(view);
wb = createWorkbook(GenericWorkbookFactory.instance());
view.setWorkbook(wb);
}
}
|
package se.chalmers.watchme.database;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
/**
* The table in the database that holds data for Movies.
*
* @author lisastenberg
*/
public class MoviesTable {
public static final String TABLE_MOVIES = "movies";
public static final String COLUMN_MOVIE_ID = "_id";
public static final String COLUMN_TITLE = "title";
public static final String COLUMN_RATING = "rating";
public static final String COLUMN_NOTE = "note";
public static final String COLUMN_DATE = "releasedate";
public static final String COLUMN_IMDB_ID = "imdbid";
private static final String CREATE_MOVIES_TABLE = "CREATE TABLE "
+ TABLE_MOVIES + "(" + COLUMN_MOVIE_ID + " INTEGER PRIMARY KEY,"
+ COLUMN_TITLE + " TEXT," + COLUMN_RATING + " INTEGER," + COLUMN_NOTE
+ " TEXT," + COLUMN_DATE + " INTEGER," + COLUMN_IMDB_ID +" TEXT" + ")";
public static void onCreate(SQLiteDatabase db) {
db.execSQL(CREATE_MOVIES_TABLE);
}
public static void onUpgrade(SQLiteDatabase db, int oldVersion,
int newVersion) {
Log.w(MoviesTable.class.getName(), "Upgrading database from version "
+ oldVersion + " to " + newVersion
+ ", which will destroy all old data");
db.execSQL("DROP TABLE IF EXISTS " + TABLE_MOVIES);
onCreate(db);
}
}
|
package swift.clocks;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import swift.exceptions.IncompatibleTypeException;
/**
* Class to represent version vectors with exceptions. This representation
* records the intervals of contiguous values.
*
* @author nmp
*/
public class VersionVectorWithExceptions implements CausalityClock {
static class Pair {
long from; // inclusive
long to; // inclusive
Pair() {
}
Pair(long from, long to) {
this.from = from;
this.to = to;
}
boolean includes(long l) {
return l >= from && l <= to;
}
boolean mergeFwd(Pair p) {
if (to == p.from + 1) {
to = p.to;
return true;
} else
return false;
}
boolean mergeBack(Pair p) {
if (from == p.to + 1) {
from = p.from;
return true;
} else
return false;
}
Pair duplicate() {
return new Pair(from, to);
}
}
private static final long serialVersionUID = 1L;
protected Map<String, LinkedList<Pair>> vv;
protected int numPairs;
public VersionVectorWithExceptions() {
vv = new TreeMap<String, LinkedList<Pair>>();
numPairs = 0;
}
protected VersionVectorWithExceptions(VersionVectorWithExceptions v) {
vv = new TreeMap<String, LinkedList<Pair>>();
numPairs = v.numPairs;
Iterator<Entry<String, LinkedList<Pair>>> it = v.vv.entrySet().iterator();
while (it.hasNext()) {
Entry<String, LinkedList<Pair>> entry = it.next();
String key = entry.getKey();
LinkedList<Pair> l = entry.getValue();
vv.put(key, duplicateList(l));
}
}
protected VersionVectorWithExceptions(VersionVector v) {
vv = new TreeMap<String, LinkedList<Pair>>();
numPairs = 0;
Iterator<Entry<String, Long>> it = v.vv.entrySet().iterator();
while (it.hasNext()) {
Entry<String, Long> entry = it.next();
String key = entry.getKey();
LinkedList<Pair> nl = new LinkedList<Pair>();
nl.add(new Pair(0, entry.getValue()));
numPairs++;
vv.put(key, nl);
}
}
protected LinkedList<Pair> duplicateList(LinkedList<Pair> l) {
LinkedList<Pair> nl = new LinkedList<Pair>();
Iterator<Pair> it = l.iterator();
while (it.hasNext()) {
Pair p = it.next();
nl.addLast(p.duplicate());
}
return nl;
}
/**
* Checks if a given event clock is reflected in this clock
*
* @param c
* Event clock.
* @return Returns true if the given event clock is included in this
* causality clock.
*/
@Override
public boolean includes(Timestamp cc) {
LinkedList<Pair> l = vv.get(cc.getIdentifier());
if (l == null) {
return false;
}
long v = cc.getCounter();
ListIterator<Pair> it = l.listIterator(l.size());
while (it.hasPrevious()) {
Pair p = it.previous();
if (v > p.to)
return false;
if (v >= p.from)
return true;
}
return false;
}
/**
* Records an event.
*
* @param cc
* Timestamp to insert.
*/
public boolean record(Timestamp cc) {
long v = cc.getCounter();
LinkedList<Pair> l = vv.get(cc.getIdentifier());
if (l == null) {
l = new LinkedList<Pair>();
vv.put(cc.getIdentifier(), l);
l.add(new Pair(v, v));
return true;
}
ListIterator<Pair> it = l.listIterator(l.size());
Pair p = null;
while (it.hasPrevious()) {
Pair oldP = p;
p = it.previous();
if( v >= p.from && v <= p.to)
return false;
if (v == p.to + 1) {
p.to = p.to + 1;
if (oldP != null && oldP.mergeBack(p)) {
it.remove();
numPairs
}
return true;
} else if (v > p.to) {
it.next();
it.add(new Pair(v, v));
numPairs++;
return true;
}
}
if (p != null) {
if (p.from == v + 1) {
p.from = v;
return true;
}
}
l.addFirst(new Pair(v, v));
numPairs++;
return true;
}
protected Pair advanceUntil(Pair p, Iterator<Pair> it, int val) {
if (val <= p.to)
return p;
while (it.hasNext()) {
p = it.next();
if (val > p.to)
continue;
return p;
}
return null;
}
/* protected CMP_CLOCK mergeOneEntryVV(String siteid, LinkedList<Pair> l0) {
LinkedList<Pair> l = vv.get(siteid);
if (l == null) {
l = duplicateList(l0);
numPairs = numPairs + l0.size();
vv.put(siteid, l);
return CMP_CLOCK.CMP_ISDOMINATED;
}
boolean thisHasMoreEntries = false;
boolean otherHasMoreEntries = false;
LinkedList<Pair> nl = new LinkedList<Pair>();
Iterator<Pair> it = l.iterator();
Iterator<Pair> it0 = l0.iterator();
Pair np = null;
Pair p = it.hasNext() ? it.next() : null;
Pair p0 = it0.hasNext() ? it0.next() : null;
numPairs = 0;
// last value that has been compared between the two sets
long v = Math.min(p == null ? Long.MAX_VALUE : p.from - 1, p0 == null ? Long.MAX_VALUE : p0.from - 1);
for (;;) {
if (p == null && p0 == null)
break;
if (p != null && p0 != null) {
if (p.from == p0.from && p.to == p0.to) {
nl.add(p);
numPairs++;
v = p.to;
p = null;
p0 = null;
} else {
if (p.from <= v) { // we are in the middle of p
if (p0.from > v + 1) {
thisHasMoreEntries = true;
}
if (p.to < p0.from) { // p ends before p0 start
v = p.to;
p = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p.to < p0.to) {
v = p.to;
p = null;
} else {
v = p0.to;
p0 = null;
}
}
} else if (p0.from <= v) { // we are in the middle of p0
if (p.from > v + 1) {
otherHasMoreEntries = true;
}
if (p0.to < p.from) { // p ends before p0 start
v = p0.to;
p0 = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p0.to < p.to) {
v = p0.to;
p0 = null;
} else {
v = p.to;
p = null;
}
}
} else { // need to advance to next intervals
if (p.from == p0.from) {
v = p.from;
} else if (p.from < p0.from) {
thisHasMoreEntries = true;
if (p.to < p0.from) {
v = p.to;
p = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p.to < p0.to) {
v = p.to;
p = null;
} else {
v = p0.to;
p0 = null;
}
}
} else {
otherHasMoreEntries = true;
if (p0.to < p.from) {
v = p0.to;
p0 = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p0.to < p.to) {
v = p0.to;
p0 = null;
} else {
v = p.to;
p = null;
}
}
}
}
}
} else if (p == null) {
otherHasMoreEntries = true;
break;
} else if (p0 == null) {
thisHasMoreEntries = true;
break;
}
if (p == null && it.hasNext()) {
p = it.next();
}
if (p0 == null && it0.hasNext()) {
p0 = it0.next();
}
}
vv.put(siteid, nl);
if (thisHasMoreEntries && otherHasMoreEntries) {
return CMP_CLOCK.CMP_CONCURRENT;
}
if (thisHasMoreEntries) {
return CMP_CLOCK.CMP_DOMINATES;
}
if (otherHasMoreEntries) {
return CMP_CLOCK.CMP_ISDOMINATED;
}
return CMP_CLOCK.CMP_EQUALS;
}
protected CMP_CLOCK mergeOneEntryVV(String siteid, LinkedList<Pair> l0) {
LinkedList<Pair> l = vv.get(siteid);
if (l == null) {
l = duplicateList(l0);
numPairs = numPairs + l0.size();
vv.put(siteid, l);
return CMP_CLOCK.CMP_ISDOMINATED;
}
CMP_CLOCK cmp = compareOneEntryVV(siteid, l0);
numPairs = numPairs - l.size();
LinkedList<Pair> nl = new LinkedList<Pair>();
Iterator<Pair> it = l.iterator();
Iterator<Pair> it0 = l0.iterator();
Pair p = it.hasNext() ? it.next() : null;
Pair p0 = it0.hasNext() ? it0.next() : null;
Pair np = null;
for (;;) {
boolean hasChanged = false;
if (p == null && p0 == null)
break;
if (np == null) {
if (p != null && p0 != null) {
if (p.from <= p0.from) {
np = p;
p = null;
hasChanged = true;
} else {
np = p0.duplicate();
p0 = null;
hasChanged = true;
}
} else if (p != null) {
np = p;
p = null;
hasChanged = true;
} else if (p0 != null) {
np = p0.duplicate();
p0 = null;
hasChanged = true;
}
}
if (p != null) {
if (np.to >= p.from - 1) {
if (p.to > np.to)
np.to = p.to;
p = null;
hasChanged = true;
}
}
if (p0 != null) {
if (np.to >= p0.from - 1) {
if (p0.to > np.to)
np.to = p0.to;
p0 = null;
hasChanged = true;
}
}
if (!hasChanged) {
nl.add(np);
numPairs++;
np = null;
}
if (p == null && it.hasNext()) {
p = it.next();
}
if (p0 == null && it0.hasNext()) {
p0 = it0.next();
}
}
if (np != null) {
nl.add(np);
numPairs++;
}
vv.put(siteid, nl);
return cmp;
}
*/
protected CMP_CLOCK mergeOneEntryVV(String siteid, LinkedList<Pair> l0) {
LinkedList<Pair> l = vv.get(siteid);
if (l == null) {
l = duplicateList(l0);
numPairs = numPairs + l0.size();
vv.put(siteid, l);
return CMP_CLOCK.CMP_ISDOMINATED;
}
CMP_CLOCK cmp = compareOneEntryVV(siteid, l0);
numPairs = numPairs - l.size();
LinkedList<Pair> nl = new LinkedList<Pair>();
Iterator<Pair> it = l.iterator();
Iterator<Pair> it0 = l0.iterator();
Pair p = it.hasNext() ? it.next() : null;
Pair p0 = it0.hasNext() ? it0.next() : null;
Pair np = null;
for (;;) {
boolean hasChanged = false;
if (p == null && p0 == null)
break;
if (np == null) {
if (p != null && p0 != null) {
if (p.from <= p0.from) {
np = p;
p = null;
hasChanged = true;
} else {
np = p0.duplicate();
p0 = null;
hasChanged = true;
}
} else if (p != null) {
np = p;
p = null;
hasChanged = true;
} else if (p0 != null) {
np = p0.duplicate();
p0 = null;
hasChanged = true;
}
}
if (p != null) {
if (np.to >= p.from - 1) {
if (p.to > np.to)
np.to = p.to;
p = null;
hasChanged = true;
}
}
if (p0 != null) {
if (np.to >= p0.from - 1) {
if (p0.to > np.to)
np.to = p0.to;
p0 = null;
hasChanged = true;
}
}
if (!hasChanged) {
nl.add(np);
numPairs++;
np = null;
}
if (p == null && it.hasNext()) {
p = it.next();
}
if (p0 == null && it0.hasNext()) {
p0 = it0.next();
}
}
if (np != null) {
nl.add(np);
numPairs++;
}
vv.put(siteid, nl);
return cmp;
}
/**
* Merge this clock with the given c clock.
*
* @param c
* Clock to merge to
* @return Returns one of the following, based on the initial value of
* clocks:<br>
* CMP_EQUALS : if clocks were equal; <br>
* CMP_DOMINATES : if this clock dominated the given c clock; <br>
* CMP_ISDOMINATED : if this clock was dominated by the given c
* clock; <br>
* CMP_CONCUREENT : if this clock and the given c clock were
* concurrent; <br>
* @throws IncompatibleTypeException
* Case comparison cannot be made
*/
protected CMP_CLOCK mergeVV(VersionVectorWithExceptions cc) {
CMP_CLOCK result = CMP_CLOCK.CMP_EQUALS;
Iterator<Entry<String, LinkedList<Pair>>> it = cc.vv.entrySet().iterator();
while (it.hasNext()) {
Entry<String, LinkedList<Pair>> e = it.next();
CMP_CLOCK partialResult = mergeOneEntryVV(e.getKey(), e.getValue());
result = ClockUtils.combineCmpClock(result, partialResult);
}
it = vv.entrySet().iterator();
while (it.hasNext()) {
Entry<String, LinkedList<Pair>> e = it.next();
LinkedList<Pair> l = cc.vv.get(e.getKey());
if (l == null) {
result = ClockUtils.combineCmpClock(result, CMP_CLOCK.CMP_DOMINATES);
break;
}
}
return result;
}
/**
* Merge this clock with the given c clock.
*
* @param c
* Clock to merge to
* @return Returns one of the following, based on the initial value of
* clocks:<br>
* CMP_EQUALS : if clocks were equal; <br>
* CMP_DOMINATES : if this clock dominated the given c clock; <br>
* CMP_ISDOMINATED : if this clock was dominated by the given c
* clock; <br>
* CMP_CONCUREENT : if this clock and the given c clock were
* concurrent; <br>
* @throws IncompatibleTypeException
* Case comparison cannot be made
*/
public CMP_CLOCK merge(CausalityClock cc) {
// if ( ! VersionVectorWithExceptions.class.equals(cc.getClass())) {
// throw new IncompatibleTypeException();
return mergeVV((VersionVectorWithExceptions) cc);
}
protected CMP_CLOCK compareOneEntryVV(String siteid, LinkedList<Pair> l0) {
LinkedList<Pair> l = vv.get(siteid);
if (l == null) {
return CMP_CLOCK.CMP_ISDOMINATED;
}
boolean thisHasMoreEntries = false;
boolean otherHasMoreEntries = false;
Iterator<Pair> it = l.iterator();
Iterator<Pair> it0 = l0.iterator();
Pair p = it.hasNext() ? it.next() : null;
Pair p0 = it0.hasNext() ? it0.next() : null;
// last value that has been compared between the two sets
long v = Math.min(p == null ? Long.MAX_VALUE : p.from - 1, p0 == null ? Long.MAX_VALUE : p0.from - 1);
for (;;) {
if (p == null && p0 == null)
break;
if (thisHasMoreEntries && otherHasMoreEntries)
break;
if (p != null && p0 != null) {
if (p.from == p0.from && p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else {
if (p.from <= v) { // we are in the middle of p
if (p0.from > v + 1) {
thisHasMoreEntries = true;
}
if (p.to < p0.from) { // p ends before p0 start
v = p.to;
p = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p.to < p0.to) {
v = p.to;
p = null;
} else {
v = p0.to;
p0 = null;
}
}
} else if (p0.from <= v) { // we are in the middle of p0
if (p.from > v + 1) {
otherHasMoreEntries = true;
}
if (p0.to < p.from) { // p ends before p0 start
v = p0.to;
p0 = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p0.to < p.to) {
v = p0.to;
p0 = null;
} else {
v = p.to;
p = null;
}
}
} else { // need to advance to next intervals
if (p.from == p0.from) {
v = p.from;
} else if (p.from < p0.from) {
thisHasMoreEntries = true;
if (p.to < p0.from) {
v = p.to;
p = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p.to < p0.to) {
v = p.to;
p = null;
} else {
v = p0.to;
p0 = null;
}
}
} else {
otherHasMoreEntries = true;
if (p0.to < p.from) {
v = p0.to;
p0 = null;
} else {
if (p.to == p0.to) {
v = p.to;
p = null;
p0 = null;
} else if (p0.to < p.to) {
v = p0.to;
p0 = null;
} else {
v = p.to;
p = null;
}
}
}
}
}
} else if (p == null) {
otherHasMoreEntries = true;
break;
} else if (p0 == null) {
thisHasMoreEntries = true;
break;
}
if (p == null && it.hasNext()) {
p = it.next();
}
if (p0 == null && it0.hasNext()) {
p0 = it0.next();
}
}
if (thisHasMoreEntries && otherHasMoreEntries) {
return CMP_CLOCK.CMP_CONCURRENT;
}
if (thisHasMoreEntries) {
return CMP_CLOCK.CMP_DOMINATES;
}
if (otherHasMoreEntries) {
return CMP_CLOCK.CMP_ISDOMINATED;
}
return CMP_CLOCK.CMP_EQUALS;
}
/**
* compare this clock with the given c clock.
*
* @param c
* Clock to compare to
* @return Returns one of the following, based on the initial value of
* clocks:<br>
* CMP_EQUALS : if clocks were equal; <br>
* CMP_DOMINATES : if this clock dominated the given c clock; <br>
* CMP_ISDOMINATED : if this clock was dominated by the given c
* clock; <br>
* CMP_CONCUREENT : if this clock and the given c clock were
* concurrent; <br>
* @throws IncompatibleTypeException
* Case comparison cannot be made
*/
protected CMP_CLOCK compareVV(VersionVectorWithExceptions cc) {
CMP_CLOCK result = CMP_CLOCK.CMP_EQUALS;
Iterator<Entry<String, LinkedList<Pair>>> it = cc.vv.entrySet().iterator();
while (it.hasNext()) {
Entry<String, LinkedList<Pair>> e = it.next();
CMP_CLOCK partialResult = compareOneEntryVV(e.getKey(), e.getValue());
result = ClockUtils.combineCmpClock(result, partialResult);
}
it = vv.entrySet().iterator();
while (it.hasNext()) {
Entry<String, LinkedList<Pair>> e = it.next();
LinkedList<Pair> i = cc.vv.get(e.getKey());
if (i == null) {
result = ClockUtils.combineCmpClock(result, CMP_CLOCK.CMP_DOMINATES);
break;
}
}
return result;
}
// TODO: fix parametric types
@Override
public CMP_CLOCK compareTo(CausalityClock cc) {
// if ( ! VersionVectorWithExceptions.class.equals(cc.getClass())) {
// throw new IncompatibleTypeException();
return compareVV((VersionVectorWithExceptions) cc);
}
/**
* Returns the most recent event for a given site. <br>
*
* @param siteid
* Site identifier.
* @return Returns an event clock.
*/
public Timestamp getLatest(String siteid) {
LinkedList<Pair> p = vv.get(siteid);
if (p == null) {
return new Timestamp(siteid, Timestamp.MIN_VALUE);
} else {
return new Timestamp(siteid, p.getLast().to);
}
}
/**
* Returns the most recent event for a given site. <br>
*
* @param siteid
* Site identifier.
* @return Returns an event clock.
*/
public long getLatestCounter(String siteid) {
LinkedList<Pair> p = vv.get(siteid);
if (p == null) {
return Timestamp.MIN_VALUE;
} else {
return p.getLast().to;
}
}
@Override
public boolean hasEventFrom(String siteid) {
return getLatestCounter(siteid) != Timestamp.MIN_VALUE;
}
/**
* Create a copy of this causality clock.
*/
public CausalityClock clone() {
return new VersionVectorWithExceptions(this);
}
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append("[");
Iterator<Entry<String, LinkedList<Pair>>> it = vv.entrySet().iterator();
while (it.hasNext()) {
Entry<String, LinkedList<Pair>> e = it.next();
buf.append(e.getKey() + ":");
Iterator<Pair> it2 = e.getValue().iterator();
while (it2.hasNext()) {
Pair p = it2.next();
buf.append("[");
buf.append(p.from);
buf.append("-");
buf.append(p.to);
buf.append("]");
}
if (it.hasNext()) {
buf.append(",");
}
}
buf.append("]");
return buf.toString();
}
public boolean hasExceptions() {
return vv.size() != numPairs;
}
@Override
public void drop(String siteId) {
vv.remove(siteId);
}
@Override
public void drop(final Timestamp cc) {
LinkedList<Pair> l = vv.get(cc.getIdentifier());
if (l == null) {
return;
}
long v = cc.getCounter();
ListIterator<Pair> it = l.listIterator(l.size());
Pair p = null;
while (it.hasPrevious()) {
Pair oldP = p;
p = it.previous();
if (v > p.to) {
return;
} else if (v == p.to) {
p.to = p.to - 1;
if (p.from > p.to) {
it.remove();
numPairs
if (l.size() == 0)
vv.remove(cc.getIdentifier());
}
return;
} else if (v == p.from) {
p.from = p.from + 1;
if (p.from > p.to) {
it.remove();
numPairs
if (l.size() == 0)
vv.remove(cc.getIdentifier());
}
return;
} else if (v > p.from && v < p.to) {
p.from = v + 1;
it.add(new Pair(p.from, v - 1));
numPairs++;
return;
}
}
}
}
|
package com.exmertec.dummie;
import org.junit.Test;
import static com.exmertec.dummie.Dummie.create;
import static com.exmertec.dummie.Dummie.prepare;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
public class StringTest {
@Test
public void should_create_object_with_string_field() throws Exception {
StringData data = create(StringData.class);
assertThat(data, not(nullValue()));
assertThat(data.getStringValue(), is("stringValue"));
}
@Test
public void should_allow_customize_string_type_fields() throws Exception {
StringData data = prepare(StringData.class).override(String.class, "test").build();
assertThat(data.getStringValue(), is("test"));
}
@Test
public void should_not_write_fields_without_setter() throws Exception {
StringData data = create(StringData.class);
assertThat(data.getNoSetter(), is(nullValue()));
}
public static class StringData {
private String stringValue;
private String noSetter;
public String getStringValue() {
return stringValue;
}
public void setStringValue(String stringValue) {
this.stringValue = stringValue;
}
public String getNoSetter() {
return noSetter;
}
}
}
|
package com.jcabi.aether;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import org.apache.maven.model.Dependency;
import org.apache.maven.project.MavenProject;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import org.sonatype.aether.repository.RemoteRepository;
import org.sonatype.aether.util.artifact.JavaScopes;
/**
* Test case for {@link Classpath}.
* @author Yegor Bugayenko (yegor@tpc2.com)
* @version $Id$
*/
public final class ClasspathTest {
/**
* Temp dir.
* @checkstyle VisibilityModifier (3 lines)
*/
@Rule
public final transient TemporaryFolder temp = new TemporaryFolder();
/**
* Classpath can build a classpath.
* @throws Exception If there is some problem inside
*/
@Test
@SuppressWarnings("unchecked")
public void buildsClasspath() throws Exception {
final File local = this.temp.newFolder();
final MavenProject project = Mockito.mock(MavenProject.class);
Mockito.doReturn(Arrays.asList("/some/path/as/directory"))
.when(project).getTestClasspathElements();
final Dependency dep = new Dependency();
final String group = "junit";
dep.setGroupId(group);
dep.setArtifactId(group);
dep.setVersion("4.10");
dep.setScope(JavaScopes.TEST);
Mockito.doReturn(Arrays.asList(dep)).when(project).getDependencies();
final List<RemoteRepository> repos = Arrays.asList(
new RemoteRepository(
"maven-central",
"default",
"http://repo1.maven.org/maven2/"
)
);
Mockito.doReturn(repos).when(project).getRemoteProjectRepositories();
MatcherAssert.assertThat(
new Classpath(project, local.getPath(), JavaScopes.TEST),
Matchers.<File>hasItems(
Matchers.hasToString(Matchers.endsWith("/as/directory")),
Matchers.hasToString(Matchers.endsWith("junit-4.10.jar")),
Matchers.hasToString(Matchers.endsWith("hamcrest-core-1.1.jar"))
)
);
}
}
|
package cpw.mods.fml.common;
import org.objectweb.asm.tree.ClassNode;
public interface IASMHook {
/**
* Inject the {@link Mod} class node into this instance. This allows retrieval from custom
* attributes or other artifacts in your mod class
*
* @param modClassNode The mod class
* @return optionally some code generated classes that will be injected into the classloader
*/
ClassNode[] inject(ClassNode modClassNode);
/**
* Allow mods to manipulate classes loaded from this {@link Mod}'s jar file. The {@link Mod}
* class is always guaranteed to be called first.
* The node state should be changed in place.
*
* @param node The class being loaded
*/
void modifyClass(String className, ClassNode node);
}
|
package com.ociweb.gl.example;
import com.ociweb.json.appendable.AppendableByteWriter;
import com.ociweb.json.template.StringTemplateBuilder;
import com.ociweb.json.template.StringTemplateScript;
import com.ociweb.pronghorn.network.config.HTTPHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ociweb.gl.api.GreenRuntime;
import com.ociweb.gl.api.HTTPFieldReader;
import com.ociweb.gl.api.HTTPRequestReader;
import com.ociweb.gl.api.Headable;
import com.ociweb.gl.api.MsgCommandChannel;
import com.ociweb.gl.api.Writable;
import com.ociweb.gl.api.RestListener;
import com.ociweb.pronghorn.network.config.HTTPContentTypeDefaults;
import com.ociweb.pronghorn.network.config.HTTPHeaderDefaults;
import com.ociweb.pronghorn.pipe.ChannelReader;
import com.ociweb.pronghorn.pipe.ChannelWriter;
import com.ociweb.pronghorn.util.Appendables;
import com.ociweb.pronghorn.util.math.Decimal;
import com.ociweb.pronghorn.util.math.DecimalResult;
public class MathUnit implements RestListener {
private final Logger logger = LoggerFactory.getLogger(MathUnit.class);
private final MsgCommandChannel<?> cc;
private String lastCookie;
private final byte[] fieldA = "a".getBytes();
private final byte[] fieldB = "b".getBytes();
private final StringTemplateBuilder<HTTPFieldReader> template;
public MathUnit(final GreenRuntime runtime) {
this.cc = runtime.newCommandChannel(MsgCommandChannel.NET_RESPONDER);
StringTemplateScript<HTTPFieldReader> consumeX = new StringTemplateScript<HTTPFieldReader>() {
@Override
public void fetch(AppendableByteWriter writer, HTTPFieldReader source) {
source.getText(fieldA, writer);
}
};
StringTemplateScript<HTTPFieldReader> consumeY = new StringTemplateScript<HTTPFieldReader>() {
@Override
public void fetch(AppendableByteWriter writer, HTTPFieldReader source) {
source.getText(fieldB, writer);
}
};
StringTemplateScript<HTTPFieldReader> consumeSum = new StringTemplateScript<HTTPFieldReader>() {
@Override
public void fetch(final AppendableByteWriter writer, HTTPFieldReader source) {
DecimalResult adder = new DecimalResult() {
@Override
public void result(long m, byte e) {
Appendables.appendDecimalValue(writer, m, e);
}
};
Decimal.sum(
source.getDecimalMantissaDirect(fieldA),
source.getDecimalExponentDirect(fieldA),
source.getDecimalMantissaDirect(fieldB),
source.getDecimalExponentDirect(fieldB),
adder);
}
};
template = new StringTemplateBuilder<HTTPFieldReader>()
.add("{\"x\":").add(consumeX)
.add(",\"y\":").add(consumeY)
.add(",\"groovySum\":").add(consumeSum)
.add("}");
}
@Override
public boolean restRequest(final HTTPRequestReader request) {
final StringBuilder cookieValue = new StringBuilder();
Headable eat = new Headable() {
@Override
public void read(HTTPHeader header, ChannelReader httpPayloadReader) {
httpPayloadReader.readUTF(cookieValue);
lastCookie = cookieValue.toString();
}
};
request.openHeaderData((int)request.getFieldId(HTTPHeaderDefaults.COOKIE.rootBytes()), eat);
Writable render = new Writable() {
@Override
public void write(ChannelWriter writer) {
template.render(writer, request);
}
};
return cc.publishHTTPResponse(request.getConnectionId(), request.getSequenceCode(), 200, false,
HTTPContentTypeDefaults.JSON, render );
}
public String getLastCookie() {
return lastCookie;
}
}
|
package com.razorpay;
import org.json.JSONObject;
import org.junit.Test;
import org.mockito.InjectMocks;
import java.io.IOException;
import java.util.List;
import static org.junit.Assert.*;
public class PaymentClientTest extends BaseTest{
@InjectMocks
protected PaymentClient paymentClient = new PaymentClient(TEST_SECRET_KEY);
private static final String PAYMENT_ID = "pay_IDRP0tbirMSsbn";
private static final String REFUND_ID = "rfnd_FP8QHiV938haTz";
/**
* Retrieve payment details of respective customer using payment id.
* @throws RazorpayException
*/
@Test
public void fetch() throws RazorpayException{
String mockedResponseJson = "{\"id\":"+PAYMENT_ID+",\"entity\":\"payment\",\"amount\":1000,\"currency\":\"INR\",\"status\":\"captured\",\"order_id\":\"order_G8VPOayFxWEU28\",\"invoice_id\":null,\"international\":false,\"method\":\"upi\",\"amount_refunded\":0,\"refund_status\":null,\"captured\":true,\"description\":\"PurchaseShoes\",\"card_id\":null,\"bank\":null,\"wallet\":null,\"vpa\":\"gaurav.kumar@exampleupi\",\"email\":\"gaurav.kumar@example.com\",\"contact\":\"+919999999999\",\"customer_id\":\"cust_DitrYCFtCIokBO\",\"notes\":[],\"fee\":24,\"tax\":4,\"error_code\":null,\"error_description\":null,\"error_source\":null,\"error_step\":null,\"error_reason\":null,\"acquirer_data\":{\"rrn\":\"033814379298\"},\"created_at\":1606985209}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
Payment fetch = paymentClient.fetch(PAYMENT_ID);
assertNotNull(fetch);
assertEquals(PAYMENT_ID,fetch.get("id"));
assertTrue(fetch.has("status"));
assertTrue(fetch.has("currency"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Details of all the payments can be retrieved.
* @throws RazorpayException
*/
@Test
public void fetchAll() throws RazorpayException{
String mockedResponseJson = "{\"entity\":\"collection\",\"count\":2,\"items\":[{\"id\":\"pay_G8VaL2Z68LRtDs\",\"entity\":\"payment\",\"amount\":900,\"currency\":\"INR\",\"status\":\"captured\",\"order_id\":\"order_G8VXfKDWDEOHHd\",\"invoice_id\":null,\"international\":false,\"method\":\"netbanking\",\"amount_refunded\":0,\"refund_status\":null,\"captured\":true,\"description\":\"PurchaseShoes\",\"card_id\":null,\"bank\":\"KKBK\",\"wallet\":null,\"vpa\":null,\"email\":\"gaurav.kumar@example.com\",\"contact\":\"+919999999999\",\"customer_id\":\"cust_DitrYCFtCIokBO\",\"notes\":[],\"fee\":22,\"tax\":4,\"error_code\":null,\"error_description\":null,\"error_source\":null,\"error_step\":null,\"error_reason\":null,\"acquirer_data\":{\"bank_transaction_id\":\"0125836177\"},\"created_at\":1606985740}]}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
List<Payment> fetch = paymentClient.fetchAll();
assertNotNull(fetch);
assertTrue(fetch.get(0).has("id"));
assertTrue(fetch.get(0).has("entity"));
assertTrue(fetch.get(0).has("amount"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Capture a payment that verifies the amount deducted from the customer
* is same as the amount paid by the customer on website
* @throws RazorpayException
*/
@Test
public void capture() throws RazorpayException{
JSONObject request = new JSONObject("{\"amount\":1000,\"currency\":\"INR\"}");
String mockedResponseJson = "{\"id\":"+PAYMENT_ID+",\"entity\":\"payment\",\"amount\":1000,\"currency\":\"INR\",\"status\":\"captured\",\"order_id\":\"order_G8VPOayFxWEU28\",\"invoice_id\":null,\"international\":false,\"method\":\"upi\",\"amount_refunded\":0,\"refund_status\":null,\"captured\":true,\"description\":\"PurchaseShoes\",\"card_id\":null,\"bank\":null,\"wallet\":null,\"vpa\":\"gaurav.kumar@exampleupi\",\"email\":\"gaurav.kumar@example.com\",\"contact\":\"+919999999999\",\"customer_id\":\"cust_DitrYCFtCIokBO\",\"notes\":[],\"fee\":24,\"tax\":4,\"error_code\":null,\"error_description\":null,\"error_source\":null,\"error_step\":null,\"error_reason\":null,\"acquirer_data\":{\"rrn\":\"033814379298\"},\"created_at\":1606985209}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
Payment fetch = paymentClient.capture(PAYMENT_ID,request);
assertNotNull(fetch);
assertEquals(PAYMENT_ID,fetch.get("id"));
assertTrue(fetch.has("entity"));
assertTrue(fetch.has("amount"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Create a refunds to respective customers
* @throws RazorpayException
*/
@Test
public void refund() throws Exception{
JSONObject request = new JSONObject("{\"amount\":\"100\",\"speed\":\"normal\",\"notes\":{\"notes_key_1\":\"BeammeupScotty.\",\"notes_key_2\":\"Engage\"},\"receipt\":\"ReceiptNo.31\"}");
String mockedResponseJson = "{\"id\":"+REFUND_ID+",\"entity\":\"refund\",\"amount\":500100,\"receipt\":\"ReceiptNo.31\",\"currency\":\"INR\",\"payment_id\":\"pay_FCXKPFtYfPXJPy\",\"notes\":[],\"acquirer_data\":{\"arn\":null},\"created_at\":1597078866,\"batch_id\":null,\"status\":\"processed\",\"speed_processed\":\"normal\"}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
Refund fetch = paymentClient.refund(PAYMENT_ID,request);
assertNotNull(fetch);
assertEquals(REFUND_ID,fetch.get("id"));
assertEquals("INR",fetch.get("currency"));
assertTrue(fetch.has("payment_id"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Retrieve all the refunds for a payment by default only last 10 refunds returned.
* @throws RazorpayException
*/
@Test
public void FetchAllRefunds() throws RazorpayException{
JSONObject request = new JSONObject("{}");
String mockedResponseJson = "{\"entity\":\"collection\",\"count\":1,\"items\":[{\"id\":\"rfnd_IDQbLKwiy0aHrA\",\"entity\":\"refund\",\"amount\":100,\"currency\":\"INR\",\"payment_id\":\"pay_I3eaMwGV0462JA\",\"notes\":[],\"receipt\":null,\"acquirer_data\":{\"arn\":\"10000000000000\"},\"created_at\":1635134062,\"batch_id\":null,\"status\":\"processed\",\"speed_processed\":\"normal\",\"speed_requested\":\"normal\"}]}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
List<Refund> fetch = paymentClient.fetchAllRefunds(PAYMENT_ID,request);
assertNotNull(fetch);
assertTrue(fetch.get(0).has("id"));
assertTrue(fetch.get(0).has("amount"));
assertTrue(fetch.get(0).has("payment_id"));
assertTrue(fetch.get(0).has("notes"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Create transfer from payments using payment id and with
* object of that properties
* @throws RazorpayException
*/
@Test
public void transfers() throws RazorpayException{
JSONObject request = new JSONObject("{\"transfers\":[{\"account\":\"acc_CPRsN1LkFccllA\",\"amount\":100,\"currency\":\"INR\",\"notes\":{\"name\":\"GauravKumar\",\"roll_no\":\"IEC2011025\"},\"linked_account_notes\":[\"roll_no\"],\"on_hold\":true,\"on_hold_until\":1671222870}]}");
String mockedResponseJson = "{\"entity\":\"collection\",\"count\":1,\"items\":[{\"id\":\"trf_ItzBst0oybrcNx\",\"entity\":\"transfer\",\"status\":\"pending\",\"source\":\"pay_IOyKpYsPTMSWph\",\"recipient\":\"acc_I0QRP7PpvaHhpB\",\"amount\":100,\"currency\":\"INR\",\"amount_reversed\":0,\"notes\":{\"name\":\"GauravKumar\",\"roll_no\":\"IEC2011025\"},\"linked_account_notes\":[\"roll_no\"],\"on_hold\":true,\"on_hold_until\":1671222870,\"recipient_settlement_id\":null,\"created_at\":1644426157,\"processed_at\":null,\"error\":{\"code\":null,\"description\":null,\"reason\":null,\"field\":null,\"step\":null,\"id\":\"trf_ItzBst0oybrcNx\",\"source\":null,\"metadata\":null}}]}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
List <Transfer> fetch = paymentClient.transfer(PAYMENT_ID,request);
assertNotNull(fetch);
assertTrue(fetch.get(0).has("status"));
assertTrue(fetch.get(0).has("source"));
assertTrue(fetch.get(0).has("recipient"));
assertTrue(fetch.get(0).has("currency"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Details of all the transfers payment can be retrieved.
* @throws RazorpayException
*/
@Test
public void fetchAllTransfers() throws RazorpayException{
String mockedResponseJson = "{\n \"entity\": \"collection\",\n \"count\": 1,\n \"items\": [\n {\n \"id\": \"trf_EAznuJ9cDLnF7Y\",\n \"entity\": \"transfer\",\n \"source\": \"pay_E9up5WhIfMYnKW\",\n \"recipient\": \"acc_CMaomTz4o0FOFz\",\n \"amount\": 1000,\n \"currency\": \"INR\",\n \"amount_reversed\": 100,\n \"notes\": [],\n \"fees\": 3,\n \"tax\": 0,\n \"on_hold\": false,\n \"on_hold_until\": null,\n \"recipient_settlement_id\": null,\n \"created_at\": 1580454666,\n \"linked_account_notes\": [],\n \"processed_at\": 1580454666\n }\n ]\n}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
List<Transfer> fetch = paymentClient.fetchAllTransfers(PAYMENT_ID);
assertNotNull(fetch);
assertTrue(fetch.get(0).has("source"));
assertTrue(fetch.get(0).has("recipient"));
assertTrue(fetch.get(0).has("amount"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Retrieve transfers for a payment using payment id
* @throws RazorpayException
*/
@Test
public void fetchBankTransfers() throws RazorpayException{
String mockedResponseJson = "{\n \"id\": \"bt_Di5iqCElVyRlCb\",\n \"entity\": \"bank_transfer\",\n \"payment_id\": "+PAYMENT_ID+",\n \"mode\": \"NEFT\",\n \"bank_reference\": \"157414364471\",\n \"amount\": 239000,\n \"payer_bank_account\": {\n \"id\": \"ba_Di5iqSxtYrTzPU\",\n \"entity\": \"bank_account\",\n \"ifsc\": \"UTIB0003198\",\n \"bank_name\": \"Axis Bank\",\n \"name\": \"Acme Corp\",\n \"notes\": [],\n \"account_number\": \"765432123456789\"\n },\n \"virtual_account_id\": \"va_Di5gbNptcWV8fQ\",\n \"virtual_account\": {\n \"id\": \"va_Di5gbNptcWV8fQ\",\n \"name\": \"Acme Corp\",\n \"entity\": \"virtual_account\",\n \"status\": \"closed\",\n \"description\": \"Virtual Account created for MS ABC Exports\",\n \"amount_expected\": 2300,\n \"notes\": {\n \"material\": \"teakwood\"\n },\n \"amount_paid\": 239000,\n \"customer_id\": \"cust_DOMUFFiGdCaCUJ\",\n \"receivers\": [\n {\n \"id\": \"ba_Di5gbQsGn0QSz3\",\n \"entity\": \"bank_account\",\n \"ifsc\": \"RATN0VAAPIS\",\n \"bank_name\": \"RBL Bank\",\n \"name\": \"Acme Corp\",\n \"notes\": [],\n \"account_number\": \"1112220061746877\"\n }\n ],\n \"close_by\": 1574427237,\n \"closed_at\": 1574164078,\n \"created_at\": 1574143517\n }\n}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
BankTransfer fetch = paymentClient.fetchBankTransfers(PAYMENT_ID);
assertNotNull(fetch);
assertEquals("bt_Di5iqCElVyRlCb",fetch.get("id"));
assertEquals("bank_transfer",fetch.get("entity"));
assertEquals(PAYMENT_ID,fetch.get("payment_id"));
assertTrue(fetch.has("entity"));
assertTrue(fetch.has("amount"));
} catch (IOException e) {
assertTrue(false);
}
}
/**
* Create a payment of customer after order is created (Server to server integration)
* @throws RazorpayException
*/
@Test
public void createJsonPayment() throws RazorpayException {
JSONObject request = new JSONObject("{\"amount\":\"100\",\"currency\":\"INR\",\"email\":\"gaurav.kumar@example.com\",\"contact\":\"9123456789\",\"order_id\":\"order_ItZMEZjpBD6dhT\",\"method\":\"upi\"}");
String mockedResponseJson = "{\"entity\":\"payment\",\"type\":\"respawn\",\"request\":{\"url\":\"https://api.razorpay.com/v1/payments?key_id=rzp_test_pNL6H0AmbBEyjD\",\"method\":\"POST\",\"content\":{\"amount\":\"100\",\"currency\":\"INR\",\"email\":\"gaurav.kumar@example.com\",\"contact\":\"9123456789\",\"order_id\":\"order_ItYKzxCxnKAKlD\",\"method\":\"upi\",\"card\":{\"number\":\"4854980604708430\",\"cvv\":\"123\",\"expiry_month\":\"12\",\"expiry_year\":\"21\",\"name\":\"GauravKumar\"},\"_\":{\"library\":\"s2s\"},\"upi\":{\"flow\":\"collect\",\"type\":\"default\"}}},\"image\":null,\"theme\":\"#3594E2\",\"method\":\"upi\",\"version\":\"1\",\"missing\":[\"vpa\"],\"base\":\"api.razorpay.com\"}";
try {
mockResponseFromExternalClient(mockedResponseJson);
mockResponseHTTPCodeFromExternalClient(200);
Payment fetch = paymentClient.createJsonPayment(request);
assertNotNull(fetch);
assertEquals("upi",fetch.get("method"));
assertEquals("payment",fetch.get("entity"));
assertTrue(fetch.has("request"));
assertTrue(fetch.has("base"));
} catch (IOException e) {
assertTrue(false);
}
}
}
|
package com.aol.simple.react.async;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import com.aol.simple.react.SimpleReact;
import com.aol.simple.react.Stage;
public class QueueTest {
@Before
public void setup() {
found = 0;
}
int found = 0;
public synchronized void incrementFound() {
found++;
}
@Test
public void backPressureTest() {
Queue<Integer> q = new Queue<>(new LinkedBlockingQueue<>(2));
new SimpleReact().react(() -> {
q.offer(1);
return found++;
}, () -> {
q.offer(1);
return found++;
}, () -> {
q.offer(6);
return found++;
}, () -> {
q.offer(5);
return found++;
});
sleep(10);
assertThat(found, is(2));
assertThat(q.stream().limit(2).collect(Collectors.toList()).size(),
is(2));
assertThat(q.stream().limit(2).collect(Collectors.toList()).size(),
is(2));
assertThat(found, is(4));
}
@Test
public void backPressureJDKTest() {
Queue<String> q = new Queue<>(new LinkedBlockingQueue<>(2));
new SimpleReact().react(() -> {
Stream.of("1","2","3","4").forEach(it -> {q.offer(it); found++;});
return 1;
});
sleep(10);
assertThat(found, is(2));
assertThat(q.stream().limit(2).collect(Collectors.toList()).size(),
is(2));
assertThat(q.stream().limit(2).collect(Collectors.toList()).size(),
is(2));
assertThat(found, is(4));
}
@Test
public void backPressureTimeoutTestVeryLow() {
Queue<Integer> q = new Queue<Integer>(new LinkedBlockingQueue<>(2))
.withOfferTimeout(1).withOfferTimeUnit(TimeUnit.MICROSECONDS);
Set<Boolean> results = new SimpleReact().react(
() -> offerAndIncrementFound(q),
() -> offerAndIncrementFound(q),
() -> offerAndIncrementFound(q),
() -> offerAndIncrementFound(q)).block(Collectors.toSet());
sleep(10);
assertThat(found, is(4));
assertThat(results.size(), is(2));
assertThat(results, hasItem(false)); // some offers failed.
}
@Test
public void backPressureTimeoutTestVeryHigh() {
Queue<Integer> q = new Queue<Integer>(new LinkedBlockingQueue<>(2))
.withOfferTimeout(1).withOfferTimeUnit(TimeUnit.DAYS);
Stage<Boolean> s = new SimpleReact().react(
() -> offerAndIncrementFound(q),
() -> offerAndIncrementFound(q),
() -> offerAndIncrementFound(q),
() -> offerAndIncrementFound(q));
sleep(10);
assertThat(found, is(2));
assertThat(q.stream().limit(4).collect(Collectors.toList()).size(),
is(4));
Set<Boolean> results = s.block(Collectors.toSet());
assertThat(found, is(4));
assertThat(results.size(), is(1));
assertThat(results, not(hasItem(false))); // some offers failed.
}
private Boolean offerAndIncrementFound(Queue<Integer> q) {
boolean ret = q.offer(1);
found++;
return ret;
}
@Test
public void testSizeSignal() {
System.out.println("hello");
Queue<Integer> q = new Queue<Integer>();
Signal<Integer> s = q.getSizeSignal();
q.add(1);
q.add(1);
q.add(1);
q.stream().limit(3).forEach(it -> System.out.println(it)); // drain the
// queue
q.add(1); // queue size is 1
sleep(50);
List<Integer> sizes = s.getDiscrete().stream().limit(7)
.collect(Collectors.toList());
assertThat(sizes.get(0), is(1));
assertThat(sizes.get(1), is(2));
assertThat(sizes.get(2), is(3));
assertThat(sizes.get(3), is(2));
assertThat(sizes.get(4), is(1));
assertThat(sizes.get(5), is(0));
assertThat(sizes.get(6), is(1));
}
@Test
public void testAdd() {
Queue<Integer> q = new Queue<>(new LinkedBlockingQueue<>(2));
new SimpleReact().react(() -> {
q.add(1);
return found++;
}, () -> {
q.add(1);
return found++;
}, () -> {
q.add(6);
return found++;
}, () -> {
q.add(5);
return found++;
});
sleep(10);
assertThat(found, is(4));
}
@Test
public void testAddFull() {
Queue<Integer> q = new Queue<>(new LinkedBlockingQueue<>(2));
assertTrue(q.add(1));
assertTrue(q.add(2));
assertFalse(q.add(3));
}
@Test
public void enqueueTest() {
Stream<String> stream = Stream.of("1", "2", "3");
Queue<String> q = new Queue(new LinkedBlockingQueue());
q.fromStream(stream);
Stream<String> dq = q.stream();
Integer dequeued = q.stream().limit(3).map(it -> Integer.valueOf(it))
.reduce(0, (acc, next) -> acc + next);
assertThat(dequeued, is(6));
}
volatile int count = 0;
volatile int count1 = 10000;
@Test
public void simpleMergingTestLazyIndividualMerge() {
Queue<Integer> q = new Queue(new LinkedBlockingQueue());
q.offer(0);
q.offer(100000);
List<Integer> result = q.stream().limit(2)
.peek(it -> System.out.println(it))
.collect(Collectors.toList());
assertThat(result, hasItem(100000));
assertThat(result, hasItem(0));
}
@Test
@Ignore
// too non-deterministic to run regularly - relying on population from
// competing threads
public void mergingTestLazyIndividualMerge() {
count = 0;
count1 = 100000;
Queue<Integer> q = new Queue(new LinkedBlockingQueue());
SimpleReact.lazy().reactInfinitely(() -> count++)
.then(it -> q.offer(it)).run(new ForkJoinPool(1));
SimpleReact.lazy().reactInfinitely(() -> count1++)
.then(it -> q.offer(it)).run(new ForkJoinPool(1));
List<Integer> result = q.stream().limit(1000)
.peek(it -> System.out.println(it))
.collect(Collectors.toList());
assertThat(result, hasItem(100000));
assertThat(result, hasItem(0));
}
@Test
public void simpleMergingTestEagerStreamMerge() {
Queue<Integer> q = new Queue(new LinkedBlockingQueue());
q.offer(0);
q.offer(100000);
List<Integer> result = q.stream().limit(2)
.peek(it -> System.out.println(it))
.collect(Collectors.toList());
assertThat(result, hasItem(100000));
assertThat(result, hasItem(0));
}
@Test
@Ignore
// too non-deterministic to run regularly - relying on population from
// competing threads
public void mergingTestEagerStreamMerge() {
count = 0;
count1 = 100000;
Queue<Integer> q = new Queue(new LinkedBlockingQueue());
new SimpleReact().react(() -> q.fromStream(Stream
.generate(() -> count++)));
new SimpleReact().react(() -> q.fromStream(Stream
.generate(() -> count1++)));
List<Integer> result = q.stream().limit(1000)
.peek(it -> System.out.println(it))
.collect(Collectors.toList());
assertThat(result, hasItem(100000));
assertThat(result, hasItem(0));
}
@Test(expected = Queue.ClosedQueueException.class)
public void queueTestBlock() {
try {
Queue q = new Queue<>(new LinkedBlockingQueue<>());
new SimpleReact().react(() -> q.offer(1), () -> q.offer(2), () -> {
sleep(50);
return q.offer(4);
}, () -> {
sleep(400);
q.close();
return 1;
});
SimpleReact.lazy().fromStream(q.streamCompletableFutures())
.then(it -> "*" + it).peek(it -> incrementFound())
.peek(it -> System.out.println(it)).block();
} finally {
assertThat(found, is(3));
}
}
@Test
public void queueTestTimeout() {
Queue q = new Queue<>(new LinkedBlockingQueue<>()).withTimeout(1)
.withTimeUnit(TimeUnit.MILLISECONDS);
new SimpleReact().react(() -> q.offer(1), () -> q.offer(2), () -> {
sleep(500);
return q.offer(4);
}, () -> q.offer(5));
Collection<String> results = SimpleReact.lazy()
.fromStream(q.streamCompletableFutures()).then(it -> "*" + it)
.run(() -> new ArrayList<String>());
assertThat(results.size(), is(3));
assertThat(results, not(hasItem("*4")));
assertThat(results, hasItem("*5"));
}
@Test
public void queueTestRun() {
try {
Queue<Integer> q = new Queue<>(new LinkedBlockingQueue<>());
new SimpleReact().react(() -> q.offer(1), () -> q.offer(2), () -> {
sleep(200);
return q.offer(4);
}, () -> {
sleep(400);
q.close();
return 1;
});
List<String> result = SimpleReact.lazy()
.fromStream(q.streamCompletableFutures())
.then(it -> "*" + it).peek(it -> incrementFound())
.peek(it -> System.out.println(it))
.run(() -> new ArrayList<String>());
assertThat(result, hasItem("*1"));
} finally {
assertThat(found, is(3));
}
}
private int sleep(int i) {
try {
Thread.sleep(i);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return i;
}
}
|
package de.bmoth.app;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.stage.Stage;
import org.junit.Ignore;
import org.junit.Test;
import static org.testfx.api.FxAssert.verifyThat;
import static org.testfx.matcher.base.NodeMatchers.isNotNull;
import static org.testfx.matcher.base.NodeMatchers.isNull;
public class AppControllerTest extends HeadlessUITest {
@Override
public void start(Stage stage) throws Exception {
FXMLLoader loader = new FXMLLoader(getClass().getResource("app.fxml"));
Parent root = loader.load();
Scene scene = new Scene(root, 500, 300);
stage.setScene(scene);
stage.show();
}
@Test
public void clickingOptionsOpensDialog() {
clickOn("#fileMenu");
clickOn("#options");
verifyThat("#minInt", isNotNull());
clickOn("Close");
verifyThat("#minInt", isNull());
}
@Test
@Ignore("this test confuses the other repl tests, because it does not close the repl window")
public void clickingReplOpensRepl() {
verifyThat("#replText", isNull());
clickOn("#replMenu").clickOn("#openRepl");
verifyThat("#replText", isNotNull());
}
}
|
package com.heroku;
import com.atlassian.bamboo.build.logger.BuildLogger;
import com.atlassian.bamboo.configuration.ConfigurationMapImpl;
import com.atlassian.bamboo.task.TaskContext;
import com.atlassian.bamboo.task.TaskResult;
import com.atlassian.bamboo.task.TaskState;
import com.atlassian.bamboo.task.TaskType;
import org.jmock.Mock;
import org.jmock.MockObjectTestCase;
import org.jmock.core.InvocationMatcher;
import org.jmock.core.matcher.AnyArgumentsMatcher;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
public class ArtifactDeploymentTasksIT extends MockObjectTestCase {
private final Mock mockStatics = new Mock(WarDeploymentTask.StaticSandbox.class);
private final Mock mockContext = new Mock(TaskContext.class);
private final Mock mockLogger = new Mock(BuildLogger.class);
private final Mock mockSuccessfulTaskResult = new Mock(TaskResult.class);
private final Mock mockFailedTaskResult = new Mock(TaskResult.class);
private final ConfigurationMapImpl configMap = new ConfigurationMapImpl();
private final File workingDir = createTempDir();
@Override
protected void setUp() throws Exception {
super.setUp();
mockStatics.expects(anything()).method("success").will(returnValue(mockSuccessfulTaskResult.proxy()));
mockStatics.expects(anything()).method("failed").will(returnValue(mockFailedTaskResult.proxy()));
mockContext.expects(once()).method("getBuildLogger").will(returnValue(mockLogger.proxy()));
mockContext.expects(atLeastOnce()).method("getConfigurationMap").will(returnValue(configMap));
mockContext.expects(atLeastOnce()).method("getWorkingDirectory").will(returnValue(workingDir));
mockLogger.expects(anything()).method("addBuildLogEntry");
mockLogger.expects(anything()).method("addErrorLogEntry");
mockSuccessfulTaskResult.expects(anything()).method("getTaskState").will(returnValue(TaskState.SUCCESS));
mockFailedTaskResult.expects(anything()).method("getTaskState").will(returnValue(TaskState.FAILED));
}
protected TaskResult runTask(Class<? extends AbstractDeploymentTask> taskClass) throws Exception {
configMap.put("apiKey", System.getProperty("heroku.apiKey"));
configMap.put("appName", System.getProperty("heroku.appName"));
TaskType task = taskClass.getConstructor(AbstractDeploymentTask.StaticSandbox.class).newInstance((AbstractDeploymentTask.StaticSandbox) mockStatics.proxy());
return task.execute((TaskContext) mockContext.proxy());
}
public void testWarDeployment() throws Exception {
configMap.put("war", File.createTempFile("some", ".war", workingDir).getName());
final TaskResult taskResult = runTask(WarDeploymentTask.class);
assertEquals(TaskState.SUCCESS, taskResult.getTaskState());
}
public void testFatJarDeployment() throws Exception {
configMap.put("jar", File.createTempFile("some", ".jar", workingDir).getName());
configMap.put("procfile", createProcfile(workingDir).getName());
assertEquals(TaskState.SUCCESS, runTask(FatJarDeploymentTask.class).getTaskState());
}
public void testTarGzDeployment() throws Exception {
configMap.put("targz", File.createTempFile("some", ".tar.gz", workingDir).getName());
configMap.put("procfile", createProcfile(workingDir).getName());
assertEquals(TaskState.SUCCESS, runTask(TarGzDeploymentTask.class).getTaskState());
}
InvocationMatcher anything() {
return new AnyArgumentsMatcher();
}
private static File createTempDir() {
try {
final File tmp = File.createTempFile("temp", "dir");
if (!tmp.delete()) throw new IOException("Could not delete");
if (!tmp.mkdir()) throw new IOException("Could not mkdir");
return tmp;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private File createProcfile(File dir) {
try {
final File procfile = File.createTempFile("Procfile", "", dir);
PrintWriter writer = new PrintWriter(procfile);
writer.append("web: exit");
writer.close();
return procfile;
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
|
package com.twu.biblioteca.model;
import com.twu.biblioteca.exceptions.BookNotBorrowable;
import com.twu.biblioteca.exceptions.BookNotReturnable;
import com.twu.biblioteca.exceptions.MovieNotBorrowable;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Matchers;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Tests for the Library class.
*
* @author Desiree Kelly
* @version 1.0
* @see Library
*/
public class LibraryTest {
private static final Book BOOK_1 = new Book("Java 101", "Joe Bloggs", 1990);
private static final Book BOOK_2 = new Book("PHP 101", "Mary Jane", 2005);
private static final Movie MOVIE_1 = new Movie("The Matrix", 1999, "The Wachowski Brothers", "10");
private static final Movie MOVIE_2 = new Movie("Inception", 2010, "Christopher Nolan", "8");
private static final User USER_1 = new User("Joe Bloggs", "joebloggs@joebloggs.com", "0400 000 000", "123-4566", "f8kf93jd");
private static final User USER_2 = new User("Jane Smith", "janesmith@janesmith.com", "0400 123 888", "123-4567", "5jgfdkl5");
private Library library;
private Library libraryMock;
private Book book;
private Movie movie;
@Before
public void setUp() throws Exception {
library = new LibraryImpl();
book = mock(Book.class);
movie = mock(Movie.class);
libraryMock = mock(LibraryImpl.class);
}
@Test
public void testCreateBookList() throws Exception {
assertEquals(BOOK_1, library.getBookList().get(0));
assertEquals(BOOK_2, library.getBookList().get(1));
}
@Test
public void testCreateMovieList() throws Exception {
assertEquals(MOVIE_1, library.getMovieList().get(0));
assertEquals(MOVIE_2, library.getMovieList().get(1));
}
@Test
public void testCreateUserList() throws Exception {
assertEquals(USER_1, library.getUserList().get(0));
assertEquals(USER_2, library.getUserList().get(1));
}
@Test
public void testCheckoutBook() throws Exception {
libraryMock.checkoutBook(book);
verify(libraryMock).checkoutBook(Matchers.eq(book));
verify(libraryMock, times(1)).checkoutBook(book);
}
@Test
public void testReturnBook() throws Exception {
libraryMock.returnBook(book);
verify(libraryMock).returnBook(Matchers.eq(book));
verify(libraryMock, times(1)).returnBook(book);
}
@Test(expected = BookNotReturnable.class)
public void testExceptionThrownWhenBookAlreadyReturned() throws Exception {
try {
library.returnBook(library.getAvailableBooks().get(0));
} catch (BookNotReturnable e) {
String message = "book is already returned";
assertEquals(message, e.getMessage());
throw e;
}
fail("BookNotReturnable Exception not thrown");
}
@Test(expected = BookNotBorrowable.class)
public void testExceptionThrownWhenBookBorrowedTwice() throws Exception {
try {
library.checkoutBook(library.getBookList().get(0));
library.checkoutBook(library.getBookList().get(0));
} catch (BookNotBorrowable e) {
String message = "book is not available";
assertEquals(message, e.getMessage());
throw e;
}
fail("BookNotBorrowable Exception not thrown");
}
@Test
public void testGetBorrowedBooks() throws Exception {
library.checkoutBook(library.getAvailableBooks().get(0));
assertTrue(library.getBorrowedBooks().contains(BOOK_1));
library.checkoutBook(library.getAvailableBooks().get(0));
assertTrue(library.getBorrowedBooks().contains(BOOK_2));
}
@Test
public void testGetAvailableBooks() throws Exception {
library.checkoutBook(library.getAvailableBooks().get(0));
assertFalse(library.getAvailableBooks().contains(BOOK_1));
library.checkoutBook(library.getAvailableBooks().get(0));
assertFalse(library.getAvailableBooks().contains(BOOK_2));
}
@Test
public void testCheckoutMovie() throws Exception {
libraryMock.checkoutMovie(movie);
verify(libraryMock).checkoutMovie(Matchers.eq(movie));
verify(libraryMock, times(1)).checkoutMovie(movie);
}
@Test
public void testGetAvailableMovies() throws Exception {
library.checkoutMovie(library.getAvailableMovies().get(0));
assertFalse(library.getAvailableMovies().contains(MOVIE_1));
library.checkoutMovie(library.getAvailableMovies().get(0));
assertFalse(library.getAvailableMovies().contains(MOVIE_2));
}
@Test
public void testGetBorrowedMovies() throws Exception {
library.checkoutMovie(library.getAvailableMovies().get(0));
assertTrue(library.getBorrowedMovies().contains(MOVIE_1));
library.checkoutMovie(library.getAvailableMovies().get(0));
assertTrue(library.getBorrowedMovies().contains(MOVIE_2));
}
@Test(expected = MovieNotBorrowable.class)
public void testExceptionThrownWhenMovieBorrowedTwice() throws Exception {
try {
library.checkoutMovie(library.getMovieList().get(0));
library.checkoutMovie(library.getMovieList().get(0));
} catch (MovieNotBorrowable e) {
String message = "movie is not available";
assertEquals(message, e.getMessage());
throw e;
}
fail("MovieNotBorrowable Exception not thrown");
}
@Test
public void testGetUserList() throws Exception {
library.getUserList().get(0);
assertTrue(library.getUserList().contains(USER_1));
library.getUserList().get(1);
assertTrue(library.getUserList().contains(USER_2));
}
}
|
package org.scijava.util;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Collection;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import org.junit.Test;
/**
* Tests {@link FileUtils}.
*
* @author Curtis Rueden
* @author Johannes Schindelin
* @author Grant Harris
*/
public class FileUtilsTest {
private final static boolean isWindows =
System.getProperty("os.name").startsWith("Win");
@Test
public void testGetPath() {
// test that Windows-style paths get standardized
assertEquals("C:/path/to/my-windows-file", FileUtils.getPath(
"C:\\path\\to\\my-windows-file", "\\"));
// test that there are no changes to *nix-style paths
assertEquals("/path/to/my-nix-file", FileUtils.getPath(
"/path/to/my-nix-file", "/"));
// test that an already-standardized path stays good on Windows
assertEquals("/path/to/my-nix-file", FileUtils.getPath(
"/path/to/my-nix-file", "\\"));
}
@Test
public void testGetExtension() {
assertEquals("ext", FileUtils.getExtension("/path/to/file.ext"));
assertEquals("", FileUtils.getExtension("/path/to/file"));
assertEquals("a", FileUtils.getExtension("/etc/init.d/xyz/file.a"));
assertEquals("", FileUtils.getExtension("/etc/init.d/xyz/file"));
}
@Test
public void testURLToFile() throws MalformedURLException {
// verify that 'file:' URL works
final String jqpublic;
if (isWindows) {
jqpublic = "C:/Users/jqpublic/";
} else {
jqpublic = "/Users/jqpublic/";
}
final String filePath = jqpublic + "imagej/ImageJ.class";
final String fileURL = new File(filePath).toURI().toURL().toString();
final File fileFile = FileUtils.urlToFile(fileURL);
assertEqualsPath(filePath, fileFile.getPath());
// verify that file path with spaces works
final File spaceFileOriginal =
new File(jqpublic.replace("jqpublic", "Spaceman Spiff") + "stun/Blaster.class");
final URL spaceURL = spaceFileOriginal.toURI().toURL();
final File spaceFileResult = FileUtils.urlToFile(spaceURL);
assertEqualsPath(spaceFileOriginal.getPath(), spaceFileResult.getPath());
// verify that file path with various characters works
final String alphaLo = "abcdefghijklmnopqrstuvwxyz";
final String alphaHi = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
final String numbers = "1234567890";
final String special = "_~!@
final File specialFileOriginal = new File(jqpublic.replace("jqpublic", alphaLo) +
alphaHi + "/" + numbers + "/" + special + "/foo/Bar.class");
final URL specialURL = specialFileOriginal.toURI().toURL();
final File specialFileResult = FileUtils.urlToFile(specialURL);
assertEqualsPath(specialFileOriginal.getPath(), specialFileResult.getPath());
// verify that 'jar:' URL works
final String jarPath = "/Users/jqpublic/imagej/ij-core.jar";
final String jarURL = "jar:file:" + jarPath + "!/imagej/ImageJ.class";
final File jarFile = FileUtils.urlToFile(jarURL);
assertEqualsPath(jarPath, jarFile.getPath());
// verify that OSGi 'bundleresource:' URL fails
final String bundleURL =
"bundleresource://346.fwk2106232034:4/imagej/ImageJ.class";
try {
final File bundleFile = FileUtils.urlToFile(bundleURL);
fail("Expected exception not thrown; result=" + bundleFile);
}
catch (IllegalArgumentException exc) {
// NB: Expected behavior.
}
}
@Test
public void testShortenPath() {
assertEquals("C:\\Documents and Settings\\"
+ "All Users\\Application Data\\Apple Computer\\...\\SC Info.txt",
FileUtils.shortenPath("C:\\Documents and Settings\\All Users"
+ "\\Application Data\\Apple Computer\\iTunes\\SC Info\\SC Info.txt"));
assertEquals("C:\\Documents and Settings\\All Users\\Application Data\\"
+ "Apple Computer\\iTunes\\...\\SC Info.txt", FileUtils.shortenPath(
"C:\\Documents and Settings\\All Users\\"
+ "Application Data\\Apple Computer\\iTunes\\SC Info\\SC Info.txt", 5));
assertEquals("C:\\temp", FileUtils.shortenPath("C:\\temp"));
assertEquals("C:\\1\\2\\3\\4\\...\\test.txt", FileUtils
.shortenPath("C:\\1\\2\\3\\4\\5\\test.txt"));
assertEquals("C:/1/2/test.txt", FileUtils.shortenPath("C:/1/2/test.txt"));
assertEquals("C:/1/2/3/4/.../test.txt", FileUtils
.shortenPath("C:/1/2/3/4/5/test.txt"));
assertEquals("\\\\server\\p1\\p2\\p3\\p4\\...\\p6", FileUtils
.shortenPath("\\\\server\\p1\\p2\\p3\\p4\\p5\\p6"));
assertEquals("\\\\server\\p1\\p2\\p3", FileUtils
.shortenPath("\\\\server\\p1\\p2\\p3"));
assertEquals("http:
.shortenPath("http:
}
@Test
public void testLimitPath() {
assertEquals("C:\\Doc...SC Info.txt",
FileUtils
.limitPath("C:\\Documents and Settings\\All Users\\"
+ "Application Data\\Apple Computer\\iTunes\\SC Info\\SC Info.txt",
20));
assertEquals("C:\\temp", FileUtils.limitPath("C:\\temp", 20));
assertEquals("C:\\1\\2\\3\\...test.txt", FileUtils.limitPath(
"C:\\1\\2\\3\\4\\5\\test.txt", 20));
assertEquals("...testfile.txt", FileUtils.limitPath("C:/1/2/testfile.txt",
15));
assertEquals("C:/1...test.txt", FileUtils.limitPath(
"C:/1/2/3/4/5/test.txt", 15));
assertEquals("\\\\server\\p1\\p2\\...p6", FileUtils.limitPath(
"\\\\server\\p1\\p2\\p3\\p4\\p5\\p6", 20));
assertEquals("http:
"http:
}
@Test
public void testListContents() throws IOException, URISyntaxException {
// verify that listContents on a non-existent file returns the empty set
File nonExisting;
int i = 0;
for (;;) {
nonExisting = new File("" + i);
if (!nonExisting.exists()) break;
i++;
}
try {
Collection<URL> urls = FileUtils.listContents(nonExisting.toURI().toURL());
assertNotNull(urls);
assertEquals(0, urls.size());
} catch (MalformedURLException e) {
e.printStackTrace();
}
// write some items to a temporary .jar file
final String subDir = "sub directory/";
final String subSubDir = "more stuff/";
final File jarFile = File.createTempFile("listContentsTest", ".jar");
final FileOutputStream out = new FileOutputStream(jarFile);
final JarOutputStream jarOut = new JarOutputStream(out);
try {
jarOut.putNextEntry(new JarEntry(subDir));
jarOut.closeEntry();
jarOut.putNextEntry(new JarEntry(subDir + subSubDir));
jarOut.closeEntry();
// NB: This entry is not in the subdirectory, and should not be listed.
jarOut.putNextEntry(new JarEntry("foo.txt"));
jarOut.write("bar".getBytes());
jarOut.closeEntry();
// NB: The next two entries are directly beneath the subdirectory.
jarOut.putNextEntry(new JarEntry(subDir + "hello.txt"));
jarOut.write("world".getBytes());
jarOut.closeEntry();
jarOut.putNextEntry(new JarEntry(subDir + "rock.txt"));
jarOut.write("roll".getBytes());
jarOut.closeEntry();
// NB: The last two entries are beneath a second subdirectory,
// and should only be listed when the recurse flag is set to true.
jarOut.putNextEntry(new JarEntry(subDir + subSubDir + "fox.txt"));
jarOut.write("the quick brown fox".getBytes());
jarOut.closeEntry();
jarOut.putNextEntry(new JarEntry(subDir + subSubDir + "dog.txt"));
jarOut.write("jumps over the lazy dog".getBytes());
jarOut.closeEntry();
jarOut.close();
} finally {
out.close();
}
final String uriPath = new URI(null, null, "!/" + subDir, null).toString();
final String urlPath = "jar:" + jarFile.toURI().toURL() + uriPath;
final URL url = new URL(urlPath);
final URL subSubURL = new URL(urlPath + "more%20stuff/");
final URL helloURL = new URL(urlPath + "hello.txt");
final URL rockURL = new URL(urlPath + "rock.txt");
final URL foxURL = new URL(urlPath + "more%20stuff/fox.txt");
final URL dogURL = new URL(urlPath + "more%20stuff/dog.txt");
// check listContents: recursive without directories
final Collection<URL> setTT = FileUtils.listContents(url, true, true);
final URL[] listTT = setTT.toArray(new URL[setTT.size()]);
assertArrayEquals(new URL[] { helloURL, rockURL, foxURL, dogURL }, listTT);
// check listContents: recursive including directories
final Collection<URL> setTF = FileUtils.listContents(url, true, false);
final URL[] listTF = setTF.toArray(new URL[setTF.size()]);
assertArrayEquals(
new URL[] { subSubURL, helloURL, rockURL, foxURL, dogURL }, listTF);
// check listContents: non-recursive without directories
final Collection<URL> setFT = FileUtils.listContents(url, false, true);
final URL[] listFT = setFT.toArray(new URL[setFT.size()]);
assertArrayEquals(new URL[] { helloURL, rockURL }, listFT);
// check listContents: non-recursive including directories
final Collection<URL> setFF = FileUtils.listContents(url, false, false);
final URL[] listFF = setFF.toArray(new URL[setFF.size()]);
assertArrayEquals(new URL[] { subSubURL, helloURL, rockURL }, listFF);
// clean up
assertTrue(jarFile.delete());
}
private static void assertEqualsPath(final String a, final String b) {
if (isWindows) {
assertEquals(a.replace('\\', '/'), b.replace('\\', '/'));
} else {
assertEquals(a, b);
}
}
@Test
public void testStripVersionFromFilename() {
assertEquals("jars/bio-formats.jar", FileUtils.stripFilenameVersion("jars/bio-formats-4.4-imagej-2.0.0-beta1.jar"));
assertEquals(FileUtils.stripFilenameVersion("jars/ij-data-2.0.0.1-beta1.jar"), FileUtils.stripFilenameVersion("jars/ij-data-2.0.0.1-SNAPSHOT.jar"));
assertEquals(FileUtils.stripFilenameVersion("jars/ij-1.44.jar"), FileUtils.stripFilenameVersion("jars/ij-1.46b.jar"));
assertEquals(FileUtils.stripFilenameVersion("jars/javassist.jar"), FileUtils.stripFilenameVersion("jars/javassist-3.9.0.GA.jar"));
assertEquals(FileUtils.stripFilenameVersion("jars/javassist.jar"), FileUtils.stripFilenameVersion("jars/javassist-3.16.1-GA.jar"));
assertEquals(FileUtils.stripFilenameVersion("jars/bsh.jar"), FileUtils.stripFilenameVersion("jars/bsh-2.0b4.jar"));
assertEquals(FileUtils.stripFilenameVersion("jars/mpicbg.jar"), FileUtils.stripFilenameVersion("jars/mpicbg-20111128.jar"));
assertEquals(FileUtils.stripFilenameVersion("jars/miglayout-swing.jar"), FileUtils.stripFilenameVersion("jars/miglayout-3.7.3.1-swing.jar"));
}
}
|
package de.domisum.lib.auxilium.util;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class PHRTest
{
// TEST: ERRORS
@Test public void testErrorInvalidNumberOfArgs()
{
Assertions.assertThrows(IllegalArgumentException.class, ()->PHR.r("blah {} blah {} xd"));
Assertions.assertThrows(IllegalArgumentException.class, ()->PHR.r("blah {} blah {} xd", "meme"));
Assertions.assertThrows(IllegalArgumentException.class, ()->PHR.r("blah {} blah {} xd", 8, 8, 100));
}
// TEST: PROPER VALUES
@Test public void testSingleReplacement()
{
assertReplaceEquals("here I go", "here {} go", "I");
assertReplaceEquals("aha#asdf", "aha#{}df", "as");
assertReplaceEquals("topKek4", "topKek{}", 4);
}
@Test public void testMultiReplace()
{
assertReplaceEquals("some text goes here", "some {} goes {}", "text", "here");
assertReplaceEquals("multi replacements are very fun indeed, I'll have to admit",
"multi replacements {} {} fun {}, {} have to admit", "are", "very", "indeed", "I'll");
}
@Test public void testReplaceWithPlaceholder()
{
assertReplaceEquals("here {} go", "here {} go", "{}");
assertReplaceEquals("some silly face: :-{} xddd", "some silly face: {} xddd", ":-{}");
}
private static void assertReplaceEquals(String expected, String withPlaceholders, Object... values)
{
String replaced = PHR.r(withPlaceholders, values);
Assertions.assertEquals(expected, replaced);
}
}
|
package me.abeyta.deckmanager.model;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
public class CardTest {
private Card card;
@Before
public void setup() {
card = new Card(Suite.CLUBS, "K");
}
@Test
public void suiteGetterGetterDefaultConstructor() {
card = new Card();
card.setFaceValue("Ace");
card.setSuite(Suite.DIAMONDS);
assertEquals("Ace", card.getFaceValue());
assertEquals(Suite.DIAMONDS, card.getSuite());
}
@Test
public void constructorGetsSets() {
assertEquals("K", card.getFaceValue());
assertEquals(Suite.CLUBS, card.getSuite());
}
@Test
public void cardsAreEqualWhenSameSuiteValue() {
Card card2 = new Card(Suite.CLUBS, "K");
assertTrue("Cards should be equal", card.equals(card2));
}
@Test
public void cardsAreNotEqualWhenDifValue() {
Card card2 = new Card(Suite.CLUBS, "Q");
assertFalse("Cards should not be equal with different face values", card.equals(card2));
}
@Test
public void cardsAreNotEqualWhenDifSuiteValue() {
Card card2 = new Card(Suite.SPADES, "Q");
assertFalse("Cards should not be equal with different suite", card.equals(card2));
}
@Test
public void hashCodeTest() {
assertTrue(card.hashCode() != 0);
}
@Test
public void toStringTest() {
assertEquals("Card[suite=CLUBS,faceValue=K]", card.toString());
}
}
|
package no.steria.quizzical;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.BufferedReader;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mockito;
public class AdminServletTest {
private HttpServletRequest req = mock(HttpServletRequest.class);
private HttpServletResponse resp = mock(HttpServletResponse.class);
private AdminServlet servlet = new AdminServlet();
@Test
public void shouldRetrieveQuizzesForUser() throws Exception {
when(req.getParameter("mode")).thenReturn("2");
when(req.getParameter("userId")).thenReturn("1");
MongoUserDao mongoUserDao = mock(MongoUserDao.class);
User dummyUser = new User(1, "martin", null, null,Arrays.asList(new Integer(33)));
when(mongoUserDao.getUser(1)).thenReturn(dummyUser );
MongoQuizDao mongoQuizDao = mock(MongoQuizDao.class);
when(mongoQuizDao.getQuiz(33)).thenReturn(new Quiz(33,"DummyQuiz","description","sub", new ArrayList<Question>(), true));
MongoResponseDao mongoResponseDao = mock(MongoResponseDao.class);
when(mongoResponseDao.countResponsesForQuiz(33)).thenReturn(43);
servlet.setMongoUserDao(mongoUserDao);
servlet.setMongoQuizDao(mongoQuizDao);
servlet.setMongoResponseDao(mongoResponseDao);
when(resp.getWriter()).thenReturn(new PrintWriter(new StringWriter()));
servlet.doGet(req, resp);
}
@Ignore
public void shouldRecieveQuizFromAdminAddPage() throws Exception {
MongoUserDao mongoUserDao = mock(MongoUserDao.class);
User dummyUser = new User(1, "martin", null, null,Arrays.asList(new Integer(33)));
when(mongoUserDao.getUser(1)).thenReturn(dummyUser);
MongoQuizDao mongoQuizDao = mock(MongoQuizDao.class);
//when(mongoQuizDao.getQuiz(33)).thenReturn(new Quiz(33,"DummyQuiz","description","sub", new ArrayList<Question>(), true));
//MongoResponseDao mongoResponseDao = mock(MongoResponseDao.class);
//when(mongoResponseDao.countResponsesForQuiz(33)).thenReturn(43);
String s = "{\"quizId\": 9, \"quizName\":\"SteriaQuiz\",\"quizDesc\":\"Quiz om Steria\",\"submitMsg\":\"Takk\",\"questions\":[{\"id\":1,\"text\":\"Spm1\",\"alternatives\":[{\"aid\":1,\"atext\":\"svar1\"},{\"aid\":2,\"atext\":\"svar2\"}],\"answer\":\"2\"}], \"active\": true, \"userId\": 1}";
BufferedReader br = new BufferedReader(new StringReader(s));
when(req.getReader()).thenReturn(br);
Alternative alt1 = new Alternative(1, "svar1");
Alternative alt2 = new Alternative(2, "svar2");
List<Alternative> alternatives = new ArrayList<Alternative>();
alternatives.add(alt1);
alternatives.add(alt2);
Question q = new Question(1, "Spm1", alternatives, 2);
ArrayList<Question> qs = new ArrayList<Question>();
qs.add(q);
Quiz quiz = new Quiz(9, "SteriaQuiz", "Quiz om Steria", "Takk", qs, true);
servlet.setMongoUserDao(mongoUserDao);
servlet.setMongoQuizDao(mongoQuizDao);
//servlet.setMongoResponseDao(mongoResponseDao);
servlet.doPost(req, resp);
Mockito.verify(mongoQuizDao).insertQuizIntoDB(quiz, 1);
}
// @Ignore
// public void shouldRetrieveNumberOfRespondentsOnASpecificQuiz() throws Exception {
// when(req.getParameter("mode")).thenReturn("3");
// when(req.getParameter("quizId")).thenReturn("1");
// MongoUserDao mongoUserDao = mock(MongoUserDao.class);
// User dummyUser = new User(1, "martin", "eple", Arrays.asList(new Integer(33)));
// when(mongoUserDao.getUser(1)).thenReturn(dummyUser );
// MongoQuizDao mongoQuizDao = mock(MongoQuizDao.class);
// when(mongoQuizDao.getQuiz(33)).thenReturn(new Quiz(33,"DummyQuiz","description","sub", new ArrayList<Question>(), true));
// MongoResponseDao mongoResponseDao = mock(MongoResponseDao.class);
// when(mongoResponseDao.countResponsesForQuiz(33)).thenReturn(43);
// servlet.setMongoUserDao(mongoUserDao);
// servlet.setMongoQuizDao(mongoQuizDao);
// servlet.setMongoResponseDao(mongoResponseDao);
// when(resp.getWriter()).thenReturn(new PrintWriter(new StringWriter()));
// servlet.doGet(req, resp);
}
|
package org.devocative.samples.j8;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import static org.junit.Assert.assertEquals;
public class TestStream {
@Test
public void testFibonacci() {
Stream<int[]> iterate;
iterate = Stream.iterate(new int[]{1, 1}, n -> new int[]{n[1], n[0] + n[1]});
int nth = iterate
.peek(n -> System.out.printf("Debug: %s \n", Arrays.toString(n)))
.limit(5)
.reduce((a, b) -> b)
.get()[1];
assertEquals(8, nth);
iterate = Stream.iterate(new int[]{1, 1}, n -> new int[]{n[1], n[0] + n[1]});
List<Integer> list = iterate
.limit(5)
.map(n -> n[1])
//.collect(ArrayList::new, ArrayList::add, ArrayList::addAll)
.collect(Collectors.toList());
assertEquals(list, Arrays.asList(1, 2, 3, 5, 8));
}
@Test
public void test_Files_FlatMap_Distinct_Sorted_Reduction() throws IOException {
final String content = "test01 passed\ntest02 passed\ntest11 failed";
final String grepped = "test01 passed\ntest11 failed";
final List<String> words =
Arrays.asList("test01", "passed", "test02", "passed", "test11", "failed");
final List<String> distinctWords =
Arrays.asList("test01", "passed", "test02", "test11", "failed");
final List<String> sortedDistinctWords =
Arrays.asList("test11", "test02", "test01", "passed", "failed");
final Path file = Files.createTempFile("__", "__");
Files.write(file, content.getBytes());
// Grepping lines containing '1'
try (Stream<String> lines = Files.lines(file)) {
String result = lines
.filter(line -> line.contains("1"))
.collect(Collectors.joining("\n"));
assertEquals(grepped, result);
}
// List of words
try (Stream<String> lines = Files.lines(file)) {
List<String> result = lines
.flatMap(line -> Stream.of(line.split("\\s")))
.collect(Collectors.toList());
assertEquals(words, result);
}
// List of distinct words
try (Stream<String> lines = Files.lines(file)) {
List<String> result = lines
.flatMap(line -> Stream.of(line.split("\\s")))
.distinct()
.collect(Collectors.toList());
assertEquals(distinctWords, result);
}
// List of distinct & descending-sorted words
try (Stream<String> lines = Files.lines(file)) {
List<String> result = lines
.flatMap(line -> Stream.of(line.split("\\s")))
.distinct()
.sorted(Comparator.reverseOrder())
.collect(Collectors.toList());
assertEquals(sortedDistinctWords, result);
}
// List of distinct & descending-sorted words
try (Stream<String> lines = Files.lines(file)) {
String result = lines
.flatMap(line -> Stream.of(line.split("\\s")))
.distinct()
.sorted(Comparator.reverseOrder())
.findFirst() // min(Comparator.reverseOrder()) instead of sorted() & findFirst()
.get();
assertEquals("test11", result);
}
// Count number of words
try (Stream<String> lines = Files.lines(file)) {
long result = lines
.flatMap(line -> Stream.of(line.split("\\s")))
.count();
assertEquals(words.size(), result);
}
// Count number of characters of words (1/2)
String fileAsStr = new String(Files.readAllBytes(file));
long result = Pattern.compile("\\s")
.splitAsStream(fileAsStr)
.mapToLong(String::length)
.sum();
assertEquals(36, result);
// Count number of characters of words (2/2)
fileAsStr = new String(Files.readAllBytes(file));
result = Pattern.compile("\\s")
.splitAsStream(fileAsStr)
.reduce(0L,
(total, word) -> total + word.length(),
(total1, total2) -> total1 + total2);
assertEquals(36, result);
}
@Test
public void testFactorial() {
long result = LongStream
//.range(1, 5) [1, 5)
.rangeClosed(1, 5) // [1, 5]
.reduce((left, right) -> left * right)
.getAsLong();
assertEquals(120, result);
result = LongStream
//.range(1, 5) [1, 5)
.rangeClosed(1, 5) // [1, 5]
.reduce(1, (left, right) -> left * right);
assertEquals(120, result);
}
@Test
public void testCollectors() {
List<Employee> list = Arrays.asList(
new Employee("John", 5000),
new Employee("Jack", 6000),
new Employee("Jack", 7000),
new Employee("Bill", 3000));
Map<String, Employee> name2employee = list.stream()
.collect(Collectors.toMap(Employee::getName, Function.identity(), (curV, newV) -> newV));
assertEquals(3, name2employee.size());
assertEquals(7000, name2employee.get("Jack").getSalary().intValue());
final Map<String, List<Employee>> name2employees = list.stream()
.collect(Collectors.groupingBy(Employee::getName, LinkedHashMap::new, Collectors.toList()));
assertEquals("John", name2employees.keySet().stream().findFirst().get());
assertEquals(3, name2employees.size());
assertEquals(1, name2employees.get("Bill").size());
assertEquals(2, name2employees.get("Jack").size());
final int averageSalary = (int) list.stream()
.mapToInt(Employee::getSalary)
.average()
.getAsDouble();
assertEquals(5250, averageSalary);
final Map<Boolean, List<Employee>> highSalaryEmployees = list.stream()
.collect(Collectors.partitioningBy(emp -> emp.getSalary() > averageSalary));
assertEquals(2, highSalaryEmployees.get(true).size());
assertEquals(2, highSalaryEmployees.get(false).size());
}
class Employee {
private String name;
private Integer salary;
Employee(String name, Integer salary) {
this.name = name;
this.salary = salary;
}
String getName() {
return name;
}
Integer getSalary() {
return salary;
}
@Override
public String toString() {
return getName() + ", " + getSalary();
}
}
}
|
package org.redisson;
import java.io.IOException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.redisson.client.codec.Codec;
import org.redisson.codec.JsonJacksonCodec;
import org.redisson.codec.SerializationCodec;
import org.redisson.core.RCountDownLatch;
import org.redisson.core.RLock;
import java.util.Arrays;
import java.util.concurrent.CountDownLatch;
@RunWith(Parameterized.class)
public class RedissonTwoLockedThread {
@Parameterized.Parameters(name= "{index} - {0}")
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {{new JsonJacksonCodec()}, {new SerializationCodec()}});
}
@Parameterized.Parameter(0)
public Codec codec;
private RedissonClient redisson;
@Before
public void before() throws IOException, InterruptedException {
RedisRunner.startDefaultRedisTestInstance();
Config config = BaseTest.createConfig();
config.setCodec(codec);
redisson = Redisson.create(config);
}
@After
public void after() throws InterruptedException {
redisson.shutdown();
RedisRunner.shutDownDefaultRedisTestInstance();
}
@Test(timeout = 3000)
public void testLock() throws InterruptedException {
final String lockName = "lock1";
final CountDownLatch startSignal = new CountDownLatch(1);
final CountDownLatch testSignal = new CountDownLatch(1);
final CountDownLatch completeSignal = new CountDownLatch(2);
System.out.println("configure");
final long millis = System.currentTimeMillis();
new Thread() {
@Override
public void run() {
try {
startSignal.await();
RLock lock = redisson.getLock(lockName);
System.out.println("1. getlock " + lock.getName() + " - " + Thread.currentThread().getId());
lock.lock();
System.out.println("1. lock " + lock.getName() + " - " + Thread.currentThread().getId());
testSignal.countDown();
Thread.sleep(500);
lock.unlock();
System.out.println("1. unlock " + lock.getName() + " - " + Thread.currentThread().getId());
} catch (InterruptedException e) {
e.printStackTrace();
}
completeSignal.countDown();
}
}.start();
new Thread() {
@Override
public void run() {
try {
testSignal.await();
RLock lock = redisson.getLock(lockName);
System.out.println("2. getlock " + lock.getName() + " - " + Thread.currentThread().getId());
lock.lock();
System.out.println("2. lock " + lock.getName() + " - " + Thread.currentThread().getId());
long current = System.currentTimeMillis();
Assert.assertTrue("current=" + current + ", millis=" + millis, current - millis >= 500);
Thread.sleep(500);
lock.unlock();
System.out.println("2. unlock " + lock.getName() + " - " + Thread.currentThread().getId());
} catch (InterruptedException e) {
e.printStackTrace();
}
completeSignal.countDown();
}
}.start();
System.out.println("start");
startSignal.countDown();
completeSignal.await();
System.out.println("complete");
}
@Test(timeout = 3000)
public void testCountDown() throws InterruptedException {
final String countDownName = getClass().getName() + ":countDown
final CountDownLatch startSignal = new CountDownLatch(1);
final CountDownLatch testSignal = new CountDownLatch(1);
final CountDownLatch completeSignal = new CountDownLatch(2);
System.out.println("configure");
final long millis = System.currentTimeMillis();
new Thread() {
@Override
public void run() {
try {
startSignal.await();
RCountDownLatch countDownLatch = redisson.getCountDownLatch(countDownName);
System.out.println("1. getCountDownLatch " + countDownLatch.getName() + " - " + Thread.currentThread().getId());
countDownLatch.trySetCount(1);
System.out.println("1. trySetCount " + countDownLatch.getName() + " - " + Thread.currentThread().getId());
Thread.sleep(500);
testSignal.countDown();
Thread.sleep(500);
System.out.println("1. sleep " + countDownLatch.getName() + " - " + Thread.currentThread().getId());
countDownLatch.countDown();
System.out.println("1. countDown " + countDownLatch.getName() + " - " + Thread.currentThread().getId());
} catch (InterruptedException e) {
e.printStackTrace();
}
completeSignal.countDown();
}
}.start();
new Thread() {
@Override
public void run() {
try {
testSignal.await();
RCountDownLatch countDownLatch = redisson.getCountDownLatch(countDownName);
System.out.println("2. getCountDownLatch " + countDownLatch.getName() + " - " + Thread.currentThread().getId());
countDownLatch.await();
System.out.println("2. await " + countDownLatch.getName() + " - " + Thread.currentThread().getId());
long current = System.currentTimeMillis();
Assert.assertTrue("current=" + current + ", millis=" + millis, (current - millis) >= 1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
completeSignal.countDown();
}
}.start();
System.out.println("start");
startSignal.countDown();
completeSignal.await();
System.out.println("complete");
}
}
|
package ru.r2cloud.satellite;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.FileSystems;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TimeZone;
import java.util.TreeMap;
import java.util.UUID;
import ru.r2cloud.CelestrakServer;
import ru.r2cloud.TestUtil;
import ru.r2cloud.it.util.BaseTest;
import ru.r2cloud.model.ObservationRequest;
import ru.r2cloud.model.Satellite;
import ru.r2cloud.predict.PredictOreKit;
import ru.r2cloud.tle.CelestrakClient;
import ru.r2cloud.tle.TLEDao;
import ru.r2cloud.util.Configuration;
public class UtilizationTest {
public static void main(String[] args) throws Exception {
Configuration config;
File userSettingsLocation = new File("target/.r2cloud-" + UUID.randomUUID().toString());
try (InputStream is = BaseTest.class.getClassLoader().getResourceAsStream("config-dev.properties")) {
config = new Configuration(is, userSettingsLocation.getAbsolutePath(), FileSystems.getDefault());
}
config.setProperty("locaiton.lat", "56.189");
config.setProperty("locaiton.lon", "38.174");
CelestrakServer celestrak = new CelestrakServer();
celestrak.start();
celestrak.mockResponse(TestUtil.loadExpected("tle-2020-09-27.txt"));
PredictOreKit predict = new PredictOreKit(config);
SatelliteDao satelliteDao = new SatelliteDao(config);
TLEDao tleDao = new TLEDao(config, satelliteDao, new CelestrakClient(celestrak.getUrl()));
tleDao.start();
ObservationFactory factory = new ObservationFactory(predict, tleDao, config);
List<Satellite> enabledByDefault = getDefaultEnabled(satelliteDao);
System.out.println("default: ");
while (!enabledByDefault.isEmpty()) {
float utilization = calculateUtilization(satelliteDao, factory, enabledByDefault);
System.out.println(enabledByDefault.size() + " " + utilization);
enabledByDefault.remove(0);
}
System.out.println("70cm: ");
List<Satellite> cm = loadFromFile(satelliteDao, "70cm-satellites.txt");
calculatePercentTotal(satelliteDao, factory, cm);
while (!cm.isEmpty()) {
float utilization = calculateUtilization(satelliteDao, factory, cm);
System.out.println(cm.size() + " " + utilization);
cm.remove(0);
}
celestrak.stop();
}
private static List<Satellite> loadFromFile(SatelliteDao satelliteDao, String file) throws Exception {
List<Satellite> result = new ArrayList<>();
try (BufferedReader r = new BufferedReader(new InputStreamReader(UtilizationTest.class.getClassLoader().getResourceAsStream(file)))) {
String curLine = null;
while ((curLine = r.readLine()) != null) {
result.add(satelliteDao.findByName(curLine.trim()));
}
}
return result;
}
private static void calculatePercentTotal(SatelliteDao satelliteDao, ObservationFactory factory, List<Satellite> satellites) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
long start = sdf.parse("2020-09-27 11:13:00").getTime();
long end = start + 2 * 24 * 60 * 60 * 1000;
for (int i = 0; i < 5; i++) {
long total = 0;
List<ObservationRequest> happened = calculateObservations(satelliteDao, factory, satellites, start, end);
Map<Long, Long> totalBySatellite = new TreeMap<>();
for (ObservationRequest req : happened) {
long observationTime = req.getEndTimeMillis() - req.getStartTimeMillis();
total += observationTime;
Long prevSat = totalBySatellite.get(Long.valueOf(req.getSatelliteId()));
if (prevSat == null) {
prevSat = 0L;
}
prevSat += observationTime;
totalBySatellite.put(Long.valueOf(req.getSatelliteId()), prevSat);
}
StringBuilder str = new StringBuilder();
for (Entry<Long, Long> cur : totalBySatellite.entrySet()) {
str.append(cur.getValue() / (float) total).append(" ");
}
System.out.println(str.toString().trim());
start = end;
end += 2 * 24 * 60 * 60 * 1000;
}
}
private static float calculateUtilization(SatelliteDao satelliteDao, ObservationFactory factory, List<Satellite> satellites) throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
long start = sdf.parse("2020-09-27 11:13:00").getTime();
long end = sdf.parse("2020-09-29 11:13:00").getTime(); // +2 days
List<ObservationRequest> happened = calculateObservations(satelliteDao, factory, satellites, start, end);
long total = end - start;
long utilized = 0;
for (ObservationRequest cur : happened) {
utilized += (cur.getEndTimeMillis() - cur.getStartTimeMillis());
}
return (utilized / (float) total);
}
private static List<ObservationRequest> calculateObservations(SatelliteDao satelliteDao, ObservationFactory factory, List<Satellite> satellites, long start, long end) {
Schedule<ScheduledObservation> schedule = new Schedule<>();
List<ObservationRequest> initialRequests = new ArrayList<>();
for (Satellite cur : satellites) {
ObservationRequest req = create(factory, schedule, start, cur, false);
if (req == null) {
continue;
}
initialRequests.add(req);
schedule.add(new ScheduledObservation(req, null, null, null, null));
}
Collections.sort(initialRequests, ObservationRequestComparator.INSTANCE);
List<ObservationRequest> happened = new ArrayList<>();
while (!initialRequests.isEmpty()) {
ObservationRequest cur = initialRequests.remove(0);
happened.add(cur);
ObservationRequest next = create(factory, schedule, cur.getEndTimeMillis(), satelliteDao.findById(cur.getSatelliteId()), false);
if (next == null) {
continue;
}
if (next.getStartTimeMillis() > end) {
continue;
}
initialRequests.add(next);
schedule.add(new ScheduledObservation(next, null, null, null, null));
Collections.sort(initialRequests, ObservationRequestComparator.INSTANCE);
}
return happened;
}
private static List<Satellite> getDefaultEnabled(SatelliteDao dao) {
List<Satellite> result = new ArrayList<>();
for (Satellite cur : dao.findAll()) {
if (!cur.isEnabled()) {
continue;
}
// this satellite can't be visible on the tested ground station
if (cur.getId().equals("44365")) {
continue;
}
result.add(cur);
}
return result;
}
// copy from scheduler to simulate utilization
private static ObservationRequest create(ObservationFactory factory, Schedule<ScheduledObservation> schedule, long current, Satellite cur, boolean immediately) {
long next = current;
while (!Thread.currentThread().isInterrupted()) {
ObservationRequest observation = factory.create(new Date(next), cur, immediately);
if (observation == null) {
return null;
}
ScheduledObservation overlapped = schedule.getOverlap(observation.getStartTimeMillis(), observation.getEndTimeMillis());
if (overlapped == null) {
return observation;
}
if (immediately) {
overlapped.cancel();
return observation;
}
// find next
next = observation.getEndTimeMillis();
}
return null;
}
}
|
package seedu.address.logic;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static seedu.task.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.task.commons.core.Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX;
import static seedu.task.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import com.google.common.eventbus.Subscribe;
import seedu.task.commons.core.EventsCenter;
import seedu.task.commons.events.model.TaskManagerChangedEvent;
import seedu.task.commons.events.ui.JumpToListRequestEvent;
import seedu.task.commons.events.ui.ShowHelpRequestEvent;
import seedu.task.logic.Logic;
import seedu.task.logic.LogicManager;
import seedu.task.logic.commands.AddCommand;
import seedu.task.logic.commands.ClearCommand;
import seedu.task.logic.commands.Command;
import seedu.task.logic.commands.CommandResult;
import seedu.task.logic.commands.DeleteCommand;
import seedu.task.logic.commands.ExitCommand;
import seedu.task.logic.commands.FindCommand;
import seedu.task.logic.commands.HelpCommand;
import seedu.task.logic.commands.ListCommand;
import seedu.task.logic.commands.ListCompleteCommand;
import seedu.task.logic.commands.ListIncompleteCommand;
import seedu.task.logic.commands.SelectCommand;
import seedu.task.logic.commands.exceptions.CommandException;
import seedu.task.model.Model;
import seedu.task.model.ModelManager;
import seedu.task.model.ReadOnlyTaskManager;
import seedu.task.model.YTomorrow;
import seedu.task.model.tag.Tag;
import seedu.task.model.tag.UniqueTagList;
import seedu.task.model.task.EndDate;
import seedu.task.model.task.Group;
import seedu.task.model.task.Name;
import seedu.task.model.task.ReadOnlyTask;
import seedu.task.model.task.StartDate;
import seedu.task.model.task.Task;
import seedu.task.storage.StorageManager;
public class LogicManagerTest {
@Rule
public TemporaryFolder saveFolder = new TemporaryFolder();
private Model model;
private Logic logic;
//These are for checking the correctness of the events raised
private ReadOnlyTaskManager latestSavedAddressBook;
private boolean helpShown;
private int targetedJumpIndex;
@Subscribe
private void handleLocalModelChangedEvent(TaskManagerChangedEvent abce) {
latestSavedAddressBook = new YTomorrow(abce.data);
}
@Subscribe
private void handleShowHelpRequestEvent(ShowHelpRequestEvent she) {
helpShown = true;
}
@Subscribe
private void handleJumpToListRequestEvent(JumpToListRequestEvent je) {
targetedJumpIndex = je.targetIndex;
}
@Before
public void setUp() {
model = new ModelManager();
String tempAddressBookFile = saveFolder.getRoot().getPath() + "TempAddressBook.xml";
String tempPreferencesFile = saveFolder.getRoot().getPath() + "TempPreferences.json";
logic = new LogicManager(model, new StorageManager(tempAddressBookFile, tempPreferencesFile));
EventsCenter.getInstance().registerHandler(this);
latestSavedAddressBook = new YTomorrow(model.getTaskManager()); // last saved assumed to be up to date
helpShown = false;
targetedJumpIndex = -1; // non yet
}
@After
public void tearDown() {
EventsCenter.clearSubscribers();
}
@Test
public void execute_invalid() {
String invalidCommand = " ";
assertCommandFailure(invalidCommand, String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
/**
* Executes the command, confirms that a CommandException is not thrown and that the result message is correct.
* Also confirms that both the 'address book' and the 'last shown list' are as specified.
* @see #assertCommandBehavior(boolean, String, String, ReadOnlyTaskManager, List)
*/
private void assertCommandSuccess(String inputCommand, String expectedMessage,
ReadOnlyTaskManager expectedAddressBook,
List<? extends ReadOnlyTask> expectedShownList) {
assertCommandBehavior(false, inputCommand, expectedMessage, expectedAddressBook, expectedShownList);
}
//@@author A0164466X
/**
* Executes the command, confirms that a CommandException is not thrown and that the result message is correct.
*/
private void assertCommandMessageSuccess(String inputCommand, String expectedMessage) {
assertCommandBehavior(false, inputCommand, expectedMessage);
}
//@@author
/**
* Executes the command, confirms that a CommandException is thrown and that the result message is correct.
* Both the 'address book' and the 'last shown list' are verified to be unchanged.
* @see #assertCommandBehavior(boolean, String, String, ReadOnlyTaskManager, List)
*/
private void assertCommandFailure(String inputCommand, String expectedMessage) {
YTomorrow expectedAddressBook = new YTomorrow(model.getTaskManager());
List<ReadOnlyTask> expectedShownList = new ArrayList<>(model.getFilteredTaskList());
assertCommandBehavior(true, inputCommand, expectedMessage, expectedAddressBook, expectedShownList);
}
/**
* Executes the command, confirms that the result message is correct
* and that a CommandException is thrown if expected
* and also confirms that the following three parts of the LogicManager object's state are as expected:<br>
* - the internal address book data are same as those in the {@code expectedAddressBook} <br>
* - the backing list shown by UI matches the {@code shownList} <br>
* - {@code expectedAddressBook} was saved to the storage file. <br>
*/
private void assertCommandBehavior(boolean isCommandExceptionExpected, String inputCommand, String expectedMessage,
ReadOnlyTaskManager expectedAddressBook,
List<? extends ReadOnlyTask> expectedShownList) {
try {
CommandResult result = logic.execute(inputCommand);
assertFalse("CommandException expected but was not thrown.", isCommandExceptionExpected);
assertEquals(expectedMessage, result.feedbackToUser);
} catch (CommandException e) {
assertTrue("CommandException not expected but was thrown.", isCommandExceptionExpected);
assertEquals(expectedMessage, e.getMessage());
}
//Confirm the ui display elements should contain the right data
assertEquals(expectedShownList, model.getFilteredTaskList());
//Confirm the state of data (saved and in-memory) is as expected
assertEquals(expectedAddressBook, model.getTaskManager());
assertEquals(expectedAddressBook, latestSavedAddressBook);
}
//@@author A0164466X
/**
* Executes the command, confirms that the result message is correct
* and that a CommandException is thrown if expected
*/
private void assertCommandBehavior(boolean isCommandExceptionExpected, String inputCommand, String expectedMessage) {
try {
CommandResult result = logic.execute(inputCommand);
assertFalse("CommandException expected but was not thrown.", isCommandExceptionExpected);
assertEquals(expectedMessage, result.feedbackToUser);
} catch (CommandException e) {
assertTrue("CommandException not expected but was thrown.", isCommandExceptionExpected);
assertEquals(expectedMessage, e.getMessage());
}
}
//@@author
@Test
public void execute_unknownCommandWord() {
String unknownCommand = "uicfhmowqewca";
assertCommandFailure(unknownCommand, MESSAGE_UNKNOWN_COMMAND);
}
@Test
public void execute_help() {
assertCommandSuccess("help", HelpCommand.SHOWING_HELP_MESSAGE, new YTomorrow(), Collections.emptyList());
assertTrue(helpShown);
}
@Test
public void execute_exit() {
assertCommandSuccess("exit", ExitCommand.MESSAGE_EXIT_ACKNOWLEDGEMENT,
new YTomorrow(), Collections.emptyList());
}
//@@author A0164032U
@Test
public void execute_clear() throws Exception {
TestDataHelper helper = new TestDataHelper();
model.addTask(helper.generatePerson(1));
model.addTask(helper.generatePerson(2));
model.addTask(helper.generatePerson(3));
assertCommandSuccess("clear all", ClearCommand.MESSAGE_SUCCESS_ALL, new YTomorrow(), Collections.emptyList());
}
//@@author
//@@author A0164032U
@Test
public void execute_add_invalidArgsFormat() {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE);
assertCommandFailure("add wrong args wrong args", AddCommand.MESSAGE_NOGROUP+expectedMessage); // missing group
assertCommandFailure("add in learning", AddCommand.MESSAGE_NONAME+expectedMessage); // missing name
assertCommandFailure("add Valid Name from 12.12 in onlyStartTime", AddCommand.MESSAGE_ILLEGAL_TIME_PARAMS+expectedMessage); // missing endDate
}
//@@author
//@@author A0164032U
@Test
public void execute_add_invalidPersonData() {
assertCommandFailure("add ??? in valid group",
AddCommand.MESSAGE_NONAME + String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE));
}
//@@author A0164032U
@Test
public void execute_add_successful() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
YTomorrow expectedAB = new YTomorrow();
expectedAB.addTask(toBeAdded);
// execute command and verify result
assertCommandSuccess(helper.generateAddCommand(toBeAdded),
String.format(AddCommand.MESSAGE_SUCCESS
+ (toBeAdded.hasPassed() ? "\n" + AddCommand.MESSAGE_PASSEDDATE : ""), toBeAdded),
expectedAB,
expectedAB.getTaskList());
}
//@@author
@Test
public void execute_addDuplicate_notAllowed() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
// setup starting state
model.addTask(toBeAdded); // person already in internal address book
// execute command and verify result
assertCommandFailure(helper.generateAddCommand(toBeAdded), AddCommand.MESSAGE_DUPLICATE_TASK);
}
@Test
public void execute_list_showsAllPersons() throws Exception {
// prepare expectations
TestDataHelper helper = new TestDataHelper();
YTomorrow expectedAB = helper.generateAddressBook(2);
List<? extends ReadOnlyTask> expectedList = expectedAB.getTaskList();
// prepare address book state
helper.addToModel(model, 2);
assertCommandSuccess("list",
ListCommand.MESSAGE_SUCCESS,
expectedAB,
expectedList);
}
//@@author A0164466X
@Test
public void execute_listIncomplete_messageTest() throws Exception {
assertCommandMessageSuccess("li", ListIncompleteCommand.MESSAGE_SUCCESS);
}
@Test
public void execute_listComplete_messageTest() throws Exception {
assertCommandMessageSuccess("lc", ListCompleteCommand.MESSAGE_SUCCESS);
}
//@@author
/**
* Confirms the 'invalid argument index number behaviour' for the given command
* targeting a single person in the shown list, using visible index.
* @param commandWord to test assuming it targets a single person in the last shown list
* based on visible index.
*/
private void assertIncorrectIndexFormatBehaviorForCommand(String commandWord, String expectedMessage)
throws Exception {
assertCommandFailure(commandWord , expectedMessage); //index missing
assertCommandFailure(commandWord + " +1", expectedMessage); //index should be unsigned
assertCommandFailure(commandWord + " -1", expectedMessage); //index should be unsigned
assertCommandFailure(commandWord + " 0", expectedMessage); //index cannot be 0
assertCommandFailure(commandWord + " not_a_number", expectedMessage);
}
/**
* Confirms the 'invalid argument index number behaviour' for the given command
* targeting a single person in the shown list, using visible index.
* @param commandWord to test assuming it targets a single person in the last shown list
* based on visible index.
*/
private void assertIndexNotFoundBehaviorForCommand(String commandWord) throws Exception {
String expectedMessage = MESSAGE_INVALID_TASK_DISPLAYED_INDEX;
TestDataHelper helper = new TestDataHelper();
List<Task> personList = helper.generatePersonList(2);
// set AB state to 2 persons
model.resetData(new YTomorrow());
for (Task p : personList) {
model.addTask(p);
}
assertCommandFailure(commandWord + " 3", expectedMessage);
}
@Test
public void execute_selectInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, SelectCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("select", expectedMessage);
}
@Test
public void execute_selectIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("select");
}
@Test
public void execute_select_jumpsToCorrectPerson() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threePersons = helper.generatePersonList(3);
YTomorrow expectedAB = helper.generateAddressBook(threePersons);
helper.addToModel(model, threePersons);
assertCommandSuccess("select 2",
String.format(SelectCommand.MESSAGE_SELECT_TASK_SUCCESS, 2),
expectedAB,
expectedAB.getTaskList());
assertEquals(1, targetedJumpIndex);
assertEquals(model.getFilteredTaskList().get(1), threePersons.get(1));
}
//@@author A0164032U
@Test
public void execute_deleteInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE);
assertCommandFailure("delete", expectedMessage);
}
@Test
public void execute_deleteIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("delete");
}
@Test
public void execute_delete_removesCorrectPerson() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threePersons = helper.generatePersonList(3);
YTomorrow expectedAB = helper.generateAddressBook(threePersons);
expectedAB.removeTask(threePersons.get(1));
helper.addToModel(model, threePersons);
assertCommandSuccess("delete 2",
String.format(DeleteCommand.MESSAGE_DELETE_TASK_SUCCESS, threePersons.get(1)),
expectedAB,
expectedAB.getTaskList());
}
@Test
public void execute_find_invalidArgsFormat() {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE);
assertCommandFailure("find ", expectedMessage);
}
@Test
public void execute_find_onlyMatchesFullWordsInNames() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generatePersonWithName("bla bla KEY bla");
Task pTarget2 = helper.generatePersonWithName("bla KEY bla bceofeia");
Task p1 = helper.generatePersonWithName("KE Y");
Task p2 = helper.generatePersonWithName("KEYKEYKEY sduauo");
List<Task> fourPersons = helper.generatePersonList(p1, pTarget1, p2, pTarget2);
YTomorrow expectedAB = helper.generateAddressBook(fourPersons);
List<Task> expectedList = helper.generatePersonList(pTarget1, pTarget2);
helper.addToModel(model, fourPersons);
assertCommandSuccess("find KEY",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
@Test
public void execute_find_isNotCaseSensitive() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task p1 = helper.generatePersonWithName("bla bla KEY bla");
Task p2 = helper.generatePersonWithName("bla KEY bla bceofeia");
Task p3 = helper.generatePersonWithName("key key");
Task p4 = helper.generatePersonWithName("KEy sduauo");
List<Task> fourPersons = helper.generatePersonList(p3, p1, p4, p2);
YTomorrow expectedAB = helper.generateAddressBook(fourPersons);
List<Task> expectedList = fourPersons;
helper.addToModel(model, fourPersons);
assertCommandSuccess("find KEY",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
@Test
public void execute_find_matchesIfAnyKeywordPresent() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generatePersonWithName("bla bla KEY bla");
Task pTarget2 = helper.generatePersonWithName("bla rAnDoM bla bceofeia");
Task pTarget3 = helper.generatePersonWithName("key key");
Task p1 = helper.generatePersonWithName("sduauo");
List<Task> fourPersons = helper.generatePersonList(pTarget1, p1, pTarget2, pTarget3);
YTomorrow expectedAB = helper.generateAddressBook(fourPersons);
List<Task> expectedList = helper.generatePersonList(pTarget1, pTarget2, pTarget3);
helper.addToModel(model, fourPersons);
assertCommandSuccess("find key rAnDoM",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
/**
* A utility class to generate test data.
*/
class TestDataHelper {
//@@author A0164889E
Task adam() throws Exception {
Name name = new Name("Adm Brown");
EndDate privateEndDate = new EndDate("12.11");
StartDate privateStartDate = new StartDate("12.00");
Group privateGroup = new Group("leisure time");
return new Task(name, privateStartDate, privateEndDate,
privateGroup, UniqueTagList.build(Tag.TAG_INCOMPLETE));
}
//@@author A0164889E
/**
* Generates a valid person using the given seed.
* Running this function with the same parameter values guarantees the returned person will have the same state.
* Each unique seed will generate a unique Person object.
*
* @param seed used to generate the person data field values
*/
Task generatePerson(int seed) throws Exception {
return new Task(
new Name("Person " + seed),
new StartDate("0" + Math.abs(seed) + ".0" + Math.abs(seed)),
new EndDate("0" + Math.abs(seed) + ".0" + Math.abs(seed)),
new Group("list of " + seed),
new UniqueTagList(new Tag("tag" + Math.abs(seed)), new Tag("tag" + Math.abs(seed + 1)))
);
}
//@@author A0164032U
/** Generates the correct add command based on the task given */
String generateAddCommand(ReadOnlyTask p) {
StringBuffer cmd = new StringBuffer();
cmd.append("add ");
cmd.append(p.getName().toString());
cmd.append(" from ").append(p.getStartDate());
cmd.append(" to ").append(p.getEndDate());
cmd.append(" in ").append(p.getGroup());
return cmd.toString();
}
//@@author
//@@author A0164466X
/** Generates the complete list */
String generateListCompleteCommand(Model model) {
model.updateFilteredListToShowComplete();
return "lc";
}
/** Generates the incomplete list */
String generateListIncompleteCommand(Model model) {
model.updateFilteredListToShowIncomplete();
return "li";
}
/** Generates the correct mark command based on the index given */
String generateMarkCommand(int index) {
StringBuffer cmd = new StringBuffer();
cmd.append("mark ");
cmd.append(index);
return cmd.toString();
}
/** Generates the correct unmark command based on the index given */
String generateUnmarkCommand(int index) {
StringBuffer cmd = new StringBuffer();
cmd.append("unmark ");
cmd.append(index);
return cmd.toString();
}
/**
* Generates an AddressBook with auto-generated persons.
*/
YTomorrow generateAddressBook(int numGenerated) throws Exception {
YTomorrow addressBook = new YTomorrow();
addToAddressBook(addressBook, numGenerated);
return addressBook;
}
/**
* Generates an AddressBook based on the list of Persons given.
*/
YTomorrow generateAddressBook(List<Task> persons) throws Exception {
YTomorrow addressBook = new YTomorrow();
addToAddressBook(addressBook, persons);
return addressBook;
}
/**
* Adds auto-generated Person objects to the given AddressBook
* @param addressBook The AddressBook to which the Persons will be added
*/
void addToAddressBook(YTomorrow addressBook, int numGenerated) throws Exception {
addToAddressBook(addressBook, generatePersonList(numGenerated));
}
/**
* Adds the given list of Persons to the given AddressBook
*/
void addToAddressBook(YTomorrow addressBook, List<Task> personsToAdd) throws Exception {
for (Task p: personsToAdd) {
addressBook.addTask(p);
}
}
/**
* Adds auto-generated Person objects to the given model
* @param model The model to which the Persons will be added
*/
void addToModel(Model model, int numGenerated) throws Exception {
addToModel(model, generatePersonList(numGenerated));
}
/**
* Adds the given list of Persons to the given model
*/
void addToModel(Model model, List<Task> personsToAdd) throws Exception {
for (Task p: personsToAdd) {
model.addTask(p);
}
}
/**
* Generates a list of Persons based on the flags.
*/
List<Task> generatePersonList(int numGenerated) throws Exception {
List<Task> persons = new ArrayList<>();
for (int i = 1; i <= numGenerated; i++) {
persons.add(generatePerson(i));
}
return persons;
}
List<Task> generatePersonList(Task... persons) {
return Arrays.asList(persons);
}
//@@author A0164889E
/**
* Generates a Person object with given name. Other fields will have some dummy values.
*/
Task generatePersonWithName(String name) throws Exception {
return new Task(
new Name(name),
new StartDate("12.21"),
new EndDate("12.11"),
new Group("list of 1"),
new UniqueTagList(new Tag("tag"))
);
}
}
}
|
package seedu.taskman.logic;
import com.google.common.eventbus.Subscribe;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import seedu.taskman.commons.core.EventsCenter;
import seedu.taskman.logic.commands.*;
import seedu.taskman.commons.events.ui.JumpToListRequestEvent;
import seedu.taskman.commons.events.ui.ShowHelpRequestEvent;
import seedu.taskman.commons.events.model.TaskManChangedEvent;
import seedu.taskman.logic.parser.DateTimeParser;
import seedu.taskman.model.TaskMan;
import seedu.taskman.model.Model;
import seedu.taskman.model.ModelManager;
import seedu.taskman.model.ReadOnlyTaskMan;
import seedu.taskman.model.tag.Tag;
import seedu.taskman.model.tag.UniqueTagList;
import seedu.taskman.model.event.*;
import seedu.taskman.model.event.legacy.Email;
import seedu.taskman.storage.StorageManager;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static seedu.taskman.commons.core.Messages.*;
public class LogicManagerTest {
@Rule
public TemporaryFolder saveFolder = new TemporaryFolder();
private Model model;
private Logic logic;
//These are for checking the correctness of the events raised
private ReadOnlyTaskMan latestSavedTaskMan;
private boolean helpShown;
private int targetedJumpIndex;
@Subscribe
private void handleLocalModelChangedEvent(TaskManChangedEvent abce) {
latestSavedTaskMan = new TaskMan(abce.data);
}
@Subscribe
private void handleShowHelpRequestEvent(ShowHelpRequestEvent she) {
helpShown = true;
}
@Subscribe
private void handleJumpToListRequestEvent(JumpToListRequestEvent je) {
targetedJumpIndex = je.targetIndex;
}
@Before
public void setup() {
model = new ModelManager();
String tempTaskManFile = saveFolder.getRoot().getPath() + "TempTaskMan.xml";
String tempPreferencesFile = saveFolder.getRoot().getPath() + "TempPreferences.json";
logic = new LogicManager(model, new StorageManager(tempTaskManFile, tempPreferencesFile));
EventsCenter.getInstance().registerHandler(this);
latestSavedTaskMan = new TaskMan(model.getTaskMan()); // last saved assumed to be up to date before.
helpShown = false;
targetedJumpIndex = -1; // non yet
}
@After
public void teardown() {
EventsCenter.clearSubscribers();
}
@Test
public void execute_invalid() throws Exception {
String invalidCommand = " ";
assertCommandBehavior(invalidCommand,
String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
/**
* Executes the command and confirms that the result message is correct.
* Both the 'task man' and the 'last shown list' are expected to be empty.
* @see #assertCommandBehavior(String, String, ReadOnlyTaskMan, List)
*/
private void assertCommandBehavior(String inputCommand, String expectedMessage) throws Exception {
assertCommandBehavior(inputCommand, expectedMessage, new TaskMan(), Collections.emptyList());
}
/**
* Executes the command and confirms that the result message is correct and
* also confirms that the following three parts of the LogicManager object's state are as expected:<br>
* - the internal task man data are same as those in the {@code expectedTaskMan} <br>
* - the backing list shown by UI matches the {@code shownList} <br>
* - {@code expectedTaskMan} was saved to the storage file. <br>
*/
private void assertCommandBehavior(String inputCommand, String expectedMessage,
ReadOnlyTaskMan expectedTaskMan,
List<? extends Activity> expectedShownList) throws Exception {
//Execute the command
CommandResult result = logic.execute(inputCommand);
//Confirm the ui display elements should contain the right data
assertEquals(expectedMessage, result.feedbackToUser);
assertEquals(expectedShownList, model.getFilteredActivityList());
//Confirm the state of data (saved and in-memory) is as expected
assertEquals(expectedTaskMan, model.getTaskMan());
assertEquals(expectedTaskMan, latestSavedTaskMan);
}
@Test
public void execute_unknownCommandWord() throws Exception {
String unknownCommand = "uicfhmowqewca";
assertCommandBehavior(unknownCommand, MESSAGE_UNKNOWN_COMMAND);
}
//@Test
public void execute_help() throws Exception {
assertCommandBehavior("help", HelpCommand.SHOWING_HELP_MESSAGE);
assertTrue(helpShown);
}
//@Test
public void execute_exit() throws Exception {
assertCommandBehavior("exit", ExitCommand.MESSAGE_EXIT_ACKNOWLEDGEMENT);
}
//@Test
public void execute_clear() throws Exception {
TestDataHelper helper = new TestDataHelper();
model.addTask(helper.generateTask(1));
model.addTask(helper.generateTask(2));
model.addTask(helper.generateTask(3));
assertCommandBehavior("clear", ClearCommand.MESSAGE_SUCCESS, new TaskMan(), Collections.emptyList());
}
//@Test
public void execute_add_invalidArgsFormat() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE);
assertCommandBehavior(
"add wrong args wrong args", expectedMessage);
assertCommandBehavior(
"add Valid Title 12345 e/valid@email.butNoDeadlinePrefix a/valid, address", expectedMessage);
assertCommandBehavior(
"add Valid Title d/12345 valid@email.butNoPrefix a/valid, address", expectedMessage);
assertCommandBehavior(
"add Valid Title d/12345 e/valid@email.butNoAddressPrefix valid, address", expectedMessage);
}
//@Test
public void execute_add_invalidTaskData() throws Exception {
assertCommandBehavior(
"add []\\[;] d/12345 e/valid@e.mail a/valid, address", Title.MESSAGE_TITLE_CONSTRAINTS);
assertCommandBehavior(
"add Valid Title d/not_numbers e/valid@e.mail a/valid, address", Deadline.MESSAGE_DEADLINE_CONSTRAINTS);
assertCommandBehavior(
"add Valid Title d/12345 e/notAnEmail a/valid, address", Email.MESSAGE_EMAIL_CONSTRAINTS);
assertCommandBehavior(
"add Valid Title d/12345 e/valid@e.mail a/valid, address t/invalid_-[.tag", Tag.MESSAGE_TAG_CONSTRAINTS);
}
//@Test
public void execute_add_successful() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.food();
TaskMan expectedTaskMan = new TaskMan();
expectedTaskMan.addTask(toBeAdded);
// execute command and verify result
assertCommandBehavior(helper.generateAddCommand(toBeAdded),
String.format(AddCommand.MESSAGE_SUCCESS, toBeAdded),
expectedTaskMan,
expectedTaskMan.getActivityList());
}
//@Test
public void execute_addDuplicate_notAllowed() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.food();
TaskMan expectedAB = new TaskMan();
expectedAB.addTask(toBeAdded);
// setup starting state
model.addTask(toBeAdded); // task already in internal task man
// execute command and verify result
assertCommandBehavior(
helper.generateAddCommand(toBeAdded),
AddCommand.MESSAGE_DUPLICATE_EVENT,
expectedAB,
expectedAB.getActivityList());
}
@Test
public void execute_list_showsAllTasks() throws Exception {
// prepare expectations
TestDataHelper helper = new TestDataHelper();
TaskMan expectedAB = helper.generateTaskMan(2);
List<? extends Activity> expectedList = expectedAB.getActivityList();
// prepare task man state
helper.addToModel(model, 2);
assertCommandBehavior("list",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
/**
* Confirms the 'invalid argument index number behaviour' for the given command
* targeting a single task in the shown list, using visible index.
* @param commandWord to test assuming it targets a single task in the last shown list based on visible index.
*/
private void assertIncorrectIndexFormatBehaviorForCommand(String commandWord, String expectedMessage) throws Exception {
assertCommandBehavior(commandWord , expectedMessage); //index missing
assertCommandBehavior(commandWord + " +1", expectedMessage); //index should be unsigned
assertCommandBehavior(commandWord + " -1", expectedMessage); //index should be unsigned
assertCommandBehavior(commandWord + " 0", expectedMessage); //index cannot be 0
assertCommandBehavior(commandWord + " not_a_number", expectedMessage);
}
/**
* Confirms the 'invalid argument index number behaviour' for the given command
* targeting a single task in the shown list, using visible index.
* @param commandWord to test assuming it targets a single task in the last shown list based on visible index.
*/
private void assertIndexNotFoundBehaviorForCommand(String commandWord) throws Exception {
String expectedMessage = MESSAGE_INVALID_PERSON_DISPLAYED_INDEX;
TestDataHelper helper = new TestDataHelper();
List<Task> taskList = helper.generateTaskList(2);
// set AB state to 2 tasks
model.resetData(new TaskMan());
for (Task p : taskList) {
model.addTask(p);
}
List<Activity> expectedList = taskList.stream().map(Activity::new).collect(Collectors.toList());
assertCommandBehavior(commandWord + " 3", expectedMessage, model.getTaskMan(), expectedList);
}
@Test
public void execute_selectInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, SelectCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("select", expectedMessage);
}
@Test
public void execute_selectIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("select");
}
@Test
public void execute_select_jumpsToCorrectTask() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threeTasks = helper.generateTaskList(3);
TaskMan expectedAB = helper.generateTaskMan(threeTasks);
helper.addToModel(model, threeTasks);
assertCommandBehavior("select 2",
String.format(SelectCommand.MESSAGE_SELECT_PERSON_SUCCESS, 2),
expectedAB,
expectedAB.getActivityList());
assertEquals(1, targetedJumpIndex);
assertEquals(model.getFilteredActivityList().get(1), new Activity(threeTasks.get(1)));
}
@Test
public void execute_deleteInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("delete", expectedMessage);
}
@Test
public void execute_deleteIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("delete");
}
@Test
public void execute_delete_removesCorrectTask() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threeTasks = helper.generateTaskList(3);
TaskMan expectedAB = helper.generateTaskMan(threeTasks);
//Wrap Task in Activity to delete
expectedAB.removeActivity(new Activity(threeTasks.get(1)));
helper.addToModel(model, threeTasks);
assertCommandBehavior("delete 2",
String.format(DeleteCommand.MESSAGE_DELETE_PERSON_SUCCESS, threeTasks.get(1)),
expectedAB,
expectedAB.getActivityList());
}
@Test
public void execute_list_emptyArgsFormat() throws Exception {
String expectedMessage = ListCommand.MESSAGE_SUCCESS;
assertCommandBehavior("list ", Command.getMessageForTaskListShownSummary(0));
}
@Test
public void execute_list_onlyMatchesFullWordsInTitles() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generateTaskWithTitle("bla bla KEY bla");
Task pTarget2 = helper.generateTaskWithTitle("bla KEY bla bceofeia");
Task p1 = helper.generateTaskWithTitle("KE Y");
Task p2 = helper.generateTaskWithTitle("KEYKEYKEY sduauo");
List<Task> fourTasks = helper.generateTaskList(p1, pTarget1, p2, pTarget2);
TaskMan expectedAB = helper.generateTaskMan(fourTasks);
Activity[] list = {new Activity(pTarget1), new Activity(pTarget2)};
List<Activity> expectedList = Arrays.asList(list);
helper.addToModel(model, fourTasks);
assertCommandBehavior("list KEY",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
@Test
public void execute_list_isNotCaseSensitive() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task p1 = helper.generateTaskWithTitle("bla bla KEY bla");
Task p2 = helper.generateTaskWithTitle("bla KEY bla bceofeia");
Task p3 = helper.generateTaskWithTitle("key key");
Task p4 = helper.generateTaskWithTitle("KEy sduauo");
List<Task> fourTasks = helper.generateTaskList(p3, p1, p4, p2);
TaskMan expectedAB = helper.generateTaskMan(fourTasks);
Activity[] list = {new Activity(p3), new Activity(p1), new Activity(p4), new Activity(p2)};
List<Activity> expectedList = Arrays.asList(list);
helper.addToModel(model, fourTasks);
assertCommandBehavior("list KEY",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
@Test
public void execute_list_matchesIfAnyKeywordPresent() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generateTaskWithTitle("bla bla KEY bla");
Task pTarget2 = helper.generateTaskWithTitle("bla rAnDoM bla bceofeia");
Task pTarget3 = helper.generateTaskWithTitle("key key");
Task p1 = helper.generateTaskWithTitle("sduauo");
List<Task> fourTasks = helper.generateTaskList(pTarget1, p1, pTarget2, pTarget3);
TaskMan expectedAB = helper.generateTaskMan(fourTasks);
Activity[] list = {new Activity(pTarget1), new Activity(pTarget2), new Activity(pTarget3)};
List<Activity> expectedList = Arrays.asList(list);
helper.addToModel(model, fourTasks);
assertCommandBehavior("list key rAnDoM",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
//@Test
public void execute_list_filter_events_only() throws Exception{
// prepare expectations
//TODO: update test when events are properly implemented
TestDataHelper helper = new TestDataHelper();
TaskMan expectedAB = helper.generateTaskMan(2);
List<Activity> expectedList = Collections.EMPTY_LIST;
// prepare task man state
helper.addToModel(model, 2);
assertCommandBehavior("list e/",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
//@Test
public void execute_list_filter_all() throws Exception{
// prepare expectations
//TODO: update test when events are properly implemented
TestDataHelper helper = new TestDataHelper();
TaskMan expectedAB = helper.generateTaskMan(2);
List<? extends Activity> expectedList = expectedAB.getActivityList();
// prepare task man state
helper.addToModel(model, 2);
assertCommandBehavior("list all/",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
@Test
public void execute_list_filter_tags() throws Exception{
// prepare expectations
TestDataHelper helper = new TestDataHelper();
// prepare task man state
helper.addToModel(model, 4);
TaskMan expectedAB = helper.generateTaskMan(4);
List<Activity> expectedList = expectedAB.getActivityList().subList(0,2);
assertCommandBehavior("list t/tag2",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
assertCommandBehavior("list t/tag6",
Command.getMessageForTaskListShownSummary(0),
expectedAB,
Collections.EMPTY_LIST);
expectedList = new ArrayList<>(expectedAB.getActivities());
expectedList.remove(1);
assertCommandBehavior("list t/tag1 t/tag4",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
@Test
public void execute_list_filter_keywords_with_tags() throws Exception{
// prepare expectations
TestDataHelper helper = new TestDataHelper();
TaskMan expectedAB = helper.generateTaskMan(5);
// prepare task man state
helper.addToModel(model, 5);
List<Activity> expectedList = new ArrayList<>();
expectedList.add(new Activity(helper.generateTask(1)));
expectedList.add(new Activity(helper.generateTask(5)));
assertCommandBehavior("list 1 5 t/tag2 t/tag6",
Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB,
expectedList);
}
/**
* A utility class to generate test data.
*/
class TestDataHelper{
Task food() throws Exception {
Title title = new Title("Procure dinner");
Deadline privateDeadline = new Deadline("7.00pm");
Frequency frequency = new Frequency("1 day");
Schedule schedule = new Schedule("6pm, 7pm");
Tag tag1 = new Tag("tag1");
Tag tag2 = new Tag("tag2");
UniqueTagList tags = new UniqueTagList(tag1, tag2);
return new Task(title, tags, privateDeadline, schedule, frequency);
}
/**
* Generates a valid task using the given seed.
* Running this function with the same parameter values guarantees the returned task will have the same state.
* Each unique seed will generate a unique Task object.
*
* @param seed used to generate the task data field values
*/
Task generateTask(int seed) throws Exception {
return new Task(
new Title("Task " + seed),
new UniqueTagList(new Tag("tag" + Math.abs(seed)), new Tag("tag" + Math.abs(seed + 1))), new Deadline(Math.abs(seed)),
new Schedule(Instant.ofEpochSecond(Math.abs(seed - 1)) + ", " + Instant.ofEpochSecond(Math.abs(seed))),
new Frequency(seed+ " mins")
);
}
/** Generates the correct add command based on the task given */
String generateAddCommand(Task p) {
StringBuffer cmd = new StringBuffer();
cmd.append("add ");
cmd.append(p.getTitle().toString());
cmd.append(" c/").append(p.getStatus().toString());
if (p.getDeadline().isPresent()) {
Instant instant = Instant.ofEpochSecond(p.getDeadline().get().epochSecond);
cmd.append(" d/").append(instant.toString());
}
if (p.getFrequency().isPresent()) {
cmd.append(" f/").append(p.getFrequency().get().seconds / 60 + " mins");
}
if (p.getSchedule().isPresent()) {
String start = DateTimeParser.epochSecondToShortDateTime(p.getSchedule().get().startEpochSecond);
String end = DateTimeParser.epochSecondToShortDateTime(p.getSchedule().get().endEpochSecond);
cmd.append(" s/").
append(start).
append(" to ").
append(end);
}
UniqueTagList tags = p.getTags();
for(Tag t: tags){
cmd.append(" t/").append(t.tagName);
}
return cmd.toString();
}
/**
* Generates an TaskMan with auto-generated tasks.
*/
TaskMan generateTaskMan(int numGenerated) throws Exception{
TaskMan taskMan = new TaskMan();
addToTaskMan(taskMan, numGenerated);
return taskMan;
}
/**
* Generates an TaskMan based on the list of Tasks given.
*/
TaskMan generateTaskMan(List<Task> tasks) throws Exception{
TaskMan taskMan = new TaskMan();
addToTaskMan(taskMan, tasks);
return taskMan;
}
/**
* Adds auto-generated Task objects to the given TaskMan
* @param taskMan The TaskMan to which the Tasks will be added
*/
void addToTaskMan(TaskMan taskMan, int numGenerated) throws Exception{
addToTaskMan(taskMan, generateTaskList(numGenerated));
}
/**
* Adds the given list of Tasks to the given TaskMan
*/
void addToTaskMan(TaskMan taskMan, List<Task> tasksToAdd) throws Exception{
for(Task p: tasksToAdd){
taskMan.addTask(p);
}
}
/**
* Adds auto-generated Task objects to the given model
* @param model The model to which the Tasks will be added
*/
void addToModel(Model model, int numGenerated) throws Exception{
addToModel(model, generateTaskList(numGenerated));
}
/**
* Adds the given list of Tasks to the given model
*/
void addToModel(Model model, List<Task> tasksToAdd) throws Exception{
for(Task p: tasksToAdd){
model.addTask(p);
}
}
/**
* Generates a list of Tasks based on the flags.
*/
List<Task> generateTaskList(int numGenerated) throws Exception{
List<Task> tasks = new ArrayList<>();
for(int i = 1; i <= numGenerated; i++){
tasks.add(generateTask(i));
}
return tasks;
}
List<Task> generateTaskList(Task... tasks) {
return Arrays.asList(tasks);
}
/**
* Generates a Task object with given title. Other fields will have some dummy values.
*/
Task generateTaskWithTitle(String title) throws Exception {
return new Task(
new Title(title),
new UniqueTagList(new Tag("t1"), new Tag("t2")),
new Deadline("in 4 days"),
new Schedule("02/05/2016 5pm, 05/05/2016 5pm"),
new Frequency("7 days")
);
}
}
}
|
package apoc.load;
import apoc.Extended;
import apoc.result.StringResult;
import apoc.util.FileUtils;
import apoc.util.Util;
import org.apache.commons.io.filefilter.FileFileFilter;
import org.apache.commons.io.filefilter.TrueFileFilter;
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.neo4j.logging.Log;
import org.neo4j.procedure.Procedure;
import org.neo4j.procedure.Context;
import org.neo4j.procedure.Description;
import org.neo4j.procedure.Name;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Paths;
import java.util.Map;
import java.util.Collection;
import java.util.stream.Stream;
import static apoc.ApocConfig.apocConfig;
import static apoc.util.FileUtils.isImportUsingNeo4jConfig;
import static org.eclipse.jetty.util.URIUtil.encodePath;
import static org.apache.commons.lang3.StringUtils.replaceOnce;
@Extended
public class LoadDirectory {
@Context
public Log log;
@Procedure
@Description("apoc.load.directory('pattern', 'urlDir', {config}) YIELD value - Loads list of all files in folder specified by urlDir or in import folder if urlDir string is empty or not specified")
public Stream<StringResult> directory(@Name(value = "pattern", defaultValue = "*") String pattern, @Name(value = "urlDir", defaultValue = "") String urlDir, @Name(value = "config", defaultValue = "{}") Map<String, Object> config) throws IOException {
if (urlDir == null) {
throw new IllegalArgumentException("Invalid (null) urlDir");
}
String dirImport = apocConfig().getString("dbms.directories.import", "import");
urlDir = urlDir.isEmpty()
? encodePath(dirImport)
: FileUtils.changeFileUrlIfImportDirectoryConstrained(encodePath(urlDir));
boolean isRecursive = Util.toBoolean(config.getOrDefault("recursive", true));
Collection<File> files = org.apache.commons.io.FileUtils.listFiles(
Paths.get(URI.create(urlDir).getPath()).toFile(),
new WildcardFileFilter(pattern),
isRecursive ? TrueFileFilter.TRUE : FileFileFilter.INSTANCE
);
return files.stream().map(i -> {
String urlFile = i.toString();
return new StringResult(isImportUsingNeo4jConfig()
? replaceOnce(urlFile, dirImport + File.separator, "")
: urlFile);
});
}
}
|
package edu.utexas.cycic;
import java.io.File;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import javafx.scene.image.Image;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonReader;
import javax.json.JsonString;
import javax.json.JsonValue;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
public class XMLReader {
static skinSet SC2 = new skinSet(){
{
name = "SC2";
images.put("reactor", new Image(new File("skinImages/reactor.png").toURI().toString()));
images.put("facility", new Image(new File("skinImages/sourceFacSC2.jpg").toURI().toString()));
}
};
/**
*
* @param path
* @return
*/
public static skinSet loadSkin(String path){
skinSet skin = new skinSet(){
{
name = "DSARR";
images.put("abr", new Image(new File(path + "/skinImages/fuelcycle_abr.png").toURI().toString(), 100, 100, false, false));
images.put("facility", new Image(new File(path + "/skinImages/fuelcycle_enr.png").toURI().toString()));
images.put("fuel fabrication", new Image(new File(path + "/skinImages/fuelcycle_fab.png").toURI().toString(), 100, 100, false, false));
images.put("repository", new Image(new File(path + "/skinImages/fuelcycle_geo.png").toURI().toString()));
images.put("mine", new Image(new File(path + "/skinImages/fuelcycle_mine.png").toURI().toString()));
images.put("reactor", new Image(new File(path + "/skinImages/fuelcycle_rxtr.png").toURI().toString(), true));
images.put("reprocessing", new Image(new File(path + "/skinImages/fuelcycle_sep.png").toURI().toString()));
}
};
return skin;
}
static ArrayList<String> blackList = new ArrayList<String>(){
{
add("agents:agents:Sink");
add("agents:agents:Source");
add("agents:agents:KFacility");
add("agents:agents:NullInst");
add("agents:agents:NullRegion");
add("agents:agents:Prey");
add("agents:agents:Predator");
add("stubs:StubFacility:StubFacility");
add("stubs:StubInst:StubInst");
add("stubs:StubRegion:StubRegion");
add("StubFacility/cyclus/StubInst/cyclus/StubRegion:StubRegion:StubRegion");
add("StubFacility/cyclus/StubInst:StubInst:StubInst");
add("StubFacility:StubFacility:StubFacility");
add(":cycaless:BatchReactor");
add(":cycamore:BatchReactor");
}
};
static ArrayList<String> facilityList = new ArrayList<String>(){
{
}
};
static ArrayList<String> regionList = new ArrayList<String>(){
{
}
};
static ArrayList<String> institutionList = new ArrayList<String>(){
{
}
};
/**
*
* @param xmlSchema
* @return
*/
static ArrayList<Object> readSchema(String xmlSchema){
ArrayList<Object> schema = new ArrayList<Object>();
try{
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
InputSource is = new InputSource(new StringReader(xmlSchema));
Document doc = dBuilder.parse(is);
NodeList top = doc.getChildNodes();
if(top.item(0).getNodeName() == "interleave"){
for(int i = 0; i < top.getLength(); i++){
schema = nodeListener(top.item(i), schema);
}
} else {
for(int i = 0; i < doc.getChildNodes().getLength(); i++){
schema = nodeListener(doc, schema);
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
return schema;
}
/**
*
* @param jsonSchema
* @param xmlschema
* @return
*/
static ArrayList<Object> annotationReader(String jsonSchema, ArrayList<Object> xmlschema){
Reader schema = new StringReader(jsonSchema);
JsonReader jsonReader = Json.createReader(schema);
JsonObject jsonObject = jsonReader.readObject();
jsonReader.close();
JsonObject vars = jsonObject.getJsonObject("vars");
for(int i = 0; i < xmlschema.size(); i++){
//System.out.println(xmlschema);
combiner((ArrayList<Object>)xmlschema.get(i), vars);
}
return xmlschema;
}
/**
*
* @param jsonSchema
* @return
*/
static String entityReader(String jsonSchema){
Reader schema = new StringReader(jsonSchema);
JsonReader jsonReader = Json.createReader(schema);
JsonObject jsonObject = jsonReader.readObject();
jsonReader.close();
JsonString string = jsonObject.getJsonString("entity");
return string.toString();
}
/**
*
* @param jsonSchema
* @return
*/
static String nicheReader(String jsonSchema){
Reader schema = new StringReader(jsonSchema);
JsonReader jsonReader = Json.createReader(schema);
JsonObject jsonObject = jsonReader.readObject();
jsonReader.close();
JsonString string = jsonObject.getJsonString("niche");
return string.toString();
}
/**
*
* @param dataArray
* @param json
*/
@SuppressWarnings("unchecked")
static void combiner(ArrayList<Object> dataArray, JsonObject json){
JsonObject json_pass;
//System.out.println(dataArray);
if(dataArray.get(0) instanceof ArrayList){
for(int i = 0; i < dataArray.size(); i++){
combiner((ArrayList<Object>)dataArray.get(i), json);
}
} else if(dataArray.get(1) instanceof ArrayList){
if(json.get((String)dataArray.get(0)) instanceof JsonString){
json_pass = json.getJsonObject(json.getJsonString((String)dataArray.get(0)).toString().replaceAll("\"", ""));
} else {
json_pass = json.getJsonObject((String)dataArray.get(0));
}
cycicResize(dataArray);
if(dataArray.get(2) == "oneOrMore" || dataArray.get(2) == "zeroOrMore" ){
orMoreInfoControl(json_pass, dataArray);
//cycicInfoControl(json_pass, (ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(0));
}
combiner((ArrayList<Object>)dataArray.get(1), json);
try{
cycicInfoControl(json_pass, dataArray);
} catch (Exception ex){
}
} else {
cycicResize(dataArray);
if(json.get((String)dataArray.get(0)) instanceof JsonString){
json_pass = json.getJsonObject(json.getJsonString((String)dataArray.get(0)).toString().replaceAll("\"", ""));
} else {
json_pass = json.getJsonObject((String)dataArray.get(0));
}
try{
cycicInfoControl(json_pass, dataArray);
} catch (Exception ex) {
//ex.printStackTrace();
}
}
}
/**
*
* @param dataArray
* @return
*/
static ArrayList<Object> cycicResize(ArrayList<Object> dataArray){
while(dataArray.size() < 10){
if(dataArray.size() == 6){
dataArray.add(0);
}
dataArray.add(null);
}
return dataArray;
}
/**
*
* @param jsonPass
* @param dataArray
* @return
*/
static ArrayList<Object> cycicInfoControl(JsonObject jsonPass, ArrayList<Object> dataArray){
if(dataArray.get(2) == null){
dataArray.set(2, "");
if(jsonPass.get("uitype") != null){
dataArray.set(2, jsonPass.get("uitype").toString().replace("\"", ""));
}
}
if(jsonPass.get("units") != null){
dataArray.set(3, jsonPass.get("units").toString());
}
if(jsonPass.get("range") != null){
dataArray.set(4, jsonPass.get("range").toString());
}
if(jsonPass.get("default") != null){
dataArray.set(6, 1);
dataArray.set(5, jsonPass.get("default").toString());
}
if(jsonPass.get("userlevel") != null){
dataArray.set(6, Integer.parseInt(jsonPass.get("userlevel").toString()));
}
if(jsonPass.get("tooltip") != null){
dataArray.set(7, jsonPass.get("tooltip").toString());
}
if(jsonPass.get("doc") != null){
dataArray.set(8, jsonPass.get("doc").toString());
}
if(jsonPass.get("uilabel") != null){
dataArray.set(9, jsonPass.get("uilabel").toString().replaceAll("\"", ""));
}
return dataArray;
}
/**
*
* @param node
* @param array
* @return
*/
static ArrayList<Object> nodeListener(Node node, ArrayList<Object> array){
NodeList nodes = node.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++){
switch (nodes.item(i).getNodeName()){
case "oneOrMore":
case "zeroOrMore":
try{
if(nodes.item(i).getParentNode().getParentNode().getNodeName().equalsIgnoreCase("config")){
ArrayList<Object> newArray = new ArrayList<Object>();
newArray = nodeListener(nodes.item(i), newArray);
array.add(newArray);
}
} catch (Exception e) {
e.printStackTrace();
}
ArrayList<Object> newArray = new ArrayList<Object>();
newArray = nodeListener(nodes.item(i), newArray);
array.add(newArray);
array.add(nodes.item(i).getNodeName());
break;
case "element":
if(nodes.item(i).getChildNodes().getLength() > 3){
try{
if(nodes.item(i).getParentNode().getParentNode().getNodeName().equalsIgnoreCase("config")){
ArrayList<Object> eleArray = new ArrayList<Object>();
eleArray = nodeListener(nodes.item(i), eleArray);
array.add(eleArray);
}
} catch (Exception e) {
e.printStackTrace();
}
ArrayList<Object> eleArray2 = new ArrayList<Object>();
eleArray2 = nodeListener(nodes.item(i), eleArray2);
array.add(eleArray2);
array.add(nodes.item(i).getNodeName());
System.out.println(array);
break;
} else {
ArrayList<Object> newArray1 = new ArrayList<Object>();
for(int j = 0; j < nodes.item(i).getAttributes().getLength(); j++){
if (nodes.item(i).getAttributes().item(j).getNodeName() == "name"){
newArray1.add(nodes.item(i).getAttributes().item(j).getNodeValue());
}
}
array.add(nodeListener(nodes.item(i), newArray1));
System.out.println(array);
break;
}
case "optional":
Node newNode = nodes.item(i).getChildNodes().item(1);
ArrayList<Object> newArray11 = new ArrayList<Object>();
for(int j = 0; j < newNode.getAttributes().getLength(); j++){
if (newNode.getAttributes().item(j).getNodeName() == "name"){
newArray11.add(newNode.getAttributes().item(j).getNodeValue());
}
}
array.add(nodeListener(newNode, newArray11));
break;
case "data":
for(int j = 0; j < nodes.item(i).getAttributes().getLength(); j++){
if(nodes.item(i).getAttributes().item(j).getNodeName() == "type"){
array.add(1, nodes.item(i).getAttributes().item(j).getNodeValue());
}
}
default:
break;
}
}
return array;
}
/**
*
* @param jsonPass
* @param dataArray
* @return
*/
@SuppressWarnings("unchecked")
static ArrayList<Object> orMoreInfoControl(JsonObject jsonPass, ArrayList<Object> dataArray){
cycicResize((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(0));
if(jsonPass.get("uitype") instanceof JsonArray){
JsonArray array = jsonPass.getJsonArray("uitype");
//System.out.println(array);
//System.out.println(dataArray);
for(int i = 0; i < ((ArrayList<Object>) dataArray.get(1)).size(); i++){
//System.out.println(array.get(i+1));
//System.out.println(((ArrayList<Object>) dataArray.get(1)).get(i));
String string = array.get(i+1).toString().replaceAll("\"", "");
cycicResize((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(i));
((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(i)).set(2, string);
}
} else if(jsonPass.get("uitype") != null){
((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(0)).set(2, jsonPass.get("uitype").toString().replace("\"", ""));
}
/*if(jsonPass.get("default") instanceof JsonArray){
JsonArray array = jsonPass.getJsonArray("default");
for(int i = 0; i < ((ArrayList<Object>) dataArray.get(1)).size(); i++){
String string = array.get(i+1).toString().replaceAll("\"", "");
cycicResize((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(i));
((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(i)).set(2, string);
}
} else if(jsonPass.get("default") instanceof JsonObject){
JsonObject object = jsonPass.getJsonObject("default");
Set<String> keys = object.keySet();
for(int i = 0; i < ((ArrayList<Object>) dataArray.get(1)).size(); i++){
cycicResize((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(i));
}
((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(0)).set(5, keys.toArray()[0].toString());
((ArrayList<Object>) ((ArrayList<Object>) dataArray.get(1)).get(1)).set(5, object.get(keys.toArray()[0]).toString());
} else if(jsonPass.get("default") != null) {
}*/
return dataArray;
}
}
|
package onion.lang.syntax;
import onion.lang.syntax.visitor.ASTVisitor;
/**
* @author Kota Mizushima
*
*/
public class TypeSpec extends AstNode {
private RawTypeNode component;
private final int dimension;
private final TypeSpec[] typeArguments;
public TypeSpec(RawTypeNode component, int dimension, TypeSpec[] typeArguments) {
this.component = component;
this.dimension = dimension;
this.typeArguments = typeArguments;
}
public int getSizeOfTypeArguments(){
return typeArguments.length;
}
public TypeSpec getTypeArgument(int index){
return typeArguments[index];
}
public RawTypeNode getComponent() {
return component;
}
public String getComponentName(){
return component.name();
}
public int getComponentKind(){
return component.kind();
}
public int getDimension() {
return dimension;
}
public Object accept(ASTVisitor visitor, Object context) {
return visitor.visit(this, context);
}
}
|
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
// You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
package opendap.metacat;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Date;
import javax.xml.transform.stream.StreamSource;
import net.sf.saxon.s9api.SaxonApiException;
import net.sf.saxon.s9api.XdmNode;
import opendap.xml.Transformer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class handles the task of getting an EML given a DDX document. It can
* test the returned document to see if it is well-formed and it can cache the
* document.
*
* @author jimg
*
*/
public class EMLBuilder {
final static String ddx2emlDefault = "ddx2eml-3.2.xsl";
private static Logger log = LoggerFactory.getLogger(EMLBuilder.class);
// The EMLCache that holds both the DDXs LMT and the EML XML/text
private ResponseCachePostgres EMLCache = null;
// This is the transformer that takes the DDX and returns EML
private Transformer transformer;
private Date date = new Date();
public EMLBuilder() throws Exception {
this(false, "", ddx2emlDefault);
}
public EMLBuilder(String namePrefix) throws Exception{
this(true, namePrefix, ddx2emlDefault);
}
public EMLBuilder(String namePrefix, String xslt) throws Exception{
this(true, namePrefix, xslt);
}
public EMLBuilder(boolean useCache, String namePrefix, String xslt) throws Exception{
try {
transformer = new Transformer(xslt);
}
catch (SaxonApiException e) {
log.error("Could not build the XSL transformer object: ", e);
throw new Exception(e);
}
// The first parameter to EMLCache() restores the cache from its
// persistent form and will cause the cache to be written when
// the DDXCache instance is collected.
if (useCache)
EMLCache = new ResponseCachePostgres(namePrefix + "EML", "eml_responses");
}
/**
* Simple method to test if the EML will parse. Generally there's no need to
* call this but it'll be useful when developing the crawler.
*
* @note This method must be called by client code; it is not used by any of
* the methods here.
*
* @param ddxString
* The EML to test
* @return true if the EML parses, false if the SAX parser throws an
* exception
*/
public boolean isWellFormedEML(String emlString) {
try {
org.jdom.input.SAXBuilder sb = new org.jdom.input.SAXBuilder();
@SuppressWarnings("unused")
org.jdom.Document emlDoc = sb.build(new ByteArrayInputStream(emlString.getBytes()));
}
catch (Exception e) {
return false;
}
return true;
}
/**
* Build and cache an EML document using the given DDX document. Use the
* DDX's URL as the key for the cache entry. If caching is not on, ignore
* the DDX URL and don't use the cache.
*
* @param ddxUrl Use this as the key when caching the EML
* @param ddxString Build EML from this document
* @return The EML document
* @throws Exception
*/
public String getEML(String ddxUrl, String ddxString) throws Exception {
String params[] = new String[4];
// '.' finds the start of the '.ddx. extension
String dataset_url = ddxUrl.substring(0, ddxUrl.lastIndexOf('.'));
// '/' + 1 finds the start of the filename
String filename = dataset_url.substring(ddxUrl.lastIndexOf('/') + 1);
// Build the params
params[0] = "filename";
params[1] = filename;
params[2] = "dataset_url";
params[3] = dataset_url;
return getEML(ddxUrl, ddxString, params);
}
/**
* This version takes a varying number of parameters.
*
* @param ddxUrl Use this as the key when caching the EML
* @param ddxString Build EML from this document
* @param params Array element pairs: name1, value1, name2, value2, ...
* @return The EML document
* @throws Exception
*/
public String getEML(String ddxUrl, String ddxString, String[] params) throws Exception {
// Get the EML document
ByteArrayOutputStream os = new ByteArrayOutputStream();
XdmNode ddxXdm = null;
try {
ddxXdm = transformer.build(new StreamSource(new ByteArrayInputStream(ddxString.getBytes("UTF-8"))));
// Set parameters
for (int i = 0; i < params.length; i += 2) {
log.debug("Setting parameter named: " + params[i]);
transformer.setParameter(params[i], params[i+1]);
}
transformer.transform(ddxXdm, os);
}
catch (Exception e) {
log.error("While trying to transform the DDX: " + ddxString);
log.error("I got the following error: " + e.getMessage());
return "";
}
finally {
// Clear parameters
for (int i = 0; i < params.length; i += 2) {
transformer.clearParameter(params[i]);
}
}
String eml = os.toString();
if (EMLCache != null) {
EMLCache.setLastVisited(ddxUrl, date.getTime());
EMLCache.setCachedResponse(ddxUrl, eml);
}
return eml;
}
/**
* Return the EML document generated using the DDX from the given DDX URL.
* This method reads from the EML cache.
*
* @param DDXURL The DDX URL is the key used to reference the EML document.
* @return The EML in a String.
* @throws Exception Thrown if caching is not on.
*/
public String getCachedEMLDoc(String DDXURL) throws Exception {
if (EMLCache == null)
throw new Exception("Caching is off but I was asked to read from the cache.");
return EMLCache.getCachedResponse(DDXURL);
}
/**
* Save the EML cache.
*
* @throws Exception Thrown if caching is not on.
*/
public void saveEMLCache() throws Exception {
if (EMLCache == null)
throw new Exception("Caching is off but I was asked to save the cache.");
EMLCache.saveState();
}
}
|
// $Id: Protocol.java,v 1.22 2005/04/15 10:31:41 belaban Exp $
package org.jgroups.stack;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jgroups.Event;
import org.jgroups.util.Queue;
import org.jgroups.util.QueueClosedException;
import org.jgroups.util.Util;
import java.util.Properties;
import java.util.Vector;
class UpHandler extends Thread {
private Queue mq=null;
private Protocol handler=null;
private ProtocolObserver observer=null;
protected final Log log=LogFactory.getLog(this.getClass());
public UpHandler(Queue mq, Protocol handler, ProtocolObserver observer) {
this.mq=mq;
this.handler=handler;
this.observer=observer;
if(handler != null)
setName("UpHandler (" + handler.getName() + ')');
else
setName("UpHandler");
setDaemon(true);
}
public void setObserver(ProtocolObserver observer) {
this.observer=observer;
}
/** Removes events from mq and calls handler.up(evt) */
public void run() {
Event evt;
while(!mq.closed()) {
try {
evt=(Event)mq.remove();
if(evt == null) {
if(log.isWarnEnabled()) log.warn("removed null event");
continue;
}
if(observer != null) { // call debugger hook (if installed)
if(observer.up(evt, mq.size()) == false) { // false means discard event
return;
}
}
handler.up(evt);
evt=null;
}
catch(QueueClosedException queue_closed) {
break;
}
catch(Throwable e) {
if(log.isWarnEnabled()) log.warn(getName() + " exception: " + e);
e.printStackTrace();
}
}
}
}
class DownHandler extends Thread {
private Queue mq=null;
private Protocol handler=null;
private ProtocolObserver observer=null;
protected final Log log=LogFactory.getLog(this.getClass());
public DownHandler(Queue mq, Protocol handler, ProtocolObserver observer) {
this.mq=mq;
this.handler=handler;
this.observer=observer;
if(handler != null)
setName("DownHandler (" + handler.getName() + ')');
else
setName("DownHandler");
setDaemon(true);
}
public void setObserver(ProtocolObserver observer) {
this.observer=observer;
}
/** Removes events from mq and calls handler.down(evt) */
public void run() {
Event evt;
while(!mq.closed()) {
try {
evt=(Event)mq.remove();
if(evt == null) {
if(log.isWarnEnabled()) log.warn("removed null event");
continue;
}
if(observer != null) { // call debugger hook (if installed)
if(observer.down(evt, mq.size()) == false) { // false means discard event
continue;
}
}
int type=evt.getType();
if(type == Event.ACK || type == Event.START || type == Event.STOP) {
if(handler.handleSpecialDownEvent(evt) == false)
continue;
}
handler.down(evt);
evt=null;
}
catch(QueueClosedException queue_closed) {
break;
}
catch(Throwable e) {
if(log.isWarnEnabled()) log.warn(getName() + " exception is " + e);
e.printStackTrace();
}
}
}
}
/**
* The Protocol class provides a set of common services for protocol layers. Each layer has to
* be a subclass of Protocol and override a number of methods (typically just <code>up()</code>,
* <code>Down</code> and <code>getName</code>. Layers are stacked in a certain order to form
* a protocol stack. <a href=org.jgroups.Event.html>Events</a> are passed from lower
* layers to upper ones and vice versa. E.g. a Message received by the UDP layer at the bottom
* will be passed to its higher layer as an Event. That layer will in turn pass the Event to
* its layer and so on, until a layer handles the Message and sends a response or discards it,
* the former resulting in another Event being passed down the stack.<p>
* Each layer has 2 FIFO queues, one for up Events and one for down Events. When an Event is
* received by a layer (calling the internal upcall <code>ReceiveUpEvent</code>), it is placed
* in the up-queue where it will be retrieved by the up-handler thread which will invoke method
* <code>Up</code> of the layer. The same applies for Events traveling down the stack. Handling
* of the up-handler and down-handler threads and the 2 FIFO queues is donw by the Protocol
* class, subclasses will almost never have to override this behavior.<p>
* The important thing to bear in mind is that Events have to passed on between layers in FIFO
* order which is guaranteed by the Protocol implementation and must be guranteed by subclasses
* implementing their on Event queuing.<p>
* <b>Note that each class implementing interface Protocol MUST provide an empty, public
* constructor !</b>
*/
public abstract class Protocol {
protected final Properties props=new Properties();
protected Protocol up_prot=null, down_prot=null;
protected ProtocolStack stack=null;
protected final Queue up_queue=new Queue();
protected final Queue down_queue=new Queue();
protected UpHandler up_handler=null;
protected int up_thread_prio=-1;
protected DownHandler down_handler=null;
protected int down_thread_prio=-1;
protected ProtocolObserver observer=null; // hook for debugger
private final static long THREAD_JOIN_TIMEOUT=1000;
protected boolean down_thread=true; // determines whether the down_handler thread should be started
protected boolean up_thread=true; // determines whether the up_handler thread should be started
protected final Log log=LogFactory.getLog(this.getClass());
/**
* Configures the protocol initially. A configuration string consists of name=value
* items, separated by a ';' (semicolon), e.g.:<pre>
* "loopback=false;unicast_inport=4444"
* </pre>
*/
public boolean setProperties(Properties props) {
if(props != null)
this.props.putAll(props);
return true;
}
/** Called by Configurator. Removes 2 properties which are used by the Protocol directly and then
* calls setProperties(), which might invoke the setProperties() method of the actual protocol instance.
*/
public boolean setPropertiesInternal(Properties props) {
String str;
this.props.putAll(props);
str=props.getProperty("down_thread");
if(str != null) {
down_thread=Boolean.valueOf(str).booleanValue();
props.remove("down_thread");
}
str=props.getProperty("down_thread_prio");
if(str != null) {
down_thread_prio=Integer.parseInt(str);
props.remove("down_thread_prio");
}
str=props.getProperty("up_thread");
if(str != null) {
up_thread=Boolean.valueOf(str).booleanValue();
props.remove("up_thread");
}
str=props.getProperty("up_thread_prio");
if(str != null) {
up_thread_prio=Integer.parseInt(str);
props.remove("up_thread_prio");
}
return setProperties(props);
}
public Properties getProperties() {
return props;
}
public void setObserver(ProtocolObserver observer) {
this.observer=observer;
observer.setProtocol(this);
if(up_handler != null)
up_handler.setObserver(observer);
if(down_handler != null)
down_handler.setObserver(observer);
}
/**
* Called after instance has been created (null constructor) and before protocol is started.
* Properties are already set. Other protocols are not yet connected and events cannot yet be sent.
* @exception Exception Thrown if protocol cannot be initialized successfully. This will cause the
* ProtocolStack to fail, so the channel constructor will throw an exception
*/
public void init() throws Exception {
}
/**
* This method is called on a {@link org.jgroups.Channel#connect(String)}. Starts work.
* Protocols are connected and queues are ready to receive events.
* Will be called <em>from bottom to top</em>. This call will replace
* the <b>START</b> and <b>START_OK</b> events.
* @exception Exception Thrown if protocol cannot be started successfully. This will cause the ProtocolStack
* to fail, so {@link org.jgroups.Channel#connect(String)} will throw an exception
*/
public void start() throws Exception {
}
/**
* This method is called on a {@link org.jgroups.Channel#disconnect()}. Stops work (e.g. by closing multicast socket).
* Will be called <em>from top to bottom</em>. This means that at the time of the method invocation the
* neighbor protocol below is still working. This method will replace the
* <b>STOP</b>, <b>STOP_OK</b>, <b>CLEANUP</b> and <b>CLEANUP_OK</b> events. The ProtocolStack guarantees that
* when this method is called all messages in the down queue will have been flushed
*/
public void stop() {
}
/**
* This method is called on a {@link org.jgroups.Channel#close()}.
* Does some cleanup; after the call the VM will terminate
*/
public void destroy() {
}
public Queue getUpQueue() {
return up_queue;
} // used by Debugger (ProtocolView)
public Queue getDownQueue() {
return down_queue;
} // used by Debugger (ProtocolView)
/** List of events that are required to be answered by some layer above.
@return Vector (of Integers) */
public Vector requiredUpServices() {
return null;
}
/** List of events that are required to be answered by some layer below.
@return Vector (of Integers) */
public Vector requiredDownServices() {
return null;
}
/** List of events that are provided to layers above (they will be handled when sent down from
above).
@return Vector (of Integers) */
public Vector providedUpServices() {
return null;
}
/** List of events that are provided to layers below (they will be handled when sent down from
below).
@return Vector (of Integers) */
public Vector providedDownServices() {
return null;
}
public abstract String getName(); // all protocol names have to be unique !
public Protocol getUpProtocol() {
return up_prot;
}
public Protocol getDownProtocol() {
return down_prot;
}
public void setUpProtocol(Protocol up_prot) {
this.up_prot=up_prot;
}
public void setDownProtocol(Protocol down_prot) {
this.down_prot=down_prot;
}
public void setProtocolStack(ProtocolStack stack) {
this.stack=stack;
}
/** Used internally. If overridden, call this method first. Only creates the up_handler thread
if down_thread is true */
public void startUpHandler() {
if(up_thread) {
if(up_handler == null) {
up_handler=new UpHandler(up_queue, this, observer);
if(up_thread_prio >= 0) {
try {
up_handler.setPriority(up_thread_prio);
}
catch(Throwable t) {
if(log.isErrorEnabled()) log.error("priority " + up_thread_prio +
" could not be set for thread: " + Util.getStackTrace(t));
}
}
up_handler.start();
}
}
}
/** Used internally. If overridden, call this method first. Only creates the down_handler thread
if down_thread is true */
public void startDownHandler() {
if(down_thread) {
if(down_handler == null) {
down_handler=new DownHandler(down_queue, this, observer);
if(down_thread_prio >= 0) {
try {
down_handler.setPriority(down_thread_prio);
}
catch(Throwable t) {
if(log.isErrorEnabled()) log.error("priority " + down_thread_prio +
" could not be set for thread: " + Util.getStackTrace(t));
}
}
down_handler.start();
}
}
}
/** Used internally. If overridden, call parent's method first */
public void stopInternal() {
up_queue.close(false); // this should terminate up_handler thread
if(up_handler != null && up_handler.isAlive()) {
try {
up_handler.join(THREAD_JOIN_TIMEOUT);
}
catch(Exception ex) {
}
if(up_handler != null && up_handler.isAlive()) {
up_handler.interrupt(); // still alive ? let's just kill it without mercy...
try {
up_handler.join(THREAD_JOIN_TIMEOUT);
}
catch(Exception ex) {
}
if(up_handler != null && up_handler.isAlive())
if(log.isErrorEnabled()) log.error("up_handler thread for " + getName() +
" was interrupted (in order to be terminated), but is still alive");
}
}
up_handler=null;
down_queue.close(false); // this should terminate down_handler thread
if(down_handler != null && down_handler.isAlive()) {
try {
down_handler.join(THREAD_JOIN_TIMEOUT);
}
catch(Exception ex) {
}
if(down_handler != null && down_handler.isAlive()) {
down_handler.interrupt(); // still alive ? let's just kill it without mercy...
try {
down_handler.join(THREAD_JOIN_TIMEOUT);
}
catch(Exception ex) {
}
if(down_handler != null && down_handler.isAlive())
if(log.isErrorEnabled()) log.error("down_handler thread for " + getName() +
" was interrupted (in order to be terminated), but is is still alive");
}
}
down_handler=null;
}
/**
* Internal method, should not be called by clients. Used by ProtocolStack. I would have
* used the 'friends' modifier, but this is available only in C++ ... If the up_handler thread
* is not available (down_thread == false), then directly call the up() method: we will run on the
* caller's thread (e.g. the protocol layer below us).
*/
protected void receiveUpEvent(Event evt) {
if(up_handler == null) {
if(observer != null) { // call debugger hook (if installed)
if(observer.up(evt, up_queue.size()) == false) { // false means discard event
return;
}
}
up(evt);
return;
}
try {
up_queue.add(evt);
evt=null;
}
catch(Exception e) {
if(log.isWarnEnabled()) log.warn("exception: " + e);
}
}
/**
* Internal method, should not be called by clients. Used by ProtocolStack. I would have
* used the 'friends' modifier, but this is available only in C++ ... If the down_handler thread
* is not available (down_thread == false), then directly call the down() method: we will run on the
* caller's thread (e.g. the protocol layer above us).
*/
protected void receiveDownEvent(Event evt) {
if(down_handler == null) {
if(observer != null) { // call debugger hook (if installed)
if(observer.down(evt, down_queue.size()) == false) { // false means discard event
return;
}
}
int type=evt.getType();
if(type == Event.ACK || type == Event.START || type == Event.STOP) {
if(handleSpecialDownEvent(evt) == false)
return;
}
down(evt);
return;
}
try {
down_queue.add(evt);
evt=null;
}
catch(Exception e) {
if(log.isWarnEnabled()) log.warn("exception: " + e);
}
}
/**
* Causes the event to be forwarded to the next layer up in the hierarchy. Typically called
* by the implementation of <code>Up</code> (when done).
*/
public void passUp(Event evt) {
if(observer != null) { // call debugger hook (if installed)
if(observer.passUp(evt) == false) { // false means don't pass up (=discard) event
return;
}
}
if(up_prot != null) {
up_prot.receiveUpEvent(evt);
evt=null; // give the garbage collector a hand
}
else
if(log.isErrorEnabled()) log.error("no upper layer available");
}
/**
* Causes the event to be forwarded to the next layer down in the hierarchy.Typically called
* by the implementation of <code>Down</code> (when done).
*/
public void passDown(Event evt) {
if(observer != null) { // call debugger hook (if installed)
if(observer.passDown(evt) == false) { // false means don't pass down (=discard) event
return;
}
}
if(down_prot != null) {
down_prot.receiveDownEvent(evt);
// evt=null; // give the garbage collector a hand
}
else
if(log.isErrorEnabled()) log.error("no lower layer available");
}
/**
* An event was received from the layer below. Usually the current layer will want to examine
* the event type and - depending on its type - perform some computation
* (e.g. removing headers from a MSG event type, or updating the internal membership list
* when receiving a VIEW_CHANGE event).
* Finally the event is either a) discarded, or b) an event is sent down
* the stack using <code>passDown()</code> or c) the event (or another event) is sent up
* the stack using <code>passUp()</code>.
*/
public void up(Event evt) {
passUp(evt);
}
/**
* An event is to be sent down the stack. The layer may want to examine its type and perform
* some action on it, depending on the event's type. If the event is a message MSG, then
* the layer may need to add a header to it (or do nothing at all) before sending it down
* the stack using <code>passDown()</code>. In case of a GET_ADDRESS event (which tries to
* retrieve the stack's address from one of the bottom layers), the layer may need to send
* a new response event back up the stack using <code>passUp()</code>.
*/
public void down(Event evt) {
passDown(evt);
}
/** These are special internal events that should not be handled by protocols
* @return boolean True: the event should be passed further down the stack. False: the event should
* be discarded (not passed down the stack)
*/
protected boolean handleSpecialDownEvent(Event evt) {
switch(evt.getType()) {
case Event.ACK:
if(down_prot == null) {
passUp(new Event(Event.ACK_OK));
return false; // don't pass down the stack
}
case Event.START:
try {
start();
// if we're the transport protocol, reply with a START_OK up the stack
if(down_prot == null) {
passUp(new Event(Event.START_OK, Boolean.TRUE));
return false; // don't pass down the stack
}
else
return true; // pass down the stack
}
catch(Exception e) {
passUp(new Event(Event.START_OK, new Exception("exception caused by " + getName() + ".start(): " + e)));
return false;
}
case Event.STOP:
stop();
if(down_prot == null) {
passUp(new Event(Event.STOP_OK, Boolean.TRUE));
return false; // don't pass down the stack
}
else
return true; // pass down the stack
default:
return true; // pass down by default
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.