answer
stringlengths 17
10.2M
|
|---|
package org.eclipse.che.plugin.docker.client;
import com.google.common.io.CharStreams;
import org.eclipse.che.plugin.docker.client.connection.CloseConnectionInputStream;
import org.eclipse.che.plugin.docker.client.connection.DockerConnection;
import org.eclipse.che.plugin.docker.client.connection.DockerResponse;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.testng.MockitoTestNGListener;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Listeners;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.anyVararg;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.when;
/**
* @author Anton Korneta
*/
@Listeners(MockitoTestNGListener.class)
public class DockerConnectorTest {
@Spy
private DockerConnector dockerConnector = new DockerConnector(null);
@Mock
private DockerConnection dockerConnection;
@Mock
private DockerResponse dockerResponse;
@BeforeMethod
public void setup() throws IOException {
doReturn(dockerConnection).when(dockerConnector).openConnection(any(URI.class));
when(dockerConnection.method(any())).thenReturn(dockerConnection);
when(dockerConnection.entity(any(InputStream.class))).thenReturn(dockerConnection);
when(dockerConnection.headers(any())).thenReturn(dockerConnection);
when(dockerConnection.query(any(), anyVararg())).thenReturn(dockerConnection);
when(dockerConnection.path(anyString())).thenReturn(dockerConnection);
when(dockerConnection.request()).thenReturn(dockerResponse);
}
@Test
public void shouldGetResourcesFromContainer() throws IOException {
String resource = "stream data";
when(dockerResponse.getStatus()).thenReturn(200);
when(dockerResponse.getInputStream())
.thenReturn(new CloseConnectionInputStream(new ByteArrayInputStream(resource.getBytes()), dockerConnection));
String response = CharStreams.toString(new InputStreamReader(dockerConnector.getResource("id", "path")));
Assert.assertEquals(response, resource);
}
@Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = "Error response from docker API, status: 500, message: Error")
public void shouldProduceAnErrorWhenGetsResourcesFromContainer() throws IOException {
String msg = "Error";
when(dockerResponse.getStatus()).thenReturn(500);
when(dockerResponse.getInputStream())
.thenReturn(new CloseConnectionInputStream(new ByteArrayInputStream(msg.getBytes()), dockerConnection));
dockerConnector.getResource("id", "path");
}
@Test
public void shouldPutResourcesIntoContainer() throws IOException {
String file = "stream data";
when(dockerResponse.getStatus()).thenReturn(200);
InputStream source = new CloseConnectionInputStream(new ByteArrayInputStream(file.getBytes()), dockerConnection);
dockerConnector.putResource("id", "path", source, false);
}
@Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = "Error response from docker API, status: 500, message: Error")
public void shouldProduceAnErrorWhenPutsResourcesIntoContainer() throws IOException {
String msg = "Error";
when(dockerResponse.getStatus()).thenReturn(500);
when(dockerResponse.getInputStream())
.thenReturn(new ByteArrayInputStream(msg.getBytes()));
InputStream source = new CloseConnectionInputStream(new ByteArrayInputStream(msg.getBytes()), dockerConnection);
dockerConnector.putResource("id", "path", source, false);
}
}
|
package com.redhat.ceylon.eclipse.code.propose;
import static com.redhat.ceylon.eclipse.code.hover.CeylonHover.getDocumentationFor;
import static com.redhat.ceylon.eclipse.code.propose.CompletionProcessor.NO_COMPLETIONS;
import static com.redhat.ceylon.eclipse.code.propose.ParameterContextValidator.findCharCount;
import static com.redhat.ceylon.eclipse.code.quickfix.CeylonQuickFixAssistant.importEdit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.eclipse.jdt.internal.ui.text.correction.proposals.LinkedNamesAssistProposal.DeleteBlockingExitPolicy;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.DocumentEvent;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IEditingSupport;
import org.eclipse.jface.text.IEditingSupportRegistry;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.contentassist.ICompletionProposal;
import org.eclipse.jface.text.contentassist.IContextInformation;
import org.eclipse.jface.text.link.ILinkedModeListener;
import org.eclipse.jface.text.link.LinkedModeModel;
import org.eclipse.jface.text.link.LinkedModeUI;
import org.eclipse.jface.text.link.LinkedPositionGroup;
import org.eclipse.jface.text.link.ProposalPosition;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.text.edits.InsertEdit;
import org.eclipse.ui.internal.editors.text.EditorsPlugin;
import org.eclipse.ui.texteditor.link.EditorLinkedModeUI;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.DeclarationWithProximity;
import com.redhat.ceylon.compiler.typechecker.model.Functional;
import com.redhat.ceylon.compiler.typechecker.model.Generic;
import com.redhat.ceylon.compiler.typechecker.model.Module;
import com.redhat.ceylon.compiler.typechecker.model.NothingType;
import com.redhat.ceylon.compiler.typechecker.model.Parameter;
import com.redhat.ceylon.compiler.typechecker.model.ParameterList;
import com.redhat.ceylon.compiler.typechecker.model.ProducedReference;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.model.Scope;
import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.compiler.typechecker.model.TypeParameter;
import com.redhat.ceylon.compiler.typechecker.model.Value;
import com.redhat.ceylon.eclipse.code.editor.CeylonEditor;
import com.redhat.ceylon.eclipse.code.editor.CeylonSourceViewer;
import com.redhat.ceylon.eclipse.code.editor.CeylonSourceViewerConfiguration;
import com.redhat.ceylon.eclipse.code.editor.Util;
import com.redhat.ceylon.eclipse.code.outline.CeylonLabelProvider;
import com.redhat.ceylon.eclipse.code.parse.CeylonParseController;
class DeclarationCompletionProposal extends CompletionProposal {
private final CeylonParseController cpc;
private final Declaration declaration;
private final boolean addimport;
private final ProducedReference producedReference;
private Scope scope;
DeclarationCompletionProposal(int offset, String prefix,
String desc, String text, boolean selectParams,
CeylonParseController cpc, Declaration d) {
this(offset, prefix, desc, text, selectParams,
cpc, d, false, null, null);
}
DeclarationCompletionProposal(int offset, String prefix,
String desc, String text, boolean selectParams,
CeylonParseController cpc, Declaration d,
boolean addimport, ProducedReference producedReference,
Scope scope) {
super(offset, prefix, CeylonLabelProvider.getImage(d),
desc, text, selectParams);
this.cpc = cpc;
this.declaration = d;
this.addimport = addimport;
this.producedReference = producedReference;
this.scope = scope;
}
@Override
public void apply(IDocument document) {
if (addimport) {
try {
List<InsertEdit> ies = importEdit(cpc.getRootNode(),
Collections.singleton(declaration), null, null);
for (InsertEdit ie: ies) {
ie.apply(document);
offset+=ie.getText().length();
}
}
catch (Exception e) {
e.printStackTrace();
}
}
super.apply(document);
if (EditorsPlugin.getDefault().getPreferenceStore()
.getBoolean(CeylonSourceViewerConfiguration.LINKED_MODE)) {
if (declaration instanceof Generic) {
ParameterList paramList = null;
if (declaration instanceof Functional && getFirstPosition(false)>0) {
List<ParameterList> pls = ((Functional) declaration).getParameterLists();
if (!pls.isEmpty() && !pls.get(0).getParameters().isEmpty()) {
paramList = pls.get(0);
}
}
enterLinkedMode(document, paramList, (Generic) declaration);
}
}
}
@Override
public Point getSelection(IDocument document) {
if (declaration instanceof Generic) {
ParameterList pl = null;
if (declaration instanceof Functional) {
List<ParameterList> pls = ((Functional) declaration).getParameterLists();
if (!pls.isEmpty() && !pls.get(0).getParameters().isEmpty()) {
pl = pls.get(0);
}
}
int first = getFirstPosition(pl==null);
if (first<=0) {
return super.getSelection(document);
}
int next = getNextPosition(document, first, pl==null);
int middle = getCompletionPosition(first, next);
return new Point(offset-prefix.length()+first+middle, next-middle);
}
return super.getSelection(document);
}
public int getNextPosition(IDocument document, int lastOffset,
boolean typeArgList) {
int loc = offset-prefix.length();
int comma = -1;
try {
int start = loc+lastOffset;
int end = loc+text.length()-1;
if (text.endsWith(";")) end
comma = findCharCount(1, document, start, end, ",;", "", true) - start;
}
catch (BadLocationException e) {
e.printStackTrace();
}
if (comma<0) {
int angleIndex = text.lastIndexOf('>');
int parenIndex = text.lastIndexOf(')');
int braceIndex = text.lastIndexOf('}');
comma = (typeArgList ? angleIndex : (braceIndex>parenIndex?braceIndex:parenIndex))-lastOffset;
}
return comma;
}
public String getAdditionalProposalInfo() {
return getDocumentationFor(cpc, declaration);
}
private IEditingSupport editingSupport;
public void enterLinkedMode(IDocument document, ParameterList parameterList,
Generic generic) {
boolean basicProposal = parameterList==null;
int paramCount = basicProposal ?
generic.getTypeParameters().size() :
parameterList.getParameters().size();
if (paramCount==0) return;
try {
final LinkedModeModel linkedModeModel = new LinkedModeModel();
final int loc = offset-prefix.length();
int first = getFirstPosition(basicProposal);
if (first<0) return;
int next = getNextPosition(document, first, basicProposal);
int i=0;
while (next>1 && i<paramCount) {
List<ICompletionProposal> props = new ArrayList<ICompletionProposal>();
if (basicProposal) {
addBasicProposals(generic, loc, first, props, i);
}
else {
addProposals(parameterList, loc, first, props, i);
}
LinkedPositionGroup linkedPositionGroup = new LinkedPositionGroup();
int middle = getCompletionPosition(first, next);
ProposalPosition linkedPosition = new ProposalPosition(document,
loc+first+middle, next-middle, i,
props.toArray(NO_COMPLETIONS));
linkedPositionGroup.addPosition(linkedPosition);
first = first+next+1;
next = getNextPosition(document, first, basicProposal);
linkedModeModel.addGroup(linkedPositionGroup);
i++;
}
linkedModeModel.forceInstall();
final CeylonEditor editor = (CeylonEditor) Util.getCurrentEditor();
linkedModeModel.addLinkingListener(new ILinkedModeListener() {
@Override
public void left(LinkedModeModel model, int flags) {
editor.setLinkedMode(null);
// linkedModeModel.exit(ILinkedModeListener.NONE);
CeylonSourceViewer viewer= editor.getCeylonSourceViewer();
if (viewer instanceof IEditingSupportRegistry) {
((IEditingSupportRegistry) viewer).unregister(editingSupport);
}
editor.getSite().getPage().activate(editor);
if ((flags&EXTERNAL_MODIFICATION)==0 && viewer!=null) {
viewer.invalidateTextPresentation();
}
}
@Override
public void suspend(LinkedModeModel model) {
editor.setLinkedMode(null);
}
@Override
public void resume(LinkedModeModel model, int flags) {
editor.setLinkedMode(model);
}
});
editor.setLinkedMode(linkedModeModel);
CeylonSourceViewer viewer = editor.getCeylonSourceViewer();
EditorLinkedModeUI ui= new EditorLinkedModeUI(linkedModeModel, viewer);
ui.setExitPosition(viewer, loc+text.length(), 0, i);
ui.setExitPolicy(new DeleteBlockingExitPolicy(document));
ui.setCyclingMode(LinkedModeUI.CYCLE_WHEN_NO_PARENT);
ui.setDoContextInfo(true);
ui.enter();
if (viewer instanceof IEditingSupportRegistry) {
editingSupport = new IEditingSupport() {
public boolean ownsFocusShell() {
Shell editorShell= editor.getSite().getShell();
Shell activeShell= editorShell.getDisplay().getActiveShell();
if (editorShell == activeShell)
return true;
return false;
}
public boolean isOriginator(DocumentEvent event, IRegion subjectRegion) {
return false; //leave on external modification outside positions
}
};
((IEditingSupportRegistry) viewer).register(editingSupport);
}
}
catch (Exception e) {
e.printStackTrace();
}
}
protected int getCompletionPosition(int first, int next) {
return text.substring(first, first+next-1).lastIndexOf(' ')+1;
}
protected int getFirstPosition(boolean basicProposal) {
int anglePos = text.indexOf('<');
int parenPos = text.indexOf('(');
int bracePos = text.indexOf('{');
return (basicProposal ? anglePos : (bracePos>0&&(bracePos<parenPos||parenPos<0) ? bracePos : parenPos))+1;
}
private void addProposals(ParameterList parameterList, final int loc,
int first, List<ICompletionProposal> props, final int index) {
Parameter p = parameterList.getParameters().get(index);
if (p.getModel().isDynamicallyTyped()) {
return;
}
ProducedType type = producedReference.getTypedParameter(p)
.getType();
if (type==null) return;
TypeDeclaration td = type.getDeclaration();
for (DeclarationWithProximity dwp: getSortedProposedValues()) {
Declaration d = dwp.getDeclaration();
if (d instanceof Value && !dwp.isUnimported()) {
if (d.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (d.getName().equals("process") ||
d.getName().equals("language") ||
d.getName().equals("emptyIterator") ||
d.getName().equals("infinity") ||
d.getName().endsWith("IntegerValue") ||
d.getName().equals("finished")) {
continue;
}
}
ProducedType vt = ((Value) d).getType();
if (vt!=null && !vt.isNothing() &&
((td instanceof TypeParameter) &&
isInBounds(((TypeParameter)td).getSatisfiedTypes(), vt) ||
vt.isSubtypeOf(type))) {
addProposal(loc, first, props, index, d, false);
}
}
}
}
private void addBasicProposals(Generic generic, final int loc,
int first, List<ICompletionProposal> props, final int index) {
TypeParameter p = generic.getTypeParameters().get(index);
for (DeclarationWithProximity dwp: getSortedProposedValues()) {
Declaration d = dwp.getDeclaration();
if (d instanceof TypeDeclaration && !dwp.isUnimported()) {
TypeDeclaration td = (TypeDeclaration) d;
ProducedType t = td.getType();
if (td.getTypeParameters().isEmpty() &&
!td.isAnnotation() &&
!(td instanceof NothingType) &&
!td.inherits(td.getUnit().getExceptionDeclaration())) {
if (td.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (!td.getName().equals("Object") &&
!td.getName().equals("Anything") &&
!td.getName().equals("String") &&
!td.getName().equals("Integer") &&
!td.getName().equals("Character") &&
!td.getName().equals("Float") &&
!td.getName().equals("Boolean")) {
continue;
}
}
if (isInBounds(p.getSatisfiedTypes(), t)) {
addProposal(loc, first, props, index, d, true);
}
}
}
}
}
public boolean isInBounds(List<ProducedType> upperBounds, ProducedType t) {
boolean ok = true;
for (ProducedType ub: upperBounds) {
if (!t.isSubtypeOf(ub) &&
!(ub.containsTypeParameters() &&
t.getDeclaration().inherits(ub.getDeclaration()))) {
ok = false;
break;
}
}
return ok;
}
public List<DeclarationWithProximity> getSortedProposedValues() {
List<DeclarationWithProximity> results = new ArrayList<DeclarationWithProximity>(
scope.getMatchingDeclarations(cpc.getRootNode().getUnit(), "", 0).values());
Collections.sort(results, new Comparator<DeclarationWithProximity>() {
public int compare(DeclarationWithProximity x, DeclarationWithProximity y) {
if (x.getProximity()<y.getProximity()) return -1;
if (x.getProximity()>y.getProximity()) return 1;
int c = x.getDeclaration().getName().compareTo(y.getDeclaration().getName());
if (c!=0) return c;
return x.getDeclaration().getQualifiedNameString()
.compareTo(y.getDeclaration().getQualifiedNameString());
}
});
return results;
}
private void addProposal(final int loc, int first,
List<ICompletionProposal> props, final int index,
final Declaration d, final boolean basic) {
props.add(new ICompletionProposal() {
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument document) {
try {
IRegion li = document.getLineInformationOfOffset(loc);
int endOfLine = li.getOffset() + li.getLength();
int startOfArgs = getFirstPosition(basic);
int offset = findCharCount(index, document,
loc+startOfArgs, endOfLine,
",;", "", true)+1;
while (offset>0&&document.getChar(offset)==' ') offset++;
int nextOffset = findCharCount(index+1, document,
loc+startOfArgs, endOfLine,
",;", "", true);
int middleOffset = findCharCount(1, document,
offset, nextOffset,
"=", "", true)+1;
if (middleOffset>0&&document.getChar(middleOffset)=='>') middleOffset++;
while (middleOffset>0&&document.getChar(middleOffset)==' ') middleOffset++;
if (middleOffset>offset&&middleOffset<nextOffset) offset = middleOffset;
document.replace(offset, nextOffset-offset, d.getName());
}
catch (BadLocationException e) {
e.printStackTrace();
}
}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public String getDisplayString() {
return d.getName();
}
@Override
public Image getImage() {
return CeylonLabelProvider.getImage(d);
}
@Override
public IContextInformation getContextInformation() {
return null;
}
});
}
@Override
public IContextInformation getContextInformation() {
if (declaration instanceof Functional) {
List<ParameterList> pls = ((Functional) declaration).getParameterLists();
if (!pls.isEmpty() &&
//TODO: for now there is no context info for type args lists - fix that!
!(pls.get(0).getParameters().isEmpty()&&!((Generic)declaration).getTypeParameters().isEmpty())) {
int paren = text.indexOf('(');
if (paren<0 && !getDisplayString().equals("show parameters")) { //ugh, horrible, todo!
return super.getContextInformation();
}
return new ParameterContextInformation(declaration,
producedReference, pls.get(0), offset-prefix.length());
}
}
return null;
}
}
|
package org.eclipse.oomph.workingsets.presentation;
import org.eclipse.oomph.workingsets.WorkingSet;
import org.eclipse.oomph.workingsets.WorkingSetGroup;
import org.eclipse.oomph.workingsets.util.WorkingSetsUtil;
import org.eclipse.emf.common.util.BasicEMap;
import org.eclipse.emf.common.util.EMap;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.eclipse.core.resources.IResourceDelta;
import org.eclipse.core.resources.IResourceDeltaVisitor;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.preferences.IEclipsePreferences;
import org.eclipse.core.runtime.preferences.IEclipsePreferences.PreferenceChangeEvent;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.IAggregateWorkingSet;
import org.eclipse.ui.IPageLayout;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.IWorkingSet;
import org.eclipse.ui.IWorkingSetManager;
import org.eclipse.ui.PlatformUI;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author Eike Stepper
*/
public class WorkingSetManager
{
private static final String WORKING_SET_PAGE = "org.eclipse.jdt.ui.JavaWorkingSetPage";
private static final String PACKAGE_EXPLORER_ID = "org.eclipse.jdt.ui.PackageExplorer";
private static final IWorkingSetManager MANAGER = PlatformUI.getWorkbench().getWorkingSetManager();
private static final IWorkspace WORKSPACE = ResourcesPlugin.getWorkspace();
public static final WorkingSetManager INSTANCE = new WorkingSetManager();
private final IEclipsePreferences.IPreferenceChangeListener preferencesListener = new IEclipsePreferences.IPreferenceChangeListener()
{
public void preferenceChange(PreferenceChangeEvent event)
{
if (WorkingSetsUtil.WORKING_SET_GROUP_PREFERENCE_KEY.equals(event.getKey()))
{
apply();
}
}
};
private final IResourceChangeListener resourceChangeListener = new IResourceChangeListener()
{
public void resourceChanged(IResourceChangeEvent event)
{
if (!workingSetGroup.getWorkingSets().isEmpty())
{
IResourceDelta delta = event.getDelta();
if (delta != null)
{
try
{
// Compute the workings sets and update them relative to our workspace delta, i.e., relative to added and
// removed projects.
class ResourceDeltaVisitor implements IResourceDeltaVisitor
{
List<IProject> addedProjects = new ArrayList<IProject>();
List<IProject> removedProjects = new ArrayList<IProject>();
public boolean visit(IResourceDelta delta) throws CoreException
{
IResource resource = delta.getResource();
if (resource instanceof IWorkspaceRoot)
{
return true;
}
if (resource instanceof IProject)
{
int kind = delta.getKind();
if (kind == IResourceDelta.ADDED)
{
IProject project = (IProject)resource;
if (!project.isHidden())
{
addedProjects.add(project);
}
}
else if (kind == IResourceDelta.REMOVED)
{
IProject project = (IProject)resource;
if (!project.isHidden())
{
removedProjects.remove(project);
}
}
}
return false;
}
}
ResourceDeltaVisitor resourceDeltaVisitor = new ResourceDeltaVisitor();
delta.accept(resourceDeltaVisitor);
updateProjects(resourceDeltaVisitor.addedProjects, resourceDeltaVisitor.removedProjects);
}
catch (CoreException ex)
{
// Ignore
}
}
}
}
};
private WorkingSetGroup workingSetGroup;
public WorkingSetManager()
{
workingSetGroup = WorkingSetsUtil.getWorkingSetGroup();
// Listen for projects being added or removed from the workspace and for preferences changing.
WORKSPACE.addResourceChangeListener(resourceChangeListener);
WorkingSetsUtil.WORKING_SET_GROUP_PREFERENCES.addPreferenceChangeListener(preferencesListener);
}
/**
* Clean up the listeners.
*/
public void dispose()
{
WORKSPACE.removeResourceChangeListener(resourceChangeListener);
WorkingSetsUtil.WORKING_SET_GROUP_PREFERENCES.removePreferenceChangeListener(preferencesListener);
}
/**
* Returns a map with an entry for every working set defined in the working set group from the working set name to either the elements in the real working set or to null, if there isn't one.
*/
private EMap<String, Set<IAdaptable>> getWorkingSets()
{
EMap<String, Set<IAdaptable>> workingSets = new BasicEMap<String, Set<IAdaptable>>();
for (WorkingSet workingSet : workingSetGroup.getWorkingSets())
{
String name = workingSet.getName();
IWorkingSet iWorkingSet = MANAGER.getWorkingSet(name);
workingSets.put(name, iWorkingSet == null ? new LinkedHashSet<IAdaptable>() : new LinkedHashSet<IAdaptable>(Arrays.asList(iWorkingSet.getElements())));
}
return workingSets;
}
/**
* Update the real workings sets based on the map.
* This deletes the real working set for any map entry with a null value and creates or updates the real working set for every other entry.
*/
private void apply(final EMap<String, Set<IAdaptable>> workingSets)
{
for (Map.Entry<String, Set<IAdaptable>> entry : workingSets)
{
String key = entry.getKey();
Set<IAdaptable> value = entry.getValue();
IWorkingSet workingSet = MANAGER.getWorkingSet(key);
if (workingSet == null)
{
if (value != null)
{
workingSet = MANAGER.createWorkingSet(key, value.toArray(new IAdaptable[value.size()]));
workingSet.setLabel(key);
workingSet.setId(WORKING_SET_PAGE);
MANAGER.addWorkingSet(workingSet);
}
}
else
{
if (value == null)
{
MANAGER.removeWorkingSet(workingSet);
}
else
{
workingSet.setElements(value.toArray(new IAdaptable[value.size()]));
}
}
}
managePackageExplorer(true);
}
private void managePackageExplorer(boolean addListener)
{
if (WORKSPACE.getRoot().getProjects().length != 0)
{
try
{
boolean handledNavigator = false;
boolean handledPackageExplorer = false;
for (IWorkbenchWindow workbenchWindow : PlatformUI.getWorkbench().getWorkbenchWindows())
{
for (IWorkbenchPage workbenchPage : workbenchWindow.getPages())
{
if (!handledNavigator)
{
Object commonNavigator = workbenchPage.findView(IPageLayout.ID_PROJECT_EXPLORER);
if (commonNavigator != null)
{
handledNavigator = true;
Method getNavigatorActionServiceMethod = commonNavigator.getClass().getMethod("getNavigatorActionService");
Object navigatorActionService = getNavigatorActionServiceMethod.invoke(commonNavigator);
Field actionProviderInstancesField = navigatorActionService.getClass().getDeclaredField("actionProviderInstances");
actionProviderInstancesField.setAccessible(true);
HashMap<?, ?> object = (HashMap<?, ?>)actionProviderInstancesField.get(navigatorActionService);
if (object != null)
{
for (Object value : object.values())
{
Class<? extends Object> theClass = value.getClass();
if ("org.eclipse.ui.internal.navigator.resources.actions.WorkingSetActionProvider".equals(theClass.getName()))
{
Field workingSetField = theClass.getDeclaredField("workingSet");
workingSetField.setAccessible(true);
IWorkingSet oldWorkingSet = (IWorkingSet)workingSetField.get(value);
IWorkingSet[] activeWorkingSets = oldWorkingSet == null ? new IWorkingSet[0]
: oldWorkingSet.isAggregateWorkingSet() ? ((IAggregateWorkingSet)oldWorkingSet).getComponents() : new IWorkingSet[] { oldWorkingSet };
Set<IWorkingSet> allWorkingSets = new LinkedHashSet<IWorkingSet>(Arrays.asList(activeWorkingSets));
allWorkingSets.addAll(Arrays.asList(MANAGER.getAllWorkingSets()));
List<IWorkingSet> newActiveWorkingSets = getActiveWorkingSets(allWorkingSets.toArray(new IWorkingSet[allWorkingSets.size()]),
activeWorkingSets);
StringBuilder id = new StringBuilder("Aggregate:");
for (Iterator<IWorkingSet> it = newActiveWorkingSets.iterator(); it.hasNext();)
{
IWorkingSet iWorkingSet = it.next();
if (iWorkingSet == null)
{
it.remove();
}
else
{
id.append(iWorkingSet.getName());
id.append(":");
}
}
IWorkingSet aggregateWorkingSet = MANAGER.getWorkingSet(id.toString());
if (aggregateWorkingSet == null)
{
aggregateWorkingSet = MANAGER.createAggregateWorkingSet(id.toString(), "Multiple Working Sets",
newActiveWorkingSets.toArray(new IWorkingSet[newActiveWorkingSets.size()]));
MANAGER.addWorkingSet(aggregateWorkingSet);
}
MANAGER.addRecentWorkingSet(aggregateWorkingSet);
Method setWorkingSetMethod = theClass.getDeclaredMethod("setWorkingSet", IWorkingSet.class);
setWorkingSetMethod.setAccessible(true);
setWorkingSetMethod.invoke(value, aggregateWorkingSet);
}
}
}
}
}
if (!handledPackageExplorer)
{
IViewPart packageExplorer = workbenchPage.findView(PACKAGE_EXPLORER_ID);
if (packageExplorer != null)
{
handledPackageExplorer = true;
Class<? extends IViewPart> packageExplorerClass = packageExplorer.getClass();
Method getWorkingSetModelMethod = packageExplorerClass.getMethod("getWorkingSetModel");
Object workingSetModel = getWorkingSetModelMethod.invoke(packageExplorer);
if (workingSetModel != null)
{
Class<?> workingSetModelClass = workingSetModel.getClass();
Method getAllWorkingSetsMethod = workingSetModelClass.getMethod("getAllWorkingSets");
IWorkingSet[] allWorkingSets = (IWorkingSet[])getAllWorkingSetsMethod.invoke(workingSetModel);
Method getActiveWorkingSetsMethod = workingSetModelClass.getMethod("getActiveWorkingSets");
IWorkingSet[] activeWorkingSets = (IWorkingSet[])getActiveWorkingSetsMethod.invoke(workingSetModel);
List<IWorkingSet> newActiveWorkingSets = getActiveWorkingSets(allWorkingSets, activeWorkingSets);
IWorkingSet[] orderedActiveWorkingSetsArray = newActiveWorkingSets.toArray(new IWorkingSet[newActiveWorkingSets.size()]);
Method setWorkingSetsMethod = workingSetModelClass.getMethod("setActiveWorkingSets", IWorkingSet[].class);
setWorkingSetsMethod.invoke(workingSetModel, new Object[] { orderedActiveWorkingSetsArray });
}
}
}
}
}
}
catch (NoSuchMethodException ex)
{
WorkingSetsEditorPlugin.INSTANCE.log(ex);
}
catch (SecurityException ex)
{
WorkingSetsEditorPlugin.INSTANCE.log(ex);
}
catch (IllegalAccessException ex)
{
WorkingSetsEditorPlugin.INSTANCE.log(ex);
}
catch (InvocationTargetException ex)
{
WorkingSetsEditorPlugin.INSTANCE.log(ex);
}
catch (NoSuchFieldException ex)
{
WorkingSetsEditorPlugin.INSTANCE.log(ex);
}
}
}
private List<IWorkingSet> getActiveWorkingSets(IWorkingSet[] allWorkingSets, IWorkingSet[] activeWorkingSets)
{
Map<WorkingSet, IWorkingSet> managedWorkingSets = new HashMap<WorkingSet, IWorkingSet>();
for (int i = 0; i < allWorkingSets.length; ++i)
{
IWorkingSet iWorkingSet = allWorkingSets[i];
WorkingSet workingSet = workingSetGroup.getWorkingSet(iWorkingSet.getName());
if (workingSet != null)
{
managedWorkingSets.put(workingSet, iWorkingSet);
}
}
Map<IWorkingSet, List<IWorkingSet>> orderedWorkingSetGroups = new LinkedHashMap<IWorkingSet, List<IWorkingSet>>();
for (WorkingSet workingSet : workingSetGroup.getWorkingSets())
{
IWorkingSet iWorkingSet = managedWorkingSets.get(workingSet);
List<IWorkingSet> group = new ArrayList<IWorkingSet>();
group.add(iWorkingSet);
orderedWorkingSetGroups.put(iWorkingSet, group);
}
List<IWorkingSet> newActiveWorkingSets = new ArrayList<IWorkingSet>();
List<IWorkingSet> group = newActiveWorkingSets;
for (IWorkingSet iWorkingSet : activeWorkingSets)
{
List<IWorkingSet> targetGroup = orderedWorkingSetGroups.get(iWorkingSet);
if (targetGroup == null)
{
group.add(iWorkingSet);
}
else
{
group = targetGroup;
}
}
for (List<IWorkingSet> workingSets : orderedWorkingSetGroups.values())
{
newActiveWorkingSets.addAll(workingSets);
}
return newActiveWorkingSets;
}
private void updateProjects(final List<IProject> addedProjects, final List<IProject> removedProjects)
{
// Do this on the UI thread to avoid problems with JDT's getting out of sync with respect to our updates.
Display.getDefault().asyncExec(new Runnable()
{
public void run()
{
EMap<String, Set<IAdaptable>> workingSets = getWorkingSets();
boolean isChanged = false;
for (IProject project : addedProjects)
{
if (addProject(project, workingSets))
{
isChanged = true;
}
}
for (IProject project : removedProjects)
{
if (removeProject(project, workingSets))
{
isChanged = true;
}
}
if (isChanged)
{
apply(workingSets);
}
}
});
}
/**
* Compute the elements for the working sets based on the projects in the workspace.
* Returns <code>true</code> only if a project was added to some working set.
*/
private boolean updateProjects(EMap<String, Set<IAdaptable>> workingSets)
{
boolean result = false;
for (IProject project : WORKSPACE.getRoot().getProjects())
{
if (addProject(project, workingSets))
{
result = true;
}
}
return result;
}
/**
* Adds the project to the appropriate working set entry, if applicable.
* Returns <code>true</code> only if the project was added to some working set.
*/
private boolean addProject(IProject project, EMap<String, Set<IAdaptable>> workingSets)
{
boolean result = false;
for (WorkingSet workingSet : workingSetGroup.getWorkingSets())
{
if (workingSet.matches(project))
{
String name = workingSet.getName();
Set<IAdaptable> elements = workingSets.get(name);
if (elements == null)
{
elements = new LinkedHashSet<IAdaptable>();
workingSets.put(name, elements);
}
elements.add(project);
result = true;
}
}
return result;
}
/**
* Removes the project from the appropriate working set entry.
* Returns <code>true</code> only if the project was removed to some working set.
*/
private boolean removeProject(IProject project, EMap<String, Set<IAdaptable>> workingSets)
{
boolean result = false;
for (WorkingSet workingSet : workingSetGroup.getWorkingSets())
{
if (workingSet.matches(project))
{
String name = workingSet.getName();
Set<IAdaptable> elements = workingSets.get(name);
if (elements != null)
{
elements.remove(name);
result = true;
}
}
}
return result;
}
public void apply()
{
WorkingSetGroup oldWorkingSetGroup = workingSetGroup;
// Compute the working sets for the new working group.
workingSetGroup = WorkingSetsUtil.getWorkingSetGroup();
final EMap<String, Set<IAdaptable>> workingSets = new BasicEMap<String, Set<IAdaptable>>();
// Update the map to include null (to cause an delete) for any old working set not present in the new ones
for (WorkingSet workingSet : oldWorkingSetGroup.getWorkingSets())
{
String name = workingSet.getName();
workingSets.put(name, null);
}
// Update the map to include empty sets (to cause an add) for any new working set not already present.
for (WorkingSet workingSet : workingSetGroup.getWorkingSets())
{
String name = workingSet.getName();
workingSets.put(name, new LinkedHashSet<IAdaptable>());
}
// Update the working sets for all the projects in the workspace and apply the result to the real working sets.
if (!workingSets.isEmpty())
{
Display.getDefault().asyncExec(new Runnable()
{
public void run()
{
updateProjects(workingSets);
apply(workingSets);
}
});
}
}
}
|
package org.yakindu.sct.model.stext.ui.contentassist;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.jface.text.contentassist.ICompletionProposal;
import org.eclipse.xtext.EnumLiteralDeclaration;
import org.eclipse.xtext.Keyword;
import org.eclipse.xtext.RuleCall;
import org.eclipse.xtext.XtextFactory;
import org.eclipse.xtext.ui.editor.contentassist.ConfigurableCompletionProposal;
import org.eclipse.xtext.ui.editor.contentassist.ContentAssistContext;
import org.eclipse.xtext.ui.editor.contentassist.ICompletionProposalAcceptor;
import org.yakindu.base.types.Operation;
import org.yakindu.sct.model.stext.services.STextGrammarAccess;
import org.yakindu.sct.model.stext.stext.ElementReferenceExpression;
import org.yakindu.sct.model.stext.stext.FeatureCall;
import org.yakindu.sct.model.stext.stext.InterfaceScope;
import org.yakindu.sct.model.stext.stext.InternalScope;
import org.yakindu.sct.model.stext.stext.SimpleScope;
import org.yakindu.sct.model.stext.stext.StatechartSpecification;
import org.yakindu.sct.model.stext.stext.TransitionSpecification;
import org.yakindu.sct.model.stext.stext.VariableDefinition;
import com.google.inject.Inject;
/**
* Several filters to make proposals more useful.
*
* @author muehlbrandt
*/
public class STextProposalProvider extends AbstractSTextProposalProvider {
@Inject
private STextGrammarAccess grammarAccess;
/**
* Validates if a keyword should be viewed by the proposal view.
*
* Builds dependent on the ContentAssistContext a list with keywords which
* shouldn't be displayed by the proposal view.
*/
@Override
public void completeKeyword(Keyword keyword,
ContentAssistContext contentAssistContext,
ICompletionProposalAcceptor acceptor) {
List<Keyword> suppressKeywords = new ArrayList<Keyword>();
// context Transition
if (contentAssistContext.getRootModel() instanceof TransitionSpecification) {
suppressKeywords.addAll(getKeywords(grammarAccess.getEntryEventAccess()
.getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getExitEventAccess()
.getGroup().eContents()));
}
// context States
else if (contentAssistContext.getRootModel() instanceof SimpleScope) {
suppressKeywords.addAll(getKeywords(grammarAccess
.getVariableDefinitionAccess().getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess
.getEventDefinitionAccess().getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getExitpointAccess()
.getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getEntrypointAccess()
.getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getDirectionAccess()
.getAlternatives().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess
.getOperationDefinitionAccess().getGroup().eContents()));
}
// context Statechart
else if (contentAssistContext.getRootModel() instanceof StatechartSpecification) {
suppressKeywords.addAll(getKeywords(grammarAccess.getExitEventAccess()
.getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getEntryEventAccess()
.getGroup().eContents()));
}
EObject currentModel = contentAssistContext.getCurrentModel();
if (currentModel instanceof InterfaceScope) {
suppressKeywords.addAll(getKeywords(grammarAccess.getLocalReactionAccess()
.getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getAlwaysEventAccess()
.getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getOnCycleEventAccess()
.getGroup().eContents()));
suppressKeywords.addAll(getKeywords(grammarAccess.getTimeEventTypeAccess()
.getAlternatives().eContents()));
suppressKeywords.add(grammarAccess.getDirectionAccess()
.getLOCALLocalKeyword_0_0());
}
if (currentModel instanceof FeatureCall) {
FeatureCall featureCall = (FeatureCall) currentModel;
if (!(featureCall.getFeature() instanceof Operation)) {
suppressKeywords.add(grammarAccess.getFeatureCallAccess()
.getOperationCallLeftParenthesisKeyword_1_3_0_0());
}
}
if (currentModel instanceof ElementReferenceExpression) {
ElementReferenceExpression referenceExpression = (ElementReferenceExpression) currentModel;
if (!(referenceExpression.getReference() instanceof Operation)) {
suppressKeywords.add(grammarAccess
.getElementReferenceExpressionAccess()
.getOperationCallLeftParenthesisKeyword_2_0_0());
}
}
if (currentModel instanceof InternalScope) {
suppressKeywords.add(grammarAccess.getDirectionAccess()
.getINInKeyword_1_0());
suppressKeywords.add(grammarAccess.getDirectionAccess()
.getOUTOutKeyword_2_0());
}
if (!suppressKeywords.contains(keyword)) {
super.completeKeyword(keyword, contentAssistContext,
new AcceptorDelegate(acceptor));
}
}
private List<Keyword> getKeywords(EList<EObject> list) {
final List<Keyword> keywords = new ArrayList<Keyword>();
for (EObject eObject : list) {
if (eObject instanceof Keyword) {
keywords.add((Keyword) eObject);
} else if (eObject instanceof EnumLiteralDeclaration) {
keywords.add(((EnumLiteralDeclaration) eObject).getLiteral());
}
}
return keywords;
}
@Override
public void complete_BOOL(EObject model, RuleCall ruleCall,
ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
ICompletionProposalAcceptor priorityOptimizer = getCustomAcceptor(
model, "boolean", acceptor);
for (String s : new String[] { "true", "false", "yes", "no" }) {
ICompletionProposal proposal = createCompletionProposal(s, s
+ " - " + ruleCall.getRule().getName(), null, context);
priorityOptimizer.accept(proposal);
}
}
protected ICompletionProposalAcceptor getCustomAcceptor(EObject model,
String typeName, ICompletionProposalAcceptor acceptor) {
ICompletionProposalAcceptor priorityOptimizer = acceptor;
if (model instanceof VariableDefinition) {
VariableDefinition vd = (VariableDefinition) model;
if (vd.getType() != null
&& typeName.equalsIgnoreCase(vd.getType().getName())) {
priorityOptimizer = new ICompletionProposalAcceptor.Delegate(
acceptor) {
@Override
public void accept(ICompletionProposal proposal) {
alterPriority(proposal, 1);
super.accept(proposal);
}
};
}
}
return priorityOptimizer;
}
@Override
public void complete_STRING(EObject model, RuleCall ruleCall,
ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
super.complete_STRING(model, ruleCall, context,
getCustomAcceptor(model, "string", acceptor));
}
@Override
public void complete_INT(EObject model, RuleCall ruleCall,
ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
super.complete_INT(model, ruleCall, context,
getCustomAcceptor(model, "integer", acceptor));
}
@Override
public void complete_HEX(EObject model, RuleCall ruleCall,
ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
ICompletionProposalAcceptor priorityOptimizer = getCustomAcceptor(
model, "integer", acceptor);
String proposalText = "0x1";
ICompletionProposal proposal = createCompletionProposal(proposalText,
proposalText + " - " + ruleCall.getRule().getName(), null,
context);
if (proposal instanceof ConfigurableCompletionProposal) {
ConfigurableCompletionProposal configurable = (ConfigurableCompletionProposal) proposal;
configurable
.setSelectionStart(configurable.getReplacementOffset() + 2);
configurable.setSelectionLength(proposalText.length() - 2);
configurable.setAutoInsertable(false);
configurable.setSimpleLinkedMode(context.getViewer(), '\t', ' ');
}
priorityOptimizer.accept(proposal);
}
@Override
public void complete_FLOAT(EObject model, RuleCall ruleCall,
ContentAssistContext context, ICompletionProposalAcceptor acceptor) {
ICompletionProposalAcceptor priorityOptimizer = getCustomAcceptor(
model, "real", acceptor);
String proposalText = "0.1";
ICompletionProposal proposal = createCompletionProposal(proposalText,
proposalText + " - " + ruleCall.getRule().getName(), null,
context);
priorityOptimizer.accept(proposal);
}
private void alterPriority(ICompletionProposal proposal, int delta) {
if (proposal == null
|| !(proposal instanceof ConfigurableCompletionProposal))
return;
ConfigurableCompletionProposal castedProposal = (ConfigurableCompletionProposal) proposal;
castedProposal.setPriority(castedProposal.getPriority() + delta);
}
/**
* The acceptor delegate creates a Dummy EObject of type Keyword for the
* User Help Hover integration
*
*/
public class AcceptorDelegate implements ICompletionProposalAcceptor {
private final ICompletionProposalAcceptor delegate;
public AcceptorDelegate(ICompletionProposalAcceptor delegate) {
this.delegate = delegate;
}
public void accept(ICompletionProposal proposal) {
if (proposal instanceof ConfigurableCompletionProposal) {
Keyword keyword = XtextFactory.eINSTANCE.createKeyword();
keyword.setValue(proposal.getDisplayString());
((ConfigurableCompletionProposal) proposal)
.setAdditionalProposalInfo(keyword);
((ConfigurableCompletionProposal) proposal)
.setHover(STextProposalProvider.this.getHover());
}
delegate.accept(proposal);
}
public boolean canAcceptMoreProposals() {
return delegate.canAcceptMoreProposals();
}
}
}
|
package com.opengamma.bbg.referencedata.cache;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Map;
import java.util.Set;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import org.fudgemsg.FudgeContext;
import org.fudgemsg.FudgeMsg;
import org.fudgemsg.mapping.FudgeSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import com.opengamma.bbg.referencedata.ReferenceData;
import com.opengamma.bbg.referencedata.ReferenceDataProvider;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.ehcache.EHCacheUtils;
import com.opengamma.util.fudgemsg.OpenGammaFudgeContext;
/**
* Decorates a reference data provider, adding caching.
* <p>
* The cache is implemented using {@code EHCache}.
*/
public class EHValueCachingReferenceDataProvider extends AbstractValueCachingReferenceDataProvider {
/** Logger. */
private static final Logger s_logger = LoggerFactory.getLogger(EHValueCachingReferenceDataProvider.class);
/**
* Cache key for reference data.
*/
/*package*/ static final String REFERENCE_DATA_CACHE = "referenceData";
/**
* The cache manager.
*/
private final CacheManager _cacheManager;
/**
* The reference data cache.
*/
private final Cache _cache;
/**
* Creates an instance.
*
* @param underlying the underlying reference data provider, not null
* @param cacheManager the cache manager, not null
*/
public EHValueCachingReferenceDataProvider(final ReferenceDataProvider underlying, final CacheManager cacheManager) {
this(underlying, cacheManager, OpenGammaFudgeContext.getInstance());
}
/**
* Creates an instance.
*
* @param underlying the underlying reference data provider, not null
* @param cacheManager the cache manager, not null
* @param fudgeContext the Fudge context, not null
*/
public EHValueCachingReferenceDataProvider(final ReferenceDataProvider underlying, final CacheManager cacheManager, final FudgeContext fudgeContext) {
super(underlying, fudgeContext);
ArgumentChecker.notNull(cacheManager, "cacheManager");
_cacheManager = cacheManager;
EHCacheUtils.addCache(cacheManager, REFERENCE_DATA_CACHE);
_cache = EHCacheUtils.getCacheFromManager(cacheManager, REFERENCE_DATA_CACHE);
}
/**
* Gets the cache manager.
*
* @return the cache manager, not null
*/
public CacheManager getCacheManager() {
return _cacheManager;
}
@Override
protected Map<String, ReferenceData> loadFieldValues(Set<String> identifiers) {
Map<String, ReferenceData> result = Maps.newTreeMap();
FudgeSerializer serializer = new FudgeSerializer(getFudgeContext());
for (String identifier : identifiers) {
ReferenceData cachedResult = loadStateFromCache(serializer, identifier);
if (cachedResult != null) {
result.put(identifier, cachedResult);
}
}
return result;
}
@Override
protected void saveFieldValues(ReferenceData result) {
String identifier = result.getIdentifier();
FudgeMsg fieldData = result.getFieldValues();
if (identifier != null && fieldData != null) {
s_logger.info("Persisting fields for \"{}\": {}", identifier, result.getFieldValues());
Object cachedObject = createCachedObject(result);
s_logger.debug("cachedObject={}", cachedObject);
Element element = new Element(identifier, cachedObject);
_cache.put(element);
}
}
/**
* Loads the state from the cache.
*
* @param serializer the Fudge serializer, not null
* @param identifier the identifier, not null
* @return the result, null if not found
*/
protected ReferenceData loadStateFromCache(FudgeSerializer serializer, String identifier) {
Element element = _cache.get(identifier);
if (element != null) {
s_logger.debug("Have security data for des {} in cache", identifier);
Object fromCache = element.getObjectValue();
s_logger.debug("cachedObject={}", fromCache);
return parseCachedObject(fromCache);
}
return null;
}
/**
* Data holder for storing the results.
*/
private static final class CachedReferenceData implements Serializable {
private static final long serialVersionUID = 3L;
private transient String _security;
private transient FudgeContext _fudgeContext;
private transient volatile FudgeMsg _fieldDataMsg;
private transient volatile byte[] _fieldData;
private byte[] getFieldData() {
byte[] fieldData = _fieldData;
if (fieldData == null) {
synchronized (this) {
fieldData = _fieldData;
if (fieldData == null) {
fieldData = _fudgeContext.toByteArray(_fieldDataMsg);
_fieldData = fieldData;
_fieldDataMsg = null;
}
}
}
return fieldData;
}
private void setFieldData(final byte[] fieldData) {
_fieldData = fieldData;
}
public FudgeMsg getFieldDataMsg(final FudgeContext fudgeContext) {
FudgeMsg fieldDataMsg = _fieldDataMsg;
if (fieldDataMsg == null) {
synchronized (this) {
fieldDataMsg = _fieldDataMsg;
if (fieldDataMsg == null) {
_fudgeContext = fudgeContext;
fieldDataMsg = fudgeContext.deserialize(_fieldData).getMessage();
_fieldDataMsg = fieldDataMsg;
_fieldData = null;
}
}
}
return _fieldDataMsg;
}
public void setFieldDataMsg(final FudgeMsg fieldDataMsg, final FudgeContext fudgeContext) {
_fieldDataMsg = fieldDataMsg;
_fudgeContext = fudgeContext;
}
public String getSecurity() {
return _security;
}
public void setSecurity(final String security) {
_security = security;
}
private void writeObject(final ObjectOutputStream out) throws IOException {
out.writeUTF(getSecurity());
final byte[] fieldData = getFieldData();
out.writeInt(fieldData.length);
out.write(fieldData);
}
private void readObject(final ObjectInputStream in) throws IOException, ClassNotFoundException {
setSecurity(in.readUTF());
final int dataLength = in.readInt();
final byte[] data = new byte[dataLength];
in.readFully(data);
setFieldData(data);
}
}
/**
* Parse the cached object.
*
* @param fromCache the data from the cache, not null
* @return the result, not null
*/
protected ReferenceData parseCachedObject(Object fromCache) {
CachedReferenceData rd = (CachedReferenceData) fromCache;
return new ReferenceData(rd.getSecurity(), rd.getFieldDataMsg(getFudgeContext()));
}
/**
* Creates the cached object.
*
* @param refDataResult the reference data result.
* @return the cache object, not null
*/
protected Object createCachedObject(ReferenceData refDataResult) {
CachedReferenceData result = new CachedReferenceData();
result.setSecurity(refDataResult.getIdentifier());
result.setFieldDataMsg(getFudgeContext().newMessage(refDataResult.getFieldValues()), getFudgeContext());
return result;
}
}
|
package org.eclipse.kapua.service.datastore.internal;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.commons.util.KapuaDateUtils;
import org.eclipse.kapua.locator.KapuaLocator;
import org.eclipse.kapua.message.KapuaChannel;
import org.eclipse.kapua.message.KapuaPosition;
import org.eclipse.kapua.message.device.data.KapuaDataMessage;
import org.eclipse.kapua.message.device.data.KapuaDataPayload;
import org.eclipse.kapua.message.internal.KapuaPositionImpl;
import org.eclipse.kapua.message.internal.device.data.KapuaDataChannelImpl;
import org.eclipse.kapua.message.internal.device.data.KapuaDataMessageImpl;
import org.eclipse.kapua.message.internal.device.data.KapuaDataPayloadImpl;
import org.eclipse.kapua.model.id.KapuaId;
import org.eclipse.kapua.service.account.Account;
import org.eclipse.kapua.service.account.AccountService;
import org.eclipse.kapua.service.datastore.ChannelInfoRegistryService;
import org.eclipse.kapua.service.datastore.ClientInfoRegistryService;
import org.eclipse.kapua.service.datastore.DatastoreObjectFactory;
import org.eclipse.kapua.service.datastore.MessageStoreService;
import org.eclipse.kapua.service.datastore.MetricInfoRegistryService;
import org.eclipse.kapua.service.datastore.internal.elasticsearch.ChannelInfoField;
import org.eclipse.kapua.service.datastore.internal.elasticsearch.ClientInfoField;
import org.eclipse.kapua.service.datastore.internal.elasticsearch.EsSchema;
import org.eclipse.kapua.service.datastore.internal.elasticsearch.MessageField;
import org.eclipse.kapua.service.datastore.internal.elasticsearch.MessageStoreConfiguration;
import org.eclipse.kapua.service.datastore.internal.elasticsearch.MetricInfoField;
import org.eclipse.kapua.service.datastore.internal.model.DataIndexBy;
import org.eclipse.kapua.service.datastore.internal.model.MetricsIndexBy;
import org.eclipse.kapua.service.datastore.internal.model.query.AndPredicateImpl;
import org.eclipse.kapua.service.datastore.internal.model.query.ChannelInfoQueryImpl;
import org.eclipse.kapua.service.datastore.internal.model.query.ChannelMatchPredicateImpl;
import org.eclipse.kapua.service.datastore.internal.model.query.ClientInfoQueryImpl;
import org.eclipse.kapua.service.datastore.internal.model.query.MessageQueryImpl;
import org.eclipse.kapua.service.datastore.internal.model.query.MetricInfoQueryImpl;
import org.eclipse.kapua.service.datastore.internal.model.query.RangePredicateImpl;
import org.eclipse.kapua.service.datastore.internal.model.query.SortFieldImpl;
import org.eclipse.kapua.service.datastore.internal.setting.DatastoreSettingKey;
import org.eclipse.kapua.service.datastore.internal.setting.DatastoreSettings;
import org.eclipse.kapua.service.datastore.model.ChannelInfo;
import org.eclipse.kapua.service.datastore.model.ChannelInfoListResult;
import org.eclipse.kapua.service.datastore.model.ClientInfo;
import org.eclipse.kapua.service.datastore.model.ClientInfoListResult;
import org.eclipse.kapua.service.datastore.model.DatastoreMessage;
import org.eclipse.kapua.service.datastore.model.MessageListResult;
import org.eclipse.kapua.service.datastore.model.MetricInfo;
import org.eclipse.kapua.service.datastore.model.MetricInfoListResult;
import org.eclipse.kapua.service.datastore.model.StorableId;
import org.eclipse.kapua.service.datastore.model.StorableListResult;
import org.eclipse.kapua.service.datastore.model.query.AndPredicate;
import org.eclipse.kapua.service.datastore.model.query.ChannelInfoQuery;
import org.eclipse.kapua.service.datastore.model.query.ChannelMatchPredicate;
import org.eclipse.kapua.service.datastore.model.query.ClientInfoQuery;
import org.eclipse.kapua.service.datastore.model.query.MessageQuery;
import org.eclipse.kapua.service.datastore.model.query.MetricInfoQuery;
import org.eclipse.kapua.service.datastore.model.query.RangePredicate;
import org.eclipse.kapua.service.datastore.model.query.SortDirection;
import org.eclipse.kapua.service.datastore.model.query.SortField;
import org.eclipse.kapua.service.datastore.model.query.StorableFetchStyle;
import org.eclipse.kapua.service.datastore.model.query.TermPredicate;
import org.eclipse.kapua.service.device.registry.Device;
import org.eclipse.kapua.service.device.registry.DeviceCreator;
import org.eclipse.kapua.service.device.registry.DeviceFactory;
import org.eclipse.kapua.service.device.registry.DeviceRegistryService;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MessageStoreServiceTest extends AbstractMessageStoreServiceTest
{
private static final Logger s_logger = LoggerFactory.getLogger(MessageStoreServiceTest.class);
private static final long QUERY_TIME_WINDOW = 2000l;
private static final long PUBLISH_DATE_TEST_CHECK_TIME_WINDOW = 1000l;
private static final DeviceRegistryService deviceRegistryService = KapuaLocator.getInstance().getService(DeviceRegistryService.class);
private static final DeviceFactory deviceFactory = KapuaLocator.getInstance().getFactory(DeviceFactory.class);
private static final MessageStoreService messageStoreService = KapuaLocator.getInstance().getService(MessageStoreService.class);
private static final DatastoreObjectFactory datastoreObjectFactory = KapuaLocator.getInstance().getFactory(DatastoreObjectFactory.class);
private static final ChannelInfoRegistryService channelInfoRegistryService = KapuaLocator.getInstance().getService(ChannelInfoRegistryService.class);
private static final MetricInfoRegistryService metricInfoRegistryService = KapuaLocator.getInstance().getService(MetricInfoRegistryService.class);
private static final ClientInfoRegistryService clientInfoRegistryService = KapuaLocator.getInstance().getService(ClientInfoRegistryService.class);
private long elasticsearchRefreshTime = DatastoreSettings.getInstance().getLong(DatastoreSettingKey.ELASTICSEARCH_IDX_REFRESH_INTERVAL) * KapuaDateUtils.SEC_MILLIS;
@Test
/**
* Store few messages with few metrics, position and body (partially randomly generated) and check if the stored message (retrieved by id) has all the fields correctly set
*
* @throws Exception
*/
public void testMessageStore()
throws Exception
{
Account account = getTestAccountCreator(adminScopeId);
Random random = new Random();
String[] semanticTopic = new String[] {
"bus/route/one",
"bus/route/one",
"bus/route/two/a",
"bus/route/two/b",
"tram/route/one",
"car/one"
};
KapuaDataMessage message = null;
String clientId = String.format("device-%d", new Date().getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
// leave the message index by as default (DEVICE_TIMESTAMP)
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
for (int i = 0; i < 12; i++) {
byte[] randomPayload = new byte[128];
random.nextBytes(randomPayload);
String stringPayload = "Hello World" + (i + 1) + " \n\n\t\n\tHelloWord2";
byte[] payload = ArrayUtils.addAll(randomPayload, stringPayload.getBytes());
KapuaDataPayloadImpl messagePayload = new KapuaDataPayloadImpl();
Map<String, Object> metrics = new HashMap<String, Object>();
metrics.put("float_int", new Float((float) (i + 1)));
metrics.put("float_float", new Float((float) (i + 1) * 0.01));
metrics.put("integer_value", new Integer((i + 1)));
metrics.put("double_int", (double) (i + 1));
metrics.put("double_float", (double) (i + 1) * 0.01);
metrics.put("long_long", (long) (10000000000000l * (i + 1)));
metrics.put("long_int_1", (long) (1000 * (i + 1)));
metrics.put("long_int_2", (long) (i + 1));
metrics.put("string_value", Integer.toString((i + 1) * 1000));
messagePayload.setProperties(metrics);
messagePayload.setBody(payload);
Date receivedOn = new Date();
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2016").getTime());
message = getMessage(clientId, account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message, semanticTopic[i % semanticTopic.length]);
updatePayload(message, messagePayload);
KapuaPosition messagePosition = getPosition(10.00d * (i + 1), 12d * (i + 1), 1.123d * (i + 1), 2d * (i + 1), 0001d * (i + 1), 1000 * (i + 1), 1d * (i + 1), 44 * (i + 1), new Date());
message.setPosition(messagePosition);
List<StorableId> messageStoredIds = null;
try {
messageStoredIds = insertMessages(true, message);
DatastoreMessage messageQueried = messageStoreService.find(account.getId(), messageStoredIds.get(0), StorableFetchStyle.SOURCE_FULL);
checkMessageId(messageQueried, messageStoredIds.get(0));
checkTopic(messageQueried, semanticTopic[i % semanticTopic.length]);
checkMessageBody(messageQueried, message.getPayload().getBody());
checkMetricsSize(messageQueried, metrics.size());
checkMetrics(messageQueried, metrics);
checkPosition(messageQueried, messagePosition);
checkMessageDate(messageQueried, new Range<Date>("timestamp", capturedOn), new Range<Date>("sentOn", sentOn), new Range<Date>("capturedOn", capturedOn), new Range<Date>("receivedOn", receivedOn));
}
catch (KapuaException e) {
s_logger.error("Exception: ", e.getMessage(), e);
}
}
}
/**
* Test the correctness of the query filtering order (3 fields: date descending, date ascending, string descending)
*
* @throws Exception
*/
@Test
public void testMessageOrderingMixedTest()
throws Exception
{
Account account = getTestAccountCreator(adminScopeId);
String[] semanticTopic = new String[] {
"bus/route/one",
"bus/route/one",
"bus/route/two/a",
"bus/route/two/b",
"tram/route/one",
"car/one"
};
KapuaDataMessage message = null;
String clientId1 = String.format("device-%d", new Date().getTime());
Thread.sleep(100);
String clientId2 = String.format("device-%d", new Date().getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId1);
Device device1 = deviceRegistryService.create(deviceCreator);
DeviceCreator deviceCreator2 = deviceFactory.newCreator(account.getId(), clientId2);
Device device2 = deviceRegistryService.create(deviceCreator2);
int messagesCount = 100;
Date sentOn1 = new Date();
Date sentOn2 = new Date(sentOn1.getTime() + 5000);
Date capturedOn1 = new Date(new Date().getTime() + 1000);
Date capturedOn2 = new Date(capturedOn1.getTime() + 1000);
String clientId = null;
Device device = null;
// leave the message index by as default (DEVICE_TIMESTAMP)
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
for (int i = 0; i < messagesCount; i++) {
clientId = clientId1;
device = device1;
Date receivedOn = new Date();
Date sentOn = null;
if (i < messagesCount / 2) {
sentOn = sentOn1;
}
else {
sentOn = sentOn2;
}
Date capturedOn = null;
if (i < messagesCount / 4 - 1 || (i > messagesCount / 2 - 1 && i < messagesCount * 3 / 4 - 1)) {
capturedOn = capturedOn1;
if (i % 2 == 0) {
clientId = clientId2;
device = device2;
}
}
else {
capturedOn = capturedOn2;
if (i % 2 == 0) {
clientId = clientId2;
device = device2;
}
}
message = getMessage(clientId, account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message, semanticTopic[i % semanticTopic.length]);
insertMessages(false, message);
}
waitEsRefresh();
List<SortField> sort = new ArrayList<SortField>();
SortField sortSentOn = new SortFieldImpl();
sortSentOn.setField(EsSchema.MESSAGE_SENT_ON);
sortSentOn.setSortDirection(SortDirection.DESC);
sort.add(sortSentOn);
SortField sortTimestamp = new SortFieldImpl();
sortTimestamp.setField(EsSchema.MESSAGE_TIMESTAMP);
sortTimestamp.setSortDirection(SortDirection.ASC);
sort.add(sortTimestamp);
SortField sortClientId = new SortFieldImpl();
sortClientId.setField(EsSchema.MESSAGE_CLIENT_ID);
sortClientId.setSortDirection(SortDirection.DESC);
sort.add(sortClientId);
MessageQuery messageQuery = getMessageOrderedQuery(account.getId(), messagesCount + 1, sort);
setMessageQueryBaseCriteria(messageQuery, new DateRange(capturedOn1, capturedOn2));
MessageListResult messageList = messageStoreService.query( messageQuery);
checkMessagesCount(messageList, messagesCount);
checkMessagesDateBound(messageList, new Date(capturedOn1.getTime()), new Date(capturedOn2.getTime()));
for (DatastoreMessage messageStored : messageList.getItems()) {
s_logger.debug("message sentOn: '" + messageStored.getSentOn() + "' - capturedOn: '" + messageStored.getCapturedOn() + "' clientId: '" + messageStored.getClientId() + "'");
}
checkListOrder(messageList, sort);
}
@Test
/**
* Test the correctness of the storage process with a basic message (no metrics, payload and position) indexing message date by device timestamp (as default)
*
* @throws Exception
*/
public void testMessageStoreWithDeviceTimestampIndexingAndNullPayload()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String topicSemanticPart = "testStoreWithNullPayload/testStoreWithNullPayload/" + Calendar.getInstance().getTimeInMillis();
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message = getMessage(clientId, account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message, topicSemanticPart);
updatePayload(message, null);
message.setReceivedOn(messageTime);
// leave the message index by as default (DEVICE_TIMESTAMP)
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
List<StorableId> messageStoredIds = insertMessages(true, message);
MessageQuery messageQuery = getBaseMessageQuery(account.getId());
setMessageQueryBaseCriteria(messageQuery, new DateRange(capturedOn));
MessageListResult result = messageStoreService.query( messageQuery);
DatastoreMessage messageQueried = checkMessagesCount(result, 1);
checkMessageId(messageQueried, messageStoredIds.get(0));
checkMessageBody(messageQueried, null);
checkMetricsSize(messageQueried, 0);
checkPosition(messageQueried, null);
checkMessageDate(messageQueried, new Range<Date>("timestamp", capturedOn), new Range<Date>("sentOn", sentOn), new Range<Date>("capturedOn", capturedOn), new Range<Date>("receivedOn", messageTime));
}
@Test
/**
* Test the correctness of the storage process with a basic message (no metrics, payload and position) indexing message date by server timestamp
*
* @throws Exception
*/
public void testMessageStoreWithServerTimestampIndexingAndNullPayload()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String topicSemanticPart = "testStoreWithNullPayload/testStoreWithNullPayload/" + Calendar.getInstance().getTimeInMillis();
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message = getMessage(clientId, account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message, topicSemanticPart);
updatePayload(message, null);
message.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.SERVER_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
messageTime = new Date();
message.setReceivedOn(messageTime);
List<StorableId> messageStoredIds = insertMessages(true, message);
Date timestampLowerBound = new Date(messageTime.getTime() - QUERY_TIME_WINDOW);
Date timestampUpperBound = new Date(messageTime.getTime() + QUERY_TIME_WINDOW);
DateRange dateRange = new DateRange(timestampLowerBound, timestampUpperBound);
MessageQuery messageQuery = getBaseMessageQuery(account.getId());
setMessageQueryBaseCriteria(messageQuery, dateRange);
MessageListResult result = messageStoreService.query(messageQuery);
DatastoreMessage messageQueried = checkMessagesCount(result, 1);
checkMessageId(messageQueried, messageStoredIds.get(0));
checkTopic(messageQueried, topicSemanticPart);
checkMessageBody(messageQueried, null);
checkMetricsSize(messageQueried, 0);
checkPosition(messageQueried, null);
checkMessageDate(messageQueried, new Range<Date>("timestamp", dateRange.getLowerBound(), dateRange.getUpperBound()), new Range<Date>("sentOn", sentOn), new Range<Date>("capturedOn", capturedOn),
new Range<Date>("receivedOn", messageTime));
}
@Test
/**
* Check the correctness of the client ids info stored in the channel info data by retrieving the channel info by account.
*
* @throws Exception
*/
public void testChannelInfoFindClientIdByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "ci_client_by_account_client1", "ci_client_by_account_client2", "ci_client_by_account_client3", "ci_client_by_account_client4" };
String[] semanticTopic = new String[] { "ci_client_by_account/1/2/3" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[0]);
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[2], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message3, semanticTopic[0]);
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[3], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message4, semanticTopic[0]);
message4.setReceivedOn(messageTime);
KapuaDataMessage message5 = getMessage(clientIds[3], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message5, semanticTopic[0]);
message5.setReceivedOn(messageTime);
KapuaDataMessage message6 = getMessage(clientIds[3], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message6, semanticTopic[0]);
message6.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3, message4, message5, message6);
ChannelInfoQuery channelInfoQuery = getBaseChannelInfoQuery(account.getId());
setChannelInfoQueryBaseCriteria(channelInfoQuery, new DateRange(messageTime));
ChannelInfoListResult channelList = channelInfoRegistryService.query( channelInfoQuery);
checkChannelInfoClientIdsAndTopics(channelList, 4, clientIds, semanticTopic);
}
@Test
/**
* Check the correctness of the channel info last publish date stored by retrieving the channel info by client id.
*
* This test is failing because Elastichsearch caching code should be improved.
*
* @throws Exception
*/
public void testChannelInfoFindClientIdByPublishDateByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "ci_client_by_pd_by_account_client1", "ci_client_by_pd_by_account_client2" };
String[] semanticTopic = new String[] { "ci_client_by_pd_by_account/1/2/3" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date capturedOnSecondMessage = new Date(capturedOn.getTime() + PUBLISH_DATE_TEST_CHECK_TIME_WINDOW);
Date capturedOnThirdMessage = new Date(capturedOnSecondMessage.getTime() + PUBLISH_DATE_TEST_CHECK_TIME_WINDOW);
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[0]);
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOnSecondMessage, sentOn);
updateChannel(message3, semanticTopic[0]);
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOnThirdMessage, sentOn);
updateChannel(message4, semanticTopic[0]);
message4.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3, message4);
ChannelInfoQuery channelInfoQuery = getBaseChannelInfoQuery(account.getId());
setChannelInfoQueryBaseCriteria(channelInfoQuery, new DateRange(messageTime, capturedOnThirdMessage));
ChannelInfoListResult channelList = channelInfoRegistryService.query( channelInfoQuery);
checkChannelInfoClientIdsAndTopics(channelList, 2, clientIds, semanticTopic);
// check the channel info date
for (ChannelInfo channelInfo : channelList.getItems()) {
if (clientIds[0].equals(channelInfo.getClientId())) {
assertEquals(String.format("Wrong last publish date for the client id [%s]", clientIds[0]), capturedOn, channelInfo.getLastMessageOn());
}
else if (clientIds[1].equals(channelInfo.getClientId())) {
assertEquals(String.format("Wrong last publish date for the client id [%s]", clientIds[1]), capturedOnThirdMessage, channelInfo.getLastMessageOn());
}
assertEquals(String.format("Wrong first publish date for the client id [%s]", channelInfo.getClientId()), capturedOn, channelInfo.getFirstMessageOn());
}
}
@Test
/**
* Check the correctness of the topic info stored in the channel info data by retrieving the channel info by account.
*
* @throws Exception
*/
public void testChannelInfoFindTopicByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "ci_topic_by_account_client1", "ci_topic_by_account_client2" };
String[] semanticTopic = new String[] { "ci_topic_by_account/1/2/3", "ci_topic_by_account/1/2/4", "ci_topic_by_account/1/2/5", "ci_topic_by_account/1/2/6" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[1]);
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message3, semanticTopic[2]);
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message4, semanticTopic[3]);
message4.setReceivedOn(messageTime);
KapuaDataMessage message5 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message5, semanticTopic[0]);
message5.setReceivedOn(messageTime);
KapuaDataMessage message6 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message6, semanticTopic[1]);
message6.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3, message4, message5, message6);
ChannelInfoQuery channelInfoQuery = getBaseChannelInfoQuery(account.getId());
setChannelInfoQueryBaseCriteria(channelInfoQuery, new DateRange(messageTime));
ChannelInfoListResult channelList = channelInfoRegistryService.query( channelInfoQuery);
checkChannelInfoClientIdsAndTopics(channelList, 6, clientIds, semanticTopic);
}
@Test
/**
* Check the correctness of the topic info stored in the channel info data by retrieving the channel info by client id.
*
* @throws Exception
*/
public void testChannelInfoFindTopicByClientId()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "ci_topic_by_client_client1" };
String[] semanticTopic = new String[] { "ci_topic_by_client/1/2/3", "ci_topic_by_client/1/2/4", "ci_topic_by_client/1/2/5", "ci_topic_by_client/1/2/6" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[1]);
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message3, semanticTopic[2]);
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message4, semanticTopic[3]);
message4.setReceivedOn(messageTime);
KapuaDataMessage message5 = getMessage(clientIds[0] + "_NO", account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message5, semanticTopic[2]);
message5.setReceivedOn(messageTime);
KapuaDataMessage message6 = getMessage(clientIds[0] + "_NO", account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message6, semanticTopic[3]);
message6.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3, message4, message5, message6);
ChannelInfoQuery channelInfoQuery = getBaseChannelInfoQuery(account.getId());
setChannelInfoQueryBaseCriteria(channelInfoQuery, clientIds[0], new DateRange(messageTime));
ChannelInfoListResult channelList = channelInfoRegistryService.query( channelInfoQuery);
checkChannelInfoClientIdsAndTopics(channelList, 4, clientIds, semanticTopic);
}
@Test
/**
* Check the correctness of the metric info data stored by retrieving the metrics information by account.
*
* @throws Exception
*/
public void testMetricsInfoFindClientByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "mi_client_by_account_client1", "mi_client_by_account_client2" };
String[] metrics = new String[] { "mi_client_by_account_metric1", "mi_client_by_account_metric2", "mi_client_by_account_metric3", "mi_client_by_account_metric4" };
String[] semanticTopic = new String[] { "mi_client_by_account/1/2/3" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
initMetrics(message1);
message1.getPayload().getProperties().put(metrics[0], new Double(123));
message1.getPayload().getProperties().put(metrics[1], new Integer(123));
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[0]);
initMetrics(message2);
message2.getPayload().getProperties().put(metrics[2], new String("123"));
message2.getPayload().getProperties().put(metrics[3], new Boolean(true));
message2.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2);
MetricInfoQuery metricInfoQuery = getBaseMetricInfoQuery(account.getId());
setMetricInfoQueryBaseCriteria(metricInfoQuery, new DateRange(capturedOn));
MetricInfoListResult metricList = metricInfoRegistryService.query( metricInfoQuery);
checkMetricInfoClientIdsAndMetricNames(metricList, 4, clientIds, metrics);
}
@Test
/**
* Check the correctness of the metric info last publish date stored by retrieving the metric info by account.
*
* This test is failing because Elastichsearch caching code should be improved.
*
* @throws Exception
*/
public void testMetricsInfoFindClientByPublishDateByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "mi_client_by_pd_by_account_client1", "mi_client_by_pd_by_account_client2" };
String[] metrics = new String[] { "mi_client_by_pd_by_account_metric1", "mi_client_by_pd_by_account_metric2", "mi_client_by_pd_by_account_metric3", "mi_client_by_pd_by_account_metric4" };
String[] semanticTopic = new String[] { "mi_client_by_pd_by_account/1/2/3" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date capturedOnSecondMessage = new Date(capturedOn.getTime() + PUBLISH_DATE_TEST_CHECK_TIME_WINDOW);
Date capturedOnThirdMessage = new Date(capturedOnSecondMessage.getTime() + PUBLISH_DATE_TEST_CHECK_TIME_WINDOW);
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
initMetrics(message1);
message1.getPayload().getProperties().put(metrics[0], new Double(123));
message1.getPayload().getProperties().put(metrics[1], new Integer(123));
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[0]);
initMetrics(message2);
message2.getPayload().getProperties().put(metrics[2], new String("123"));
message2.getPayload().getProperties().put(metrics[3], new Boolean(true));
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOnSecondMessage, sentOn);
updateChannel(message3, semanticTopic[0]);
initMetrics(message3);
message3.getPayload().getProperties().put(metrics[2], new String("123"));
message3.getPayload().getProperties().put(metrics[3], new Boolean(true));
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOnThirdMessage, sentOn);
updateChannel(message4, semanticTopic[0]);
initMetrics(message4);
message4.getPayload().getProperties().put(metrics[2], new String("123"));
message4.getPayload().getProperties().put(metrics[3], new Boolean(true));
message4.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
// Store messages
insertMessages(true, message1, message2, message3, message4);
MetricInfoQuery metricInfoQuery = getBaseMetricInfoQuery(account.getId());
setMetricInfoQueryBaseCriteria(metricInfoQuery, new DateRange(capturedOn, capturedOnThirdMessage));
MetricInfoListResult metricList = metricInfoRegistryService.query( metricInfoQuery);
checkMetricInfoClientIdsAndMetricNames(metricList, 4, clientIds, metrics);
// check the metric info date
for (MetricInfo metricInfo : metricList.getItems()) {
if (clientIds[0].equals(metricInfo.getClientId())) {
assertEquals(String.format("Wrong last publish date for the client id [%s]", clientIds[0]), capturedOn, metricInfo.getLastMessageOn());
}
else if (clientIds[1].equals(metricInfo.getClientId())) {
assertEquals(String.format("Wrong last publish date for the client id [%s]", clientIds[1]), capturedOnThirdMessage, metricInfo.getLastMessageOn());
}
if (metrics[0].equals(metricInfo.getName())) {
assertEquals(String.format("Wrong last publish date for the metric [%s]", metrics[0]), capturedOn, metricInfo.getLastMessageOn());
}
else if (metrics[1].equals(metricInfo.getName())) {
assertEquals(String.format("Wrong last publish date for the metric [%s]", metrics[1]), capturedOn, metricInfo.getLastMessageOn());
}
else if (metrics[2].equals(metricInfo.getName())) {
assertEquals(String.format("Wrong last publish date for the metric [%s]", metrics[2]), capturedOnThirdMessage, metricInfo.getLastMessageOn());
}
else if (metrics[3].equals(metricInfo.getName())) {
assertEquals(String.format("Wrong last publish date for the metric [%s]", metrics[3]), capturedOnThirdMessage, metricInfo.getLastMessageOn());
}
assertEquals(String.format("Wrong first publish date for the client id [%s]", metricInfo.getClientId()), capturedOn, metricInfo.getFirstMessageOn());
}
}
@Test
/**
* Check the correctness of the metric info data stored by retrieving the metrics information by client id.
*
* @throws Exception
*/
public void testMetricsInfoByClientId()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "mi_client_by_client_client1", "mi_client_by_client_client2" };
String[] metrics = new String[] { "mi_client_by_client_metric1", "mi_client_by_client_metric2", "mi_client_by_client_metric3", "mi_client_by_client_metric4" };
String[] semanticTopic = new String[] { "mi_client_by_client/1/2/3" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
initMetrics(message1);
message1.getPayload().getProperties().put(metrics[0], new Double(123));
message1.getPayload().getProperties().put(metrics[1], new Integer(123));
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[0]);
initMetrics(message2);
message2.getPayload().getProperties().put(metrics[2], new String("123"));
message2.getPayload().getProperties().put(metrics[3], new Boolean(true));
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message3, semanticTopic[0]);
initMetrics(message3);
message3.getPayload().getProperties().put(metrics[2], new Double(123));
message3.getPayload().getProperties().put(metrics[3], new Integer(123));
message3.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3);
MetricInfoQuery metricInfoQuery = getBaseMetricInfoQuery(account.getId());
setMetricInfoQueryBaseCriteria(metricInfoQuery, clientIds[0], new DateRange(capturedOn));
MetricInfoListResult metricList = metricInfoRegistryService.query( metricInfoQuery);
checkMetricInfoClientIdsAndMetricNames(metricList, 4, new String[] { clientIds[0] }, metrics);
}
/**
* Test the correctness of the query filtering order (3 fields: date descending, date ascending, string descending) for the metrics
*
* @throws Exception
*/
@Test
public void testMetricOrderingMixedTest()
throws Exception
{
Account account = getTestAccountCreator(adminScopeId);
String[] semanticTopic = new String[] {
"bus/route/one",
"bus/route/one",
"bus/route/two/a",
"bus/route/two/b",
"tram/route/one",
"car/one"
};
String[] metrics = new String[] { "m_order_metric1", "m_order_metric2", "m_order_metric3", "m_order_metric4", "m_order_metric5", "m_order_metric6" };
String[] clientIds = new String[] { String.format("device-%d", new Date().getTime()), String.format("device-%d", new Date().getTime() + 100) };
String[] metricsValuesString = new String[] { "string_metric_1", "string_metric_2", "string_metric_3", "string_metric_4", "string_metric_5", "string_metric_6" };
Date[] metricsValuesDate = new Date[] { new Date(new SimpleDateFormat("hh:MM:ss dd/MM/yyyy").parse("10:10:01 01/01/2017").getTime()),
new Date(new SimpleDateFormat("hh:MM:ss dd/MM/yyyy").parse("10:10:02 01/01/2017").getTime()),
new Date(new SimpleDateFormat("hh:MM:ss dd/MM/yyyy").parse("10:10:03 01/01/2017").getTime()),
new Date(new SimpleDateFormat("hh:MM:ss dd/MM/yyyy").parse("10:10:04 01/01/2017").getTime()),
new Date(new SimpleDateFormat("hh:MM:ss dd/MM/yyyy").parse("10:10:05 01/01/2017").getTime()),
new Date(new SimpleDateFormat("hh:MM:ss dd/MM/yyyy").parse("10:10:06 01/01/2017").getTime()) };
int[] metricsValuesInt = new int[] { 10, 20, 30, 40, 50, 60 };
float[] metricsValuesFloat = new float[] { 0.002f, 10.12f, 20.22f, 33.33f, 44.44f, 55.66f };
double[] metricsValuesDouble = new double[] { 1.002d, 11.12d, 21.22d, 34.33d, 45.44d, 56.66d };
boolean[] metricsValuesBoolean = new boolean[] { true, true, false, true, false, false };
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientIds[0]);
Device device1 = deviceRegistryService.create(deviceCreator);
DeviceCreator deviceCreator2 = deviceFactory.newCreator(account.getId(), clientIds[1]);
Device device2 = deviceRegistryService.create(deviceCreator2);
int messagesCount = 100;
Date sentOn1 = new Date();
Date sentOn2 = new Date(sentOn1.getTime() + 5000);
Date capturedOn1 = new Date(new Date().getTime() + 1000);
Date capturedOn2 = new Date(capturedOn1.getTime() + 1000);
String clientId = null;
Device device = null;
// leave the message index by as default (DEVICE_TIMESTAMP)
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
for (int i = 0; i < messagesCount; i++) {
clientId = clientIds[0];
device = device1;
Date receivedOn = new Date();
Date sentOn = null;
if (i < messagesCount / 2) {
sentOn = sentOn1;
}
else {
sentOn = sentOn2;
}
Date capturedOn = null;
if (i < messagesCount / 4 - 1 || (i > messagesCount / 2 - 1 && i < messagesCount * 3 / 4 - 1)) {
capturedOn = capturedOn1;
if (i % 2 == 0) {
clientId = clientIds[1];
device = device2;
}
}
else {
capturedOn = capturedOn2;
if (i % 2 == 0) {
clientId = clientIds[1];
device = device2;
}
}
KapuaDataMessage message = getMessage(clientId, account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message, semanticTopic[i % semanticTopic.length]);
// insert metrics
initMetrics(message);
message.getPayload().getProperties().put(metrics[0], metricsValuesDate[i % metricsValuesDate.length]);
message.getPayload().getProperties().put(metrics[1], metricsValuesString[i % metricsValuesString.length]);
message.getPayload().getProperties().put(metrics[2], metricsValuesInt[i % metricsValuesInt.length]);
message.getPayload().getProperties().put(metrics[3], metricsValuesFloat[i % metricsValuesFloat.length]);
message.getPayload().getProperties().put(metrics[4], metricsValuesBoolean[i % metricsValuesBoolean.length]);
message.getPayload().getProperties().put(metrics[5], metricsValuesDouble[i % metricsValuesDouble.length]);
insertMessages(false, message);
}
waitEsRefresh();
List<SortField> sort = new ArrayList<SortField>();
SortField sortMetricName = new SortFieldImpl();
sortMetricName.setField(EsSchema.METRIC_MTR_NAME_FULL);
sortMetricName.setSortDirection(SortDirection.ASC);
sort.add(sortMetricName);
MetricInfoQuery metricInfoQuery = getMetricInfoOrderedQuery(account.getId(), (6 + 1) * messagesCount, sort);
setMetricInfoQueryBaseCriteria(metricInfoQuery, new DateRange(capturedOn1, capturedOn2));
MetricInfoListResult metricList = metricInfoRegistryService.query( metricInfoQuery);
checkMetricInfoClientIdsAndMetricNames(metricList, metrics.length * semanticTopic.length, new String[] { clientIds[0], clientIds[1] }, new String[] { metrics[0], metrics[1], metrics[2],
metrics[3], metrics[4], metrics[5] });
checkMetricDateBound(metricList, new Date(capturedOn1.getTime()), new Date(capturedOn2.getTime()));
for (MetricInfo metricInfo : metricList.getItems()) {
s_logger.debug("metric client id: '" + metricInfo.getClientId() + "' - channel: '" + metricInfo.getChannel() + "' metric name: '" + metricInfo.getName()
+ "' metric type: '" + metricInfo.getMetricType() + "'");
}
checkListOrder(metricList, sort);
}
@Test
/**
* Check the correctness of the client info data stored by retrieving the client information by account.
*
* @throws Exception
*/
public void testClientInfoFindClientIdByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "clii_client_by_account_client1", "clii_client_by_account_client2" };
String[] semanticTopic = new String[] { "clii_client_by_account/1/2/3", "clii_client_by_account/1/2/4", "clii_client_by_account/1/2/5", "clii_client_by_account/1/2/6" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
initMetrics(message1);
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[1]);
initMetrics(message2);
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message3, semanticTopic[2]);
initMetrics(message3);
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message4, semanticTopic[3]);
initMetrics(message4);
message4.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3, message4);
ClientInfoQuery clientInfoQuery = getBaseClientInfoQuery(account.getId());
setClientInfoQueryBaseCriteria(clientInfoQuery, new DateRange(capturedOn));
ClientInfoListResult clientList = clientInfoRegistryService.query( clientInfoQuery);
checkClientInfo(clientList, 2, clientIds);
}
@Test
/**
* Check the correctness of the client info data stored by retrieving the client information by account.
*
* @throws Exception
*/
public void testClientInfoFindClientIdByPublishDateByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "clii_client_by_pd_by_account_client1", "clii_client_by_pd_by_account_client2" };
String[] semanticTopic = new String[] { "clii_client_by_pd_by_account/1/2/3" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date capturedOnSecondMessage = new Date(capturedOn.getTime() + PUBLISH_DATE_TEST_CHECK_TIME_WINDOW);
Date capturedOnThirdMessage = new Date(capturedOnSecondMessage.getTime() + PUBLISH_DATE_TEST_CHECK_TIME_WINDOW);
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
initMetrics(message1);
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[0]);
initMetrics(message2);
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOnSecondMessage, sentOn);
updateChannel(message3, semanticTopic[0]);
initMetrics(message3);
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOnThirdMessage, sentOn);
updateChannel(message4, semanticTopic[0]);
initMetrics(message4);
message4.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3, message4);
ClientInfoQuery clientInfoQuery = getBaseClientInfoQuery(account.getId());
setClientInfoQueryBaseCriteria(clientInfoQuery, new DateRange(capturedOn, capturedOnThirdMessage));
ClientInfoListResult clientList = clientInfoRegistryService.query( clientInfoQuery);
checkClientInfo(clientList, 2, clientIds);
for (ClientInfo clientInfo : clientList.getItems()) {
if (clientIds[0].equals(clientInfo.getClientId())) {
assertEquals(String.format("Wrong last publish date for the client id [%s]", clientIds[0]), capturedOn, clientInfo.getLastMessageOn());
}
else if (clientIds[1].equals(clientInfo.getClientId())) {
assertEquals(String.format("Wrong last publish date for the client id [%s]", clientIds[1]), capturedOnThirdMessage, clientInfo.getLastMessageOn());
}
assertEquals(String.format("Wrong first publish date for the client id [%s]", clientInfo.getClientId()), capturedOn, clientInfo.getFirstMessageOn());
}
}
@Test
/**
* Check the correctness of the client info data stored by retrieving the client information by account.
*
* @throws Exception
*/
public void testClientInfoByClientId()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "clii_by_client_client1", "clii_by_client_client2", "clii_by_client_client3", "clii_by_client_client4" };
String[] semanticTopic = new String[] { "clii_by_client/1/2/3", "clii_by_client/1/2/4" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopic[0]);
initMetrics(message1);
message1.setReceivedOn(messageTime);
KapuaDataMessage message2 = getMessage(clientIds[1], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message2, semanticTopic[1]);
initMetrics(message2);
message2.setReceivedOn(messageTime);
KapuaDataMessage message3 = getMessage(clientIds[2], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message3, semanticTopic[0]);
initMetrics(message3);
message3.setReceivedOn(messageTime);
KapuaDataMessage message4 = getMessage(clientIds[3], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message4, semanticTopic[1]);
initMetrics(message4);
message4.setReceivedOn(messageTime);
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
insertMessages(true, message1, message2, message3, message4);
ClientInfoQuery clientInfoQuery = getBaseClientInfoQuery(account.getId());
setClientInfoQueryBaseCriteria(clientInfoQuery, clientIds[0], new DateRange(capturedOn));
ClientInfoListResult clientList = clientInfoRegistryService.query( clientInfoQuery);
checkClientInfo(clientList, 1, new String[] { clientIds[0] });
}
@Test
public void testTopicsByAccount()
throws Exception
{
Account account = createAccount(null, null);
Date messageTime = new Date();
String clientId = String.format("device-%d", messageTime.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
String[] clientIds = new String[] { "tba_client1" };
String[] semanticTopic = new String[] { "tba_1/1/1/1", "tba_1/1/1/2", "tba_1/1/1/3", "tba_1/1/2/1", "tba_1/1/2/2", "tba_1/1/2/3",
"tba_1/2/1/1", "tba_1/2/1/2", "tba_1/2/1/3", "tba_1/2/2/1", "tba_1/2/2/2", "tba_1/2/2/3",
"tba_2/1/1/1", "tba_2/1/1/2", "tba_2/1/1/3", "tba_2/1/2/1", "tba_2/1/2/2", "tba_2/1/2/3",
"tba_2/2/1/1", "tba_2/2/1/2", "tba_2/2/1/3", "tba_2/2/2/1", "tba_2/2/2/2", "tba_2/2/2/3" };
Date sentOn = new Date(new SimpleDateFormat("dd/MM/yyyy").parse("01/01/2015").getTime());
Date capturedOn = new Date();
Date receivedOn = new Date();
updateConfiguration(messageStoreService, account.getId(), DataIndexBy.DEVICE_TIMESTAMP, MetricsIndexBy.TIMESTAMP, 30, true);
for (String semanticTopicTmp : semanticTopic) {
KapuaDataMessage message1 = getMessage(clientIds[0], account.getId(), device.getId(), receivedOn, capturedOn, sentOn);
updateChannel(message1, semanticTopicTmp);
message1.setReceivedOn(messageTime);
insertMessages(false, message1);
}
waitEsRefresh();
ChannelInfoListResult channelList = doChannelInfoQuery(account, clientIds[0], "1/#", messageTime);
checkChannelInfoClientIdsAndTopics(channelList, 0, null, null);
channelList = doChannelInfoQuery(account, clientIds[0], "tba_1/#", messageTime);
checkChannelInfoClientIdsAndTopics(channelList, 12, clientIds, Arrays.copyOfRange(semanticTopic, 0, 12));
channelList = doChannelInfoQuery(account, clientIds[0], "tba_2/#", messageTime);
checkChannelInfoClientIdsAndTopics(channelList, 12, clientIds, Arrays.copyOfRange(semanticTopic, 12, 24));
channelList = doChannelInfoQuery(account, clientIds[0], "tba_1/1/#", messageTime);
checkChannelInfoClientIdsAndTopics(channelList, 6, clientIds, Arrays.copyOfRange(semanticTopic, 0, 6));
channelList = doChannelInfoQuery(account, clientIds[0], "tba_2/1/1/#", messageTime);
checkChannelInfoClientIdsAndTopics(channelList, 3, clientIds, Arrays.copyOfRange(semanticTopic, 12, 15));
}
// utility methods
private List<StorableId> insertMessages(boolean waitForElasticsearchRefreshTime, KapuaDataMessage... messages) throws InterruptedException
{
List<StorableId> storableIds = new ArrayList<StorableId>();
for (KapuaDataMessage message : messages) {
try {
storableIds.add(messageStoreService.store(message));
}
catch (Exception e) {
s_logger.error("Message insert exception!", e);
fail("Store messages should have succeded");
}
}
if (waitForElasticsearchRefreshTime) {
waitEsRefresh();
}
return storableIds;
}
private void waitEsRefresh() throws InterruptedException
{
// Wait ES indexes to be refreshed
Thread.sleep(elasticsearchRefreshTime);
}
/**
* This method should create a new account for the test (temp implementation that return always the default kapua-sys account)
*
* @param accountName
* @param password
* @return
* @throws KapuaException
*/
private Account createAccount(KapuaId scopeId, String password) throws KapuaException
{
return getTestAccountCreator(adminScopeId);
}
/**
* Creates a new KapuaMessage setting the provided parameters
*
* @param clientId
* @param scopeId
* @param deviceId
* @param receiedOn
* @param capturedOn
* @param sentOn
* @return
*/
private KapuaDataMessage getMessage(String clientId, KapuaId scopeId, KapuaId deviceId,
Date receivedOn, Date capturedOn, Date sentOn)
{
KapuaDataMessage message = new KapuaDataMessageImpl();
message.setReceivedOn(receivedOn);
message.setCapturedOn(capturedOn);
message.setSentOn(sentOn);
message.setChannel(new KapuaDataChannelImpl());
message.setClientId(clientId);
message.setDeviceId(deviceId);
message.setScopeId(scopeId);
return message;
}
/**
* Update the KapuaMessage channel with the provided semantic part
*
* @param message
* @param semanticPart
*/
private void updateChannel(KapuaDataMessage message, String semanticPart)
{
message.setChannel(new KapuaDataChannelImpl());
message.getChannel().setSemanticParts(new ArrayList<String>(Arrays.asList(semanticPart.split("/"))));
}
/**
* Update the KapuaMessage payload with the provided payload
*
* @param message
* @param messagePayload
*/
private void updatePayload(KapuaDataMessage message, KapuaDataPayload messagePayload)
{
message.setPayload(messagePayload);
}
private void initMetrics(KapuaDataMessage message)
{
if (message.getPayload() == null) {
message.setPayload(new KapuaDataPayloadImpl());
}
}
/**
* Return a new KapuaPosition instance and set all the provided position informations
*
* @param altitude
* @param heading
* @param latitude
* @param longitude
* @param precision
* @param satellites
* @param speed
* @param status
* @param timestamp
* @return
*/
private KapuaPosition getPosition(Double altitude, Double heading, Double latitude, Double longitude, Double precision, Integer satellites, Double speed, Integer status, Date timestamp)
{
KapuaPosition messagePosition = new KapuaPositionImpl();
messagePosition.setAltitude(altitude);
messagePosition.setHeading(heading);
messagePosition.setLatitude(latitude);
messagePosition.setLongitude(longitude);
messagePosition.setPrecision(precision);
messagePosition.setSatellites(satellites);
messagePosition.setSpeed(speed);
messagePosition.setStatus(status);
messagePosition.setTimestamp(timestamp);
return messagePosition;
}
// Utility methods to help to to create message queries
/**
* Creates a new query setting the default base parameters (fetch style, sort, limit, offset, ...) for the Message schema
*
* @return
*/
private MessageQuery getBaseMessageQuery(KapuaId scopeId)
{
MessageQuery query = new MessageQueryImpl(scopeId);
query.setAskTotalCount(true);
query.setFetchStyle(StorableFetchStyle.SOURCE_FULL);
query.setLimit(10);
query.setOffset(0);
List<SortField> order = new ArrayList<SortField>();
SortField sf = new SortFieldImpl();
sf.setField(EsSchema.MESSAGE_TIMESTAMP);
sf.setSortDirection(SortDirection.DESC);
order.add(sf);
query.setSortFields(order);
return query;
}
/**
* Creates a new query setting the default base parameters (fetch style, sort, limit, offset, ...) for the Channel Info schema
*
* @return
*/
private ChannelInfoQuery getBaseChannelInfoQuery(KapuaId scopeId)
{
ChannelInfoQuery query = new ChannelInfoQueryImpl(scopeId);
query.setAskTotalCount(true);
query.setFetchStyle(StorableFetchStyle.SOURCE_FULL);
query.setLimit(10);
query.setOffset(0);
List<SortField> order = new ArrayList<SortField>();
SortField sf = new SortFieldImpl();
sf.setField(EsSchema.MESSAGE_TIMESTAMP);
sf.setSortDirection(SortDirection.DESC);
order.add(sf);
query.setSortFields(order);
return query;
}
private ChannelInfoListResult doChannelInfoQuery(Account account, String clientId, String channelFilter, Date queryDate) throws KapuaException
{
ChannelInfoQuery channelInfoQuery = getBaseChannelInfoQuery(account.getId());
channelInfoQuery.setLimit(100);
setChannelInfoQueryChannelPredicateCriteria(channelInfoQuery, clientId, channelFilter, new DateRange(queryDate));
return channelInfoRegistryService.query( channelInfoQuery);
}
/**
* Creates a new query setting the default base parameters (fetch style, sort, limit, offset, ...) for the Metric Info schema
*
* @return
*/
private MetricInfoQuery getBaseMetricInfoQuery(KapuaId scopeId)
{
MetricInfoQuery query = new MetricInfoQueryImpl(scopeId);
query.setAskTotalCount(true);
query.setFetchStyle(StorableFetchStyle.SOURCE_FULL);
query.setLimit(10);
query.setOffset(0);
List<SortField> order = new ArrayList<SortField>();
SortField sf = new SortFieldImpl();
sf.setField(EsSchema.MESSAGE_TIMESTAMP);
sf.setSortDirection(SortDirection.DESC);
order.add(sf);
query.setSortFields(order);
return query;
}
/**
* Creates a new query setting the default base parameters (fetch style, sort, limit, offset, ...) for the Metric Info schema
*
* @return
*/
private ClientInfoQuery getBaseClientInfoQuery(KapuaId scopeId)
{
ClientInfoQuery query = new ClientInfoQueryImpl(scopeId);
query.setAskTotalCount(true);
query.setFetchStyle(StorableFetchStyle.SOURCE_FULL);
query.setLimit(10);
query.setOffset(0);
List<SortField> order = new ArrayList<SortField>();
SortField sf = new SortFieldImpl();
sf.setField(EsSchema.MESSAGE_TIMESTAMP);
sf.setSortDirection(SortDirection.DESC);
order.add(sf);
query.setSortFields(order);
return query;
}
/**
* Get the ordered query (adding the sort fields list provided and the result limit count)
*
* @param limit
* @param order
* @return
*/
private MessageQuery getMessageOrderedQuery(KapuaId scopeId, int limit, List<SortField> order)
{
MessageQuery query = new MessageQueryImpl(scopeId);
query.setAskTotalCount(true);
query.setFetchStyle(StorableFetchStyle.SOURCE_FULL);
query.setLimit(limit);
query.setOffset(0);
query.setSortFields(order);
return query;
}
/**
* Set the query account and message timestamp filter
*
* @param messageQuery
* @param accountName
* @param dateRange
*/
private void setMessageQueryBaseCriteria(MessageQuery messageQuery, DateRange dateRange)
{
setMessageQueryBaseCriteria(messageQuery, null, dateRange);
}
/**
* Set the query account, message timestamp and client id filter
*
* @param messageQuery
* @param accountName
* @param clientId
* @param dateRange
*/
private void setMessageQueryBaseCriteria(MessageQuery messageQuery, String clientId, DateRange dateRange)
{
AndPredicate andPredicate = new AndPredicateImpl();
if (!StringUtils.isEmpty(clientId)) {
TermPredicate clientPredicate = datastoreObjectFactory.newTermPredicate(MessageField.CLIENT_ID, clientId);
andPredicate.getPredicates().add(clientPredicate);
}
if (dateRange != null) {
RangePredicate timestampPredicate = new RangePredicateImpl(MessageField.TIMESTAMP, dateRange.getLowerBound(), dateRange.getUpperBound());
andPredicate.getPredicates().add(timestampPredicate);
}
messageQuery.setPredicate(andPredicate);
}
/**
* Set the query account and message timestamp filter
*
* @param channelInfoQuery
* @param accountName
* @param dateRange
*/
private void setChannelInfoQueryBaseCriteria(ChannelInfoQuery channelInfoQuery, DateRange dateRange)
{
setChannelInfoQueryBaseCriteria(channelInfoQuery, null, dateRange);
}
/**
* Set the query account, message timestamp and client id filter
*
* @param channelInfoQuery
* @param accountName
* @param clientId
* @param dateRange
*/
private void setChannelInfoQueryBaseCriteria(ChannelInfoQuery channelInfoQuery, String clientId, DateRange dateRange)
{
AndPredicate andPredicate = new AndPredicateImpl();
if (!StringUtils.isEmpty(clientId)) {
TermPredicate clientPredicate = datastoreObjectFactory.newTermPredicate(ChannelInfoField.CLIENT_ID, clientId);
andPredicate.getPredicates().add(clientPredicate);
}
if (dateRange != null) {
RangePredicate timestampPredicate = new RangePredicateImpl(ChannelInfoField.TIMESTAMP, dateRange.getLowerBound(), dateRange.getUpperBound());
andPredicate.getPredicates().add(timestampPredicate);
}
channelInfoQuery.setPredicate(andPredicate);
}
/**
*
* @param channelInfoQuery
* @param channelPredicate
* @param dateLowerBound
* @param dateUpperBound
*/
private void setChannelInfoQueryChannelPredicateCriteria(ChannelInfoQuery channelInfoQuery, String clientId, String channelPredicate, DateRange dateRange)
{
AndPredicate andPredicate = new AndPredicateImpl();
if (!StringUtils.isEmpty(clientId)) {
TermPredicate clientIdPredicate = datastoreObjectFactory.newTermPredicate(ChannelInfoField.CLIENT_ID, clientId);
andPredicate.getPredicates().add(clientIdPredicate);
}
if (dateRange != null) {
RangePredicate timestampPredicate = new RangePredicateImpl(ChannelInfoField.TIMESTAMP, dateRange.getLowerBound(), dateRange.getUpperBound());
andPredicate.getPredicates().add(timestampPredicate);
}
ChannelMatchPredicate channelMatchPredicate = new ChannelMatchPredicateImpl(channelPredicate);
andPredicate.getPredicates().add(channelMatchPredicate);
channelInfoQuery.setPredicate(andPredicate);
}
/**
* Get the ordered query (adding the sort fields list provided and the result limit count)
*
* @param limit
* @param order
* @return
*/
private MetricInfoQuery getMetricInfoOrderedQuery(KapuaId scopeId, int limit, List<SortField> order)
{
MetricInfoQuery query = new MetricInfoQueryImpl(scopeId);
query.setAskTotalCount(true);
query.setFetchStyle(StorableFetchStyle.SOURCE_FULL);
query.setLimit(limit);
query.setOffset(0);
query.setSortFields(order);
return query;
}
/**
* Set the query account and message timestamp id filter
*
* @param metricInfoQuery
* @param accountName
* @param dateRange
*/
private void setMetricInfoQueryBaseCriteria(MetricInfoQuery metricInfoQuery, DateRange dateRange)
{
setMetricInfoQueryBaseCriteria(metricInfoQuery, null, dateRange);
}
/**
* Set the query account, message timestamp and client id filter
*
* @param metricInfoQuery
* @param accountName
* @param clientId
* @param dateRange
*/
private void setMetricInfoQueryBaseCriteria(MetricInfoQuery metricInfoQuery, String clientId, DateRange dateRange)
{
AndPredicate andPredicate = new AndPredicateImpl();
if (!StringUtils.isEmpty(clientId)) {
TermPredicate clientIdPredicate = datastoreObjectFactory.newTermPredicate(MetricInfoField.CLIENT_ID, clientId);
andPredicate.getPredicates().add(clientIdPredicate);
}
if (dateRange != null) {
RangePredicate timestampPredicate = new RangePredicateImpl(MetricInfoField.TIMESTAMP_FULL, dateRange.getLowerBound(), dateRange.getUpperBound());
andPredicate.getPredicates().add(timestampPredicate);
}
metricInfoQuery.setPredicate(andPredicate);
}
/**
* Set the query account and message timestamp filter
*
* @param clientInfoQuery
* @param accountName
* @param dateRange
*/
private void setClientInfoQueryBaseCriteria(ClientInfoQuery clientInfoQuery, DateRange dateRange)
{
setClientInfoQueryBaseCriteria(clientInfoQuery, null, dateRange);
}
/**
* Set the query account, message timestamp and client id filter
*
* @param clientInfoQuery
* @param accountName
* @param clientId
* @param dateRange
*/
private void setClientInfoQueryBaseCriteria(ClientInfoQuery clientInfoQuery, String clientId, DateRange dateRange)
{
AndPredicate andPredicate = new AndPredicateImpl();
if (!StringUtils.isEmpty(clientId)) {
TermPredicate clientIdPredicate = datastoreObjectFactory.newTermPredicate(ClientInfoField.CLIENT_ID, clientId);
andPredicate.getPredicates().add(clientIdPredicate);
}
if (dateRange != null) {
RangePredicate timestampPredicate = new RangePredicateImpl(ClientInfoField.TIMESTAMP, dateRange.getLowerBound(), dateRange.getUpperBound());
andPredicate.getPredicates().add(timestampPredicate);
}
clientInfoQuery.setPredicate(andPredicate);
}
// Utility methods to help to check the message result
/**
* Check if in the result set has the expected messages count and return the first (if any)
*
* @param result
* @return
*/
private DatastoreMessage checkMessagesCount(MessageListResult result, int messagesCount)
{
DatastoreMessage messageQueried = null;
if (messagesCount > 0) {
assertNotNull("No result found!", result);
assertNotNull("No result found!", result.getTotalCount());
assertEquals("Result message has a wrong size!", messagesCount, result.getTotalCount().intValue());
messageQueried = result.getFirstItem();
assertNotNull("Result message is null!", messageQueried);
}
else {
assertTrue("No result should be found!", result == null || result.getTotalCount() == null || result.getTotalCount() <= 0);
}
return messageQueried;
}
/**
* Check if the queried message has the correct message id
*
* @param message
* @param storableId
*/
private void checkMessageId(DatastoreMessage message, StorableId storableId)
{
if (storableId != null) {
assertNotNull("Message is null", message);
assertNotNull("Message id doesn't match", message.getId());
assertEquals("Message id doesn't match", storableId.toString(), message.getId().toString());
}
}
/**
* Check if the queried message has the correct semantic part (the same topic part and the same length)
*
* @param message
* @param topicSemanticPart
*/
private void checkTopic(DatastoreMessage message, String topicSemanticPart)
{
KapuaChannel channel = message.getChannel();
assertNotNull("Null message channel!", channel);
List<String> semanticParts = channel.getSemanticParts();
assertNotNull("Null topic semantic part!", semanticParts);
String[] topicSemanticPartTokenized = topicSemanticPart.split("/");
assertEquals("Wrong semantic topic stored!", topicSemanticPartTokenized.length, semanticParts.size());
int i = 0;
for (String tmp : topicSemanticPartTokenized) {
assertEquals(String.format("Wrong [%s] sematic part!", i), tmp, semanticParts.get(i++));
}
}
/**
* Check if the queried message has the correct dates (indexOn, sentOn, receivedOn, capturedOn). The dates can be checked also for a range.
*
* @param message
* @param index
* @param sentOn
* @param capturedOn
* @param receivedOn
*/
private void checkMessageDate(DatastoreMessage message, Range<Date> index, Range<Date> sentOn, Range<Date> capturedOn, Range<Date> receivedOn)
{
assertNotNull("Message timestamp is null!", message.getTimestamp());
index.checkValue(message.getTimestamp());
sentOn.checkValue(message.getSentOn());
capturedOn.checkValue(message.getCapturedOn());
receivedOn.checkValue(message.getReceivedOn());
assertNotNull("Message payload is null!", message.getPayload());
assertNotNull("Message prorperties are null!", message.getPayload().getProperties());
}
/**
* Check if the queried message has the correct metrics size
*
* @param message
* @param metricsSize
*/
private void checkMetricsSize(DatastoreMessage message, int metricsSize)
{
if (metricsSize < 0) {
assertNull("Message metrics is not null!", message.getPayload().getProperties());
}
else {
assertNotNull("Message metrics shouldn't be null!", message.getPayload().getProperties());
assertEquals("Message metrics size doesn't match!", metricsSize, message.getPayload().getProperties().size());
}
}
/**
* Check if the queried message has the correct body (length and also content should be the same)
*
* @param message
* @param body
*/
private void checkMessageBody(DatastoreMessage message, byte[] body)
{
if (body == null) {
assertNull("Message body is not null!", message.getPayload().getBody());
}
else {
assertNotNull("Message body shouldn't be null!", message.getPayload().getBody());
assertEquals("Message body size doesn't match!", body.length, message.getPayload().getBody().length);
assertArrayEquals("Message body differs from the original!", body, message.getPayload().getBody());
}
}
/**
* Check if the queried message has the correct metrics (metrics count and same keys/values)
*
* @param message
* @param metrics
*/
private void checkMetrics(DatastoreMessage message, Map<String, Object> metrics)
{
// assuming metrics size is already checked by the checkMetricsSize
Map<String, Object> messageProperties = message.getPayload().getProperties();
Iterator<String> metricsKeys = metrics.keySet().iterator();
while (metricsKeys.hasNext()) {
String key = metricsKeys.next();
assertEquals(String.format("Metric [%s] differs!", key), metrics.get(key), messageProperties.get(key));
}
}
/**
* Check if the queried message has the correct position
*
* @param message
* @param position
*/
private void checkPosition(DatastoreMessage message, KapuaPosition position)
{
if (position == null) {
assertNull("Message position is not null!", message.getPosition());
}
else {
assertNotNull("Message position shouldn't be null!", message.getPosition());
KapuaPosition messagePosition = message.getPosition();
assertEquals("Altitude position differs from the original!", messagePosition.getAltitude(), position.getAltitude());
assertEquals("Heading position differs from the original!", messagePosition.getHeading(), position.getHeading());
assertEquals("Latitude position differs from the original!", messagePosition.getLatitude(), position.getLatitude());
assertEquals("Longitude position differs from the original!", messagePosition.getLongitude(), position.getLongitude());
assertEquals("Precision position differs from the original!", messagePosition.getPrecision(), position.getPrecision());
assertEquals("Satellites position differs from the original!", messagePosition.getSatellites(), position.getSatellites());
assertEquals("Speed position differs from the original!", messagePosition.getSpeed(), position.getSpeed());
assertEquals("Status position differs from the original!", messagePosition.getStatus(), position.getStatus());
assertEquals("Timestamp position differs from the original!", messagePosition.getTimestamp(), position.getTimestamp());
}
}
/**
* Check if in the result set has the expected channel info count and return the first (if any)
*
* @param result
* @return
*/
private ChannelInfo checkChannelInfoCount(ChannelInfoListResult result, int clientInfoCount)
{
ChannelInfo channelInfoQueried = null;
if (clientInfoCount > 0) {
assertNotNull("No result found!", result);
assertNotNull("No result found!", result.getTotalCount());
assertEquals("Result channel info list has a wrong size!", clientInfoCount, result.getTotalCount().intValue());
channelInfoQueried = result.getFirstItem();
assertNotNull("Result channel info list is null!", channelInfoQueried);
}
else {
assertTrue("No result should be found!", result == null || result.getTotalCount() == null || result.getTotalCount() <= 0);
}
return channelInfoQueried;
}
/**
* Check if in the result set has the expected channel info client ids
*
* @param result
* @return
*/
private void checkChannelInfoClientIdsAndTopics(ChannelInfoListResult result, int clientInfoCount, String[] clientIds, String[] topics)
{
checkChannelInfoCount(result, clientInfoCount);
Set<String> allClientId = new HashSet<String>();
Set<String> allTopics = new HashSet<String>();
for (ChannelInfo channelInfo : result.getItems()) {
allClientId.add(channelInfo.getClientId());
allTopics.add(channelInfo.getChannel());
}
assertEquals("Wrong client ids size!", (clientIds != null ? clientIds.length : 0), allClientId.size());
assertEquals("Wrong topics size!", (topics != null ? topics.length : 0), allTopics.size());
if (clientIds != null) {
for (String clientIdFound : clientIds) {
assertTrue(String.format("Cannot find the client [%s] in the client ids list!", clientIdFound), allClientId.contains(clientIdFound));
}
}
if (topics != null) {
for (String topicFound : topics) {
assertTrue(String.format("Cannot find the topic [%s] in the topics list!", topicFound), allTopics.contains(topicFound));
}
}
}
/**
* Check if in the result set has the expected metric info count and return the first (if any)
*
* @param result
* @return
*/
private MetricInfo checkMetricInfoCount(MetricInfoListResult result, int metricInfoCount)
{
MetricInfo metricInfoQueried = null;
if (metricInfoCount > 0) {
assertNotNull("No result found!", result);
assertNotNull("No result found!", result.getTotalCount());
assertEquals("Result metric info list has a wrong size!", metricInfoCount, result.getTotalCount().intValue());
metricInfoQueried = result.getFirstItem();
assertNotNull("Result metric info list is null!", metricInfoQueried);
}
else {
assertTrue("No result should be found!", result == null || result.getTotalCount() == null || result.getTotalCount() <= 0);
}
return metricInfoQueried;
}
/**
* Check if in the result set has the expected metric info client ids
*
* @param result
* @return
*/
private void checkMetricInfoClientIdsAndMetricNames(MetricInfoListResult result, int metricInfoCount, String[] clientIds, String[] metrics)
{
checkMetricInfoCount(result, metricInfoCount);
Set<String> allClientId = new HashSet<String>();
Set<String> allMetrics = new HashSet<String>();
for (MetricInfo metricInfo : result.getItems()) {
allClientId.add(metricInfo.getClientId());
allMetrics.add(metricInfo.getName());
}
assertEquals("Wrong client ids size!", (clientIds != null ? clientIds.length : 0), allClientId.size());
assertEquals("Wrong metrics size!", (metrics != null ? metrics.length : 0), allMetrics.size());
if (clientIds != null) {
for (String clientIdFound : clientIds) {
assertTrue(String.format("Cannot find the client [%s] in the client ids list!", clientIdFound), allClientId.contains(clientIdFound));
}
}
if (metrics != null) {
for (String metric : metrics) {
assertTrue(String.format("Cannot find the metric [%s] in the metrics list!", metric), allMetrics.contains(metric));
}
}
}
/**
* Check if in the result set has the expected metric info count and return the first (if any)
*
* @param result
* @return
*/
private ClientInfo checkClientInfoCount(ClientInfoListResult result, int clientInfoCount)
{
ClientInfo clientInfoQueried = null;
if (clientInfoCount > 0) {
assertNotNull("No result found!", result);
assertNotNull("No result found!", result.getTotalCount());
assertEquals("Result client id list has a wrong size!", clientInfoCount, result.getTotalCount().intValue());
clientInfoQueried = result.getItem(0);
assertNotNull("Result client id list is null!", clientInfoQueried);
}
else {
assertTrue("No result should be found!", result == null || result.getTotalCount() == null || result.getTotalCount() <= 0);
}
return clientInfoQueried;
}
/**
* Check if in the result set has the expected client ids
*
* @param result
* @param clientInfoCount
* @param clientIds
*/
private void checkClientInfo(ClientInfoListResult result, int clientInfoCount, String[] clientIds)
{
checkClientInfoCount(result, clientInfoCount);
Set<String> allClientId = new HashSet<String>();
for (ClientInfo clientInfo : result.getItems()) {
allClientId.add(clientInfo.getClientId());
}
assertEquals("Wrong client ids size!", (clientIds != null ? clientIds.length : 0), allClientId.size());
if (clientIds != null) {
for (String clientIdFound : clientIds) {
assertTrue(String.format("Cannot find the client [%s] in the client ids list!", clientIdFound), allClientId.contains(clientIdFound));
}
}
}
private void checkMetricDateBound(MetricInfoListResult result, Date startDate, Date endDate)
{
// TODO
}
private void checkMessagesDateBound(MessageListResult result, Date startDate, Date endDate)
{
// TODO
}
/**
* Check if the message result list is correctly ordered by the provided criteria (list of fields and ordering)
*
* @param result
* @param sortFieldList
* @param cleanComposedFieldName takes only the field part after the last dot (useful for clean up the composed field name)
*/
@SuppressWarnings("rawtypes")
private void checkListOrder(StorableListResult<?> result, List<SortField> sortFieldList)
{
Object previousItem = null;
for (Object item : result.getItems()) {
for (SortField field : sortFieldList) {
if (previousItem != null) {
Comparable currentValue = getValue(item, field.getField());
Comparable previousValue = getValue(previousItem, field.getField());
if (!currentValue.equals(previousValue)) {
checkNextValueCoherence(field, currentValue, previousValue);
// proceed with next message
break;
}
}
else {
break;
}
}
previousItem = item;
}
}
/**
* Check if the next value (it must be not equals, so the equals condition must be checked before calling this method) is coherent with its ordering criteria
*
* @param field
* @param currentValue
* @param previousValue
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private void checkNextValueCoherence(SortField field, Comparable currentValue, Comparable previousValue)
{
if (SortDirection.ASC.equals(field.getSortDirection())) {
assertTrue(String.format("The field [%s] is not correctly ordered as [%s]!", field.getField(), field.getSortDirection()), currentValue.compareTo(previousValue) > 0);
}
else {
assertTrue(String.format("The field [%s] is not correctly ordered as [%s]!", field.getField(), field.getSortDirection()), currentValue.compareTo(previousValue) < 0);
}
}
/**
* Return the value of the field name provided (assuming that this value is a Comparable)
*
* @param message
* @param field
* @param cleanComposedFieldName
* @return
*/
@SuppressWarnings("rawtypes")
private Comparable getValue(Object object, String field)
{
try {
Class objetcClass = object.getClass();
String getterFieldName = getFieldName(field, true);
Method getMethod = getMethod(objetcClass, getterFieldName, "get");
if (getMethod != null) {
return (Comparable) getMethod.invoke(object, new Object[0]);
}
getMethod = getMethod(objetcClass, getterFieldName, "is");
if (getMethod != null) {
return (Comparable) getMethod.invoke(object, new Object[0]);
}
// else try by field access
String fieldName = getFieldName(field, false);
Field objField = getField(objetcClass, fieldName);
if (objField != null) {
return (Comparable) objField.get(object);
}
else {
throw new IllegalArgumentException(String.format("Cannot find getter for field [%s] or field [%s] or the field value is not a Comparable value!", field, field));
}
}
catch (Exception e) {
throw new IllegalArgumentException(String.format("Cannot find getter for field [%s] or field [%s] or the field value is not a Comparable value!", field, field));
}
}
/**
* Return the suffix field name to compose (in a different method) the getter name.
* It removes the _ and append the remaining part capitalizing the first letter (if capitalizeFirstLetter = true)
*
* @param field
* @param cleanComposedFieldName
* @return
*/
private String getFieldName(String field, boolean capitalizeFirstLetter)
{
String str[] = cleanupFieldName(field);
String fieldName = null;
if (capitalizeFirstLetter) {
fieldName = str[0].substring(0, 1).toUpperCase() + str[0].substring(1);
}
else {
fieldName = str[0];
}
for (int i = 1; i < str.length; i++) {
fieldName += str[i].substring(0, 1).toUpperCase() + str[i].substring(1);
}
return fieldName;
}
private String[] cleanupFieldName(String field)
{
int lastDot = field.lastIndexOf('.');
if (lastDot > -1) {
field = field.substring(lastDot + 1, field.length());
}
String str[] = field.split("_");
if (str == null || str.length <= 0) {
throw new IllegalArgumentException(String.format("Invalid field name [%s]", field));
}
return str;
}
/**
* Return the method combining the prefix and the field name provided
*
* @param objetcClass
* @param field
* @param prefix
* @return
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private Method getMethod(Class objetcClass, String field, String prefix)
{
String fieldName = prefix + field.substring(0, 1).toUpperCase() + field.substring(1);
Method objMethod = null;
do {
try {
objMethod = objetcClass.getMethod(fieldName, new Class[0]);
}
catch(NoSuchMethodException e) {
objetcClass = objetcClass.getSuperclass();
}
}
while(objMethod == null && objetcClass != null);
return objMethod;
}
/**
* Return the field combining the prefix and the field name provided
*
* @param objetcClass
* @param field
* @param prefix
* @return
*/
@SuppressWarnings({ "rawtypes" })
private Field getField(Class objetcClass, String field)
{
Field objField = null;
do {
try {
objField = objetcClass.getDeclaredField(field);
objField.setAccessible(true);
}
catch(NoSuchFieldException e) {
objetcClass = objetcClass.getSuperclass();
}
}
while(objField == null && objetcClass != null);
return objField;
}
// Configuration utility
/**
* Update the store service configuration with the provided values
*
* @param messageStoreService
* @param scopeId
* @param dataIndexBy
* @param metricsIndexBy
* @param dataTTL
* @param storageEnabled
* @throws KapuaException
*/
private void updateConfiguration(MessageStoreService messageStoreService, KapuaId scopeId, DataIndexBy dataIndexBy, MetricsIndexBy metricsIndexBy, int dataTTL, boolean storageEnabled) throws KapuaException
{
Map<String, Object> config = messageStoreService.getConfigValues(scopeId);
if (config == null) {
config = new HashMap<String, Object>();
}
if (dataIndexBy != null) {
config.put(MessageStoreConfiguration.CONFIGURATION_DATA_INDEX_BY_KEY, dataIndexBy.name());
}
if (metricsIndexBy != null) {
config.put(MessageStoreConfiguration.CONFIGURATION_METRICS_INDEX_BY_KEY, metricsIndexBy.name());
}
config.put(MessageStoreConfiguration.CONFIGURATION_DATA_TTL_KEY, dataTTL);
config.put(MessageStoreConfiguration.CONFIGURATION_DATA_STORAGE_ENABLED_KEY, storageEnabled);
messageStoreService.setConfigValues(scopeId, config);
}
/**
* Utility class to check the correctness value (exact value or range depending on the constructor used)
*
* @param <O>
*/
private class Range<O extends Comparable<O>>
{
private String field;
private O min;
private O max;
private Range(String field, O min, O max)
{
this(field, min);
this.max = max;
}
private Range(String field, O exactValue)
{
assertNotNull("The lower bound or the exact value to compare cannot be null!", exactValue);
this.field = field;
this.min = exactValue;
}
public void checkValue(O value)
{
if (max == null) {
assertEquals("Expected value for " + field + " doesn't match!", min, value);
}
else {
assertTrue("Expected value for " + field + " doesn't match the lower bound", min.compareTo(value) <= 0);
assertTrue("Expected value for " + field + " doesn't match the upper bound", max.compareTo(value) >= 0);
}
}
}
private class DateRange
{
private Date lowerBound;
private Date upperBound;
public DateRange(Date bound)
{
this(bound, bound);
}
public DateRange(Date lowerBound, Date upperBound)
{
this.lowerBound = new Date(lowerBound.getTime() - QUERY_TIME_WINDOW);
this.upperBound = new Date(upperBound.getTime() + QUERY_TIME_WINDOW);
}
public Date getLowerBound()
{
return lowerBound;
}
public Date getUpperBound()
{
return upperBound;
}
}
@Test
/**
* Base test, may be removed since we should already have tests that covers also this one (to be check!)
*
* @throws Exception
*/
public void testStore()
throws Exception
{
Account account = getTestAccountCreator(adminScopeId);
Date now = new Date();
String clientId = String.format("device-%d", now.getTime());
DeviceCreator deviceCreator = deviceFactory.newCreator(account.getId(), clientId);
Device device = deviceRegistryService.create(deviceCreator);
KapuaDataMessageImpl message = new KapuaDataMessageImpl();
KapuaDataChannelImpl channel = new KapuaDataChannelImpl();
KapuaDataPayloadImpl messagePayload = new KapuaDataPayloadImpl();
KapuaPositionImpl messagePosition = new KapuaPositionImpl();
Map<String, Object> metrics = new HashMap<String, Object>();
channel.setClientId(device.getClientId());
channel.setSemanticParts(Arrays.asList("APP01"));
message.setScopeId(account.getId());
message.setDeviceId(device.getId());
message.setCapturedOn(now);
message.setReceivedOn(now);
message.setChannel(channel);
metrics.put("distance", 1L);
metrics.put("label", "pippo");
messagePayload.setProperties(metrics);
messagePosition.setAltitude(1.0);
messagePosition.setTimestamp(now);
message.setPosition(messagePosition);
messagePayload.setProperties(metrics);
message.setPayload(messagePayload);
message.setClientId(clientId);
StorableId messageId = messageStoreService.store(message);
// A non empty message id must be returned
assertNotNull(messageId);
assertTrue(!messageId.toString().isEmpty());
// Wait ES indexes to be refreshed
Thread.sleep(elasticsearchRefreshTime);
// Retrieve the message from its id
DatastoreMessage retrievedMessage = messageStoreService.find(account.getId(), messageId, StorableFetchStyle.SOURCE_FULL);
// The returned message must be not null and values must coincide
assertNotNull(retrievedMessage);
assertTrue(messageId.equals(retrievedMessage.getDatastoreId()));
assertTrue(account.getId().equals(retrievedMessage.getScopeId()));
assertTrue(device.getId().equals(retrievedMessage.getDeviceId()));
assertTrue(device.getClientId().equals(retrievedMessage.getClientId()));
TermPredicate equalsMessageId = datastoreObjectFactory.newTermPredicate(ClientInfoField.MESSAGE_ID, messageId);
ClientInfoQuery clientInfoQuery = datastoreObjectFactory.newClientInfoQuery(account.getId());
clientInfoQuery.setOffset(0);
clientInfoQuery.setLimit(1);
clientInfoQuery.setFetchStyle(StorableFetchStyle.FIELDS);
clientInfoQuery.setPredicate(equalsMessageId);
ClientInfoRegistryService clientInfoRegistry = KapuaLocator.getInstance().getService(ClientInfoRegistryService.class);
ClientInfoListResult clientInfos = clientInfoRegistry.query(clientInfoQuery);
assertNotNull(clientInfos);
assertTrue(clientInfos.getSize() == 1);
ClientInfo clientInfo = clientInfos.getItem(0);
assertNotNull(clientInfo);
assertTrue(messageId.equals(clientInfo.getFirstMessageId()));
// There must be a channel info entry in the registry
equalsMessageId = datastoreObjectFactory.newTermPredicate(ChannelInfoField.MESSAGE_ID, messageId);
ChannelInfoQuery channelInfoQuery = datastoreObjectFactory.newChannelInfoQuery(account.getId());
channelInfoQuery.setOffset(0);
channelInfoQuery.setLimit(1);
channelInfoQuery.setFetchStyle(StorableFetchStyle.FIELDS);
channelInfoQuery.setPredicate(equalsMessageId);
ChannelInfoRegistryService channelInfoRegistry = KapuaLocator.getInstance().getService(ChannelInfoRegistryService.class);
ChannelInfoListResult channelInfos = channelInfoRegistry.query(channelInfoQuery);
assertNotNull(channelInfos);
assertTrue(channelInfos.getSize() == 1);
ChannelInfo channelInfo = channelInfos.getItem(0);
assertNotNull(channelInfo);
assertTrue(messageId.equals(channelInfo.getFirstMessageId()));
// There must be two metric info entries in the registry
equalsMessageId = datastoreObjectFactory.newTermPredicate(MetricInfoField.MESSAGE_ID_FULL, messageId);
MetricInfoQuery metricInfoQuery = datastoreObjectFactory.newMetricInfoQuery(account.getId());
metricInfoQuery.setOffset(0);
metricInfoQuery.setLimit(2);
metricInfoQuery.setFetchStyle(StorableFetchStyle.FIELDS);
metricInfoQuery.setPredicate(equalsMessageId);
MetricInfoRegistryService metricInfoRegistry = KapuaLocator.getInstance().getService(MetricInfoRegistryService.class);
MetricInfoListResult metricInfos = metricInfoRegistry.query(metricInfoQuery);
assertNotNull(metricInfos);
assertTrue(metricInfos.getSize() == 2);
MetricInfo metricInfo = metricInfos.getItem(0);
assertNotNull(metricInfo);
assertTrue(messageId.equals(metricInfo.getFirstMessageId()));
metricInfo = metricInfos.getItem(1);
assertNotNull(metricInfo);
assertTrue(messageId.equals(metricInfo.getFirstMessageId()));
}
/**
* Return a new account created just for the test.<br>
* <b>WARNING!!!!!!! Current implementation is not compliance with that since it is a temporary implementation that returns the default kapua-sys account</b>
*
* @param scopeId
* @return
* @throws KapuaException
*/
private Account getTestAccountCreator(KapuaId scopeId) throws KapuaException
{
KapuaLocator locator = KapuaLocator.getInstance();
Account account = locator.getService(AccountService.class).findByName("kapua-sys");
return account;
}
}
|
package org.mtransit.parser.ca_chambly_richelieu_carignan_citcrc_bus;
import java.util.HashSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MSpec;
import org.mtransit.parser.mt.data.MTrip;
public class ChamblyRichelieuCarignanCITCRCBusAgencyTools extends DefaultAgencyTools {
public static final String ROUTE_TYPE_FILTER = "3"; // bus only
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-chambly-richelieu-carignan-citcrc-bus-android/res/raw/";
args[2] = ""; // files-prefix
// args[3] = "false"; // not-V1
}
new ChamblyRichelieuCarignanCITCRCBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("Generating CITCRC bus data...\n");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this);
super.start(args);
System.out.printf("Generating CITCRC bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public boolean excludeRoute(GRoute gRoute) {
if (ROUTE_TYPE_FILTER != null && !gRoute.route_type.equals(ROUTE_TYPE_FILTER)) {
return true;
}
return super.excludeRoute(gRoute);
}
@Override
public String getRouteLongName(GRoute gRoute) {
String routeLongName = gRoute.route_long_name;
routeLongName = MSpec.SAINT.matcher(routeLongName).replaceAll(MSpec.SAINT_REPLACEMENT);
return MSpec.cleanLabel(routeLongName);
}
private static final String ROUTE_COLOR = "D80C4A";
@Override
public String getRouteColor(GRoute gRoute) {
if ("11".equals(gRoute.route_short_name)) return "E5003D";
if ("12".equals(gRoute.route_short_name)) return "81378E";
if ("13".equals(gRoute.route_short_name)) return "FFDD00";
if ("14".equals(gRoute.route_short_name)) return "009486";
if ("15".equals(gRoute.route_short_name)) return "014A99";
if ("16".equals(gRoute.route_short_name)) return "20A74B";
if ("20".equals(gRoute.route_short_name)) return "D50080";
if ("300".equals(gRoute.route_short_name)) return "00B5E2";
if ("301".equals(gRoute.route_short_name)) return "00B5E2";
if ("302".equals(gRoute.route_short_name)) return "00B5E2";
if ("303".equals(gRoute.route_short_name)) return "00B5E2";
if ("400".equals(gRoute.route_short_name)) return "BFD885";
if ("401".equals(gRoute.route_short_name)) return "BFD885";
if ("450".equals(gRoute.route_short_name)) return "EF7B0A";
if ("500".equals(gRoute.route_short_name)) return "666666";
if ("600".equals(gRoute.route_short_name)) return "ACAA00";
if (gRoute.route_long_name.contains("Taxibus")) return "74797D";
return ROUTE_COLOR;
}
private static final String ROUTE_TEXT_COLOR = "FFFFFF";
@Override
public String getRouteTextColor(GRoute gRoute) {
return ROUTE_TEXT_COLOR;
}
@Override
public void setTripHeadsign(MRoute route, MTrip mTrip, GTrip gTrip) {
String stationName = cleanTripHeadsign(gTrip.trip_headsign);
int directionId = Integer.valueOf(gTrip.direction_id);
if (mTrip.getRouteId() == 14l) {
if (directionId == 0) {
stationName = "Richelieu-Chambly 0";
} else if (directionId == 1) {
stationName = "Richelieu-Chambly 1";
}
} else if (mTrip.getRouteId() == 15l) {
if (directionId == 0) {
stationName = "Marieville-Chambly 0";
} else if (directionId == 1) {
stationName = "Marieville-Chambly 1";
}
}
mTrip.setHeadsignString(stationName, directionId);
}
private static final Pattern DIRECTION = Pattern.compile("(direction )", Pattern.CASE_INSENSITIVE);
private static final String DIRECTION_REPLACEMENT = "";
@Override
public String cleanTripHeadsign(String tripHeadsign) {
tripHeadsign = DIRECTION.matcher(tripHeadsign).replaceAll(DIRECTION_REPLACEMENT);
return MSpec.cleanLabelFR(tripHeadsign);
}
private static final Pattern START_WITH_FACE_A = Pattern.compile("^(face à )", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
private static final Pattern START_WITH_FACE_AU = Pattern.compile("^(face au )", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
private static final Pattern START_WITH_FACE = Pattern.compile("^(face )", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
private static final Pattern SPACE_FACE_A = Pattern.compile("( face à )", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
private static final Pattern SPACE_WITH_FACE_AU = Pattern.compile("( face au )", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
private static final Pattern SPACE_WITH_FACE = Pattern.compile("( face )", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
private static final Pattern[] START_WITH_FACES = new Pattern[] { START_WITH_FACE_A, START_WITH_FACE_AU, START_WITH_FACE };
private static final Pattern[] SPACE_FACES = new Pattern[] { SPACE_FACE_A, SPACE_WITH_FACE_AU, SPACE_WITH_FACE };
private static final Pattern AVENUE = Pattern.compile("( avenue)", Pattern.CASE_INSENSITIVE);
private static final String AVENUE_REPLACEMENT = " av.";
@Override
public String cleanStopName(String gStopName) {
gStopName = AVENUE.matcher(gStopName).replaceAll(AVENUE_REPLACEMENT);
gStopName = Utils.replaceAll(gStopName, START_WITH_FACES, MSpec.SPACE);
gStopName = Utils.replaceAll(gStopName, SPACE_FACES, MSpec.SPACE);
return super.cleanStopNameFR(gStopName);
}
@Override
public String getStopCode(GStop gStop) {
if ("0".equals(gStop.stop_code)) {
return null;
}
return super.getStopCode(gStop);
}
private static final Pattern DIGITS = Pattern.compile("[\\d]+");
@Override
public int getStopId(GStop gStop) {
String stopCode = getStopCode(gStop);
if (stopCode != null && stopCode.length() > 0) {
return Integer.valueOf(stopCode); // using stop code as stop ID
}
// generating integer stop ID
Matcher matcher = DIGITS.matcher(gStop.stop_id);
matcher.find();
int digits = Integer.parseInt(matcher.group());
int stopId;
if (gStop.stop_id.startsWith("LON")) {
stopId = 100000;
} else {
System.out.println("Stop doesn't have an ID (start with)! " + gStop);
System.exit(-1);
stopId = -1;
}
if (gStop.stop_id.endsWith("A")) {
stopId += 1000;
} else {
System.out.println("Stop doesn't have an ID (end with)! " + gStop);
System.exit(-1);
}
return stopId + digits;
}
}
|
package com.thinkaurelius.titan.diskstorage.hbase;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.BiMap;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import com.thinkaurelius.titan.core.TitanException;
import com.thinkaurelius.titan.diskstorage.util.time.TimestampProvider;
import com.thinkaurelius.titan.diskstorage.util.time.Timestamps;
import com.thinkaurelius.titan.diskstorage.*;
import com.thinkaurelius.titan.diskstorage.common.DistributedStoreManager;
import com.thinkaurelius.titan.diskstorage.configuration.ConfigNamespace;
import com.thinkaurelius.titan.diskstorage.configuration.ConfigOption;
import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.*;
import com.thinkaurelius.titan.diskstorage.util.*;
import com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration;
import com.thinkaurelius.titan.graphdb.configuration.PreInitializeConfigOptions;
import com.thinkaurelius.titan.util.system.IOUtils;
import com.thinkaurelius.titan.util.system.NetworkUtil;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static com.thinkaurelius.titan.diskstorage.Backend.*;
import static com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration.STORAGE_NS;
import static com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration.SYSTEM_PROPERTIES_STORE_NAME;
/**
* Storage Manager for HBase
*
* @author Dan LaRocque <dalaro@hopcount.org>
*/
@PreInitializeConfigOptions
public class HBaseStoreManager extends DistributedStoreManager implements KeyColumnValueStoreManager {
private static final Logger logger = LoggerFactory.getLogger(HBaseStoreManager.class);
public static final ConfigOption<String> HBASE_TABLE = new ConfigOption<String>(STORAGE_NS,"tablename",
"The name of the table to store Titan's data in",
ConfigOption.Type.LOCAL, "titan");
public static final ConfigOption<Boolean> SHORT_CF_NAMES = new ConfigOption<Boolean>(STORAGE_NS,"short-cf-names",
"Whether to automatically shorten the names of frequently used column families to preserve space",
ConfigOption.Type.FIXED, true);
public static final String COMPRESSION_DEFAULT = "-DEFAULT-";
public static final ConfigOption<String> COMPRESSION = new ConfigOption<String>(STORAGE_NS,"compression-algorithm",
"An HBase Compression.Algorithm enum string which will be applied to newly created column families",
ConfigOption.Type.MASKABLE, "GZ");
public static final int MIN_REGION_COUNT = 3;
public static final ConfigOption<Boolean> SKIP_SCHEMA_CHECK = new ConfigOption<Boolean>(STORAGE_NS,"skip-schema-check",
"Assume that Titan's HBase table and column families already exist. " +
"When this is true, Titan will not check for the existence of its table/CFs, " +
"nor will it attempt to create them under any circumstances. This is useful " +
"when running Titan without HBase admin privileges.",
ConfigOption.Type.MASKABLE, false);
/**
* The total number of HBase regions to create with Titan's table. This
* setting only effects table creation; this normally happens just once when
* Titan connects to an HBase backend for the first time.
*/
public static final ConfigOption<Integer> REGION_COUNT = new ConfigOption<Integer>(STORAGE_NS, "region-count",
"The number of initial regions set when creating Titan's HBase table",
ConfigOption.Type.MASKABLE, Integer.class, new Predicate<Integer>() {
@Override
public boolean apply(Integer input) {
return null != input && MIN_REGION_COUNT <= input;
}
}
);
public static final ConfigOption<Integer> REGIONS_PER_SERVER = new ConfigOption<Integer>(STORAGE_NS, "regions-per-server",
"The number of regions per regionserver to set when creating Titan's HBase table",
ConfigOption.Type.MASKABLE, Integer.class);
/**
* If this key is present in either the JVM system properties or the process
* environment (checked in the listed order, first hit wins), then its value
* must be the full package and class name of an implementation of
* {@link HBaseCompat} that has a no-arg public constructor.
* <p>
* When this <b>is not</b> set, Titan attempts to automatically detect the
* HBase runtime version by calling {@link VersionInfo#getVersion()}. Titan
* then checks the returned version string against a hard-coded list of
* supported version prefixes and instantiates the associated compat layer
* if a match is found.
* <p>
* When this <b>is</b> set, Titan will not call
* {@code VersionInfo.getVersion()} or read its hard-coded list of supported
* version prefixes. Titan will instead attempt to instantiate the class
* specified (via the no-arg constructor which must exist) and then attempt
* to cast it to HBaseCompat and use it as such. Titan will assume the
* supplied implementation is compatible with the runtime HBase version and
* make no attempt to verify that assumption.
* <p>
* Setting this key incorrectly could cause runtime exceptions at best or
* silent data corruption at worst. This setting is intended for users
* running exotic HBase implementations that don't support VersionInfo or
* implementations which return values from {@code VersionInfo.getVersion()}
* that are inconsistent with Apache's versioning convention. It may also be
* useful to users who want to run against a new release of HBase that Titan
* doesn't yet officially support.
*
*/
public static final ConfigOption<String> HBASE_COMPAT_CLASS = new ConfigOption<String>(STORAGE_NS, "hbase-compat-class",
"The package and class name of the HBaseCompat implementation. HBaseCompat masks version-specific HBase API differences. " +
"When this option is unset, Titan calls HBase's VersionInfo.getVersion() and loads the matching compat class " +
"at runtime. Setting this option forces Titan to instead reflectively load and instantiate the specified class.",
ConfigOption.Type.MASKABLE, String.class);
public static final int PORT_DEFAULT = 9160;
public static final ConfigNamespace HBASE_CONFIGURATION_NAMESPACE =
new ConfigNamespace(STORAGE_NS,"hbase-config","General HBase configuration options",true);
private static final BiMap<String, String> SHORT_CF_NAME_MAP =
ImmutableBiMap.<String, String>builder()
.put(INDEXSTORE_NAME, "g")
.put(INDEXSTORE_NAME + LOCK_STORE_SUFFIX, "h")
.put(ID_STORE_NAME, "i")
.put(EDGESTORE_NAME, "e")
.put(EDGESTORE_NAME + LOCK_STORE_SUFFIX, "f")
.put(SYSTEM_PROPERTIES_STORE_NAME, "s")
.put(SYSTEM_PROPERTIES_STORE_NAME + LOCK_STORE_SUFFIX, "t")
.put(SYSTEM_MGMT_LOG_NAME, "m")
.put(SYSTEM_TX_LOG_NAME, "l")
.build();
private static final StaticBuffer FOUR_ZERO_BYTES = BufferUtil.zeroBuffer(4);
static {
// Verify that shortCfNameMap is injective
// Should be guaranteed by Guava BiMap, but it doesn't hurt to check
Preconditions.checkArgument(null != SHORT_CF_NAME_MAP);
Collection<String> shorts = SHORT_CF_NAME_MAP.values();
Preconditions.checkArgument(Sets.newHashSet(shorts).size() == shorts.size());
}
// Immutable instance fields
private final String tableName;
private final String compression;
private final int regionCount;
private final int regionsPerServer;
private final HConnection cnx;
private final org.apache.hadoop.conf.Configuration hconf;
private final boolean shortCfNames;
private final boolean skipSchemaCheck;
private final String compatClass;
private final HBaseCompat compat;
// Mutable instance state
private final ConcurrentMap<String, HBaseKeyColumnValueStore> openStores;
public HBaseStoreManager(com.thinkaurelius.titan.diskstorage.configuration.Configuration config) throws BackendException {
super(config, PORT_DEFAULT);
checkConfigDeprecation(config);
this.tableName = config.get(HBASE_TABLE);
this.compression = config.get(COMPRESSION);
this.regionCount = config.has(REGION_COUNT) ? config.get(REGION_COUNT) : -1;
this.regionsPerServer = config.has(REGIONS_PER_SERVER) ? config.get(REGIONS_PER_SERVER) : -1;
this.skipSchemaCheck = config.get(SKIP_SCHEMA_CHECK);
this.compatClass = config.has(HBASE_COMPAT_CLASS) ? config.get(HBASE_COMPAT_CLASS) : null;
this.compat = HBaseCompatLoader.getCompat(compatClass);
/*
* Specifying both region count options is permitted but may be
* indicative of a misunderstanding, so issue a warning.
*/
if (config.has(REGIONS_PER_SERVER) && config.has(REGION_COUNT)) {
logger.warn("Both {} and {} are set in Titan's configuration, but "
+ "the former takes precedence and the latter will be ignored.",
REGION_COUNT, REGIONS_PER_SERVER);
}
/* This static factory calls HBaseConfiguration.addHbaseResources(),
* which in turn applies the contents of hbase-default.xml and then
* applies the contents of hbase-site.xml.
*/
this.hconf = HBaseConfiguration.create();
// Copy a subset of our commons config into a Hadoop config
int keysLoaded=0;
Map<String,Object> configSub = config.getSubset(HBASE_CONFIGURATION_NAMESPACE);
for (Map.Entry<String,Object> entry : configSub.entrySet()) {
logger.debug("HBase configuration: setting {}={}", entry.getKey(), entry.getValue());
if (entry.getValue()==null) continue;
hconf.set(entry.getKey(), entry.getValue().toString());
keysLoaded++;
}
// Special case for STORAGE_HOSTS
if (config.has(GraphDatabaseConfiguration.STORAGE_HOSTS)) {
String zkQuorumKey = "hbase.zookeeper.quorum";
String csHostList = Joiner.on(",").join(config.get(GraphDatabaseConfiguration.STORAGE_HOSTS));
hconf.set(zkQuorumKey, csHostList);
logger.info("Copied host list from {} to {}: {}", GraphDatabaseConfiguration.STORAGE_HOSTS, zkQuorumKey, csHostList);
}
logger.debug("HBase configuration: set a total of {} configuration values", keysLoaded);
this.shortCfNames = config.get(SHORT_CF_NAMES);
try {
this.cnx = HConnectionManager.createConnection(hconf);
} catch (ZooKeeperConnectionException e) {
throw new PermanentBackendException(e);
} catch (@SuppressWarnings("hiding") IOException e) { // not thrown in 0.94, but thrown in 0.96+
throw new PermanentBackendException(e);
}
openStores = new ConcurrentHashMap<String, HBaseKeyColumnValueStore>();
}
@Override
public Deployment getDeployment() {
List<KeyRange> local;
try {
local = getLocalKeyPartition();
return null != local && !local.isEmpty() ? Deployment.LOCAL : Deployment.REMOTE;
} catch (BackendException e) {
// propagating StorageException might be a better approach
throw new RuntimeException(e);
}
}
@Override
public String toString() {
return "hbase[" + tableName + "@" + super.toString() + "]";
}
@Override
public void close() {
openStores.clear();
IOUtils.closeQuietly(cnx);
}
@Override
public StoreFeatures getFeatures() {
Configuration c = GraphDatabaseConfiguration.buildConfiguration();
StandardStoreFeatures.Builder fb = new StandardStoreFeatures.Builder()
.orderedScan(true).unorderedScan(true).batchMutation(true)
.multiQuery(true).distributed(true).keyOrdered(true)
// .timestamps(true)
.keyConsistent(c);
try {
fb.localKeyPartition(getDeployment() == Deployment.LOCAL);
} catch (Exception e) {
logger.warn("Unexpected exception during getDeployment()", e);
}
return fb.build();
}
@Override
public void mutateMany(Map<String, Map<StaticBuffer, KCVMutation>> mutations, StoreTransaction txh) throws BackendException {
final MaskedTimestamp commitTime = new MaskedTimestamp(txh);
// In case of an addition and deletion with identical timestamps, the
// deletion tombstone wins.
// http://hbase.apache.org/book/versions.html#d244e4250
Map<StaticBuffer, Pair<Put, Delete>> commandsPerKey =
convertToCommands(
mutations,
commitTime.getAdditionTime(times.getUnit()),
commitTime.getDeletionTime(times.getUnit()));
List<Row> batch = new ArrayList<Row>(commandsPerKey.size()); // actual batch operation
// convert sorted commands into representation required for 'batch' operation
for (Pair<Put, Delete> commands : commandsPerKey.values()) {
if (commands.getFirst() != null)
batch.add(commands.getFirst());
if (commands.getSecond() != null)
batch.add(commands.getSecond());
}
try {
HTableInterface table = null;
try {
table = cnx.getTable(tableName);
table.batch(batch);
table.flushCommits();
} finally {
IOUtils.closeQuietly(table);
}
} catch (IOException e) {
throw new TemporaryBackendException(e);
} catch (InterruptedException e) {
throw new TemporaryBackendException(e);
}
sleepAfterWrite(txh, commitTime);
}
@Override
public KeyColumnValueStore openDatabase(final String longName) throws BackendException {
HBaseKeyColumnValueStore store = openStores.get(longName);
if (store == null) {
final String cfName = shortCfNames ? shortenCfName(longName) : longName;
HBaseKeyColumnValueStore newStore = new HBaseKeyColumnValueStore(this, cnx, tableName, cfName, longName);
store = openStores.putIfAbsent(longName, newStore); // nothing bad happens if we loose to other thread
if (store == null ) {
if (!skipSchemaCheck) {
ensureColumnFamilyExists(tableName, cfName);
}
store = newStore;
}
}
return store;
}
@Override
public StoreTransaction beginTransaction(final BaseTransactionConfig config) throws BackendException {
return new HBaseTransaction(config);
}
@Override
public String getName() {
return tableName;
}
/**
* Deletes the specified table with all its columns.
* ATTENTION: Invoking this method will delete the table if it exists and therefore causes data loss.
*/
@Override
public void clearStorage() throws BackendException {
HBaseAdmin adm = getAdminInterface();
try { // first of all, check if table exists, if not - we are done
if (!adm.tableExists(tableName)) {
logger.debug("clearStorage() called before table {} was created, skipping.", tableName);
return;
}
} catch (IOException e) {
throw new TemporaryBackendException(e);
}
/*
* The commented code is the recommended way to truncate an HBase table.
* But it's so slow. The titan-hbase test suite takes 18 minutes to
* complete on my machine using the Scanner method. It takes 1 hour 17
* minutes to complete using the disable-delete-and-recreate method
* commented below. (after - before) below is usually between 3000 and
* 3100 ms on my machine, but it runs so many times in the test suite
* that it adds up.
*/
// long before = System.currentTimeMillis();
// try {
// adm.disableTable(tableName);
// adm.deleteTable(tableName);
// } catch (IOException e) {
// throw new PermanentStorageException(e);
// ensureTableExists(tableName);
// long after = System.currentTimeMillis();
// logger.debug("Dropped and recreated table {} in {} ms", tableName, after - before);
HTable table = null;
try {
table = new HTable(hconf, tableName);
Scan scan = new Scan();
scan.setBatch(100);
scan.setCacheBlocks(false);
scan.setCaching(2000);
scan.setTimeRange(0, Long.MAX_VALUE);
scan.setMaxVersions(1);
ResultScanner scanner = null;
long ts = -1;
try {
scanner = table.getScanner(scan);
for (Result res : scanner) {
Delete d = new Delete(res.getRow());
//Despite comment in Delete.java, LATEST_TIMESTAMP seems to be System.currentTimeMillis()
//LATEST_TIMESTAMP is the default for the constructor invoked above, so it's redundant anyway
//d.setTimestamp(HConstants.LATEST_TIMESTAMP);
if (-1 == ts)
ts = guessTimestamp(res);
d.setTimestamp(ts);
table.delete(d);
}
} finally {
IOUtils.closeQuietly(scanner);
}
} catch (IOException e) {
throw new TemporaryBackendException(e);
} finally {
IOUtils.closeQuietly(table);
}
}
@Override
public List<KeyRange> getLocalKeyPartition() throws BackendException {
List<KeyRange> result = new LinkedList<KeyRange>();
HTable table = null;
try {
ensureTableExists(tableName, getCfNameForStoreName(GraphDatabaseConfiguration.SYSTEM_PROPERTIES_STORE_NAME));
table = new HTable(hconf, tableName);
Map<KeyRange, ServerName> normed =
normalizeKeyBounds(table.getRegionLocations());
for (Map.Entry<KeyRange, ServerName> e : normed.entrySet()) {
if (NetworkUtil.isLocalConnection(e.getValue().getHostname())) {
result.add(e.getKey());
logger.debug("Found local key/row partition {} on host {}", e.getKey(), e.getValue());
} else {
logger.debug("Discarding remote {}", e.getValue());
}
}
} catch (MasterNotRunningException e) {
logger.warn("Unexpected MasterNotRunningException", e);
} catch (ZooKeeperConnectionException e) {
logger.warn("Unexpected ZooKeeperConnectionException", e);
} catch (IOException e) {
logger.warn("Unexpected IOException", e);
} finally {
if (null != table) {
try {
table.close();
} catch (IOException e) {
logger.warn("Failed to close HTable {}", table, e);
}
}
}
return result;
}
/**
* Given a map produced by {@link HTable#getRegionLocations()}, transform
* each key from an {@link HRegionInfo} to a {@link KeyRange} expressing the
* region's start and end key bounds using Titan-partitioning-friendly
* conventions (start inclusive, end exclusive, zero bytes appended where
* necessary to make all keys at least 4 bytes long).
* <p/>
* This method iterates over the entries in its map parameter and performs
* the following conditional conversions on its keys. "Require" below means
* either a {@link Preconditions} invocation or an assertion. HRegionInfo
* sometimes returns start and end keys of zero length; this method replaces
* zero length keys with null before doing any of the checks described
* below. The parameter map and the values it contains are only read and
* never modified.
*
* <ul>
* <li>If an entry's HRegionInfo has null start and end keys, then first
* require that the parameter map is a singleton, and then return a
* single-entry map whose {@code KeyRange} has start and end buffers that
* are both four bytes of zeros.</li>
* <li>If the entry has a null end key (but non-null start key), put an
* equivalent entry in the result map with a start key identical to the
* input, except that zeros are appended to values less than 4 bytes long,
* and an end key that is four bytes of zeros.
* <li>If the entry has a null start key (but non-null end key), put an
* equivalent entry in the result map where the start key is four bytes of
* zeros, and the end key has zeros appended, if necessary, to make it at
* least 4 bytes long, after which one is added to the padded value in
* unsigned 32-bit arithmetic with overflow allowed.</li>
* <li>Any entry which matches none of the above criteria results in an
* equivalent entry in the returned map, except that zeros are appended to
* both keys to make each at least 4 bytes long, and the end key is then
* incremented as described in the last bullet point.</li>
* </ul>
*
* After iterating over the parameter map, this method checks that it either
* saw no entries with null keys, one entry with a null start key and a
* different entry with a null end key, or one entry with both start and end
* keys null. If any null keys are observed besides these three cases, the
* method will die with a precondition failure.
*
* @param raw
* A map of HRegionInfo and ServerName from HBase
* @return Titan-friendly expression of each region's rowkey boundaries
*/
private Map<KeyRange, ServerName> normalizeKeyBounds(NavigableMap<HRegionInfo, ServerName> raw) {
Map.Entry<HRegionInfo, ServerName> nullStart = null;
Map.Entry<HRegionInfo, ServerName> nullEnd = null;
ImmutableMap.Builder<KeyRange, ServerName> b = ImmutableMap.builder();
for (Map.Entry<HRegionInfo, ServerName> e : raw.entrySet()) {
HRegionInfo regionInfo = e.getKey();
byte startKey[] = regionInfo.getStartKey();
byte endKey[] = regionInfo.getEndKey();
if (0 == startKey.length) {
startKey = null;
logger.trace("Converted zero-length HBase startKey byte array to null");
}
if (0 == endKey.length) {
endKey = null;
logger.trace("Converted zero-length HBase endKey byte array to null");
}
if (null == startKey && null == endKey) {
Preconditions.checkState(1 == raw.size());
logger.debug("HBase table {} has a single region {}", tableName, regionInfo);
// Choose arbitrary shared value = startKey = endKey
return b.put(new KeyRange(FOUR_ZERO_BYTES, FOUR_ZERO_BYTES), e.getValue()).build();
} else if (null == startKey) {
logger.debug("Found HRegionInfo with null startKey on server {}: {}", e.getValue(), regionInfo);
Preconditions.checkState(null == nullStart);
nullStart = e;
// I thought endBuf would be inclusive from the HBase javadoc, but in practice it is exclusive
StaticBuffer endBuf = StaticArrayBuffer.of(zeroExtend(endKey));
// Replace null start key with zeroes
b.put(new KeyRange(FOUR_ZERO_BYTES, endBuf), e.getValue());
} else if (null == endKey) {
logger.debug("Found HRegionInfo with null endKey on server {}: {}", e.getValue(), regionInfo);
Preconditions.checkState(null == nullEnd);
nullEnd = e;
// Replace null end key with zeroes
b.put(new KeyRange(StaticArrayBuffer.of(zeroExtend(startKey)), FOUR_ZERO_BYTES), e.getValue());
} else {
Preconditions.checkState(null != startKey);
Preconditions.checkState(null != endKey);
// Convert HBase's inclusive end keys into exclusive Titan end keys
StaticBuffer startBuf = StaticArrayBuffer.of(zeroExtend(startKey));
StaticBuffer endBuf = StaticArrayBuffer.of(zeroExtend(endKey));
KeyRange kr = new KeyRange(startBuf, endBuf);
b.put(kr, e.getValue());
logger.debug("Found HRegionInfo with non-null end and start keys on server {}: {}", e.getValue(), regionInfo);
}
}
// Require either no null key bounds or a pair of them
Preconditions.checkState(!(null == nullStart ^ null == nullEnd));
// Check that every key in the result is at least 4 bytes long
Map<KeyRange, ServerName> result = b.build();
for (KeyRange kr : result.keySet()) {
Preconditions.checkState(4 <= kr.getStart().length());
Preconditions.checkState(4 <= kr.getEnd().length());
}
return result;
}
/**
* If the parameter is shorter than 4 bytes, then create and return a new 4
* byte array with the input array's bytes followed by zero bytes. Otherwise
* return the parameter.
*
* @param dataToPad non-null but possibly zero-length byte array
* @return either the parameter or a new array
*/
private final byte[] zeroExtend(byte[] dataToPad) {
assert null != dataToPad;
final int targetLength = 4;
if (targetLength <= dataToPad.length)
return dataToPad;
byte padded[] = new byte[targetLength];
for (int i = 0; i < dataToPad.length; i++)
padded[i] = dataToPad[i];
for (int i = dataToPad.length; i < padded.length; i++)
padded[i] = (byte)0;
return padded;
}
private String shortenCfName(String longName) throws PermanentBackendException {
final String s;
if (SHORT_CF_NAME_MAP.containsKey(longName)) {
s = SHORT_CF_NAME_MAP.get(longName);
Preconditions.checkNotNull(s);
logger.debug("Substituted default CF name \"{}\" with short form \"{}\" to reduce HBase KeyValue size", longName, s);
} else {
if (SHORT_CF_NAME_MAP.containsValue(longName)) {
String fmt = "Must use CF long-form name \"%s\" instead of the short-form name \"%s\" when configured with %s=true";
String msg = String.format(fmt, SHORT_CF_NAME_MAP.inverse().get(longName), longName, SHORT_CF_NAMES.getName());
throw new PermanentBackendException(msg);
}
s = longName;
logger.debug("Kept default CF name \"{}\" because it has no associated short form", s);
}
return s;
}
private HTableDescriptor ensureTableExists(String tableName, String initialCFName) throws BackendException {
HBaseAdmin adm = getAdminInterface();
HTableDescriptor desc;
try { // Create our table, if necessary
/*
* Some HBase versions/impls respond badly to attempts to create a
* table without at least one CF. See #661. Creating a CF along with
* the table avoids HBase carping.
*/
if (adm.tableExists(tableName)) {
desc = adm.getTableDescriptor(tableName.getBytes());
} else {
desc = createTable(tableName, initialCFName, adm);
}
} catch (IOException e) {
throw new TemporaryBackendException(e);
}
return desc;
}
private HTableDescriptor createTable(String tableName, String cfName, HBaseAdmin adm) throws IOException {
HTableDescriptor desc = compat.newTableDescriptor(tableName);
HColumnDescriptor cdesc = new HColumnDescriptor(cfName);
setCFOptions(cdesc);
desc.addFamily(cdesc);
int count; // total regions to create
String src;
if (MIN_REGION_COUNT <= (count = regionCount)) {
src = "region count configuration";
} else if (0 < regionsPerServer && MIN_REGION_COUNT <= (count = regionsPerServer * getServerCount(adm))) {
src = "ClusterStatus server count";
} else {
count = -1;
src = "default";
}
if (MIN_REGION_COUNT < count) {
adm.createTable(desc, getStartKey(count), getEndKey(count), count);
logger.debug("Created table {} with region count {} from {}", tableName, count, src);
} else {
adm.createTable(desc);
logger.debug("Created table {} with default start key, end key, and region count", tableName);
}
return desc;
}
/**
* This method generates the second argument to
* {@link HBaseAdmin#createTable(HTableDescriptor, byte[], byte[], int)}.
* <p/>
* From the {@code createTable} javadoc:
* "The start key specified will become the end key of the first region of
* the table, and the end key specified will become the start key of the
* last region of the table (the first region has a null start key and
* the last region has a null end key)"
* <p/>
* To summarize, the {@code createTable} argument called "startKey" is
* actually the end key of the first region.
*/
private byte[] getStartKey(int regionCount) {
ByteBuffer regionWidth = ByteBuffer.allocate(4);
regionWidth.putInt((int)(((1L << 32) - 1L) / regionCount)).flip();
return StaticArrayBuffer.of(regionWidth).getBytes(0, 4);
}
/**
* Companion to {@link #getStartKey(int)}. See its javadoc for details.
*/
private byte[] getEndKey(int regionCount) {
ByteBuffer regionWidth = ByteBuffer.allocate(4);
regionWidth.putInt((int)(((1L << 32) - 1L) / regionCount * (regionCount - 1))).flip();
return StaticArrayBuffer.of(regionWidth).getBytes(0, 4);
}
private void ensureColumnFamilyExists(String tableName, String columnFamily) throws BackendException {
HBaseAdmin adm = getAdminInterface();
HTableDescriptor desc = ensureTableExists(tableName, columnFamily);
Preconditions.checkNotNull(desc);
HColumnDescriptor cf = desc.getFamily(columnFamily.getBytes());
// Create our column family, if necessary
if (cf == null) {
try {
if (!adm.isTableDisabled(tableName)) {
adm.disableTable(tableName);
}
} catch (TableNotEnabledException e) {
logger.debug("Table {} already disabled", tableName);
} catch (IOException e) {
throw new TemporaryBackendException(e);
}
try {
HColumnDescriptor cdesc = new HColumnDescriptor(columnFamily);
setCFOptions(cdesc);
adm.addColumn(tableName, cdesc);
try {
logger.debug("Added HBase ColumnFamily {}, waiting for 1 sec. to propogate.", columnFamily);
Thread.sleep(1000L);
} catch (InterruptedException ie) {
throw new TemporaryBackendException(ie);
}
adm.enableTable(tableName);
} catch (TableNotFoundException ee) {
logger.error("TableNotFoundException", ee);
throw new PermanentBackendException(ee);
} catch (org.apache.hadoop.hbase.TableExistsException ee) {
logger.debug("Swallowing exception {}", ee);
} catch (IOException ee) {
throw new TemporaryBackendException(ee);
}
}
}
private static long guessTimestamp(Result res) {
Long sampleTime = res.getMap().firstEntry().getValue().firstEntry().getValue().firstEntry().getKey();
// Estimate timestamp unit from order of magnitude assuming UNIX epoch -- not compatible with arbitrary custom timestamps
Preconditions.checkArgument(null != sampleTime);
final double exponent = Math.log10(sampleTime);
final TimestampProvider prov;
/*
* These exponent brackets approximately cover UNIX Epoch timestamps
* between:
*
* Sat Sep 8 21:46:40 EDT 2001
*
* Thu Sep 26 21:46:40 EDT 33658
*
* Even though it won't rollover, this timestamp guessing kludge still
* eventually be refactored away to support arbitrary timestamps
* provided by the user. There's no good reason clearStorage() should be
* timestamp sensitive, it's just that truncating tables in the way
* recommended by HBase is so incredibly slow that it more than doubles
* the walltime taken by the titan-hbase test suite.
*/
if (12 <= exponent && exponent < 15)
prov = Timestamps.MILLI;
else if (15 <= exponent && exponent < 18)
prov = Timestamps.MICRO;
else if (18 <= exponent && exponent < 21)
prov = Timestamps.NANO;
else
throw new IllegalStateException("Timestamp " + sampleTime + " does not match expected UNIX Epoch timestamp in milli-, micro-, or nanosecond units. clearStorage() does not support custom timestamps.");
logger.debug("Guessed timestamp provider " + prov);
return prov.getTime().getNativeTimestamp();
}
private void setCFOptions(HColumnDescriptor cdesc) {
if (null != compression && !compression.equals(COMPRESSION_DEFAULT))
compat.setCompression(cdesc, compression);
}
private HBaseAdmin getAdminInterface() {
try {
return new HBaseAdmin(hconf);
} catch (IOException e) {
throw new TitanException(e);
}
}
/**
* Convert Titan internal Mutation representation into HBase native commands.
*
* @param mutations Mutations to convert into HBase commands.
* @param putTimestamp The timestamp to use for Put commands.
* @param delTimestamp The timestamp to use for Delete commands.
* @return Commands sorted by key converted from Titan internal representation.
* @throws com.thinkaurelius.titan.diskstorage.PermanentBackendException
*/
private Map<StaticBuffer, Pair<Put, Delete>> convertToCommands(Map<String, Map<StaticBuffer, KCVMutation>> mutations,
final long putTimestamp,
final long delTimestamp) throws PermanentBackendException {
Map<StaticBuffer, Pair<Put, Delete>> commandsPerKey = new HashMap<StaticBuffer, Pair<Put, Delete>>();
for (Map.Entry<String, Map<StaticBuffer, KCVMutation>> entry : mutations.entrySet()) {
String cfString = getCfNameForStoreName(entry.getKey());
byte[] cfName = cfString.getBytes();
for (Map.Entry<StaticBuffer, KCVMutation> m : entry.getValue().entrySet()) {
byte[] key = m.getKey().as(StaticBuffer.ARRAY_FACTORY);
KCVMutation mutation = m.getValue();
Pair<Put, Delete> commands = commandsPerKey.get(m.getKey());
if (commands == null) {
commands = new Pair<Put, Delete>();
commandsPerKey.put(m.getKey(), commands);
}
if (mutation.hasDeletions()) {
if (commands.getSecond() == null) {
Delete d = new Delete(key);
d.setTimestamp(delTimestamp);
commands.setSecond(d);
}
for (StaticBuffer b : mutation.getDeletions()) {
commands.getSecond().deleteColumns(cfName, b.as(StaticBuffer.ARRAY_FACTORY), delTimestamp);
}
}
if (mutation.hasAdditions()) {
if (commands.getFirst() == null) {
Put p = new Put(key, putTimestamp);
commands.setFirst(p);
}
for (Entry e : mutation.getAdditions()) {
commands.getFirst().add(cfName,
e.getColumnAs(StaticBuffer.ARRAY_FACTORY),
putTimestamp,
e.getValueAs(StaticBuffer.ARRAY_FACTORY));
}
}
}
}
return commandsPerKey;
}
private String getCfNameForStoreName(String storeName) throws PermanentBackendException {
return shortCfNames ? shortenCfName(storeName) : storeName;
}
/**
* Estimate the number of regionservers in the HBase cluster by calling
* {@link HBaseAdmin#getClusterStatus()} and then
* {@link ClusterStatus#getServers()} and finally {@code size()} on the
* returned server list.
*
* @param adm
* HBase admin interface
* @return the number of servers in the cluster or -1 if an error occurred
*/
private int getServerCount(HBaseAdmin adm) {
int serverCount = -1;
try {
serverCount = adm.getClusterStatus().getServers().size();
logger.debug("Read {} servers from HBase ClusterStatus", serverCount);
} catch (IOException e) {
logger.debug("Unable to retrieve HBase cluster status", e);
}
return serverCount;
}
private void checkConfigDeprecation(com.thinkaurelius.titan.diskstorage.configuration.Configuration config) {
if (config.has(GraphDatabaseConfiguration.STORAGE_PORT)) {
logger.warn("The configuration property {} is ignored for HBase. Set hbase.zookeeper.property.clientPort in hbase-site.xml or {}.hbase.zookeeper.property.clientPort in Titan's configuration file.",
GraphDatabaseConfiguration.STORAGE_PORT, HBASE_CONFIGURATION_NAMESPACE);
}
}
}
|
package org.xwiki.test.ldap;
import java.security.Principal;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.jmock.Mock;
import org.jmock.core.Invocation;
import org.jmock.core.stub.CustomStub;
import org.xwiki.cache.Cache;
import org.xwiki.cache.CacheException;
import org.xwiki.cache.CacheFactory;
import org.xwiki.cache.config.CacheConfiguration;
import org.xwiki.cache.internal.DefaultCache;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.DocumentReferenceResolver;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.EntityReferenceSerializer;
import com.xpn.xwiki.XWiki;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.doc.XWikiDocument;
import org.xwiki.rendering.syntax.Syntax;
import org.xwiki.test.ldap.framework.AbstractLDAPTestCase;
import org.xwiki.test.ldap.framework.LDAPTestSetup;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.objects.classes.BaseClass;
import com.xpn.xwiki.store.XWikiStoreInterface;
import com.xpn.xwiki.user.api.XWikiGroupService;
import com.xpn.xwiki.user.impl.LDAP.LDAPProfileXClass;
import com.xpn.xwiki.user.impl.LDAP.XWikiLDAPAuthServiceImpl;
import com.xpn.xwiki.web.Utils;
/**
* Unit tests using embedded LDAP server (Apache DS). Theses test can be launched directly from JUnit plugin of EDI.
*
* @version $Id$
*/
public class XWikiLDAPAuthServiceImplTest extends AbstractLDAPTestCase
{
private static final String MAIN_WIKI_NAME = "xwiki";
private static final String USER_XCLASS = "XWiki.XWikiUsers";
private static final String GROUP_XCLASS = "XWiki.XWikiGroups";
/**
* Used to convert a proper Document Reference to a string but without the wiki name.
*/
private EntityReferenceSerializer<String> localEntityReferenceSerializer;
private XWikiLDAPAuthServiceImpl ldapAuth;
private CacheFactory cacheFactory = new CacheFactory()
{
public <T> Cache<T> newCache(CacheConfiguration config) throws CacheException
{
return new DefaultCache<T>();
}
};
private Properties properties = new Properties();
private boolean isVirtualMode = false;
private Map<String, Map<String, XWikiDocument>> databases = new HashMap<String, Map<String, XWikiDocument>>();
private BaseClass userClass;
private BaseClass groupClass;
private Mock mockStore;
private Mock mockGroupService;
private Map<String, XWikiDocument> getDocuments(String database, boolean create) throws XWikiException
{
if (database == null) {
database = getContext().getDatabase();
}
if (database == null || database.length() == 0) {
database = MAIN_WIKI_NAME;
}
if (!this.databases.containsKey(database)) {
if (create) {
this.databases.put(database, new HashMap<String, XWikiDocument>());
} else {
throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_UNKNOWN,
"Database " + database + " does not exists.");
}
}
return this.databases.get(database);
}
private XWikiDocument getDocument(String documentFullName) throws XWikiException
{
XWikiDocument document = new XWikiDocument();
document.setFullName(documentFullName);
return getDocument(document);
}
private XWikiDocument getDocument(DocumentReference documentReference) throws XWikiException
{
XWikiDocument document = new XWikiDocument(documentReference);
return getDocument(document);
}
private XWikiDocument getDocument(XWikiDocument document) throws XWikiException
{
Map<String, XWikiDocument> docs = getDocuments(document.getDatabase(), false);
if (docs.containsKey(document.getFullName())) {
return docs.get(document.getFullName());
} else {
return document;
}
}
private void saveDocument(XWikiDocument document) throws XWikiException
{
document.setNew(false);
Map<String, XWikiDocument> database = getDocuments(document.getDatabase(), true);
database.remove(document.getFullName());
database.put(document.getFullName(), document);
}
private boolean documentExists(String documentFullName) throws XWikiException
{
return !getDocument(documentFullName).isNew();
}
@Override
public void setUp() throws Exception
{
super.setUp();
this.localEntityReferenceSerializer = Utils.getComponent(
EntityReferenceSerializer.TYPE_STRING, "local");
this.userClass = new BaseClass();
this.groupClass = new BaseClass();
getContext().setDatabase(MAIN_WIKI_NAME);
getContext().setMainXWiki(MAIN_WIKI_NAME);
this.databases.put(MAIN_WIKI_NAME, new HashMap<String, XWikiDocument>());
this.mockStore = mock(XWikiStoreInterface.class, new Class[] {}, new Object[] {});
this.mockStore.stubs().method("searchDocuments").will(returnValue(Collections.EMPTY_LIST));
this.mockGroupService = mock(XWikiGroupService.class, new Class[] {}, new Object[] {});
this.mockGroupService.stubs().method("getAllGroupsNamesForMember").will(returnValue(Collections.EMPTY_LIST));
this.mockGroupService.stubs().method("getAllMatchedGroups").will(returnValue(Collections.EMPTY_LIST));
Mock mockXWiki = mock(XWiki.class, new Class[] {}, new Object[] {});
mockXWiki.stubs().method("getStore").will(returnValue(mockStore.proxy()));
mockXWiki.stubs().method("getGroupService").will(returnValue(mockGroupService.proxy()));
mockXWiki.stubs().method("getCacheFactory").will(returnValue(this.cacheFactory));
mockXWiki.stubs().method("getXWikiPreference").will(returnValue(null));
mockXWiki.stubs().method("getXWikiPreferenceAsInt").will(throwException(new NumberFormatException("null")));
mockXWiki.stubs().method("isVirtualMode").will(returnValue(this.isVirtualMode));
mockXWiki.stubs().method("getDefaultDocumentSyntax").will(returnValue(Syntax.XWIKI_1_0.toIdString()));
mockXWiki.stubs().method("Param").will(new CustomStub("Implements XWiki.Param")
{
public Object invoke(Invocation invocation) throws Throwable
{
return properties.getProperty((String) invocation.parameterValues.get(0));
}
});
mockXWiki.stubs().method("ParamAsLong").will(new CustomStub("Implements XWiki.ParamAsLong")
{
public Object invoke(Invocation invocation) throws Throwable
{
return Long.parseLong(properties.getProperty((String) invocation.parameterValues.get(0)));
}
});
mockXWiki.stubs().method("getDocument").will(new CustomStub("Implements XWiki.getDocument")
{
public Object invoke(Invocation invocation) throws Throwable
{
Object document = invocation.parameterValues.get(0);
if (document instanceof String) {
return getDocument((String) document);
} else if (document instanceof EntityReference) {
DocumentReferenceResolver<EntityReference> resolver = Utils.getComponent(
DocumentReferenceResolver.TYPE_REFERENCE, "current");
return getDocument(resolver.resolve((EntityReference) document));
} else {
return getDocument((DocumentReference) document);
}
}
});
mockXWiki.stubs().method("saveDocument").will(new CustomStub("Implements XWiki.saveDocument")
{
public Object invoke(Invocation invocation) throws Throwable
{
saveDocument((XWikiDocument) invocation.parameterValues.get(0));
return null;
}
});
mockXWiki.stubs().method("exists").will(new CustomStub("Implements XWiki.exists")
{
public Object invoke(Invocation invocation) throws Throwable
{
return documentExists((String) invocation.parameterValues.get(0));
}
});
mockXWiki.stubs().method("getXClass").will(new CustomStub("Implements XWiki.getClass")
{
public Object invoke(Invocation invocation) throws Throwable
{
return getDocument(localEntityReferenceSerializer.serialize((EntityReference) invocation.parameterValues.get(0))).getXClass();
}
});
mockXWiki.stubs().method("search").will(returnValue(Collections.EMPTY_LIST));
this.userClass.setName(USER_XCLASS);
this.userClass.addTextField("first_name", "First Name", 30);
this.userClass.addTextField("last_name", "Last Name", 30);
this.userClass.addTextField("email", "e-Mail", 30);
this.userClass.addPasswordField("password", "Password", 10);
this.userClass.addTextField("customproperty", "Custom property", 10);
mockXWiki.stubs().method("getUserClass").will(returnValue(this.userClass));
this.groupClass.setName(GROUP_XCLASS);
this.groupClass.addTextField("member", "Member", 30);
mockXWiki.stubs().method("getGroupClass").will(returnValue(this.groupClass));
mockXWiki.stubs().method("createUser").will(new CustomStub("Implements XWiki.createUser")
{
public Object invoke(Invocation invocation) throws Throwable
{
XWikiDocument document = new XWikiDocument();
document.setFullName("XWiki." + invocation.parameterValues.get(0));
BaseObject newobject = new BaseObject();
newobject.setClassName(userClass.getName());
userClass.fromMap((Map) invocation.parameterValues.get(1), newobject);
document.addObject(userClass.getName(), newobject);
saveDocument(document);
return 1;
}
});
getContext().setWiki((XWiki) mockXWiki.proxy());
this.properties.setProperty("xwiki.authentication.ldap", "1");
this.properties.setProperty("xwiki.authentication.ldap.server", LDAPTestSetup.LDAP_SERVER);
this.properties.setProperty("xwiki.authentication.ldap.port", "" + LDAPTestSetup.getLDAPPort());
this.properties.setProperty("xwiki.authentication.ldap.base_DN", LDAPTestSetup.LDAP_BASEDN);
this.properties.setProperty("xwiki.authentication.ldap.bind_DN", LDAPTestSetup.LDAP_BINDDN_CN);
this.properties.setProperty("xwiki.authentication.ldap.bind_pass", LDAPTestSetup.LDAP_BINDPASS_CN);
this.properties.setProperty("xwiki.authentication.ldap.UID_attr", LDAPTestSetup.LDAP_USERUID_FIELD);
this.properties.setProperty("xwiki.authentication.ldap.groupcache_expiration", "1");
this.properties.setProperty("xwiki.authentication.ldap.try_local", "0");
this.properties.setProperty("xwiki.authentication.ldap.update_user", "1");
this.properties.setProperty("xwiki.authentication.ldap.fields_mapping",
"last_name=sn,first_name=givenName,fullname=cn,email=mail");
this.ldapAuth = new XWikiLDAPAuthServiceImpl();
}
private void assertAuthenticate(String login, String password, String storedDn) throws XWikiException
{
assertAuthenticate(login, password, "XWiki." + login, storedDn);
}
private void assertAuthenticate(String login, String password, String xwikiUserName, String storedDn)
throws XWikiException
{
assertAuthenticate(login, password, xwikiUserName, storedDn, login);
}
private void assertAuthenticate(String login, String password, String xwikiUserName, String storedDn,
String storedUid) throws XWikiException
{
Principal principal = this.ldapAuth.authenticate(login, password, getContext());
// Check that authentication return a valid Principal
assertNotNull("Authentication failed", principal);
// Check that the returned Principal has the good name
assertEquals("Wrong returned principal", xwikiUserName, principal.getName());
XWikiDocument userProfile = getDocument(xwikiUserName);
// check hat user has been created
assertTrue("The user profile has not been created", !userProfile.isNew());
BaseObject userProfileObj = userProfile.getObject(USER_XCLASS);
assertNotNull("The user profile document does not contains user object", userProfileObj);
BaseObject ldapProfileObj = userProfile.getObject(LDAPProfileXClass.LDAP_XCLASS);
assertNotNull("The user profile document does not contains ldap object", ldapProfileObj);
assertEquals(storedDn, ldapProfileObj.getStringValue(LDAPProfileXClass.LDAP_XFIELD_DN));
assertEquals(storedUid, ldapProfileObj.getStringValue(LDAPProfileXClass.LDAP_XFIELD_UID));
}
/**
* Validate "simple" LDAP authentication.
*/
public void testAuthenticate() throws XWikiException
{
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
/**
* Validate "simple" LDAP authentication fail with wrong user.
*/
public void testAuthenticateWithWrongUser() throws XWikiException
{
Principal principal = this.ldapAuth.authenticate("WrongUser", "WrongPass", getContext());
// Check that authentication return a null Principal
assertNull(principal);
XWikiDocument userProfile = getDocument("XWiki.WrongUser");
// check hat user has not been created
assertTrue("The user profile has been created", userProfile.isNew());
}
/**
* Validate the same user profile is used when authentication is called twice for same user.
*/
public void testAuthenticateTwice() throws XWikiException
{
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
this.mockStore.stubs().method("searchDocuments").will(
returnValue(Collections.singletonList(getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN))));
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
/**
* Validate the same user profile is used when authentication is called twice for same user even the uid used have
* different case.
*/
public void testAuthenticateTwiceAndDifferentCase() throws XWikiException
{
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
this.mockStore.stubs().method("searchDocuments").will(
returnValue(Collections.singletonList(getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN))));
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN.toUpperCase(), LDAPTestSetup.HORATIOHORNBLOWER_PWD,
"XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_DN,
LDAPTestSetup.HORATIOHORNBLOWER_CN);
}
/**
* Validate "simple" LDAP authentication when uid contains point(s).
*/
public void testAuthenticateWhenUidContainsPoints() throws XWikiException
{
assertAuthenticate(LDAPTestSetup.USERWITHPOINTS_CN, LDAPTestSetup.USERWITHPOINTS_PWD, "XWiki."
+ LDAPTestSetup.USERWITHPOINTS_CN.replaceAll("\\.", ""), LDAPTestSetup.USERWITHPOINTS_DN);
}
/**
* Validate a different profile is used for different uid containing points but having same cleaned uid.
*/
public void testAuthenticateTwiceWhenDifferentUsersAndUidContainsPoints() throws XWikiException
{
assertAuthenticate(LDAPTestSetup.USERWITHPOINTS_CN, LDAPTestSetup.USERWITHPOINTS_PWD, "XWiki."
+ LDAPTestSetup.USERWITHPOINTS_CN.replaceAll("\\.", ""), LDAPTestSetup.USERWITHPOINTS_DN);
assertAuthenticate(LDAPTestSetup.OTHERUSERWITHPOINTS_CN, LDAPTestSetup.OTHERUSERWITHPOINTS_PWD, "XWiki."
+ LDAPTestSetup.OTHERUSERWITHPOINTS_CN.replaceAll("\\.", "") + "_1", LDAPTestSetup.OTHERUSERWITHPOINTS_DN);
}
/**
* Validate "simple" LDAP authentication when the user already exists but does not contains LDAP profile object.
*/
public void testAuthenticateWhenNonLDAPUserAlreadyExists() throws XWikiException
{
XWikiDocument userDoc = getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN);
userDoc.newObject(this.userClass.getName(), getContext());
saveDocument(userDoc);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
/**
* Validate "simple" LDAP authentication when the user profile default page already exists but does not contains
* user object. In that case it is using another document to create the user.
*/
public void testAuthenticateWhenNonLDAPNonUserAlreadyExists() throws XWikiException
{
XWikiDocument userDoc = getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN);
saveDocument(userDoc);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD, "XWiki."
+ LDAPTestSetup.HORATIOHORNBLOWER_CN + "_1", LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
public void testAuthenticateWithGroupMembership() throws XWikiException
{
saveDocument(getDocument("XWiki.Group1"));
this.properties.setProperty("xwiki.authentication.ldap.group_mapping", "XWiki.Group1="
+ LDAPTestSetup.HMSLYDIA_DN);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
List<BaseObject> groupList = getDocument("XWiki.Group1").getObjects(this.groupClass.getName());
assertTrue("No user has been added to the group", groupList != null && groupList.size() > 0);
BaseObject groupObject = groupList.get(0);
assertEquals("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN, groupObject.getStringValue("member"));
}
public void testAuthenticateWithGroupMembershipWhenOneXWikiGroupMapTwoLDAPGroups() throws XWikiException
{
saveDocument(getDocument("XWiki.Group1"));
this.properties.setProperty("xwiki.authentication.ldap.group_mapping", "XWiki.Group1="
+ LDAPTestSetup.HMSLYDIA_DN + "|" + "XWiki.Group1=" + LDAPTestSetup.EXCLUSIONGROUP_DN);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
List<BaseObject> groupList = getDocument("XWiki.Group1").getObjects(this.groupClass.getName());
assertTrue("No user has been added to the group", groupList != null && groupList.size() > 0);
BaseObject groupObject = groupList.get(0);
assertEquals("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN, groupObject.getStringValue("member"));
}
public void testAuthenticateTwiceWithGroupMembership() throws XWikiException
{
saveDocument(getDocument("XWiki.Group1"));
this.properties.setProperty("xwiki.authentication.ldap.group_mapping", "XWiki.Group1="
+ LDAPTestSetup.HMSLYDIA_DN);
this.mockGroupService.stubs().method("getAllMatchedGroups").will(
returnValue(Collections.singletonList("XWiki.Group1")));
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
this.mockGroupService.stubs().method("getAllGroupsNamesForMember").will(
returnValue(Collections.singletonList("XWiki.Group1")));
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
List<BaseObject> groupList = getDocument("XWiki.Group1").getObjects(this.groupClass.getName());
assertTrue("No user has been added to the group", groupList != null);
assertTrue("The user has been added twice in the group", groupList.size() == 1);
BaseObject groupObject = groupList.get(0);
assertEquals("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN, groupObject.getStringValue("member"));
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
/**
* Validate user field synchronization in "simple" LDAP authentication.
*/
public void testAuthenticateUserSync() throws XWikiException
{
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
XWikiDocument userProfile = getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN);
BaseObject userProfileObj = userProfile.getObject(USER_XCLASS);
assertEquals(LDAPTestSetup.HORATIOHORNBLOWER_SN, userProfileObj.getStringValue("last_name"));
assertEquals(LDAPTestSetup.HORATIOHORNBLOWER_GIVENNAME, userProfileObj.getStringValue("first_name"));
assertEquals(LDAPTestSetup.HORATIOHORNBLOWER_MAIL, userProfileObj.getStringValue("email"));
// Check non mapped properties are not touched
userProfileObj.setStringValue("customproperty", "customvalue");
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
userProfile = getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN);
userProfileObj = userProfile.getObject(USER_XCLASS);
assertEquals("customvalue", userProfileObj.getStringValue("customproperty"));
}
public void testAuthenticateUserSyncWithoutMapping() throws XWikiException
{
this.properties.setProperty("xwiki.authentication.ldap.fields_mapping", "");
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
public void testAuthenticateUserSyncWithEmptyMapping() throws XWikiException
{
this.properties.remove("xwiki.authentication.ldap.fields_mapping");
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
public void testAuthenticateUserSyncWithWrongMapping() throws XWikiException
{
this.properties.setProperty("xwiki.authentication.ldap.fields_mapping", "wrongfield=wrongfield");
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
public void testAuthenticateWhenLDAPDNChanged() throws XWikiException
{
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
XWikiDocument userProfile = getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN);
BaseObject ldapProfileObj = userProfile.getObject(LDAPProfileXClass.LDAP_XCLASS);
ldapProfileObj.setStringValue(LDAPProfileXClass.LDAP_XFIELD_DN, "oldDN");
this.mockStore.stubs().method("searchDocuments").will(
returnValue(Collections.singletonList(getDocument("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN))));
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
}
public void testAuthenticateWithOUMembership() throws XWikiException
{
saveDocument(getDocument("XWiki.Group1"));
this.properties.setProperty("xwiki.authentication.ldap.group_mapping", "XWiki.Group1="
+ LDAPTestSetup.USERS_OU);
assertAuthenticate(LDAPTestSetup.HORATIOHORNBLOWER_CN, LDAPTestSetup.HORATIOHORNBLOWER_PWD,
LDAPTestSetup.HORATIOHORNBLOWER_DN);
List<BaseObject> groupList = getDocument("XWiki.Group1").getObjects(this.groupClass.getName());
assertTrue("No user has been added to the group", groupList != null && groupList.size() > 0);
BaseObject groupObject = groupList.get(0);
assertEquals("XWiki." + LDAPTestSetup.HORATIOHORNBLOWER_CN, groupObject.getStringValue("member"));
}
}
|
package com.xpn.xwiki.store.hibernate.query;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Provider;
import javax.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.Session;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.spi.NamedQueryDefinition;
import org.hibernate.engine.spi.NamedSQLQueryDefinition;
import org.hibernate.query.NativeQuery;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.context.Execution;
import org.xwiki.query.Query;
import org.xwiki.query.QueryException;
import org.xwiki.query.QueryExecutor;
import org.xwiki.query.QueryFilter;
import org.xwiki.query.QueryParameter;
import org.xwiki.query.SecureQuery;
import org.xwiki.query.WrappingQuery;
import org.xwiki.security.authorization.ContextualAuthorizationManager;
import org.xwiki.security.authorization.Right;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.internal.store.hibernate.HibernateStore;
import com.xpn.xwiki.internal.store.hibernate.query.HqlQueryUtils;
import com.xpn.xwiki.store.XWikiHibernateStore;
import com.xpn.xwiki.util.Util;
/**
* QueryExecutor implementation for Hibernate Store.
*
* @version $Id$
* @since 1.6M1
*/
@Component
@Named("hql")
@Singleton
public class HqlQueryExecutor implements QueryExecutor, Initializable
{
/**
* Path to Hibernate mapping with named queries. Configured via component manager.
*/
private static final String MAPPING_PATH = "queries.hbm.xml";
private static final String ESCAPE_LIKE_PARAMETERS_FILTER = "escapeLikeParameters";
@Inject
private HibernateStore hibernate;
/**
* Used for access to XWikiContext.
*/
@Inject
private Execution execution;
@Inject
private ContextualAuthorizationManager authorization;
@Inject
@Named("context")
private Provider<ComponentManager> componentManagerProvider;
private volatile Set<String> allowedNamedQueries;
@Override
public void initialize() throws InitializationException
{
Configuration configuration = this.hibernate.getConfiguration();
configuration.addInputStream(Util.getResourceAsStream(MAPPING_PATH));
}
private Set<String> getAllowedNamedQueries()
{
if (this.allowedNamedQueries == null) {
synchronized (this) {
if (this.allowedNamedQueries == null) {
this.allowedNamedQueries = new HashSet<>();
// Gather the list of allowed named queries
Collection<NamedQueryDefinition> namedQueries =
this.hibernate.getConfigurationMetadata().getNamedQueryDefinitions();
for (NamedQueryDefinition definition : namedQueries) {
if (HqlQueryUtils.isSafe(definition.getQuery())) {
this.allowedNamedQueries.add(definition.getName());
}
}
}
}
}
return this.allowedNamedQueries;
}
/**
* @param statementString the statement to evaluate
* @return true if the select is allowed for user without PR
*/
protected static boolean isSafeSelect(String statementString)
{
return HqlQueryUtils.isShortFormStatement(statementString) || HqlQueryUtils.isSafe(statementString);
}
protected void checkAllowed(final Query query) throws QueryException
{
if (query instanceof SecureQuery && ((SecureQuery) query).isCurrentAuthorChecked()) {
if (!this.authorization.hasAccess(Right.PROGRAM)) {
if (query.isNamed() && !getAllowedNamedQueries().contains(query.getStatement())) {
throw new QueryException("Named queries requires programming right", query, null);
}
if (!isSafeSelect(query.getStatement())) {
throw new QueryException("The query requires programming right", query, null);
}
}
}
}
@Override
public <T> List<T> execute(final Query query) throws QueryException
{
// Make sure the query is allowed in the current context
checkAllowed(query);
String oldDatabase = getContext().getWikiId();
try {
if (query.getWiki() != null) {
getContext().setWikiId(query.getWiki());
}
return getStore().executeRead(getContext(), session -> {
org.hibernate.query.Query<T> hquery = createHibernateQuery(session, query);
List<T> results = hquery.list();
if (query.getFilters() != null && !query.getFilters().isEmpty()) {
for (QueryFilter filter : query.getFilters()) {
results = filter.filterResults(results);
}
}
return results;
});
} catch (XWikiException e) {
throw new QueryException("Exception while executing query", query, e);
} finally {
getContext().setWikiId(oldDatabase);
}
}
protected <T> org.hibernate.query.Query<T> createHibernateQuery(Session session, Query query)
{
org.hibernate.query.Query<T> hquery;
Query filteredQuery = query;
if (!filteredQuery.isNamed()) {
// For non-named queries, convert the short form into long form before we apply the filters.
filteredQuery = new WrappingQuery(filteredQuery)
{
@Override
public String getStatement()
{
// handle short queries
return completeShortFormStatement(getWrappedQuery().getStatement());
}
};
filteredQuery = filterQuery(filteredQuery, Query.HQL);
hquery = session.createQuery(filteredQuery.getStatement());
populateParameters(hquery, filteredQuery);
} else {
hquery = createNamedHibernateQuery(session, filteredQuery);
}
return hquery;
}
private Query filterQuery(Query query, String language)
{
Query filteredQuery = query;
// If there are Query parameters of type QueryParameter then, for convenience, automatically add the
// "escapeLikeParameters" filter (if not already there)
addEscapeLikeParametersFilter(query);
if (query.getFilters() != null && !query.getFilters().isEmpty()) {
for (QueryFilter filter : query.getFilters()) {
// Step 1: For backward-compatibility reasons call #filterStatement() first
String filteredStatement = filter.filterStatement(filteredQuery.getStatement(), language);
// Prevent unnecessary creation of WrappingQuery objects when the QueryFilter doesn't modify the
// statement.
if (!filteredStatement.equals(filteredQuery.getStatement())) {
filteredQuery = new WrappingQuery(filteredQuery)
{
@Override
public String getStatement()
{
return filteredStatement;
}
};
}
// Step 2: Run #filterQuery()
filteredQuery = filter.filterQuery(filteredQuery);
}
}
return filteredQuery;
}
private void addEscapeLikeParametersFilter(Query query)
{
if (!hasQueryParametersType(query)) {
return;
}
// Find the component class for the "escapeLikeParameters" filter
QueryFilter escapeFilter;
try {
escapeFilter =
this.componentManagerProvider.get().getInstance(QueryFilter.class, ESCAPE_LIKE_PARAMETERS_FILTER);
} catch (ComponentLookupException e) {
// Shouldn't happen!
throw new RuntimeException(
String.format("Failed to locate [%s] Query Filter", ESCAPE_LIKE_PARAMETERS_FILTER), e);
}
boolean found = false;
for (QueryFilter filter : query.getFilters()) {
if (escapeFilter.getClass() == filter.getClass()) {
found = true;
break;
}
}
if (!found) {
query.addFilter(escapeFilter);
}
}
private boolean hasQueryParametersType(Query query)
{
boolean found = false;
for (Object value : query.getNamedParameters().values()) {
if (value instanceof QueryParameter) {
found = true;
break;
}
}
if (!found) {
for (Object value : query.getPositionalParameters().values()) {
if (value instanceof QueryParameter) {
found = true;
break;
}
}
}
return found;
}
/**
* Append the required select clause to HQL short query statements. Short statements are the only way for users
* without programming rights to perform queries. Such statements can be for example:
* <ul>
* <li>{@code , BaseObject obj where doc.fullName=obj.name and obj.className='XWiki.MyClass'}</li>
* <li>{@code where doc.creationDate > '2008-01-01'}</li>
* </ul>
*
* @param statement the statement to complete if required.
* @return the complete statement if it had to be completed, the original one otherwise.
*/
protected String completeShortFormStatement(String statement)
{
String lcStatement = statement.toLowerCase().trim();
if (lcStatement.isEmpty() || lcStatement.startsWith(",") || lcStatement.startsWith("where ")
|| lcStatement.startsWith("order by ")) {
return "select doc.fullName from XWikiDocument doc " + statement.trim();
}
return statement;
}
private <T> org.hibernate.query.Query<T> createNamedHibernateQuery(Session session, Query query)
{
org.hibernate.query.Query<T> hQuery = session.getNamedQuery(query.getStatement());
Query filteredQuery = query;
if (filteredQuery.getFilters() != null && !filteredQuery.getFilters().isEmpty()) {
// Since we can't modify the Hibernate query statement at this point we need to create a new one to apply
// the query filter. This comes with a performance cost, we could fix it by handling named queries ourselves
// and not delegate them to Hibernate. This way we would always get a statement that we can transform before
// the execution.
boolean isNative = hQuery instanceof NativeQuery;
String language = isNative ? "sql" : Query.HQL;
final String statement = hQuery.getQueryString();
// Run filters
filteredQuery = filterQuery(new WrappingQuery(filteredQuery)
{
@Override
public String getStatement()
{
return statement;
}
}, language);
if (isNative) {
hQuery = session.createSQLQuery(filteredQuery.getStatement());
// Copy the information about the return column types, if possible.
NamedSQLQueryDefinition definition =
this.hibernate.getConfigurationMetadata().getNamedNativeQueryDefinition(query.getStatement());
if (!StringUtils.isEmpty(definition.getResultSetRef())) {
((NativeQuery<T>) hQuery).setResultSetMapping(definition.getResultSetRef());
}
} else {
hQuery = session.createQuery(filteredQuery.getStatement());
}
}
populateParameters(hQuery, filteredQuery);
return hQuery;
}
/**
* @param hquery query to populate parameters
* @param query query from to populate.
*/
protected void populateParameters(org.hibernate.query.Query<?> hquery, Query query)
{
if (query.getOffset() > 0) {
hquery.setFirstResult(query.getOffset());
}
if (query.getLimit() > 0) {
hquery.setMaxResults(query.getLimit());
}
for (Entry<String, Object> e : query.getNamedParameters().entrySet()) {
setNamedParameter(hquery, e.getKey(), e.getValue());
}
Map<Integer, Object> positionalParameters = query.getPositionalParameters();
if (positionalParameters.size() > 0) {
positionalParameters.forEach(hquery::setParameter);
}
}
/**
* Sets the value of the specified named parameter, taking into account the type of the given value.
*
* @param query the query to set the parameter for
* @param name the parameter name
* @param value the non-null parameter value
*/
protected void setNamedParameter(org.hibernate.query.Query<?> query, String name, Object value)
{
if (value instanceof Collection) {
query.setParameterList(name, (Collection<?>) value);
} else if (value.getClass().isArray()) {
query.setParameterList(name, (Object[]) value);
} else {
query.setParameter(name, value);
}
}
/**
* @return Store component
*/
protected XWikiHibernateStore getStore()
{
return getContext().getWiki().getHibernateStore();
}
/**
* @return XWiki Context
*/
protected XWikiContext getContext()
{
return (XWikiContext) this.execution.getContext().getProperty(XWikiContext.EXECUTIONCONTEXT_KEY);
}
}
|
package com.hubspot.baragon.service.edgecache.cloudflare.client;
public class CloudflareClientException extends Exception {
public CloudflareClientException(String message, Throwable t) {
super(message, t);
}
public CloudflareClientException(String message) {
super(message);
}
}
|
package com.hubspot.singularity.executor.task;
import java.io.File;
import java.nio.file.Paths;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.mesos.Protos;
import org.apache.mesos.Protos.TaskState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.rholder.retry.RetryException;
import com.github.rholder.retry.Retryer;
import com.github.rholder.retry.RetryerBuilder;
import com.github.rholder.retry.StopStrategies;
import com.github.rholder.retry.WaitStrategies;
import com.google.common.base.Optional;
import com.hubspot.deploy.HealthcheckOptions;
import com.hubspot.singularity.executor.config.SingularityExecutorConfiguration;
import com.hubspot.singularity.executor.utils.ExecutorUtils;
import com.hubspot.singularity.runner.base.shared.SafeProcessManager;
public class SingularityExecutorTaskProcessCallable extends SafeProcessManager implements Callable<Integer> {
private static final Logger LOG = LoggerFactory.getLogger(SingularityExecutorTaskProcessCallable.class);
private final ProcessBuilder processBuilder;
private final ExecutorUtils executorUtils;
private final SingularityExecutorConfiguration configuration;
private final SingularityExecutorTask task;
public SingularityExecutorTaskProcessCallable(SingularityExecutorConfiguration configuration,
SingularityExecutorTask task,
ProcessBuilder processBuilder,
ExecutorUtils executorUtils) {
super(task.getLog());
this.executorUtils = executorUtils;
this.processBuilder = processBuilder;
this.configuration = configuration;
this.task = task;
}
@Override
public Integer call() throws Exception {
LOG.info("Process being started");
Process process = startProcess(processBuilder);
runHealthcheck();
return process.waitFor();
}
public SingularityExecutorTask getTask() {
return task;
}
@Override
public String toString() {
return "SingularityExecutorTaskProcessCallable [task=" + task + "]";
}
private void runHealthcheck() {
task.getLog().info("Running health check for {}", task.getTaskDefinition());
Optional<HealthcheckOptions> maybeOptions = task.getTaskDefinition().getHealthcheckOptions();
task.getLog().info("HC options: {}", maybeOptions);
Optional<String> expectedHealthcheckResultFilePath = task.getTaskDefinition().getHealthcheckResultFilePath();
task.getLog().info("Expected result file path: {}", expectedHealthcheckResultFilePath);
String taskAppDirectory = task.getTaskDefinition().getTaskAppDirectory();
task.getLog().info("Curdir: {}", new File(taskAppDirectory).getAbsolutePath());
task.getLog().info("Files: {}", new File(taskAppDirectory).listFiles());
task.getLog().info("All Files: {}", new File(taskAppDirectory).list());
if (maybeOptions.isPresent() && expectedHealthcheckResultFilePath.isPresent()) {
File fullHealthcheckPath = Paths.get(taskAppDirectory, expectedHealthcheckResultFilePath.get()).toFile();
task.getLog().info("Full healthcheck path: {}", fullHealthcheckPath);
try {
Integer healthcheckMaxRetries = maybeOptions.get().getMaxRetries().or(configuration.getDefaultHealthcheckMaxRetries());
Retryer<Boolean> retryer = RetryerBuilder.<Boolean>newBuilder()
.retryIfResult(bool -> !bool)
.withWaitStrategy(WaitStrategies.fixedWait(1L, TimeUnit.SECONDS))
.withStopStrategy(StopStrategies.stopAfterAttempt(healthcheckMaxRetries))
.build();
retryer.call(() -> {
task.getLog().info("files: {}", new File(taskAppDirectory).list());
return fullHealthcheckPath.exists();
});
executorUtils.sendStatusUpdate(task.getDriver(), task.getTaskInfo().getTaskId(), Protos.TaskState.TASK_RUNNING, String.format("Task running process %s (health check file found successfully).", getCurrentProcessToString()), task.getLog());
} catch (ExecutionException | RetryException e) {
executorUtils.sendStatusUpdate(task.getDriver(), task.getTaskInfo().getTaskId(), TaskState.TASK_FAILED, String.format("Task timed out on health checks (health check file not found)."), task.getLog());
}
}
}
}
|
package org.eclipse.birt.report.designer.data.ui.dataset;
import java.net.URISyntaxException;
import java.util.HashSet;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.report.designer.data.ui.datasource.PropertyBindingPage;
import org.eclipse.birt.report.designer.data.ui.property.AbstractPropertyDialog;
import org.eclipse.birt.report.designer.data.ui.property.PropertyNode;
import org.eclipse.birt.report.designer.data.ui.util.DTPUtil;
import org.eclipse.birt.report.designer.data.ui.util.DataSetProvider;
import org.eclipse.birt.report.designer.data.ui.util.DataUIConstants;
import org.eclipse.birt.report.designer.data.ui.util.IHelpConstants;
import org.eclipse.birt.report.designer.data.ui.util.Utility;
import org.eclipse.birt.report.designer.internal.ui.util.ExceptionHandler;
import org.eclipse.birt.report.designer.internal.ui.util.IHelpContextIds;
import org.eclipse.birt.report.designer.internal.ui.util.UIUtil;
import org.eclipse.birt.report.designer.nls.Messages;
import org.eclipse.birt.report.designer.ui.dialogs.properties.IPropertyPage;
import org.eclipse.birt.report.model.api.DataSetHandle;
import org.eclipse.birt.report.model.api.DesignElementHandle;
import org.eclipse.birt.report.model.api.JointDataSetHandle;
import org.eclipse.birt.report.model.api.OdaDataSetHandle;
import org.eclipse.birt.report.model.api.OdaDataSourceHandle;
import org.eclipse.birt.report.model.api.ScriptDataSetHandle;
import org.eclipse.birt.report.model.api.ScriptDataSourceHandle;
import org.eclipse.birt.report.model.api.activity.NotificationEvent;
import org.eclipse.birt.report.model.api.activity.SemanticException;
import org.eclipse.birt.report.model.api.core.Listener;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.IExtension;
import org.eclipse.core.runtime.IExtensionPoint;
import org.eclipse.core.runtime.IExtensionRegistry;
import org.eclipse.core.runtime.Platform;
import org.eclipse.datatools.connectivity.oda.OdaException;
import org.eclipse.datatools.connectivity.oda.design.DataSetDesign;
import org.eclipse.datatools.connectivity.oda.design.DesignSessionRequest;
import org.eclipse.datatools.connectivity.oda.design.DesignSessionResponse;
import org.eclipse.datatools.connectivity.oda.design.SessionStatus;
import org.eclipse.datatools.connectivity.oda.design.ui.designsession.DataSetDesignSession;
import org.eclipse.datatools.connectivity.oda.design.ui.designsession.DesignSessionUtil;
import org.eclipse.datatools.connectivity.oda.design.ui.manifest.UIManifestExplorer;
import org.eclipse.datatools.connectivity.oda.design.ui.wizards.DataSetEditorPage;
import org.eclipse.jface.dialogs.IMessageProvider;
import org.eclipse.jface.preference.IPreferencePageContainer;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.dialogs.PropertyPage;
/**
* Presents data set editor dialog.
*
*/
@SuppressWarnings("deprecation")
public class DataSetEditor extends AbstractPropertyDialog implements
IPreferencePageContainer
{
private ItemModelManager itemModelManager = new ItemModelManager( );
private DataSetDesignSession m_designSession = null;
private boolean includeInputParameterPage = false;
private boolean includeOutputParameterPage = false;
private boolean needToFocusOnOutput = false;
private transient HistoryToolBar historyBar;
// Common internal pages
// The pages of ODA extensions are considered as internal pages, including
// jdbc, xml, flatfile and web service, who's page id are hard-coded for
// current workaround.
// TODO:Fix me.
private static final String DATASET_SETTINGS_PAGE = "org.eclipse.birt.datasource.editor.dataset.settings"; //$NON-NLS-1$
private static final String OUTPUT_PARAMETER_PREVIEW_PAGE = "org.eclipse.birt.datasource.editor.dataset.outputparameterpreviewpage"; //$NON-NLS-1$
private static final String DATASOURCE_EDITOR_PROPERTY_PAGE = "org.eclipse.birt.datasource.editor.property"; //$NON-NLS-1$
private static final String COMPUTED_COLUMNS_PAGE = "org.eclipse.birt.datasource.editor.dataset.computedcolumnspage"; //$NON-NLS-1$
private static final String RESULTSET_PREVIEW_PAGE = "org.eclipse.birt.datasource.editor.dataset.resultsetpreviewpage"; //$NON-NLS-1$
private static final String FILTERS_PAGE = "org.eclipse.birt.datasource.editor.dataset.filterspage"; //$NON-NLS-1$
private static final String PARAMETERS_PAGE = "org.eclipse.birt.datasource.editor.dataset.parameterspage"; //$NON-NLS-1$
private static final String OUTPUTCOLUMN_PAGE = "org.eclipse.birt.datasource.editor.dataset.outputcolumnpage"; //$NON-NLS-1$
private static final String JOINT_DATA_SET_PAGE = "org.eclipse.birt.datasource.editor.dataset.jointDataSetPage"; //$NON-NLS-1$
private static final String DATA_SOURCE_SELECTION_PAGE = "org.eclipse.birt.datasource.editor.dataset.datasourceselectionpage"; //$NON-NLS-1$
private static final String SOAP_PARAMETERS_PAGE = "org.eclipse.datatools.enablement.oda.ws.ui.SOAPParametersPage"; //$NON-NLS-1$
private static final Set<String> internalPages = getInternalPageNames( );
private static Logger logger = Logger.getLogger( DataSetEditor.class.getName( ) );
protected Control createDialogArea( Composite parent )
{
UIUtil.bindHelp( parent, IHelpContextIds.DATA_SET_EDITOR_ID );
return super.createDialogArea( parent );
}
protected boolean needRememberLastSize( )
{
return true;
}
/**
* The constructor.
*
* @param parentShell
*/
public DataSetEditor( Shell parentShell, DataSetHandle ds,
boolean needToFocusOnOutput )
{
super( parentShell, ds );
if ( !( ds instanceof JointDataSetHandle ) )
{
if ( ds.getDataSource( ) == null )
{
throw new RuntimeException( Messages.getFormattedString( "dataset.editor.error.noDataSource", new String[]{ds.getQualifiedName( )} ) );//$NON-NLS-1$
}
if ( ( ds instanceof OdaDataSetHandle && !( ds.getDataSource( ) instanceof OdaDataSourceHandle ) ) )
{
throw new RuntimeException( Messages.getFormattedString( "dataset.editor.error.nonmatchedDataSource", //$NON-NLS-1$
new String[]{
ds.getQualifiedName( ),
( (OdaDataSetHandle) ds ).getExtensionID( )
} ) );
}
else if ( ds instanceof ScriptDataSetHandle
&& !( ds.getDataSource( ) instanceof ScriptDataSourceHandle ) )
{
throw new RuntimeException( Messages.getFormattedString( "dataset.editor.error.nonmatchedDataSource", //$NON-NLS-1$
new String[]{
ds.getQualifiedName( ),
DataUIConstants.DATA_SET_SCRIPT
} ) );
}
}
this.needToFocusOnOutput = needToFocusOnOutput;
// get the data source and dataset type from handle
String dataSourceType, dataSetType;
if ( ds instanceof OdaDataSetHandle )
{
OdaDataSourceHandle dataSource = (OdaDataSourceHandle) ( (OdaDataSetHandle) ds ).getDataSource( );
dataSourceType = dataSource.getExtensionID( );
dataSetType = ( (OdaDataSetHandle) ds ).getExtensionID( );
}
else if ( ds instanceof ScriptDataSetHandle )
{
dataSourceType = DataUIConstants.DATA_SOURCE_SCRIPT;
dataSetType = DataUIConstants.DATA_SET_SCRIPT;
}
else if ( ds instanceof JointDataSetHandle )
{
dataSourceType = ""; //$NON-NLS-1$
dataSetType = ""; //$NON-NLS-1$
addPageTo( "/", //$NON-NLS-1$
JOINT_DATA_SET_PAGE,
Messages.getString( "JointDataSetPage.query" ), //$NON-NLS-1$
null,
new JointDataSetPage( Messages.getString( "dataset.editor.dataSource" ) ) ); //$NON-NLS-1$
}
else
{
throw new RuntimeException( Messages.getFormattedString( "dataset.editor.error.noDataSource", new String[]{ds.getClass( ).getName( )} ) );//$NON-NLS-1$
}
// according to the data source type, get the extension point.If
// extention is birt, populate birt page. or the ODA Custom page will be
// populated.
if ( !( ds instanceof JointDataSetHandle ) )
{
addPageTo( "/", DATA_SOURCE_SELECTION_PAGE, Messages.getString( "dataset.editor.dataSource" ), null, new DataSetDataSourceSelectionPage( ) ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
if ( DesignSessionUtil.hasValidOdaDesignUIExtension( dataSourceType ) )
{
addCustomPageODAV3( (OdaDataSetHandle) ds,
dataSourceType,
dataSetType );
}
else
addBirtPage( dataSourceType, dataSetType );
}
// add common pages, just like computedColumn page, parameter page,
// output column page etc.
addCommonPage( ds );
// start model manager to process the edit transaction
itemModelManager.start( ds );
}
/**
* add page for org.eclipse.datatools.connectivity.oda.design.ui.dataSource
*
* @param dataSetHandle
*/
private void addCustomPageODAV3( OdaDataSetHandle dataSetHandle,
String dataSourceType, String dataSetType )
{
try
{
DesignSessionRequest request = DTPUtil.getInstance( )
.createDesignSessionRequest( dataSetHandle );
if ( request != null && request.getDataSourceDesign( ) != null )
{
DTPUtil.getInstance( )
.applyResourceIdentifiers( request.getDataSourceDesign( ) );
}
m_designSession = DataSetDesignSession.startEditDesign( request );
includeInputParameterPage = UIManifestExplorer.getInstance( )
.getDataSetUIElement( dataSourceType, dataSetType )
.supportsInParameters( );
includeOutputParameterPage = UIManifestExplorer.getInstance( )
.getDataSetUIElement( dataSourceType, dataSetType )
.supportsOutParameters( );
}
catch ( OdaException e )
{
ExceptionHandler.handle( e );
}
catch ( URISyntaxException e )
{
ExceptionHandler.handle( e );
}
if ( m_designSession != null )
populateEditorPage( m_designSession );
}
/**
* populate editor page
*
* @param m_designSession
* @param dataSourceType
* @param dataSetType
*/
private void populateEditorPage( DataSetDesignSession m_designSession )
{
try
{
DataSetEditorPage[] dataSetEditorPages = m_designSession.getEditorPages( );
for ( int i = 0; i < dataSetEditorPages.length; i++ )
{
DataSetEditorPage dataSetEditorPage = dataSetEditorPages[i];
// temp solution to screen off ws.SOAPParametersPage
if ( SOAP_PARAMETERS_PAGE.equals( dataSetEditorPage.getPageId( ) ) )
continue;
PropertyPageWrapper propertyPageWrapper = new PropertyPageWrapper( dataSetEditorPage,
m_designSession );
addPageTo( dataSetEditorPage.getPagePath( ),
dataSetEditorPage.getPageId( ),
dataSetEditorPage.getTitle( ),
null,
propertyPageWrapper );
if ( dataSetEditorPage.hasInitialFocus( ) )
setDefaultNode( dataSetEditorPage.getPageId( ) );
}
}
catch ( OdaException ex )
{
ExceptionHandler.handle( ex );
}
}
/**
* add several common use page for Birt data set editor
*
* @param ds
*/
private void addCommonPage( DataSetHandle ds )
{
if ( ds instanceof ScriptDataSetHandle )
{
// Output column is replaced by column definition page
addPageTo( "/", OUTPUTCOLUMN_PAGE, Messages.getString( "dataset.editor.outputColumns" ), null, new OutputColumnDefnPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Parameter page
addPageTo( "/", PARAMETERS_PAGE, Messages.getString( "dataset.editor.parameters" ), null, new DataSetParametersPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Parameter is removed
addPageTo( "/", FILTERS_PAGE, Messages.getString( "dataset.editor.filters" ), null, new DataSetFiltersPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Setting page
addDataSetSettingPage( ds );
// Result set preview page
addPageTo( "/", RESULTSET_PREVIEW_PAGE, Messages.getString( "dataset.editor.preview" ), null, new ResultSetPreviewPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
else if ( ds instanceof OdaDataSetHandle )
{
// Output column page
addPageTo( "/", OUTPUTCOLUMN_PAGE, Messages.getString( "dataset.editor.outputColumns" ), null, new OutputColumnsPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Computed column page
addPageTo( "/", COMPUTED_COLUMNS_PAGE, Messages.getString( "dataset.editor.computedColumns" ), null, new DataSetComputedColumnsPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// flatfile driver need not parameter page to be displayed.
if ( includeInputParameterPage )
addPageTo( "/", PARAMETERS_PAGE, Messages.getString( "dataset.editor.parameters" ), null, new DataSetParametersPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Filter page
addPageTo( "/", FILTERS_PAGE, Messages.getString( "dataset.editor.filters" ), null, new DataSetFiltersPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Property binding page
addPageTo( "/", DATASOURCE_EDITOR_PROPERTY_PAGE, Messages.getString( "datasource.editor.property" ), null, new PropertyBindingPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Setting page
addDataSetSettingPage( ds );
// Output parameters page
if ( includeOutputParameterPage )
addPageTo( "/", OUTPUT_PARAMETER_PREVIEW_PAGE, Messages.getString( "dataset.editor.outputparameters" ), null, new OutputParameterPreviewPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Result set preview page
addPageTo( "/", RESULTSET_PREVIEW_PAGE, Messages.getString( "dataset.editor.preview" ), null, new ResultSetPreviewPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
else if ( ds instanceof JointDataSetHandle )
{
// Output column page
addPageTo( "/", OUTPUTCOLUMN_PAGE, Messages.getString( "dataset.editor.outputColumns" ), null, new OutputColumnsPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Computed column page
addPageTo( "/", COMPUTED_COLUMNS_PAGE, Messages.getString( "dataset.editor.computedColumns" ), null, new DataSetComputedColumnsPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
addPageTo( "/", PARAMETERS_PAGE, Messages.getString( "dataset.editor.parameters" ), null, new DataSetParametersPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
/*
* // Setting page addDataSetSettingPage( ds );
*/
// Filter page
addPageTo( "/", FILTERS_PAGE, Messages.getString( "dataset.editor.filters" ), null, new DataSetFiltersPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Result set preview page
addPageTo( "/", RESULTSET_PREVIEW_PAGE, Messages.getString( "dataset.editor.preview" ), null, new ResultSetPreviewPage( ) );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
if ( needToFocusOnOutput )
setDefaultNode( OUTPUTCOLUMN_PAGE );
}
/**
* add page for org.eclipse.birt.report.designer.ui.odadatasource
*
* @param dataSourceType
* @param dataSetType
*/
private void addBirtPage( String dataSourceType, String dataSetType )
{
try
{
IConfigurationElement element = DataSetProvider.findDataSetElement( dataSetType,
dataSourceType );
if ( element != null )
{
String supportParameterPage = element.getAttribute( "addsDataSetParametersPage" ); //$NON-NLS-1$
if ( supportParameterPage != null )
includeInputParameterPage = Boolean.valueOf( supportParameterPage )
.booleanValue( );
// Now get all the editor pages
IConfigurationElement[] editorPages = element.getChildren( "dataSetEditorPage" );//$NON-NLS-1$
if ( editorPages != null )
{
boolean hasFocus = false;
for ( int n = 0; n < editorPages.length; n++ )
{
IPropertyPage page = (IPropertyPage) editorPages[n].createExecutableExtension( "class" );//$NON-NLS-1$
addPageTo( editorPages[n].getAttribute( "path" ), editorPages[n].getAttribute( "name" ), editorPages[n].getAttribute( "displayName" ), null, page );//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
if ( hasFocus == false )
{
String initFocusAttr = editorPages[n].getAttribute( "initFocus" ); //$NON-NLS-1$
if ( initFocusAttr != null
&& initFocusAttr.equalsIgnoreCase( "true" ) ) //$NON-NLS-1$
{
setDefaultNode( editorPages[n].getAttribute( "name" ) ); //$NON-NLS-1$
hasFocus = true;
}
else if ( n == editorPages.length - 1 )
{
setDefaultNode( editorPages[0].getAttribute( "name" ) ); //$NON-NLS-1$
}
}
}
}
}
}
catch ( CoreException e1 )
{
ExceptionHandler.handle( e1 );
}
}
/**
*
* @param ds
*/
private void addDataSetSettingPage( DataSetHandle ds )
{
IPropertyPage settingPage = new DataSetSettingsPage( );
addPageTo( "/", //$NON-NLS-1$
DATASET_SETTINGS_PAGE,
Messages.getString( "dataset.editor.settings" ), //$NON-NLS-1$
null,
settingPage );
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.birt.report.designer.ui.dialogs.properties.AbstractPropertyDialog
* #performOk()
*/
public boolean performOk( )
{
try
{
if ( m_designSession != null )
m_designSession.finish( );
}
catch ( OdaException e )
{
}
itemModelManager.destory( false );
return true;
}
/**
* update dataset design
*/
public void updateDataSetDesign( IPropertyPage page )
{
try
{
if ( this.getCurrentNode( ) != null
&& this.getCurrentNode( ).getPage( ) != page )
return;
if ( m_designSession != null )
{
// restart the oda design session with a new request
// based on the latest state of the data set handle
DesignSessionRequest request = DTPUtil.getInstance( )
.createDesignSessionRequest( (OdaDataSetHandle) getHandle( ) );
if ( request != null && request.getDataSourceDesign( ) != null )
{
DTPUtil.getInstance( )
.applyResourceIdentifiers( request.getDataSourceDesign( ) );
}
// try to preserve the existing editor pages if feasible
boolean hasResetEditorPages = m_designSession.restartEditDesign( request,
false );
if ( hasResetEditorPages )
populateDataSetEditor( );
}
}
catch ( OdaException e )
{
logger.log( Level.WARNING, e.getLocalizedMessage( ), e );
}
catch ( URISyntaxException e )
{
logger.log( Level.WARNING, e.getLocalizedMessage( ), e );
}
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.birt.report.designer.ui.dialogs.properties.AbstractPropertyDialog
* #performCancel()
*/
public boolean performCancel( )
{
if ( m_designSession != null )
{
m_designSession.cancel( );
}
itemModelManager.destory( true );
return true;
}
/**
* Returns the current model handle.
*
*/
public DataSetHandle getHandle( )
{
return (DataSetHandle) getModel( );
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.jface.window.Window#createContents(org.eclipse.swt.widgets
* .Composite)
*/
protected Control createContents( Composite parent )
{
String title = Messages.getFormattedString( "dataset.edit", new String[]{getHandle( ).getName( )} );//$NON-NLS-1$
getShell( ).setText( title );
Control control = super.createContents( parent );
Utility.setSystemHelp( control, IHelpConstants.CONEXT_ID_DATASET_EDIT );
return control;
}
/**
* Gets all columns items from dataset list
*
* @return DataSetViewData[]
*/
public DataSetViewData[] getCurrentItemModel( )
{
return itemModelManager.getCurrentItemModel( true, true );
}
/**
* Gets all columns items from dataset list
*
* @param useColumnHint
* @param suppressErrorMessage
* @return DataSetViewData[]
*/
public DataSetViewData[] getCurrentItemModel( boolean useColumnHint,
boolean suppressErrorMessage )
{
return itemModelManager.getCurrentItemModel( useColumnHint,
suppressErrorMessage );
}
/**
* Set the modelChanged status to "true"
*
*/
public void enableLinkedParamChanged( )
{
this.itemModelManager.enableLinkedParamChanged( );
}
/**
* @return if the model has changed or the weak-linked report parameter has
* changed, return true;
*/
public boolean modelChanged( )
{
return this.itemModelManager.modelChanged( );
}
/**
*
* @return
* @throws OdaException
*/
public DataSetDesign getCurrentDataSetDesign( ) throws OdaException
{
if ( m_designSession != null )
{
DesignSessionResponse response = m_designSession.flush( )
.getResponse( );
if ( response.getSessionStatus( ) != SessionStatus.OK_LITERAL )
return null;
DataSetDesign dataSetDesign = response.getDataSetDesign( );
return dataSetDesign;
}
else
return null;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.jface.dialogs.Dialog#okPressed()
*/
protected void okPressed( )
{
super.okPressed( );
try
{
DataSetUIUtil.updateColumnCache( this.getHandle( ) );
}
catch ( Exception e )
{
ExceptionHandler.handle( e );
}
// First call ok on all the pages
if ( super.rootNode.hasSubNodes( ) )
{
PropertyNode[] nodes = rootNode.getSubNodes( );
for ( int n = 0; n < nodes.length; n++ )
{
// Check whether the current page can be closed
if ( nodes[n].getPage( ) instanceof DataSetParametersPage )
{
if ( viewer == null && viewer.getTree( ) == null )
return;
DataSetParametersPage page = (DataSetParametersPage) nodes[n].getPage( );
if ( !page.canFinish( ) && !viewer.getTree( ).isDisposed( ) )
{
TreeItem firstNode = viewer.getTree( ).getItems( )[n];
StructuredSelection select = new StructuredSelection( firstNode.getData( ) );
viewer.setSelection( select );
String name = ( (DataSetParametersPage) nodes[n].getPage( ) ).getNoneValuedParameterName( );
this.setMessage( Messages.getFormattedString( "dataset.editor.error.noInputParameterDefaultValue", //$NON-NLS-1$
new Object[]{
name
} ),
IMessageProvider.ERROR );
return;
}
}
}
}
}
/**
* get current PropertyPage
*
* @return
*/
private PropertyPage getCurrentPropertyPage( )
{
if ( getCurrentNode( ) != null )
{
IPropertyPage ipropertyPage = getCurrentNode( ).getPage( );
if ( ipropertyPage instanceof PropertyPageWrapper )
return ( (PropertyPageWrapper) ipropertyPage ).getPropertyPage( );
}
return null;
}
/**
* populate dataSetEditor page in design session
*
*/
private void populateDataSetEditor( )
{
if ( m_designSession == null )
{
return;
}
DataSetEditorPage[] dataSetEditorPages;
try
{
dataSetEditorPages = m_designSession.getEditorPages( );
}
catch ( OdaException e )
{
return;
}
IPropertyPage currentNode = null;
for ( int i = 0; i < dataSetEditorPages.length; i++ )
{
DataSetEditorPage dataSetEditorPage = dataSetEditorPages[i];
// temp solution to screen off ws.SOAPParametersPage
if ( SOAP_PARAMETERS_PAGE.equals( dataSetEditorPage.getPageId( ) ) )
continue;
PropertyPageWrapper propertyPageWrapper = new PropertyPageWrapper( dataSetEditorPage,
m_designSession );
// First call ok on all the pages
if ( super.rootNode.hasSubNodes( ) )
{
PropertyNode[] nodes = rootNode.getSubNodes( );
for ( int n = 0; n < nodes.length; n++ )
{
if ( nodes[n] != null
&& nodes[n].getId( )
.equals( dataSetEditorPage.getPageId( ) ) )
{
nodes[n].removePageControl( );
nodes[n].setPage( propertyPageWrapper );
nodes[n].setContainer( this );
if ( this.getCurrentNode( )
.getId( )
.equals( dataSetEditorPage.getPageId( ) ) )
currentNode = propertyPageWrapper;
break;
}
}
}
}
if ( currentNode != null )
{
rootNode.setPage( currentNode );
}
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.jface.preference.IPreferencePageContainer#getPreferenceStore
* ()
*/
public IPreferenceStore getPreferenceStore( )
{
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.jface.preference.IPreferencePageContainer#updateButtons()
*/
public void updateButtons( )
{
PropertyPage propertyPage = this.getCurrentPropertyPage( );
if ( propertyPage != null && getOkButton( ) != null )
{
getOkButton( ).setEnabled( propertyPage.okToLeave( ) );
}
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.jface.preference.IPreferencePageContainer#updateMessage()
*/
public void updateMessage( )
{
PropertyPage propertyPage = getCurrentPropertyPage( );
if ( propertyPage != null )
setMessage( propertyPage.getMessage( ),
propertyPage.getMessageType( ) );
}
/*
* (non-Javadoc)
*
* @see org.eclipse.jface.preference.IPreferencePageContainer#updateTitle()
*/
public void updateTitle( )
{
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.birt.report.designer.ui.dialogs.properties.AbstractPropertyDialog
* #createTitleArea(org.eclipse.swt.widgets.Composite)
*/
public Composite createTitleArea( Composite parent )
{
GridLayout layout = (GridLayout) parent.getLayout( );
layout.numColumns = 2;
layout.marginHeight = 0;
parent.setLayout( layout );
createMessageComposite( parent );
createToolbarComposite( parent );
return null;
}
/**
*
* @param parent
*/
private void createMessageComposite( Composite parent )
{
super.createTitleArea( parent );
}
/**
*
* @param parent
*/
private void createToolbarComposite( Composite parent )
{
historyBar = new HistoryToolBar( parent, viewer, SWT.FLAT
| SWT.HORIZONTAL );
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.birt.report.designer.ui.dialogs.properties.AbstractPropertyDialog
* #
* showSelectionPage(org.eclipse.birt.report.designer.ui.dialogs.properties.
* PropertyNode)
*/
public void showSelectionPage( PropertyNode selectedNode )
{
super.showSelectionPage( selectedNode );
if ( showPage )
{
if ( historyBar != null )
{
historyBar.addHistoryNode( selectedNode );
}
// automatically pack(resize) the shell if some external page
// specified was selected
if ( !internalPages.contains( selectedNode.getId( ) ) )
{
getShell( ).pack( );
}
}
}
/**
* whether support input parameter
*
* @return
*/
public boolean supportsInParameters( )
{
return this.includeInputParameterPage;
}
/**
* whether support output parameter
*
* @return
*/
public boolean supportsOutputParameters( )
{
return this.includeOutputParameterPage;
}
/**
* Helper class which is a itemModel manager.
*/
private static class ItemModelManager implements Listener
{
private DataSetHandle ds = null;
private boolean itemModelChanged = true;
private boolean linkedParameterChanged = true;
private DataSetViewData[] savedItemModel = null;
private String savedQueryText = null;
private ClassLoader oldContextLoader = null;
/**
* Start action
*
* @param ds
* model to be listened
*/
public void start( DataSetHandle dataSet )
{
assert dataSet != null;
this.ds = dataSet;
if ( ds instanceof OdaDataSetHandle )
this.savedQueryText = ( (OdaDataSetHandle) ds ).getQueryText( );
setContextLoader( dataSet );
this.savedItemModel = DataSetProvider.getCurrentInstance( )
.getCachedDataSetItemModel( ds );// .getDataSetItemModel( ds
this.ds.addListener( this );
}
/**
*
* @param dataSet
*/
private void setContextLoader( DataSetHandle dataSet )
{
// set context class loader
oldContextLoader = Thread.currentThread( ).getContextClassLoader( );
ClassLoader parentLoader = oldContextLoader;
if ( parentLoader == null )
parentLoader = this.getClass( ).getClassLoader( );
ClassLoader newContextLoader = DataSetProvider.getCustomScriptClassLoader( parentLoader,
dataSet.getModuleHandle( ) );
Thread.currentThread( ).setContextClassLoader( newContextLoader );
}
/**
* Destroy action
*
* @param rollback
* true: rollback to savedItemModel false: do nothing
*/
public void destory( boolean rollback )
{
if ( rollback )
{
if ( ds instanceof OdaDataSetHandle )
{
try
{
( (OdaDataSetHandle) ds ).setQueryText( this.savedQueryText );
DataSetProvider.getCurrentInstance( )
.getDataSetDesign( ds, true, true );
}
catch ( SemanticException e )
{
// should not arrive here.
}
catch ( BirtException e )
{
// should not arrive here
}
}
DataSetProvider.getCurrentInstance( )
.setModelOfDataSetHandle( this.ds, savedItemModel );
}
//Restore old context loader
Thread.currentThread( ).setContextClassLoader( oldContextLoader );
ds.removeListener( this );
}
/*
* @see
* org.eclipse.birt.report.model.api.core.Listener#elementChanged(org
* .eclipse.birt.report.model.api.DesignElementHandle,
* org.eclipse.birt.report.model.api.activity.NotificationEvent)
*/
public void elementChanged( DesignElementHandle focus,
NotificationEvent ev )
{
itemModelChanged = true;
}
/**
* Set the linked parameter changed status to 'true'
*
*/
public void enableLinkedParamChanged( )
{
linkedParameterChanged = true;
}
/**
*
* @return the model changed or not
*/
public boolean modelChanged( )
{
return this.itemModelChanged || linkedParameterChanged;
}
/**
* Gets all columns items from dataset list
*
* @return DataSetItemModel[]
*/
public DataSetViewData[] getCurrentItemModel( )
{
DataSetViewData[] dataSetItem = DataSetProvider.getCurrentInstance( )
.getColumns( ds, itemModelChanged );
itemModelChanged = false;
linkedParameterChanged = false;
return dataSetItem;
}
/**
* Gets all columns items from dataset list
*
* @param useColumnHint
* @param suppressErrorMessage
* @return DataSetItemModel[]
*/
public DataSetViewData[] getCurrentItemModel( boolean useColumnHint,
boolean suppressErrorMessage )
{
DataSetViewData[] dataSetItem = DataSetProvider.getCurrentInstance( )
.getColumns( ds,
itemModelChanged,
useColumnHint,
suppressErrorMessage );
itemModelChanged = false;
linkedParameterChanged = false;
return dataSetItem;
}
}
private static Set<String> getInternalPageNames( )
{
Set<String> result = new HashSet<String>( );
result.add( DATASET_SETTINGS_PAGE );
result.add( OUTPUT_PARAMETER_PREVIEW_PAGE );
result.add( DATASOURCE_EDITOR_PROPERTY_PAGE );
result.add( COMPUTED_COLUMNS_PAGE );
result.add( RESULTSET_PREVIEW_PAGE );
result.add( FILTERS_PAGE );
result.add( PARAMETERS_PAGE );
result.add( OUTPUTCOLUMN_PAGE );
result.add( JOINT_DATA_SET_PAGE );
result.add( DATA_SOURCE_SELECTION_PAGE );
String extensionName = "org.eclipse.datatools.connectivity.oda.design.ui.dataSource";
IExtensionRegistry extReg = Platform.getExtensionRegistry( );
IExtensionPoint extPoint = extReg.getExtensionPoint( extensionName );
if ( extPoint == null )
return result;
IExtension[] exts = extPoint.getExtensions( );
if ( exts == null )
return result;
for ( int e = 0; e < exts.length; e++ )
{
IConfigurationElement[] configElems = exts[e].getConfigurationElements( );
if ( configElems == null )
continue;
for ( int i = 0; i < configElems.length; i++ )
{
if ( configElems[i].getName( ).equals( "dataSetUI" ) )
{
IConfigurationElement[] elems = configElems[i].getChildren( "dataSetPage" );
if ( elems != null && elems.length > 0 )
{
for ( int j = 0; j < elems.length; j++ )
{
String value = elems[j].getAttribute( "id" );
if ( value != null )
result.add( value );
}
}
}
}
}
return result;
}
}
|
package com.braintreepayments.api.internal;
import android.text.TextUtils;
import com.braintreepayments.api.BuildConfig;
import com.braintreepayments.api.exceptions.BraintreeApiErrorResponse;
import com.braintreepayments.api.exceptions.UnprocessableEntityException;
import com.braintreepayments.api.interfaces.HttpResponseCallback;
import java.io.IOException;
import java.net.HttpURLConnection;
import javax.net.ssl.SSLException;
/**
* Network request class that handles BraintreeApi request specifics and threading.
*/
public class BraintreeApiHttpClient extends HttpClient {
public static final String API_VERSION_2016_10_07 = "2016-10-07";
private final String mAuthorization;
private final String mApiVersion;
public BraintreeApiHttpClient(String baseUrl, String authorization) {
this(baseUrl, authorization, API_VERSION_2016_10_07);
}
public BraintreeApiHttpClient(String baseUrl, String authorization, String apiVersion) {
super();
mBaseUrl = baseUrl;
mAuthorization = authorization;
mApiVersion = apiVersion;
setUserAgent("braintree/android/" + BuildConfig.VERSION_NAME);
try {
setSSLSocketFactory(new TLSSocketFactory(BraintreeApiCertificate.getCertInputStream()));
} catch (SSLException e) {
setSSLSocketFactory(null);
}
}
@Override
protected HttpURLConnection init(String url) throws IOException {
HttpURLConnection connection = super.init(url);
if (!TextUtils.isEmpty(mAuthorization)) {
connection.setRequestProperty("Authorization", "Bearer " + mAuthorization);
}
connection.setRequestProperty("Braintree-Version", mApiVersion);
return connection;
}
@Override
protected String parseResponse(HttpURLConnection connection) throws Exception {
try {
return super.parseResponse(connection);
} catch (UnprocessableEntityException e) {
throw new BraintreeApiErrorResponse(e.getMessage());
}
}
}
|
package com.joelj.distributedinvoke;
import com.joelj.distributedinvoke.channels.ServerSocketRemoteChannel;
import com.joelj.distributedinvoke.logging.Logger;
import com.joelj.ezasync.EzAsync;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.*;
import java.net.InetAddress;
import java.util.concurrent.Callable;
public class RemoteMachineListener implements Closeable {
private static final Logger LOGGER = Logger.forClass(RemoteMachineListener.class);
private final InetAddress bindAddress;
private final int listeningPort;
private final transient Thread thread;
private final transient EzAsync ezAsync;
/**
* Creates the listener and starts listening.
* @param bindAddress The address to bind to.
* @param listeningPort The port to listen on.
* @return The new instance of RemoteMachineListener that is actively listening for a new connection.
*/
public static RemoteMachineListener start(@NotNull InetAddress bindAddress, int listeningPort) {
return new RemoteMachineListener(bindAddress, listeningPort);
}
/**
* @param bindAddress The address to bind to.
* @param listeningPort Must be between 1 and 65535. On some operating systems, if the value is between 1 and 1024 the underlying JVM may need special privileges to open the socket.
*/
private RemoteMachineListener(@NotNull InetAddress bindAddress, int listeningPort) {
this.bindAddress = bindAddress;
this.listeningPort = listeningPort;
thread = new Thread(new ListeningThread(), this.getClass().getSimpleName());
thread.start();
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
close();
}
}));
ezAsync = EzAsync.create();
}
@Override
public void close() {
thread.interrupt();
}
private class ListeningThread implements Runnable {
@Override
public void run() {
ServerSocketRemoteChannel channel;
try {
channel = ServerSocketRemoteChannel.create(listeningPort, bindAddress);
} catch (IOException e) {
throw new RuntimeException(e);
}
while(!Thread.interrupted() && !channel.isClosed()) {
ObjectInputStream inputStream;
try {
inputStream = channel.getInputStream();
} catch (InterruptedException e) {
LOGGER.error("Thread interrupted while obtaining the input stream. Attempting clean shutdown.");
break;
} catch (IOException e) {
throw new RuntimeException(e);
}
Object object;
try {
LOGGER.info("Waiting for request");
object = inputStream.readObject();
LOGGER.info("Received request");
} catch (IOException e) {
LOGGER.error(e);
continue;
} catch (ClassNotFoundException e) {
LOGGER.error("Class path is out of sync", e);
continue;
}
if(object != null && object instanceof Transport) {
Transport transport = (Transport)object;
String requestId = transport.getId();
Object requestObject = transport.getObject();
if(requestObject instanceof Callable) {
LOGGER.info("Scheduling request to be executed");
//noinspection unchecked
ezAsync.execute((Callable) requestObject, new RequestCallback(requestId, channel));
LOGGER.info("Request execution scheduled");
} else {
LOGGER.error("Unexpected object type. Expected " + Callable.class.getCanonicalName() + " but was " + (requestObject == null ? "null" : requestObject.getClass().getCanonicalName()));
}
} else {
LOGGER.error("Unexpected object type. Expected " + Transport.class.getCanonicalName() + " but was " + (object == null ? "null" : object.getClass().getCanonicalName()));
}
}
try {
channel.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
LOGGER.info("Listener cleanly shut down.");
}
}
private static class RequestCallback implements EzAsync.Callback<String> {
@NotNull
private final String id;
@NotNull
private final ServerSocketRemoteChannel channel;
public RequestCallback(@NotNull String id, @NotNull ServerSocketRemoteChannel channel) {
this.id = id;
this.channel = channel;
}
@Override
public void done(@Nullable String result) {
LOGGER.info("Done executing request and received result");
Transport<?> response = Transport.wrapWithId(result, id);
try {
ObjectOutputStream outputStream = channel.getOutputStream();
LOGGER.info("Writing response");
outputStream.writeObject(response);
} catch (IOException e) {
LOGGER.error("Couldn't write response.", e);
} catch (InterruptedException e) {
LOGGER.error("Thread interrupted. Closing channel.", e);
try {
channel.close();
} catch (IOException closeChannelException) {
LOGGER.error("Error while trying to close channel.", closeChannelException);
}
}
}
}
}
|
package ioichack2017.github.httpscontactgsuraj.iop;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothServerSocket;
import android.bluetooth.BluetoothSocket;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.widget.ListView;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Set;
import java.util.UUID;
public class BluetoothPairing extends AppCompatActivity {
public BluetoothDevice pillow;
private int REQUEST_ENABLE_BT = 1;
private ListView listView;
private ArrayList<String> mDeviceList = new ArrayList<String>();
private static final UUID MY_UUID = UUID.fromString("0000110E-0000-1000-8000-00805F9B34FB");
public static BluetoothAdapter mBluetoothAdapter;
private BluetoothSocket mSocket;
private static Context mContext;
//the name of the device is Pillow
//standardised password
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.bluetooth_pairing);
}
@Override
protected void onResume() {
super.onResume();
mContext = BluetoothPairing.this;
// Do something in response to button
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (mBluetoothAdapter == null) {
// Device does not support Bluetooth
AlertDialog.Builder builder1 = new AlertDialog.Builder(mContext);
builder1.setMessage("Your device does not support bluetooth.");
builder1.setCancelable(true);
builder1.setPositiveButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
} else {
// If bluetooth is turned off
if (!mBluetoothAdapter.isEnabled()) {
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT);
}
}
Set<BluetoothDevice> pairedDevices = mBluetoothAdapter.getBondedDevices();
boolean pairSuccess = false;
for (BluetoothDevice device : pairedDevices) {
String deviceName = device.getName();
String deviceAddress = device.getAddress();
if (deviceName.equals("raspberrypi")) {
pairSuccess = true;
pillow = device;
}
}
if (mBluetoothAdapter.isEnabled()) {
if (pairSuccess) {
pairSuccess();
} else {
pairFailure();
}
}
}
private void pairSuccess() {
//CHECK IF PILLOW IS CONNECTED
if (pillow.getBondState() == pillow.BOND_BONDED) {
Log.d("TAG ??",pillow.getName());
}
try {
mSocket = pillow.createInsecureRfcommSocketToServiceRecord(MY_UUID);
} catch (IOException e1) {
// TODO Auto-generated catch block
Log.d("TAG","socket not created");
e1.printStackTrace();
}
try{
mSocket.connect();
}
catch(IOException e){
try {
mSocket.close();
Log.d("TAG","Cannot connect");
} catch (IOException e1) {
Log.d("TAG","Socket not closed");
e1.printStackTrace();
}
}
//Thread runThread = new Thread(new Runnable() {
// @Override
// public void run() {
// // Always cancel discovery because it will slow down a connection
// BluetoothAdapter.getDefaultAdapter().cancelDiscovery();
// // Make a connection to the BluetoothSocket
// try {
// // This is a blocking call and will only return on a
// // successful connection or an exception
// mSocket.connect();
// } catch (IOException e) {
// //connection to device failed so close the socket
// try {
// mSocket.close();
// } catch (IOException e2) {
// e2.printStackTrace();
}
private void pairFailure() {
AlertDialog.Builder builder1 = new AlertDialog.Builder(BluetoothPairing.this);
builder1.setMessage("Please pair this device in settings before proceeding.");
builder1.setCancelable(true);
builder1.setPositiveButton(
"Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
Intent myIntent = new Intent(BluetoothPairing.this, LoginScreen.class);
BluetoothPairing.this.startActivity(myIntent);
}
});
AlertDialog alert11 = builder1.create();
alert11.show();
}
private void processFailure(DialogInterface dialog) {
}
private void connectionSuccess() {
}
}
|
package com.sherpasteven.sscte.Controllers;
import android.content.Intent;
import android.net.Uri;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.Toast;
import com.sherpasteven.sscte.AddTradeActivity;
import com.sherpasteven.sscte.CardTradeActivity;
import com.sherpasteven.sscte.Models.Card;
import com.sherpasteven.sscte.Models.CurrentProfile;
import com.sherpasteven.sscte.Models.Friend;
import com.sherpasteven.sscte.Models.ISerializer;
import com.sherpasteven.sscte.Models.Email;
import com.sherpasteven.sscte.Models.LocalProfileSerializer;
import com.sherpasteven.sscte.Models.Profile;
import com.sherpasteven.sscte.Models.Trade;
import com.sherpasteven.sscte.Models.TradeComposer;
import com.sherpasteven.sscte.Models.TradeLog;
import com.sherpasteven.sscte.Models.User;
import com.sherpasteven.sscte.ViewTradeActivity;
import java.util.ArrayList;
public class ViewTradeController extends Controller<ViewTradeActivity, Trade> {
private final ViewTradeActivity view;
private Trade model;
private TradeLog tradelog;
private Profile profile;
private User owner;
private User borrower;
private ArrayList<Card> newborrowlist;
private ArrayList<Card> newownerlist;
Button acceptButton;
Button declineButton;
Button counterofferButton;
private LocalProfileSerializer profileSerializer = new LocalProfileSerializer();
/**
* Instantiates the controller on the proper activity.
*
* @param view
* @param model
*/
public ViewTradeController(ViewTradeActivity view, Trade model) {
super(view, model);
this.view = view;
this.model = model;
this.tradelog = CurrentProfile.getCurrentProfile().getProfile(view.getApplicationContext()).getUser().getTrades();
}
/**
* Sets the listener activity on the view in order to process response.
* Lets the user accept, decline and counteroffer trades.
* @param view view to set listeners on.
*/
@Override
protected void setListeners(final ViewTradeActivity view) {
acceptButton = view.getAcceptButton();
declineButton = view.getDeclineButton();
counterofferButton = view.getCounterOfferButton();
acceptButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
owner = model.getOwner();
borrower = model.getBorrower();
profile = CurrentProfile.getCurrentProfile().getProfile(view.getApplicationContext());
newownerlist = model.getOwnerList();
newborrowlist = model.getBorrowList();
if(borrower.equals(profile.getUser())){
for(Card bc: newborrowlist){
borrower.getInventory().removeCard(bc, bc.getQuantity());
}
for(Card oc: newownerlist){
borrower.getInventory().addCard(oc);
}
profile.setUser(borrower);
} else {
for(Card bc: newborrowlist){
owner.getInventory().addCard(bc);
}
for(Card oc: newownerlist){
owner.getInventory().removeCard(oc, oc.getQuantity());
}
profile.setUser(owner);
}
setLocalProfile(profile);
model.setStatus("ACCEPTED");
model.getBorrower().incrementRating();
model.getOwner().incrementRating();
tradelog.tradeFinalized(model);
model.notifyViews();
//Email email = new Email();
//email.tradeEmail(model, view);
view.finish();
}
});
declineButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
model.setStatus("DECLINED");
tradelog.tradeFinalized(model);
model.notifyViews();
view.finish();
}
});
counterofferButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Trade countertrade = model.counterOffer();
// removes from pending trades for counteroffer]
int position = tradelog.getPendingTrades().indexOf(model);
//tradelog.getPendingTrades().remove(model);
//tradelog.addCounterOfferTrade(model, countertrade);
/*TradeComposer.getTradeComposer().getComponents().setOwner(countertrade.getOwner());
TradeComposer.getTradeComposer().getComponents().setBorrower(countertrade.getBorrower());
TradeComposer.getTradeComposer().getComponents().setOwnerList(countertrade.getOwnerList());
TradeComposer.getTradeComposer().getComponents().setBorrowList(countertrade.getBorrowList());*/
TradeComposer.getTradeComposer().getComponents().setOwner(countertrade.getBorrower());
TradeComposer.getTradeComposer().getComponents().setBorrower(countertrade.getOwner());
TradeComposer.getTradeComposer().getComponents().setOwnerList(countertrade.getOwnerList());
TradeComposer.getTradeComposer().getComponents().setBorrowList(countertrade.getBorrowList());
TradeComposer.getTradeComposer().getComponents().setTradeId(countertrade.getId());
//tradelog.addCounterOfferTrade();
Intent intent = new Intent(v.getContext(), AddTradeActivity.class);
intent.putExtra("com.sherpasteven.sscte.counterindex", position);
model.notifyViews();
view.finish();
v.getContext().startActivity(intent);
}
});
}
private void setLocalProfile(Profile profile) {
ISerializer<Profile> serializer = new LocalProfileSerializer();
serializer.Serialize(profile, view);
}
}
|
package com.facebook.react.animated;
import android.util.SparseArray;
import androidx.annotation.Nullable;
import com.facebook.common.logging.FLog;
import com.facebook.infer.annotation.Assertions;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.JSApplicationCausedNativeException;
import com.facebook.react.bridge.JSApplicationIllegalArgumentException;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactNoCrashSoftException;
import com.facebook.react.bridge.ReactSoftException;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.UIManager;
import com.facebook.react.bridge.UiThreadUtil;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.IllegalViewOperationException;
import com.facebook.react.uimanager.UIManagerHelper;
import com.facebook.react.uimanager.UIManagerModule;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.EventDispatcher;
import com.facebook.react.uimanager.events.EventDispatcherListener;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Queue;
/**
* This is the main class that coordinates how native animated JS implementation drives UI changes.
*
* <p>It implements a management interface for animated nodes graph as well as implements a graph
* traversal algorithm that is run for each animation frame.
*
* <p>For each animation frame we visit animated nodes that might've been updated as well as their
* children that may use parent's values to update themselves. At the end of the traversal algorithm
* we expect to reach a special type of the node: PropsAnimatedNode that is then responsible for
* calculating property map which can be sent to native view hierarchy to update the view.
*
* <p>IMPORTANT: This class should be accessed only from the UI Thread
*/
/*package*/ class NativeAnimatedNodesManager implements EventDispatcherListener {
private static final String TAG = "NativeAnimatedNodesManager";
private static final int MAX_INCONSISTENT_FRAMES = 64;
private final SparseArray<AnimatedNode> mAnimatedNodes = new SparseArray<>();
private final SparseArray<AnimationDriver> mActiveAnimations = new SparseArray<>();
private final SparseArray<AnimatedNode> mUpdatedNodes = new SparseArray<>();
// Mapping of a view tag and an event name to a list of event animation drivers. 99% of the time
// there will be only one driver per mapping so all code code should be optimized around that.
private final Map<String, List<EventAnimationDriver>> mEventDrivers = new HashMap<>();
private final UIManagerModule.CustomEventNamesResolver mCustomEventNamesResolver;
private final ReactApplicationContext mReactApplicationContext;
private int mAnimatedGraphBFSColor = 0;
private int mNumInconsistentFrames = 0;
// Used to avoid allocating a new array on every frame in `runUpdates` and `onEventDispatch`.
private final List<AnimatedNode> mRunUpdateNodeList = new LinkedList<>();
public NativeAnimatedNodesManager(ReactApplicationContext reactApplicationContext) {
mReactApplicationContext = reactApplicationContext;
UIManagerModule uiManager =
Assertions.assertNotNull(reactApplicationContext.getNativeModule(UIManagerModule.class));
uiManager.<EventDispatcher>getEventDispatcher().addListener(this);
// TODO T64216139 Remove dependency of UIManagerModule when the Constants are not in Native
// anymore
mCustomEventNamesResolver = uiManager.getDirectEventNamesResolver();
}
/*package*/ @Nullable
AnimatedNode getNodeById(int id) {
return mAnimatedNodes.get(id);
}
public boolean hasActiveAnimations() {
return mActiveAnimations.size() > 0 || mUpdatedNodes.size() > 0;
}
public void createAnimatedNode(int tag, ReadableMap config) {
if (mAnimatedNodes.get(tag) != null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + tag + " already exists");
}
String type = config.getString("type");
final AnimatedNode node;
if ("style".equals(type)) {
node = new StyleAnimatedNode(config, this);
} else if ("value".equals(type)) {
node = new ValueAnimatedNode(config);
} else if ("props".equals(type)) {
node = new PropsAnimatedNode(config, this);
} else if ("interpolation".equals(type)) {
node = new InterpolationAnimatedNode(config);
} else if ("addition".equals(type)) {
node = new AdditionAnimatedNode(config, this);
} else if ("subtraction".equals(type)) {
node = new SubtractionAnimatedNode(config, this);
} else if ("division".equals(type)) {
node = new DivisionAnimatedNode(config, this);
} else if ("multiplication".equals(type)) {
node = new MultiplicationAnimatedNode(config, this);
} else if ("modulus".equals(type)) {
node = new ModulusAnimatedNode(config, this);
} else if ("diffclamp".equals(type)) {
node = new DiffClampAnimatedNode(config, this);
} else if ("transform".equals(type)) {
node = new TransformAnimatedNode(config, this);
} else if ("tracking".equals(type)) {
node = new TrackingAnimatedNode(config, this);
} else {
throw new JSApplicationIllegalArgumentException("Unsupported node type: " + type);
}
node.mTag = tag;
mAnimatedNodes.put(tag, node);
mUpdatedNodes.put(tag, node);
}
public void dropAnimatedNode(int tag) {
mAnimatedNodes.remove(tag);
mUpdatedNodes.remove(tag);
}
public void startListeningToAnimatedNodeValue(int tag, AnimatedNodeValueListener listener) {
AnimatedNode node = mAnimatedNodes.get(tag);
if (node == null || !(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + tag + " does not exist, or is not a 'value' node");
}
((ValueAnimatedNode) node).setValueListener(listener);
}
public void stopListeningToAnimatedNodeValue(int tag) {
AnimatedNode node = mAnimatedNodes.get(tag);
if (node == null || !(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + tag + " does not exist, or is not a 'value' node");
}
((ValueAnimatedNode) node).setValueListener(null);
}
public void setAnimatedNodeValue(int tag, double value) {
AnimatedNode node = mAnimatedNodes.get(tag);
if (node == null || !(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + tag + " does not exist, or is not a 'value' node");
}
stopAnimationsForNode(node);
((ValueAnimatedNode) node).mValue = value;
mUpdatedNodes.put(tag, node);
}
public void setAnimatedNodeOffset(int tag, double offset) {
AnimatedNode node = mAnimatedNodes.get(tag);
if (node == null || !(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + tag + " does not exist, or is not a 'value' node");
}
((ValueAnimatedNode) node).mOffset = offset;
mUpdatedNodes.put(tag, node);
}
public void flattenAnimatedNodeOffset(int tag) {
AnimatedNode node = mAnimatedNodes.get(tag);
if (node == null || !(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + tag + " does not exist, or is not a 'value' node");
}
((ValueAnimatedNode) node).flattenOffset();
}
public void extractAnimatedNodeOffset(int tag) {
AnimatedNode node = mAnimatedNodes.get(tag);
if (node == null || !(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + tag + " does not exist, or is not a 'value' node");
}
((ValueAnimatedNode) node).extractOffset();
}
public void startAnimatingNode(
int animationId, int animatedNodeTag, ReadableMap animationConfig, Callback endCallback) {
AnimatedNode node = mAnimatedNodes.get(animatedNodeTag);
if (node == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + animatedNodeTag + " does not exist");
}
if (!(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node should be of type " + ValueAnimatedNode.class.getName());
}
final AnimationDriver existingDriver = mActiveAnimations.get(animationId);
if (existingDriver != null) {
// animation with the given ID is already running, we need to update its configuration instead
// of spawning a new one
existingDriver.resetConfig(animationConfig);
return;
}
String type = animationConfig.getString("type");
final AnimationDriver animation;
if ("frames".equals(type)) {
animation = new FrameBasedAnimationDriver(animationConfig);
} else if ("spring".equals(type)) {
animation = new SpringAnimation(animationConfig);
} else if ("decay".equals(type)) {
animation = new DecayAnimation(animationConfig);
} else {
throw new JSApplicationIllegalArgumentException("Unsupported animation type: " + type);
}
animation.mId = animationId;
animation.mEndCallback = endCallback;
animation.mAnimatedValue = (ValueAnimatedNode) node;
mActiveAnimations.put(animationId, animation);
}
private void stopAnimationsForNode(AnimatedNode animatedNode) {
// in most of the cases there should never be more than a few active animations running at the
// same time. Therefore it does not make much sense to create an animationId -> animation
// object map that would require additional memory just to support the use-case of stopping
// an animation
for (int i = 0; i < mActiveAnimations.size(); i++) {
AnimationDriver animation = mActiveAnimations.valueAt(i);
if (animatedNode.equals(animation.mAnimatedValue)) {
if (animation.mEndCallback != null) {
// Invoke animation end callback with {finished: false}
WritableMap endCallbackResponse = Arguments.createMap();
endCallbackResponse.putBoolean("finished", false);
animation.mEndCallback.invoke(endCallbackResponse);
}
mActiveAnimations.removeAt(i);
i
}
}
}
public void stopAnimation(int animationId) {
// in most of the cases there should never be more than a few active animations running at the
// same time. Therefore it does not make much sense to create an animationId -> animation
// object map that would require additional memory just to support the use-case of stopping
// an animation
for (int i = 0; i < mActiveAnimations.size(); i++) {
AnimationDriver animation = mActiveAnimations.valueAt(i);
if (animation.mId == animationId) {
if (animation.mEndCallback != null) {
// Invoke animation end callback with {finished: false}
WritableMap endCallbackResponse = Arguments.createMap();
endCallbackResponse.putBoolean("finished", false);
animation.mEndCallback.invoke(endCallbackResponse);
}
mActiveAnimations.removeAt(i);
return;
}
}
// Do not throw an error in the case animation could not be found. We only keep "active"
// animations in the registry and there is a chance that Animated.js will enqueue a
// stopAnimation call after the animation has ended or the call will reach native thread only
// when the animation is already over.
}
public void connectAnimatedNodes(int parentNodeTag, int childNodeTag) {
AnimatedNode parentNode = mAnimatedNodes.get(parentNodeTag);
if (parentNode == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + parentNodeTag + " does not exists");
}
AnimatedNode childNode = mAnimatedNodes.get(childNodeTag);
if (childNode == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + childNodeTag + " does not exists");
}
parentNode.addChild(childNode);
mUpdatedNodes.put(childNodeTag, childNode);
}
public void disconnectAnimatedNodes(int parentNodeTag, int childNodeTag) {
AnimatedNode parentNode = mAnimatedNodes.get(parentNodeTag);
if (parentNode == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + parentNodeTag + " does not exists");
}
AnimatedNode childNode = mAnimatedNodes.get(childNodeTag);
if (childNode == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + childNodeTag + " does not exists");
}
parentNode.removeChild(childNode);
mUpdatedNodes.put(childNodeTag, childNode);
}
public void connectAnimatedNodeToView(int animatedNodeTag, int viewTag) {
AnimatedNode node = mAnimatedNodes.get(animatedNodeTag);
if (node == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + animatedNodeTag + " does not exists");
}
if (!(node instanceof PropsAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node connected to view should be"
+ "of type "
+ PropsAnimatedNode.class.getName());
}
if (mReactApplicationContext == null) {
throw new IllegalStateException(
"Animated node could not be connected, no ReactApplicationContext: " + viewTag);
}
@Nullable
UIManager uiManager =
UIManagerHelper.getUIManagerForReactTag(mReactApplicationContext, viewTag);
if (uiManager == null) {
ReactSoftException.logSoftException(
TAG,
new ReactNoCrashSoftException(
"Animated node could not be connected to UIManager - uiManager disappeared for tag: "
+ viewTag));
return;
}
PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node;
propsAnimatedNode.connectToView(viewTag, uiManager);
mUpdatedNodes.put(animatedNodeTag, node);
}
public void disconnectAnimatedNodeFromView(int animatedNodeTag, int viewTag) {
AnimatedNode node = mAnimatedNodes.get(animatedNodeTag);
if (node == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + animatedNodeTag + " does not exists");
}
if (!(node instanceof PropsAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node connected to view should be"
+ "of type "
+ PropsAnimatedNode.class.getName());
}
PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node;
propsAnimatedNode.disconnectFromView(viewTag);
}
public void restoreDefaultValues(int animatedNodeTag) {
AnimatedNode node = mAnimatedNodes.get(animatedNodeTag);
// Restoring default values needs to happen before UIManager operations so it is
// possible the node hasn't been created yet if it is being connected and
// disconnected in the same batch. In that case we don't need to restore
// default values since it will never actually update the view.
if (node == null) {
return;
}
if (!(node instanceof PropsAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node connected to view should be"
+ "of type "
+ PropsAnimatedNode.class.getName());
}
PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node;
propsAnimatedNode.restoreDefaultValues();
}
public void addAnimatedEventToView(int viewTag, String eventName, ReadableMap eventMapping) {
int nodeTag = eventMapping.getInt("animatedValueTag");
AnimatedNode node = mAnimatedNodes.get(nodeTag);
if (node == null) {
throw new JSApplicationIllegalArgumentException(
"Animated node with tag " + nodeTag + " does not exists");
}
if (!(node instanceof ValueAnimatedNode)) {
throw new JSApplicationIllegalArgumentException(
"Animated node connected to event should be"
+ "of type "
+ ValueAnimatedNode.class.getName());
}
ReadableArray path = eventMapping.getArray("nativeEventPath");
List<String> pathList = new ArrayList<>(path.size());
for (int i = 0; i < path.size(); i++) {
pathList.add(path.getString(i));
}
EventAnimationDriver event = new EventAnimationDriver(pathList, (ValueAnimatedNode) node);
String key = viewTag + eventName;
if (mEventDrivers.containsKey(key)) {
mEventDrivers.get(key).add(event);
} else {
List<EventAnimationDriver> drivers = new ArrayList<>(1);
drivers.add(event);
mEventDrivers.put(key, drivers);
}
}
public void removeAnimatedEventFromView(int viewTag, String eventName, int animatedValueTag) {
String key = viewTag + eventName;
if (mEventDrivers.containsKey(key)) {
List<EventAnimationDriver> driversForKey = mEventDrivers.get(key);
if (driversForKey.size() == 1) {
mEventDrivers.remove(viewTag + eventName);
} else {
ListIterator<EventAnimationDriver> it = driversForKey.listIterator();
while (it.hasNext()) {
if (it.next().mValueNode.mTag == animatedValueTag) {
it.remove();
break;
}
}
}
}
}
@Override
public void onEventDispatch(final Event event) {
// Events can be dispatched from any thread so we have to make sure handleEvent is run from the
// UI thread.
if (UiThreadUtil.isOnUiThread()) {
handleEvent(event);
} else {
UiThreadUtil.runOnUiThread(
new Runnable() {
@Override
public void run() {
handleEvent(event);
}
});
}
}
private void handleEvent(Event event) {
if (!mEventDrivers.isEmpty()) {
// If the event has a different name in native convert it to it's JS name.
String eventName = mCustomEventNamesResolver.resolveCustomEventName(event.getEventName());
List<EventAnimationDriver> driversForKey = mEventDrivers.get(event.getViewTag() + eventName);
if (driversForKey != null) {
for (EventAnimationDriver driver : driversForKey) {
stopAnimationsForNode(driver.mValueNode);
event.dispatch(driver);
mRunUpdateNodeList.add(driver.mValueNode);
}
updateNodes(mRunUpdateNodeList);
mRunUpdateNodeList.clear();
}
}
}
/**
* Animation loop performs two BFSes over the graph of animated nodes. We use incremented {@code
* mAnimatedGraphBFSColor} to mark nodes as visited in each of the BFSes which saves additional
* loops for clearing "visited" states.
*
* <p>First BFS starts with nodes that are in {@code mUpdatedNodes} (that is, their value have
* been modified from JS in the last batch of JS operations) or directly attached to an active
* animation (hence linked to objects from {@code mActiveAnimations}). In that step we calculate
* an attribute {@code mActiveIncomingNodes}. The second BFS runs in topological order over the
* sub-graph of *active* nodes. This is done by adding node to the BFS queue only if all its
* "predecessors" have already been visited.
*/
public void runUpdates(long frameTimeNanos) {
UiThreadUtil.assertOnUiThread();
boolean hasFinishedAnimations = false;
for (int i = 0; i < mUpdatedNodes.size(); i++) {
AnimatedNode node = mUpdatedNodes.valueAt(i);
mRunUpdateNodeList.add(node);
}
// Clean mUpdatedNodes queue
mUpdatedNodes.clear();
for (int i = 0; i < mActiveAnimations.size(); i++) {
AnimationDriver animation = mActiveAnimations.valueAt(i);
animation.runAnimationStep(frameTimeNanos);
AnimatedNode valueNode = animation.mAnimatedValue;
mRunUpdateNodeList.add(valueNode);
if (animation.mHasFinished) {
hasFinishedAnimations = true;
}
}
updateNodes(mRunUpdateNodeList);
mRunUpdateNodeList.clear();
// Cleanup finished animations. Iterate over the array of animations and override ones that has
// finished, then resize `mActiveAnimations`.
if (hasFinishedAnimations) {
for (int i = mActiveAnimations.size() - 1; i >= 0; i
AnimationDriver animation = mActiveAnimations.valueAt(i);
if (animation.mHasFinished) {
if (animation.mEndCallback != null) {
WritableMap endCallbackResponse = Arguments.createMap();
endCallbackResponse.putBoolean("finished", true);
animation.mEndCallback.invoke(endCallbackResponse);
}
mActiveAnimations.removeAt(i);
}
}
}
}
private void updateNodes(List<AnimatedNode> nodes) {
int activeNodesCount = 0;
int updatedNodesCount = 0;
// STEP 1.
// BFS over graph of nodes. Update `mIncomingNodes` attribute for each node during that BFS.
// Store number of visited nodes in `activeNodesCount`. We "execute" active animations as a part
// of this step.
mAnimatedGraphBFSColor++; /* use new color */
if (mAnimatedGraphBFSColor == AnimatedNode.INITIAL_BFS_COLOR) {
// value "0" is used as an initial color for a new node, using it in BFS may cause some nodes
// to be skipped.
mAnimatedGraphBFSColor++;
}
Queue<AnimatedNode> nodesQueue = new ArrayDeque<>();
for (AnimatedNode node : nodes) {
if (node.mBFSColor != mAnimatedGraphBFSColor) {
node.mBFSColor = mAnimatedGraphBFSColor;
activeNodesCount++;
nodesQueue.add(node);
}
}
while (!nodesQueue.isEmpty()) {
AnimatedNode nextNode = nodesQueue.poll();
if (nextNode.mChildren != null) {
for (int i = 0; i < nextNode.mChildren.size(); i++) {
AnimatedNode child = nextNode.mChildren.get(i);
child.mActiveIncomingNodes++;
if (child.mBFSColor != mAnimatedGraphBFSColor) {
child.mBFSColor = mAnimatedGraphBFSColor;
activeNodesCount++;
nodesQueue.add(child);
}
}
}
}
// STEP 2
// BFS over the graph of active nodes in topological order -> visit node only when all its
// "predecessors" in the graph have already been visited. It is important to visit nodes in that
// order as they may often use values of their predecessors in order to calculate "next state"
// of their own. We start by determining the starting set of nodes by looking for nodes with
// `mActiveIncomingNodes = 0` (those can only be the ones that we start BFS in the previous
// step). We store number of visited nodes in this step in `updatedNodesCount`
mAnimatedGraphBFSColor++;
if (mAnimatedGraphBFSColor == AnimatedNode.INITIAL_BFS_COLOR) {
// see reasoning for this check a few lines above
mAnimatedGraphBFSColor++;
}
// find nodes with zero "incoming nodes", those can be either nodes from `mUpdatedNodes` or
// ones connected to active animations
for (AnimatedNode node : nodes) {
if (node.mActiveIncomingNodes == 0 && node.mBFSColor != mAnimatedGraphBFSColor) {
node.mBFSColor = mAnimatedGraphBFSColor;
updatedNodesCount++;
nodesQueue.add(node);
}
}
// Run main "update" loop
boolean errorsCaught = false;
while (!nodesQueue.isEmpty()) {
AnimatedNode nextNode = nodesQueue.poll();
try {
nextNode.update();
if (nextNode instanceof PropsAnimatedNode) {
// Send property updates to native view manager
((PropsAnimatedNode) nextNode).updateView();
}
} catch (IllegalViewOperationException e) {
// An exception is thrown if the view hasn't been created yet. This can happen because
// views are
// created in batches. If this particular view didn't make it into a batch yet, the view
// won't
// exist and an exception will be thrown when attempting to start an animation on it.
// Eat the exception rather than crashing. The impact is that we may drop one or more
// frames of the
// animation.
FLog.e(TAG, "Native animation workaround, frame lost as result of race condition", e);
} catch (JSApplicationCausedNativeException e) {
// In Fabric there can be race conditions between the JS thread setting up or tearing down
// animated nodes, and Fabric executing them on the UI thread, leading to temporary
// inconsistent
// states. We require that the inconsistency last for N frames before throwing these
// exceptions.
if (!errorsCaught) {
errorsCaught = true;
mNumInconsistentFrames++;
}
if (mNumInconsistentFrames > MAX_INCONSISTENT_FRAMES) {
throw new IllegalStateException(e);
} else {
FLog.e(
TAG,
"Swallowing exception due to potential race between JS and UI threads: inconsistent frame counter: "
+ mNumInconsistentFrames,
e);
}
}
if (nextNode instanceof ValueAnimatedNode) {
// Potentially send events to JS when the node's value is updated
((ValueAnimatedNode) nextNode).onValueUpdate();
}
if (nextNode.mChildren != null) {
for (int i = 0; i < nextNode.mChildren.size(); i++) {
AnimatedNode child = nextNode.mChildren.get(i);
child.mActiveIncomingNodes
if (child.mBFSColor != mAnimatedGraphBFSColor && child.mActiveIncomingNodes == 0) {
child.mBFSColor = mAnimatedGraphBFSColor;
updatedNodesCount++;
nodesQueue.add(child);
}
}
}
}
// Verify that we've visited *all* active nodes. Throw otherwise as this would mean there is a
// cycle in animated node graph. We also take advantage of the fact that all active nodes are
// visited in the step above so that all the nodes properties `mActiveIncomingNodes` are set to
// zero.
// In Fabric there can be race conditions between the JS thread setting up or tearing down
// animated nodes, and Fabric executing them on the UI thread, leading to temporary inconsistent
// states. We require that the inconsistency last for 64 frames before throwing this exception.
if (activeNodesCount != updatedNodesCount) {
if (!errorsCaught) {
mNumInconsistentFrames++;
}
if (mNumInconsistentFrames > MAX_INCONSISTENT_FRAMES) {
throw new IllegalStateException(
"Looks like animated nodes graph has cycles, there are "
+ activeNodesCount
+ " but toposort visited only "
+ updatedNodesCount);
}
} else if (!errorsCaught) {
mNumInconsistentFrames = 0;
}
}
}
|
package org.openhab.io.transport.cul.internal;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import org.openhab.io.transport.cul.CULCommunicationException;
import org.openhab.io.transport.cul.CULDeviceException;
import org.openhab.io.transport.cul.CULHandler;
import org.openhab.io.transport.cul.CULListener;
import org.openhab.io.transport.cul.CULMode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Abstract base class for all CULHandler which brings some convenience
* regarding registering listeners and detecting forbidden messages.
*
* @author Till Klocke
* @since 1.4.0
*/
public abstract class AbstractCULHandler implements CULHandler,
CULHandlerInternal {
private final static Logger log = LoggerFactory
.getLogger(AbstractCULHandler.class);
/**
* Thread which sends all queued commands to the CUL.
*
* @author Till Klocke
* @since 1.4.0
*
*/
private class SendThread extends Thread {
private final Logger logger = LoggerFactory.getLogger(SendThread.class);
@Override
public void run() {
while (!isInterrupted()) {
String command = sendQueue.poll();
if (command != null) {
if (!command.endsWith("\r\n")) {
command = command + "\r\n";
}
try {
writeMessage(command);
} catch (CULCommunicationException e) {
logger.error("Error while writing command to CUL", e);
}
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
logger.debug("Error while sleeping in SendThread", e);
}
}
}
}
/**
* Wrapper class wraps a CULListener and a received Strings and gets
* executed by a executor in its own thread.
*
* @author Till Klocke
* @since 1.4.0
*
*/
private static class NotifyDataReceivedRunner implements Runnable {
private String message;
private CULListener listener;
public NotifyDataReceivedRunner(CULListener listener, String message) {
this.message = message;
this.listener = listener;
}
@Override
public void run() {
listener.dataReceived(message);
}
}
/**
* Executor to handle received messages. Every listern should be called in
* its own thread.
*/
protected Executor receiveExecutor = Executors.newCachedThreadPool();
protected SendThread sendThread = new SendThread();
protected String deviceName;
protected CULMode mode;
protected List<CULListener> listeners = new ArrayList<CULListener>();
protected Queue<String> sendQueue = new ConcurrentLinkedQueue<String>();
protected int credit10ms = 0;
protected BufferedReader br;
protected BufferedWriter bw;
protected AbstractCULHandler(String deviceName, CULMode mode) {
this.mode = mode;
this.deviceName = deviceName;
}
@Override
public CULMode getCULMode() {
return mode;
}
@Override
public abstract boolean arePropertiesEqual(Map<String, ?> properties);
@Override
public void registerListener(CULListener listener) {
if (listener != null) {
listeners.add(listener);
}
}
@Override
public void unregisterListener(CULListener listener) {
if (listener != null) {
listeners.remove(listener);
}
}
@Override
public boolean hasListeners() {
return listeners.size() > 0;
}
@Override
public void open() throws CULDeviceException {
openHardware();
sendThread.start();
}
@Override
public void close() {
sendThread.interrupt();
closeHardware();
}
/**
* initialize the CUL hardware and open the connection
*
* @throws CULDeviceException
*/
protected abstract void openHardware() throws CULDeviceException;
/**
* Close the connection to the hardware and clean up all resources.
*/
protected abstract void closeHardware();
@Override
public void send(String command) {
if (isMessageAllowed(command)) {
sendQueue.add(command);
}
}
@Override
public void sendWithoutCheck(String message)
throws CULCommunicationException {
sendQueue.add(message);
}
/**
* Checks if the message would alter the RF mode of this device.
*
* @param message
* The message to check
* @return true if the message doesn't alter the RF mode, false if it does.
*/
protected boolean isMessageAllowed(String message) {
if (message.startsWith("X") || message.startsWith("x")) {
return false;
}
if (message.startsWith("Ar")) {
return false;
}
return true;
}
/**
* Notifies each CULListener about the received data in its own thread.
*
* @param data
*/
protected void notifyDataReceived(String data) {
for (final CULListener listener : listeners) {
receiveExecutor
.execute(new NotifyDataReceivedRunner(listener, data));
}
}
protected void notifyError(Exception e) {
for (CULListener listener : listeners) {
listener.error(e);
}
}
/**
* read and process next line from underlying transport.
*
* @throws CULCommunicationException
* if
*/
protected void processNextLine() throws CULCommunicationException {
try {
String data = br.readLine();
if (data == null) {
String msg = "EOF encountered for " + deviceName;
log.error(msg);
throw new CULCommunicationException(msg);
}
log.debug("Received raw message from CUL: " + data);
if ("EOB".equals(data)) {
log.warn("(EOB) End of Buffer. Last message lost. Try sending less messages per time slot to the CUL");
return;
} else if ("LOVF".equals(data)) {
log.warn("(LOVF) Limit Overflow: Last message lost. You are using more than 1% transmitting time. Reduce the number of rf messages");
return;
} else if (data.matches("^\\d+\\s+\\d+")) {
processCreditReport(data);
return;
}
notifyDataReceived(data);
requestCreditReport();
} catch (SocketException e) {
try {
this.openHardware();
} catch (CULDeviceException e1) {
log.error("Exception while reading from CUL port " + deviceName, e);
notifyError(e);
throw new CULCommunicationException(e);
}
} catch (IOException e) {
log.error("Exception while reading from CUL port " + deviceName, e);
notifyError(e);
throw new CULCommunicationException(e);
}
}
/**
* process data received from credit report
*
* @param data
*/
private void processCreditReport(String data) {
// Credit report received
String[] report = data.split(" ");
credit10ms = Integer.parseInt(report[report.length - 1]);
log.debug("credit10ms = " + credit10ms);
}
/**
* get the remaining send time on channel as seen at the last send/receive
* event.
*
* @return remaining send time in 10ms units
*/
public int getCredit10ms() {
return credit10ms;
}
/**
* write out request for a credit report directly to CUL
*/
private void requestCreditReport() {
/* this requests a report which provides credit10ms */
log.debug("Requesting credit report");
try {
bw.write("X\r\n");
bw.flush();
} catch (IOException e) {
log.error("Can't write report command to CUL", e);
}
}
/**
* Write a message to the CUL.
*
* @param message
* @throws CULCommunicationException
*/
private void writeMessage(String message) throws CULCommunicationException {
log.debug("Sending raw message to CUL " + deviceName + ": '" + message
+ "'");
if (bw == null) {
log.error("Can't write message, BufferedWriter is NULL");
}
synchronized (bw) {
try {
bw.write(message);
bw.flush();
} catch (IOException e) {
log.error("Can't write to CUL " + deviceName, e);
}
requestCreditReport();
}
}
}
|
package com.hubspot.singularity;
import com.wordnik.swagger.annotations.ApiModel;
@ApiModel(description = "Singularity's view of a Mesos slave")
public class SingularitySlaveUsageWithId extends SingularitySlaveUsage {
private final String slaveId;
public SingularitySlaveUsageWithId(SingularitySlaveUsage usage, String slaveId) {
super(usage.getMemoryBytesUsed(), usage.getTimestamp(), usage.getCpusUsed(), usage.getNumTasks(), usage.getMemoryTotal(), usage.getCpuTotal(), usage.getUsagePerRequestType());
this.slaveId = slaveId;
}
public String getSlaveId() {
return slaveId;
}
@Override
public String toString() {
return "SingularitySlaveUsageWithId [slaveId=" + slaveId + ", super=" + super.toString() + "]";
}
}
|
package com
.eju.ess.common.utils.seri;
import com.eju.ess.common.utils.md5.MD5Util;
import com.relops.snowflake.Snowflake;
public class SnowflakeUtil {
private SnowflakeUtil() {}
private final static int WORKID;
private static volatile Snowflake snowflake;
static{
String md5Str = MD5Util.getMD5Str(String.valueOf(System.currentTimeMillis()));
String binStr = StrToBinstr(md5Str);
String LastStr = binStr.substring(binStr.length() - 10, binStr.length());
WORKID = Integer.parseInt(LastStr, 2);
}
public static Snowflake getInstance() {
if (snowflake == null) {
synchronized (SnowflakeUtil.class) {
//instance
if (snowflake == null) {
snowflake = new Snowflake(WORKID);
}
}
}
return snowflake;
}
/*public static Snowflake getSnowflake() {
String md5Str = MD5Util.getMD5Str(String.valueOf(System.currentTimeMillis()));
String binStr = StrToBinstr(md5Str);
String LastStr = binStr.substring(binStr.length() - 10, binStr.length());
int workId = Integer.parseInt(LastStr, 2);
Snowflake snowflake = new Snowflake(WORKID);
return snowflake;
}*/
private static String StrToBinstr(String str) {
char[] strChar = str.toCharArray();
String result = "";
for (int i = 0; i < strChar.length; i++) {
result += Integer.toBinaryString(strChar[i]);
}
return result;
}
}
|
package ca.uwaterloo.joos.parser;
import java.util.List;
import ca.uwaterloo.joos.parser.LR1.ProductionRule;
import ca.uwaterloo.joos.scanner.Token;
/**
* @author Greg Wang
*
*/
public class ParseTree {
public interface Node {
public String toString(int level);
public String getKind();
}
public static class TreeNode implements Node {
public final ProductionRule productionRule;
public final List<Node> children;
public TreeNode(ProductionRule productionRule, List<Node> children) {
this.productionRule = productionRule;
this.children = children;
}
@Override
public String toString() {
return this.productionRule.toString();
}
public String toString(int level) {
String str = "";
for(int i = 0; i < level; i++) {
str += " ";
}
str += this.productionRule + "\n";
for(Node node: this.children) str += node.toString(level + 1);
return str;
}
@Override
public String getKind() {
return this.productionRule.getLefthand();
}
}
public static class LeafNode implements Node {
public final Token token;
public LeafNode(Token token) {
this.token = token;
}
@Override
public String toString() {
return this.token.toString();
}
public String toString(int level) {
String str = "";
for(int i = 0; i < level; i++) {
str += " ";
}
str += this.token + "\n";
return str;
}
@Override
public String getKind() {
return this.token.getKind();
}
}
public final Node root;
public ParseTree(Node root) {
this.root = root;
}
@Override
public String toString() {
return "<ParseTree>\n" + this.root.toString(0);
}
}
|
package io.cloudslang.lang.runtime.bindings.strategies;
import io.cloudslang.lang.entities.bindings.Input;
import io.cloudslang.lang.entities.bindings.values.Value;
import io.cloudslang.lang.runtime.env.RunEnvironment;
import io.cloudslang.lang.runtime.events.LanguageEventData;
import io.cloudslang.score.lang.ExecutionRuntimeServices;
import io.cloudslang.score.lang.SystemContext;
import org.springframework.stereotype.Component;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@Component
public class DebuggerBreakpointsHandlerStub implements DebuggerBreakpointsHandler {
@Override
public boolean resolveInputs(List<Input> newInputs, SystemContext systemContext,
RunEnvironment runEnv, ExecutionRuntimeServices runtimeServices,
LanguageEventData.StepType stepType, String stepName) {
return false;
}
@Override
public Map<String, ? extends Value> applyValues(SystemContext systemContext, Collection<Input> inputs) {
return null;
}
@Override
public boolean handleBreakpoints(SystemContext context, String stepId) {
return false;
}
}
|
package com.blebail.components.persistence.resource.repository;
import com.querydsl.core.types.dsl.SimpleExpression;
import com.querydsl.sql.RelationalPathBase;
import com.querydsl.sql.dml.SQLInsertClause;
import com.querydsl.sql.dml.SQLUpdateClause;
import org.springframework.transaction.annotation.Transactional;
import java.util.Collection;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* {@inheritDoc}
*/
public abstract class ResourceRepository<R, ID> extends BaseRepository<R> implements CrudRepository<R, ID> {
protected final SimpleExpression<ID> idPath;
protected final Function<R, ID> idMapping;
public ResourceRepository(RelationalPathBase<R> rowPath, SimpleExpression<ID> idPath, Function<R, ID> idMapping) {
super(rowPath);
this.idPath = Objects.requireNonNull(idPath);
this.idMapping = Objects.requireNonNull(idMapping);
}
/**
* {@inheritDoc}
*/
@Transactional
@Override
public R save(R resource) {
if (resource == null) {
throw new IllegalArgumentException("Cannot create null resource");
}
ID resourceId = idMapping.apply(resource);
if (exists(resourceId)) {
queryFactory.update(rowPath)
.populate(resource)
.where(idPath.eq(resourceId))
.execute();
} else {
queryFactory.insert(rowPath)
.populate(resource)
.execute();
}
return resource;
}
/**
* {@inheritDoc}
*/
@Transactional
@Override
public Collection<R> save(Collection<R> resources) {
Collection<ID> resourceIds = resources.stream().map(idMapping).collect(Collectors.toList());
Collection<ID> persistedResourceIds = findAllIds(resources);
boolean insertBatchShouldBeExecuted = resourceIds.size() > persistedResourceIds.size();
boolean updateBatchShouldBeExecuted = persistedResourceIds.size() > 0;
SQLInsertClause insertBatch = queryFactory.insert(rowPath);
SQLUpdateClause updateBatch = queryFactory.update(rowPath);
resources.forEach(resource -> {
ID resourceId = idMapping.apply(resource);
if (persistedResourceIds.contains(resourceId)) {
updateBatch.populate(resource)
.where(idPath.eq(resourceId))
.addBatch();
} else {
insertBatch.populate(resource)
.addBatch();
}
});
if (insertBatchShouldBeExecuted) {
insertBatch.execute();
}
if (updateBatchShouldBeExecuted) {
updateBatch.execute();
}
return resources;
}
/**
* {@inheritDoc}
*/
@Transactional(readOnly = true)
@Override
public Optional<R> findOne(ID resourceId) {
if (resourceId == null) {
return Optional.empty();
}
return Optional.ofNullable(queryFactory.select(rowPath)
.from(rowPath)
.where(idPath.eq(resourceId))
.fetchOne());
}
/**
* {@inheritDoc}
*/
@Transactional(readOnly = true)
@Override
public Collection<R> findAll(Collection<ID> resourceIds) {
return queryFactory.select(rowPath)
.from(rowPath)
.where(idPath.in(resourceIds))
.fetch();
}
/**
* {@inheritDoc}
*/
@Transactional(readOnly = true)
@Override
public boolean exists(ID resourceId) {
if (resourceId == null) {
return false;
}
return queryFactory.select(idPath)
.from(rowPath)
.where(idPath.eq(resourceId))
.fetchOne() != null;
}
/**
* {@inheritDoc}
*/
@Transactional
@Override
public boolean delete(ID resourceId) {
if (resourceId == null) {
return false;
}
return queryFactory.delete(rowPath)
.where(idPath.eq(resourceId))
.execute() == 1;
}
/**
* {@inheritDoc}
*/
@Transactional
@Override
public boolean delete(Collection<ID> resourceIds) {
return queryFactory.delete(rowPath)
.where(idPath.in(resourceIds))
.execute() == resourceIds.size();
}
/**
* {@inheritDoc}
*/
@Override
public SimpleExpression<ID> idPath(){
return idPath;
}
/**
* {@inheritDoc}
*/
@Override
public Function<R, ID> idMapping() {
return idMapping;
}
protected Collection<ID> findAllIds(Collection<R> resources) {
Collection<ID> resourceIds = resources.stream().map(idMapping::apply).collect(Collectors.toList());
return queryFactory.select(idPath)
.from(rowPath)
.where(idPath.in(resourceIds))
.fetch();
}
}
|
package cellsociety_team01;
import javafx.scene.paint.Color;
public class State {
private Color myColor;
private String myName;
public State(Color c, String s) {
myName = s;
myColor = c;
}
public boolean equals(State s) {
if(this.hashCode() == s.hashCode())
return true;
return false;
}
public int hashCode() {
int ret = 0;
for(int i = 0; i < myName.length(); i++){
ret += myName.charAt(i) * (i + 1);
}
ret += myColor.hashCode();
return ret;
}
public void setColor(Color color) {
myColor = color;
}
public void setName(String s) {
myName = s;
}
public String getName() {
return myName;
}
public Color getColor() {
return myColor;
}
}
|
import java.util.List;
import org.junit.Assert;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.firefox.FirefoxDriver;
import br.com.saraivaugioni.sentinela.main.Sentinela;
public class Teste {
public static void main(String[] args) {
// Webdriver driver
System.setProperty("webdriver.gecko.driver", "driver\\geckodriver.exe");
WebDriver driver = new FirefoxDriver();
// Map webdriver elements
// Google
//Single elements
driver.get("https:
WebElement logoGoogle = driver.findElement(By.id("hplogo"));
WebElement buttonSearchGoogle = driver.findElement(By.name("btnK"));
WebElement fieldSearch = driver.findElement(By.id("lst-ib"));
//List of welbelements
List<WebElement> googleElements = driver.findElements(By.className("list"));
googleElements.add(logoGoogle);
googleElements.add(buttonSearchGoogle);
googleElements.add(fieldSearch);
// Make API instance, set image path and gen report path
// and last a resolution to work.
Sentinela sentinela = new Sentinela(driver, "C:\\testRegression\\testImages", "C:\\testRegression\\testReport\\", 1920, 1080);
// Validate a webelements list
sentinela.validate(googleElements, "elementsGoogle");
// Validate a full page
sentinela.validate("screen_google");
// Validate a webelement
sentinela.validate(logoGoogle, "logo_google");
// Gen final report
sentinela.generateReport();
System.out.println(sentinela.isDiff());
driver.quit();
Assert.assertFalse(sentinela.isDiff());
}
}
|
package org.eclipse.rdf4j.query.algebra.evaluation.impl;
import static org.junit.Assert.assertEquals;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.query.MalformedQueryException;
import org.eclipse.rdf4j.query.QueryLanguage;
import org.eclipse.rdf4j.query.UnsupportedQueryLanguageException;
import org.eclipse.rdf4j.query.algebra.Compare;
import org.eclipse.rdf4j.query.algebra.Compare.CompareOp;
import org.eclipse.rdf4j.query.algebra.Filter;
import org.eclipse.rdf4j.query.algebra.Join;
import org.eclipse.rdf4j.query.algebra.Projection;
import org.eclipse.rdf4j.query.algebra.ProjectionElem;
import org.eclipse.rdf4j.query.algebra.ProjectionElemList;
import org.eclipse.rdf4j.query.algebra.QueryRoot;
import org.eclipse.rdf4j.query.algebra.StatementPattern;
import org.eclipse.rdf4j.query.algebra.TupleExpr;
import org.eclipse.rdf4j.query.algebra.ValueConstant;
import org.eclipse.rdf4j.query.algebra.Var;
import org.eclipse.rdf4j.query.algebra.evaluation.QueryOptimizerTest;
import org.eclipse.rdf4j.query.parser.ParsedQuery;
import org.eclipse.rdf4j.query.parser.QueryParserUtil;
import org.junit.Test;
public class FilterOptimizerTest extends QueryOptimizerTest {
@Override
public FilterOptimizer getOptimizer() {
return new FilterOptimizer();
}
@Test
public void merge() {
String expectedQuery = "SELECT * WHERE {?s ?p ?o . FILTER(?o > 2 && ?o <4) }";
String query = "SELECT * WHERE {?s ?p ?o . FILTER(?o > 2) . FILTER(?o <4) }";
testOptimizer(expectedQuery, query);
}
@Test
public void dontMerge() {
Var s = new Var("s");
Var p = new Var("p");
Var o = new Var("o");
Var o2 = new Var("o2");
ValueConstant two = new ValueConstant(SimpleValueFactory.getInstance().createLiteral(2));
ValueConstant four = new ValueConstant(SimpleValueFactory.getInstance().createLiteral(4));
Compare oSmallerThanTwo = new Compare(o, two, CompareOp.GT);
Filter spo = new Filter(new StatementPattern(s, p, o), oSmallerThanTwo);
Compare o2SmallerThanFour = new Compare(o2, four, CompareOp.LT);
Filter spo2 = new Filter(new StatementPattern(s, p, o2), o2SmallerThanFour);
TupleExpr expected = new QueryRoot(
new Projection(new Join(spo, spo2), new ProjectionElemList(new ProjectionElem("s"),
new ProjectionElem("p"), new ProjectionElem("o"), new ProjectionElem("o2"))));
String query = "SELECT * WHERE {?s ?p ?o . ?s ?p ?o2 . FILTER(?o > '2'^^xsd:int) . FILTER(?o2 < '4'^^xsd:int) }";
testOptimizer(expected, query);
}
@Test
public void deMerge() {
Var s = new Var("s");
Var p = new Var("p");
Var o = new Var("o");
Var o2 = new Var("o2");
ValueConstant two = new ValueConstant(SimpleValueFactory.getInstance().createLiteral(2));
ValueConstant four = new ValueConstant(SimpleValueFactory.getInstance().createLiteral(4));
Compare oSmallerThanTwo = new Compare(o, two, CompareOp.GT);
Filter spo = new Filter(new StatementPattern(s, p, o), oSmallerThanTwo);
Compare o2SmallerThanFour = new Compare(o2, four, CompareOp.LT);
Filter spo2 = new Filter(new StatementPattern(s, p, o2), o2SmallerThanFour);
TupleExpr expected = new QueryRoot(
new Projection(new Join(spo, spo2), new ProjectionElemList(new ProjectionElem("s"),
new ProjectionElem("p"), new ProjectionElem("o"), new ProjectionElem("o2"))));
String query = "SELECT * WHERE {?s ?p ?o . ?s ?p ?o2 . FILTER(?o > '2'^^xsd:int && ?o2 < '4'^^xsd:int) }";
testOptimizer(expected, query);
}
void testOptimizer(String expectedQuery, String actualQuery)
throws MalformedQueryException, UnsupportedQueryLanguageException {
ParsedQuery pq = QueryParserUtil.parseQuery(QueryLanguage.SPARQL, actualQuery, null);
FilterOptimizer opt = getOptimizer();
opt.optimize(pq.getTupleExpr(), null, null);
ParsedQuery expectedParsedQuery = QueryParserUtil.parseQuery(QueryLanguage.SPARQL, expectedQuery, null);
assertEquals(pq.getTupleExpr(), expectedParsedQuery.getTupleExpr());
}
void testOptimizer(TupleExpr expectedQuery, String actualQuery)
throws MalformedQueryException, UnsupportedQueryLanguageException {
ParsedQuery pq = QueryParserUtil.parseQuery(QueryLanguage.SPARQL, actualQuery, null);
FilterOptimizer opt = getOptimizer();
opt.optimize(pq.getTupleExpr(), null, null);
assertEquals(pq.getTupleExpr(), expectedQuery);
}
}
|
package ch.idsia.ai.agents.ai;
import ch.idsia.ai.agents.Agent;
import ch.idsia.ai.agents.RegisterableAgent;
import ch.idsia.mario.engine.sprites.Mario;
import ch.idsia.mario.environments.Environment;
import ch.idsia.utils.MathX;
public class ForwardAgent extends RegisterableAgent implements Agent
{
int trueJumpCounter = 0;
int trueSpeedCounter = 0;
public ForwardAgent()
{
super("ForwardAgent");
reset();
}
public void reset()
{
action = new boolean[Environment.numberOfButtons];
action[Mario.KEY_RIGHT] = true;
action[Mario.KEY_SPEED] = true;
trueJumpCounter = 0;
trueSpeedCounter = 0;
}
private boolean DangerOfGap(byte[][] levelScene)
{
for (int x = 9; x < 13; ++x)
{
boolean f = true;
for(int y = 12; y < 22; ++y)
{
if (levelScene[y][x] != 0)
f = false;
}
if (f && levelScene[12][11] != 0)
return true;
}
return false;
}
private byte[][] decode(String estate)
{
byte[][] dstate = new byte[Environment.HalfObsWidth*2][Environment.HalfObsHeight*2];
int row = 0;
int col = 0;
for (int i = 0; i < estate.length(); ++i)
{
char cur_char = estate.charAt(i);
for (int j = 0; j < 8; ++j)
{
if (col > Environment.HalfObsHeight*2 - 1)
{
++row;
col = 0;
}
if ((MathX.pow(2,j) & cur_char) != 0)
{
try{
dstate[row][col] = 1;
}
catch (Exception e)
{
System.out.println("row = " + row);
System.out.println("col = " + col);
}
}
else
{
dstate[row][col] = 0; //TODO: Simplify in one line of code.
}
}
++col;
}
return dstate;
}
public boolean[] getAction(Environment observation)
{
//TODO: Discuss increasing diffuculty for handling the gaps.
// this Agent requires observation.
assert(observation != null);
byte[][] levelScene = observation.getCompleteObservation();
float[] marioPos = observation.getMarioFloatPos();
float[] enemiesPos = observation.getEnemiesFloatPos();
String encodedState = observation.getBitmapLevelObservation();
byte[][] levelSceneFromBitmap = decode(encodedState);
if (levelScene[11][13] != 0 || levelScene[11][12] != 0 || DangerOfGap(levelScene))
{
if (observation.mayMarioJump() || ( !observation.isMarioOnGround() && action[Mario.KEY_JUMP]))
{
action[Mario.KEY_JUMP] = true;
}
++trueJumpCounter;
}
else
{
action[Mario.KEY_JUMP] = false;
trueJumpCounter = 0;
}
if (trueJumpCounter > 16)
{
trueJumpCounter = 0;
action[Mario.KEY_JUMP] = false;
}
action[Mario.KEY_SPEED] = DangerOfGap(levelScene);
return action;
}
}
|
package org.hisp.dhis.android.core.program.internal;
import static com.google.common.truth.Truth.assertThat;
import android.content.ContentValues;
import org.hisp.dhis.android.core.arch.call.executors.internal.D2CallExecutor;
import org.hisp.dhis.android.core.arch.db.stores.internal.IdentifiableObjectStore;
import org.hisp.dhis.android.core.category.CategoryComboTableInfo;
import org.hisp.dhis.android.core.category.internal.CreateCategoryComboUtils;
import org.hisp.dhis.android.core.common.Unit;
import org.hisp.dhis.android.core.data.program.ProgramRuleVariableSamples;
import org.hisp.dhis.android.core.data.program.ProgramSamples;
import org.hisp.dhis.android.core.data.program.ProgramTrackedEntityAttributeSamples;
import org.hisp.dhis.android.core.dataelement.CreateDataElementUtils;
import org.hisp.dhis.android.core.dataelement.DataElementTableInfo;
import org.hisp.dhis.android.core.program.CreateProgramStageUtils;
import org.hisp.dhis.android.core.program.Program;
import org.hisp.dhis.android.core.program.ProgramRuleVariable;
import org.hisp.dhis.android.core.program.ProgramStageTableInfo;
import org.hisp.dhis.android.core.program.ProgramTrackedEntityAttribute;
import org.hisp.dhis.android.core.relationship.RelationshipType;
import org.hisp.dhis.android.core.relationship.internal.RelationshipTypeStore;
import org.hisp.dhis.android.core.trackedentity.CreateTrackedEntityAttributeUtils;
import org.hisp.dhis.android.core.trackedentity.CreateTrackedEntityUtils;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityAttributeTableInfo;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityTypeTableInfo;
import org.hisp.dhis.android.core.utils.integration.mock.BaseMockIntegrationTestEmptyEnqueable;
import org.hisp.dhis.android.core.utils.runner.D2JunitRunner;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.internal.util.collections.Sets;
import java.util.List;
import io.reactivex.Single;
@RunWith(D2JunitRunner.class)
public class ProgramEndpointCallMockIntegrationShould extends BaseMockIntegrationTestEmptyEnqueable {
private static String programUid = "lxAQ7Zs9VYR";
@BeforeClass
public static void setUpClass() throws Exception {
BaseMockIntegrationTestEmptyEnqueable.setUpClass();
D2CallExecutor executor = D2CallExecutor.create(databaseAdapter);
executor.executeD2CallTransactionally(() -> {
String categoryComboUid = "m2jTvAj5kkm";
ContentValues categoryCombo = CreateCategoryComboUtils.create(1L, categoryComboUid);
databaseAdapter.insert(CategoryComboTableInfo.TABLE_INFO.name(), null, categoryCombo);
// inserting tracked entity
ContentValues trackedEntityType = CreateTrackedEntityUtils.create(1L, "nEenWmSyUEp");
databaseAdapter.insert(TrackedEntityTypeTableInfo.TABLE_INFO.name(), null, trackedEntityType);
// inserting tracked entity attributes
ContentValues trackedEntityAttribute1 = CreateTrackedEntityAttributeUtils.create(1L, "aejWyOfXge6", null);
databaseAdapter.insert(TrackedEntityAttributeTableInfo.TABLE_INFO.name(), null, trackedEntityAttribute1);
ContentValues trackedEntityAttribute2 = CreateTrackedEntityAttributeUtils.create(2L, "cejWyOfXge6", null);
databaseAdapter.insert(TrackedEntityAttributeTableInfo.TABLE_INFO.name(), null, trackedEntityAttribute2);
ContentValues dataElement1 = CreateDataElementUtils.create(1L, "vANAXwtLwcT", categoryComboUid, null);
databaseAdapter.insert(DataElementTableInfo.TABLE_INFO.name(), null, dataElement1);
ContentValues dataElement2 = CreateDataElementUtils.create(2L, "sWoqcoByYmD", categoryComboUid, null);
databaseAdapter.insert(DataElementTableInfo.TABLE_INFO.name(), null, dataElement2);
ContentValues programStage = CreateProgramStageUtils.create(1L, "dBwrot7S420", programUid);
databaseAdapter.insert(ProgramStageTableInfo.TABLE_INFO.name(), null, programStage);
dhis2MockServer.enqueueMockResponse("program/programs.json");
Single<List<Program>> programCall = objects.d2DIComponent.programCall().download(Sets.newSet(programUid));
programCall.blockingGet();
return new Unit();
});
}
@Test
public void persist_program_when_call() {
ProgramStoreInterface store = ProgramStore.create(databaseAdapter);
assertThat(store.count()).isEqualTo(2);
assertThat(store.selectByUid(programUid).toBuilder().id(null).build())
.isEqualTo(ProgramSamples.getAntenatalProgram());
}
@Test
public void persist_program_rule_variables_on_call() {
IdentifiableObjectStore<ProgramRuleVariable> store = ProgramRuleVariableStore.create(databaseAdapter);
assertThat(store.count()).isEqualTo(2);
assertThat(store.selectByUid("omrL0gtPpDL")).isEqualTo(ProgramRuleVariableSamples.getHemoglobin());
}
@Test
public void persist_program_tracker_entity_attributes_when_call() {
IdentifiableObjectStore<ProgramTrackedEntityAttribute> store = ProgramTrackedEntityAttributeStore.create(databaseAdapter);
assertThat(store.count()).isEqualTo(2);
assertThat(store.selectByUid("YhqgQ6Iy4c4")).isEqualTo(ProgramTrackedEntityAttributeSamples.getChildProgrammeGender());
}
@Test
public void not_persist_relationship_type_when_call() {
IdentifiableObjectStore<RelationshipType> store = RelationshipTypeStore.create(databaseAdapter);
assertThat(store.count()).isEqualTo(0);
}
}
|
package ru.stqa.pft.addressbook.appmanager;
import org.openqa.selenium.By;
import org.openqa.selenium.firefox.FirefoxDriver;
import ru.stqa.pft.addressbook.model.UserData;
public class UserHelper extends HelperBase {
public UserHelper(FirefoxDriver wd) {
super(wd);
}
public void deleteSelectedUsers() {
wd.findElement(By.xpath("//div[@id='content']/form[2]/div[2]/input")).click();
}
public void selectUser() {
wd.findElement(By.name("selected[]")).click();
}
public void submitUserCreation() {
wd.findElement(By.xpath("//div[@id='content']/form/input[21]")).click();
}
public void fillUserForm(UserData userData) {
type(By.name("firstname"), userData.getFirstname());
type(By.name("lastname"), userData.getLastname());
type(By.name("address"), userData.getAddress());
type(By.name("home"), userData.getHomenumber());
type(By.name("email"), userData.getEmail());
}
public void initUserCreation() {
wd.findElement(By.linkText("add new")).click();
}
}
|
package org.talend.dataprep.api.dataset.json;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.TimeZone;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.talend.dataprep.api.dataset.DataSetMetadata;
import org.talend.dataprep.schema.FormatGuess;
import org.talend.dataprep.schema.SchemaParserResult;
import com.fasterxml.jackson.core.JsonGenerator;
@Component
public class SimpleDataSetMetadataJsonSerializer {
static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("MM-dd-YYYY HH:mm"); //$NON-NLS-1
static {
DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
}
private FormatGuess.Factory factory;
@Autowired
public SimpleDataSetMetadataJsonSerializer(FormatGuess.Factory factory) {
this.factory = factory;
}
public void serialize(final DataSetMetadata dataSetMetadata, final JsonGenerator generator) throws IOException {
generator.writeStartObject();
{
generator.writeStringField("id", dataSetMetadata.getId()); //$NON-NLS-1
generator.writeStringField("name", dataSetMetadata.getName()); //$NON-NLS-1
generator.writeStringField("author", dataSetMetadata.getAuthor()); //$NON-NLS-1
generator.writeNumberField("records", dataSetMetadata.getContent().getNbRecords()); //$NON-NLS-1
generator.writeNumberField("nbLinesHeader", dataSetMetadata.getContent().getNbLinesInHeader()); //$NON-NLS-1
generator.writeNumberField("nbLinesFooter", dataSetMetadata.getContent().getNbLinesInFooter()); //$NON-NLS-1
generator.writeBooleanField("draft", dataSetMetadata.isDraft()); //$NON-NLS-1
generator.writeStringField("certification", dataSetMetadata.getGovernance().getCertificationStep().toString()); //$NON-NLS-1
if (dataSetMetadata.getContent().getFormatGuessId() != null) {
FormatGuess formatGuess = this.factory.getFormatGuess(dataSetMetadata.getContent().getFormatGuessId());
generator.writeStringField("type", formatGuess.getMediaType()); //$NON-NLS-1
}
// data we need for extra dataset validation (i.e sheetNumber for excell sheet)
SchemaParserResult schemaParserResult = dataSetMetadata.getSchemaParserResult();
if (schemaParserResult != null) {
String sheetName = StringUtils.isEmpty(dataSetMetadata.getSheetName()) ?
schemaParserResult.getSheetName()
: dataSetMetadata.getSheetName();
generator.writeStringField("sheetName", sheetName);
if (dataSetMetadata.isDraft()) {
if (schemaParserResult.getColumnMetadatas() != null) {
generator.writeFieldName("sheetNames");
generator.writeStartArray();
for (String schemaName : schemaParserResult.getColumnMetadatas().keySet()) {
generator.writeString(schemaName);
}
generator.writeEndArray();
}
}
}
synchronized (DATE_FORMAT) {
generator.writeStringField("created", DATE_FORMAT.format(dataSetMetadata.getCreationDate())); //$NON-NLS-1
}
}
generator.writeEndObject();
}
}
|
package ua.stqa.pft.addressbook.appmanager;
import org.openqa.selenium.By;
import org.openqa.selenium.NoAlertPresentException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
public class HelperBase {
protected WebDriver wd;
public HelperBase(WebDriver wd) {
this.wd = wd;
}
protected void click(By locator) {
wd.findElement(locator).click();
}
protected void type(By locator, String text) {
click(locator);
if(text != null){
String existingText = wd.findElement(locator).getAttribute("value");
if (!text.equals(existingText)){
wd.findElement(locator).clear();
wd.findElement(locator).sendKeys(text);
}
}
}
protected void acceptPrompt() {
wd.switchTo().alert().accept();
}
public boolean isAlertPresent() {
try {
wd.switchTo().alert();
return true;
} catch (NoAlertPresentException e) {
return false;
}
}
}
|
package io.debezium.connector.sqlserver;
import java.sql.SQLException;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.debezium.connector.sqlserver.SqlServerConnectorConfig.SnapshotMode;
import io.debezium.pipeline.ErrorHandler;
import io.debezium.pipeline.EventDispatcher;
import io.debezium.pipeline.source.spi.StreamingChangeEventSource;
import io.debezium.relational.Table;
import io.debezium.relational.TableId;
import io.debezium.schema.SchemaChangeEvent.SchemaChangeEventType;
import io.debezium.util.Clock;
import io.debezium.util.Metronome;
/**
* <p>A {@link StreamingChangeEventSource} based on SQL Server change data capture functionality.
* A main loop polls database DDL change and change data tables and turns them into change events.</p>
*
* <p>The connector uses CDC functionality of SQL Server that is implemented as as a process that monitors
* source table and write changes from the table into the change table.</p>
*
* <p>The main loop keeps a pointer to the LSN of changes that were already processed. It queries all change
* tables and get result set of changes. It always finds the smallest LSN across all tables and the change
* is converted into the event message and sent downstream. The process repeats until all result sets are
* empty. The LSN is marked and the procedure repeats.</p>
*
* <p>The schema changes detection follows the procedure recommended by SQL Server CDC documentation.
* The database operator should create one more capture process (and table) when a table schema is updated.
* The code detects presence of two change tables for a single source table. It decides which table is the new one
* depending on LSNs stored in them. The loop streams changes from the older table till there are events in new
* table with the LSN larger than in the old one. Then the change table is switched and streaming is executed
* from the new one.</p>
*
* @author Jiri Pechanec
*/
public class SqlServerStreamingChangeEventSource implements StreamingChangeEventSource {
private static final Pattern MISSING_CDC_FUNCTION_CHANGES_ERROR = Pattern.compile("Invalid object name 'cdc.fn_cdc_get_all_changes_(.*)'\\.");
private static final Logger LOGGER = LoggerFactory.getLogger(SqlServerStreamingChangeEventSource.class);
/**
* Connection used for reading CDC tables.
*/
private final SqlServerConnection dataConnection;
private final SqlServerConnection metadataConnection;
private final EventDispatcher<TableId> dispatcher;
private final ErrorHandler errorHandler;
private final Clock clock;
private final SqlServerDatabaseSchema schema;
private final SqlServerOffsetContext offsetContext;
private final Duration pollInterval;
private final SqlServerConnectorConfig connectorConfig;
public SqlServerStreamingChangeEventSource(SqlServerConnectorConfig connectorConfig, SqlServerOffsetContext offsetContext, SqlServerConnection dataConnection,
SqlServerConnection metadataConnection, EventDispatcher<TableId> dispatcher, ErrorHandler errorHandler, Clock clock,
SqlServerDatabaseSchema schema) {
this.connectorConfig = connectorConfig;
this.dataConnection = dataConnection;
this.metadataConnection = metadataConnection;
this.dispatcher = dispatcher;
this.errorHandler = errorHandler;
this.clock = clock;
this.schema = schema;
this.offsetContext = offsetContext;
this.pollInterval = connectorConfig.getPollInterval();
}
@Override
public void execute(ChangeEventSourceContext context) throws InterruptedException {
if (connectorConfig.getSnapshotMode().equals(SnapshotMode.INITIAL_ONLY)) {
LOGGER.info("Streaming is not enabled in current configuration");
return;
}
final Metronome metronome = Metronome.sleeper(pollInterval, clock);
final Queue<SqlServerChangeTable> schemaChangeCheckpoints = new PriorityQueue<>((x, y) -> x.getStopLsn().compareTo(y.getStopLsn()));
try {
final AtomicReference<SqlServerChangeTable[]> tablesSlot = new AtomicReference<SqlServerChangeTable[]>(getCdcTablesToQuery());
final TxLogPosition lastProcessedPositionOnStart = offsetContext.getChangePosition();
final long lastProcessedEventSerialNoOnStart = offsetContext.getEventSerialNo();
LOGGER.info("Last position recorded in offsets is {}[{}]", lastProcessedPositionOnStart, lastProcessedEventSerialNoOnStart);
final AtomicBoolean changesStoppedBeingMonotonic = new AtomicBoolean(false);
TxLogPosition lastProcessedPosition = lastProcessedPositionOnStart;
// LSN should be increased for the first run only immediately after snapshot completion
// otherwise we might skip an incomplete transaction after restart
boolean shouldIncreaseFromLsn = offsetContext.isSnapshotCompleted();
while (context.isRunning()) {
// When reading from read-only Always On replica the default and only transaction isolation
// is snapshot. This means that CDC metadata are not visible for long-running transactions.
// It is thus necessary to restart the transaction before every read.
if (connectorConfig.isReadOnlyDatabaseConnection()) {
dataConnection.commit();
}
final MaxLsnResult maxLsnResult = dataConnection.getMaxLsnResult(connectorConfig.isSkipLowActivityLSNsEnabled());
// Shouldn't happen if the agent is running, but it is better to guard against such situation
if (!maxLsnResult.getMaxLsn().isAvailable() || !maxLsnResult.getMaxTransactionalLsn().isAvailable()) {
LOGGER.warn("No maximum LSN recorded in the database; please ensure that the SQL Server Agent is running");
metronome.pause();
continue;
}
// There is no change in the database
if (maxLsnResult.getMaxTransactionalLsn().compareTo(lastProcessedPosition.getCommitLsn()) <= 0 && shouldIncreaseFromLsn) {
LOGGER.debug("No change in the database");
metronome.pause();
continue;
}
// Reading interval is inclusive so we need to move LSN forward but not for first
// run as TX might not be streamed completely
final Lsn fromLsn = lastProcessedPosition.getCommitLsn().isAvailable() && shouldIncreaseFromLsn
? dataConnection.incrementLsn(lastProcessedPosition.getCommitLsn())
: lastProcessedPosition.getCommitLsn();
shouldIncreaseFromLsn = true;
while (!schemaChangeCheckpoints.isEmpty()) {
migrateTable(schemaChangeCheckpoints);
}
if (!dataConnection.listOfNewChangeTables(fromLsn, maxLsnResult.getMaxLsn()).isEmpty()) {
final SqlServerChangeTable[] tables = getCdcTablesToQuery();
tablesSlot.set(tables);
for (SqlServerChangeTable table : tables) {
if (table.getStartLsn().isBetween(fromLsn, maxLsnResult.getMaxLsn())) {
LOGGER.info("Schema will be changed for {}", table);
schemaChangeCheckpoints.add(table);
}
}
}
try {
dataConnection.getChangesForTables(tablesSlot.get(), fromLsn, maxLsnResult.getMaxLsn(), resultSets -> {
long eventSerialNoInInitialTx = 1;
final int tableCount = resultSets.length;
final SqlServerChangeTablePointer[] changeTables = new SqlServerChangeTablePointer[tableCount];
final SqlServerChangeTable[] tables = tablesSlot.get();
for (int i = 0; i < tableCount; i++) {
changeTables[i] = new SqlServerChangeTablePointer(tables[i], resultSets[i]);
changeTables[i].next();
}
for (;;) {
SqlServerChangeTablePointer tableWithSmallestLsn = null;
for (SqlServerChangeTablePointer changeTable : changeTables) {
if (changeTable.isCompleted()) {
continue;
}
if (tableWithSmallestLsn == null || changeTable.compareTo(tableWithSmallestLsn) < 0) {
tableWithSmallestLsn = changeTable;
}
}
if (tableWithSmallestLsn == null) {
// No more LSNs available
break;
}
if (!(tableWithSmallestLsn.getChangePosition().isAvailable() && tableWithSmallestLsn.getChangePosition().getInTxLsn().isAvailable())) {
LOGGER.error("Skipping change {} as its LSN is NULL which is not expected", tableWithSmallestLsn);
tableWithSmallestLsn.next();
continue;
}
if (tableWithSmallestLsn.isNewTransaction() && changesStoppedBeingMonotonic.get()) {
LOGGER.info("Resetting changesStoppedBeingMonotonic as transaction changes");
changesStoppedBeingMonotonic.set(false);
}
// After restart for changes that are not monotonic to avoid data loss
if (tableWithSmallestLsn.isCurrentPositionSmallerThanPreviousPosition()) {
LOGGER.info("Disabling skipping changes due to not monotonic order of changes");
changesStoppedBeingMonotonic.set(true);
}
// After restart for changes that were executed before the last committed offset
if (!changesStoppedBeingMonotonic.get() &&
tableWithSmallestLsn.getChangePosition().compareTo(lastProcessedPositionOnStart) < 0) {
LOGGER.info("Skipping change {} as its position is smaller than the last recorded position {}", tableWithSmallestLsn,
lastProcessedPositionOnStart);
tableWithSmallestLsn.next();
continue;
}
// After restart for change that was the last committed and operations in it before the last committed offset
if (!changesStoppedBeingMonotonic.get() && tableWithSmallestLsn.getChangePosition().compareTo(lastProcessedPositionOnStart) == 0
&& eventSerialNoInInitialTx <= lastProcessedEventSerialNoOnStart) {
LOGGER.info("Skipping change {} as its order in the transaction {} is smaller than or equal to the last recorded operation {}[{}]",
tableWithSmallestLsn, eventSerialNoInInitialTx, lastProcessedPositionOnStart, lastProcessedEventSerialNoOnStart);
eventSerialNoInInitialTx++;
tableWithSmallestLsn.next();
continue;
}
if (tableWithSmallestLsn.getChangeTable().getStopLsn().isAvailable() &&
tableWithSmallestLsn.getChangeTable().getStopLsn().compareTo(tableWithSmallestLsn.getChangePosition().getCommitLsn()) <= 0) {
LOGGER.debug("Skipping table change {} as its stop LSN is smaller than the last recorded LSN {}", tableWithSmallestLsn,
tableWithSmallestLsn.getChangePosition());
tableWithSmallestLsn.next();
continue;
}
LOGGER.trace("Processing change {}", tableWithSmallestLsn);
LOGGER.trace("Schema change checkpoints {}", schemaChangeCheckpoints);
if (!schemaChangeCheckpoints.isEmpty()) {
if (tableWithSmallestLsn.getChangePosition().getCommitLsn().compareTo(schemaChangeCheckpoints.peek().getStartLsn()) >= 0) {
migrateTable(schemaChangeCheckpoints);
}
}
final TableId tableId = tableWithSmallestLsn.getChangeTable().getSourceTableId();
final int operation = tableWithSmallestLsn.getOperation();
final Object[] data = tableWithSmallestLsn.getData();
// UPDATE consists of two consecutive events, first event contains
// the row before it was updated and the second the row after
// it was updated
int eventCount = 1;
if (operation == SqlServerChangeRecordEmitter.OP_UPDATE_BEFORE) {
if (!tableWithSmallestLsn.next() || tableWithSmallestLsn.getOperation() != SqlServerChangeRecordEmitter.OP_UPDATE_AFTER) {
throw new IllegalStateException("The update before event at " + tableWithSmallestLsn.getChangePosition() + " for table " + tableId
+ " was not followed by after event.\n Please report this as a bug together with a events around given LSN.");
}
eventCount = 2;
}
final Object[] dataNext = (operation == SqlServerChangeRecordEmitter.OP_UPDATE_BEFORE) ? tableWithSmallestLsn.getData() : null;
offsetContext.setChangePosition(tableWithSmallestLsn.getChangePosition(), eventCount);
offsetContext.event(
tableWithSmallestLsn.getChangeTable().getSourceTableId(),
metadataConnection.timestampOfLsn(tableWithSmallestLsn.getChangePosition().getCommitLsn()));
dispatcher
.dispatchDataChangeEvent(
tableId,
new SqlServerChangeRecordEmitter(
offsetContext,
operation,
data,
dataNext,
clock));
tableWithSmallestLsn.next();
}
});
lastProcessedPosition = TxLogPosition.valueOf(maxLsnResult.getMaxLsn());
// Terminate the transaction otherwise CDC could not be disabled for tables
dataConnection.rollback();
}
catch (SQLException e) {
tablesSlot.set(processErrorFromChangeTableQuery(e, tablesSlot.get()));
}
}
}
catch (Exception e) {
errorHandler.setProducerThrowable(e);
}
}
private void migrateTable(final Queue<SqlServerChangeTable> schemaChangeCheckpoints)
throws InterruptedException, SQLException {
final SqlServerChangeTable newTable = schemaChangeCheckpoints.poll();
LOGGER.info("Migrating schema to {}", newTable);
Table tableSchema = metadataConnection.getTableSchemaFromTable(newTable);
dispatcher.dispatchSchemaChangeEvent(newTable.getSourceTableId(),
new SqlServerSchemaChangeEventEmitter(offsetContext, newTable, tableSchema, SchemaChangeEventType.ALTER));
newTable.setSourceTable(tableSchema);
}
private SqlServerChangeTable[] processErrorFromChangeTableQuery(SQLException exception, SqlServerChangeTable[] currentChangeTables) throws Exception {
final Matcher m = MISSING_CDC_FUNCTION_CHANGES_ERROR.matcher(exception.getMessage());
if (m.matches()) {
final String captureName = m.group(1);
LOGGER.info("Table is no longer captured with capture instance {}", captureName);
return Arrays.asList(currentChangeTables).stream()
.filter(x -> !x.getCaptureInstance().equals(captureName))
.collect(Collectors.toList()).toArray(new SqlServerChangeTable[0]);
}
throw exception;
}
private SqlServerChangeTable[] getCdcTablesToQuery() throws SQLException, InterruptedException {
final Set<SqlServerChangeTable> cdcEnabledTables = dataConnection.listOfChangeTables();
if (cdcEnabledTables.isEmpty()) {
LOGGER.warn("No table has enabled CDC or security constraints prevents getting the list of change tables");
}
final Map<TableId, List<SqlServerChangeTable>> includeListCdcEnabledTables = cdcEnabledTables.stream()
.filter(changeTable -> {
if (connectorConfig.getTableFilters().dataCollectionFilter().isIncluded(changeTable.getSourceTableId())) {
return true;
}
else {
LOGGER.info("CDC is enabled for table {} but the table is not whitelisted by connector", changeTable);
return false;
}
})
.collect(Collectors.groupingBy(x -> x.getSourceTableId()));
if (includeListCdcEnabledTables.isEmpty()) {
LOGGER.warn(
"No whitelisted table has enabled CDC, whitelisted table list does not contain any table with CDC enabled or no table match the white/blacklist filter(s)");
}
final List<SqlServerChangeTable> tables = new ArrayList<>();
for (List<SqlServerChangeTable> captures : includeListCdcEnabledTables.values()) {
SqlServerChangeTable currentTable = captures.get(0);
if (captures.size() > 1) {
SqlServerChangeTable futureTable;
if (captures.get(0).getStartLsn().compareTo(captures.get(1).getStartLsn()) < 0) {
futureTable = captures.get(1);
}
else {
currentTable = captures.get(1);
futureTable = captures.get(0);
}
currentTable.setStopLsn(futureTable.getStartLsn());
futureTable.setSourceTable(dataConnection.getTableSchemaFromTable(futureTable));
tables.add(futureTable);
LOGGER.info("Multiple capture instances present for the same table: {} and {}", currentTable, futureTable);
}
if (schema.tableFor(currentTable.getSourceTableId()) == null) {
LOGGER.info("Table {} is new to be monitored by capture instance {}", currentTable.getSourceTableId(), currentTable.getCaptureInstance());
// We need to read the source table schema - nullability information cannot be obtained from change table
// There might be no start LSN in the new change table at this time so current timestamp is used
offsetContext.event(
currentTable.getSourceTableId(),
Instant.now());
dispatcher.dispatchSchemaChangeEvent(
currentTable.getSourceTableId(),
new SqlServerSchemaChangeEventEmitter(
offsetContext,
currentTable,
dataConnection.getTableSchemaFromTable(currentTable),
SchemaChangeEventType.CREATE));
}
// If a column was renamed, then the old capture instance had been dropped and a new one
// created. In consequence, a table with out-dated schema might be assigned here.
// A proper value will be set when migration happens.
currentTable.setSourceTable(schema.tableFor(currentTable.getSourceTableId()));
tables.add(currentTable);
}
return tables.toArray(new SqlServerChangeTable[tables.size()]);
}
}
|
package org.deegree.client.sos.requesthandler;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.xml.namespace.QName;
import org.apache.axiom.om.OMAttribute;
import org.apache.axiom.om.OMElement;
import org.deegree.client.sos.storage.StorageGetCapabilities;
import org.deegree.client.sos.storage.components.BoundedBy;
import org.deegree.client.sos.storage.components.Filter_Capabilities;
import org.deegree.client.sos.storage.components.ObservationOffering;
import org.deegree.client.sos.storage.components.Operation;
import org.deegree.client.sos.storage.components.Operator;
import org.deegree.client.sos.storage.components.Parameter;
import org.deegree.client.sos.storage.components.Time;
import org.deegree.commons.utils.Pair;
import org.deegree.commons.xml.CommonNamespaces;
import org.deegree.commons.xml.NamespaceContext;
import org.deegree.commons.xml.XMLAdapter;
import org.deegree.commons.xml.XPath;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* Requesthandler-class for GetCapabilities-request. Initializes a new StorageGetCapabilities-object with contents of
* the GetCapabilities-response called by a http-post-method.
*
* @author <a href="mailto:neumeister@lat-lon.de">Ulrich Neumeister</a>
* @author last edited by: $Author$
*
*/
public class HandleGetCapabilities {
private static final Logger LOG = LoggerFactory.getLogger( HandleGetCapabilities.class );
private XMLAdapter xml;
private StorageGetCapabilities storage = new StorageGetCapabilities();
private static NamespaceContext nsContext;
private XPath xpath;
static {
nsContext = new NamespaceContext();
nsContext.addNamespace( "sos", "http:
nsContext.addNamespace( "ows", "http:
nsContext.addNamespace( "om", "http:
nsContext.addNamespace( "swe", "http:
nsContext.addNamespace( "ogc", "http:
nsContext.addNamespace( "gml", "http:
}
/**
* Public constructor initializes the XMLAdapter-object with the given request-String (http-get request).<br>
* After that, the fillStorage-method is called.
*
* @param request
*/
public HandleGetCapabilities( String request ) {
try {
URL url = new URL( request );
xml = new XMLAdapter( url );
} catch ( MalformedURLException e ) {
LOG.error( "Unexpected stack trace:", e.getMessage() );
}
fillStorage( request.split( "\\?" )[0] );
}
/**
* Sets the contents of storage by parsing the XMLAdapter's rootElement.
*
* @param host
*/
private void fillStorage( String host ) {
storage.setHost( host );
storage.setServiceIdentification( parseServiceIdentification() );
storage.setServiceProvider( parseServiceProvider() );
storage.setOperationsMetadata( parseOperationsMetadata() );
storage.setFilter_Capabilities( parseFilter_Capabilities() );
storage.setContents( parseOfferings() );
}
/**
* @return
*/
private List<Pair<String, String>> parseServiceIdentification() {
List<Pair<String, String>> result = new ArrayList<Pair<String, String>>();
// xpath = new XPath( "/sos:Capabilities/ows:ServiceIdentification/descendant::*", nsContext );
xpath = new XPath( "/sos:Capabilities/ows:ServiceIdentification/descendant::*", nsContext );
List<OMElement> nodes = xml.getElements( xml.getRootElement(), xpath );
for ( OMElement element : nodes ) {
String name = element.getLocalName();
String value = element.getText();
if ( value != null && !value.trim().equals( "" ) ) {
Pair<String, String> pair = new Pair();
pair.first = name;
pair.second = value;
result.add( pair );
}
}
return result;
}
/**
* @return
*/
private List<Pair<String, String>> parseServiceProvider() {
List<Pair<String, String>> result = new ArrayList<Pair<String, String>>();
xpath = new XPath( "//ows:ServiceProvider/descendant::*", nsContext );
List<OMElement> nodes = xml.getElements( xml.getRootElement(), xpath );
for ( OMElement element : nodes ) {
String name = element.getLocalName();
String value = element.getText();
if ( value != null && !value.trim().equals( "" ) ) {
Pair<String, String> pair = new Pair<String, String>();
pair.first = name;
pair.second = value;
result.add( pair );
} else if ( name.equals( "ProviderSite" ) || name.equals( "OnlineResource" ) ) {
Pair<String, String> pair = new Pair<String, String>();
pair.first = name;
pair.second = element.getAttributeValue( new QName( CommonNamespaces.XLNNS, "href" ) );
result.add( pair );
}
}
return result;
}
/**
* @return
*/
private List<Operation> parseOperationsMetadata() {
List<Operation> result = new ArrayList<Operation>();
xpath = new XPath( "//ows:Operation", nsContext );
List<OMElement> operations = xml.getElements( xml.getRootElement(), xpath );
for ( OMElement operationsElement : operations ) {
Operation operation = new Operation();
String operationName = operationsElement.getAttribute( new QName( "name" ) ).getAttributeValue();
operation.setName( operationName );
List<Pair<String, String>> http = new ArrayList<Pair<String, String>>();
xpath = new XPath( "ows:DCP/ows:HTTP/child::*", nsContext );
List<OMElement> nodes = xml.getElements( operationsElement, xpath );
for ( OMElement metadataElement : nodes ) {
Pair<String, String> pair = new Pair<String, String>();
if ( metadataElement.getLocalName().equals( "Get" ) ) {
pair.first = "Get";
pair.second = metadataElement.getAttributeValue( new QName( CommonNamespaces.XLNNS, "href" ) );
http.add( pair );
} else if ( metadataElement.getLocalName().equals( "Post" ) ) {
pair.first = "Post";
pair.second = metadataElement.getAttributeValue( new QName( CommonNamespaces.XLNNS, "href" ) );
http.add( pair );
}
}
operation.setHttp( http );
xpath = new XPath( "ows:Parameter", nsContext );
nodes = xml.getElements( operationsElement, xpath );
List<Parameter> parameters = new ArrayList<Parameter>();
for ( OMElement parameterElement : nodes ) {
Parameter parameter = new Parameter();
String parameterName = parameterElement.getAttribute( new QName( "name" ) ).getAttributeValue();
parameter.setName( parameterName );
xpath = new XPath( "ows:AllowedValues/descendant::*", nsContext );
List<OMElement> owsValues = xml.getElements( parameterElement, xpath );
List<String> parameterValues = new ArrayList<String>();
for ( OMElement owsValue : owsValues ) {
if ( owsValue.getText() != null && !owsValue.getText().trim().equals( "" ) ) {
parameterValues.add( owsValue.getText() );
}
}
parameter.setAllowedValues( parameterValues );
parameters.add( parameter );
}
operation.setParameters( parameters );
result.add( operation );
}
return result;
}
/**
* @return
*/
private Filter_Capabilities parseFilter_Capabilities() {
Filter_Capabilities result = new Filter_Capabilities();
List<String> operands;
List<Operator> operators;
List<OMElement> nodes;
xpath = new XPath( "//ogc:Spatial_Capabilities/child::*", nsContext );
nodes = xml.getElements( xml.getRootElement(), xpath );
for ( OMElement spatial_Capability : nodes ) {
if ( spatial_Capability.getLocalName().equals( "GeometryOperands" ) ) {
xpath = new XPath( "child::*", nsContext );
List<OMElement> geometryOperands = xml.getElements( spatial_Capability, xpath );
operands = new ArrayList<String>();
for ( OMElement geometryOperand : geometryOperands ) {
if ( geometryOperand.getLocalName().equals( "GeometryOperand" ) ) {
operands.add( geometryOperand.getText() );
}
}
result.setGeometryOperands( operands );
} else if ( spatial_Capability.getLocalName().equals( "SpatialOperators" ) ) {
xpath = new XPath( "child::*", nsContext );
List<OMElement> spatialOperators = xml.getElements( spatial_Capability, xpath );
operators = new ArrayList<Operator>();
for ( OMElement spatialOperatorElement : spatialOperators ) {
Iterator<OMAttribute> attributes;
for ( attributes = spatialOperatorElement.getAllAttributes(); attributes.hasNext(); ) {
OMAttribute attribute = attributes.next();
if ( attribute.getLocalName().equals( "name" ) ) {
Operator spatialOperator = new Operator();
spatialOperator.setName( attribute.getAttributeValue() );
xpath = new XPath( "child::*/child::*", nsContext );
List<OMElement> geometryOperands = xml.getElements( spatialOperatorElement, xpath );
if ( geometryOperands.size() > 0 ) {
List<String> values = new ArrayList<String>();
for ( OMElement geometryOperand : geometryOperands ) {
values.add( geometryOperand.getText() );
}
spatialOperator.setOperands( values );
}
operators.add( spatialOperator );
}
}
}
result.setSpatialOperators( operators );
}
}
xpath = new XPath( "//ogc:Temporal_Capabilities/child::*", nsContext );
nodes = xml.getElements( xml.getRootElement(), xpath );
for ( OMElement temporal_Capability : nodes ) {
if ( temporal_Capability.getLocalName().equals( "TemporalOperands" ) ) {
xpath = new XPath( "child::*", nsContext );
List<OMElement> temporalOperands = xml.getElements( temporal_Capability, xpath );
operands = new ArrayList<String>();
for ( OMElement temporalOperand : temporalOperands ) {
if ( temporalOperand.getLocalName().equals( "TemporalOperand" ) ) {
operands.add( temporalOperand.getText() );
}
}
result.setTemporalOperands( operands );
} else if ( temporal_Capability.getLocalName().equals( "TemporalOperators" ) ) {
xpath = new XPath( "child::*", nsContext );
List<OMElement> temporalOperators = xml.getElements( temporal_Capability, xpath );
operators = new ArrayList<Operator>();
for ( OMElement temporalOperatorElement : temporalOperators ) {
Iterator<OMAttribute> attributes;
for ( attributes = temporalOperatorElement.getAllAttributes(); attributes.hasNext(); ) {
OMAttribute attribute = attributes.next();
if ( attribute.getLocalName().equals( "name" ) ) {
Operator temporalOperator = new Operator();
temporalOperator.setName( attribute.getAttributeValue() );
xpath = new XPath( "child::*/child::*", nsContext );
List<OMElement> temporalOperands = xml.getElements( temporalOperatorElement, xpath );
if ( temporalOperands.size() > 0 ) {
List<String> values = new ArrayList<String>();
for ( OMElement temporalOperand : temporalOperands ) {
values.add( temporalOperand.getText() );
}
temporalOperator.setOperands( values );
}
operators.add( temporalOperator );
}
}
}
result.setTemporalOperators( operators );
}
}
xpath = new XPath( "//ogc:Scalar_Capabilities/child::*", nsContext );
nodes = xml.getElements( xml.getRootElement(), xpath );
for ( OMElement scalar_Capability : nodes ) {
if ( scalar_Capability.getLocalName().equals( "LogicalOperators" ) ) {
result.setLogicalOperators( scalar_Capability );
} else if ( scalar_Capability.getLocalName().equals( "ComparisonOperators" ) ) {
List<String> comparisonOperators = new ArrayList<String>();
xpath = new XPath( "child::*", nsContext );
List<OMElement> comparisonOperatorsElements = xml.getElements( scalar_Capability, xpath );
for ( OMElement comparisonOperator : comparisonOperatorsElements ) {
comparisonOperators.add( comparisonOperator.getText() );
}
result.setComparisonOperators( comparisonOperators );
} else if ( scalar_Capability.getLocalName().equals( "ArithmeticOperators" ) ) {
result.setArithmeticOperators( scalar_Capability );
}
}
xpath = new XPath( "//ogc:Id_Capabilities/child::*", nsContext );
result.setId_Capabilities( xml.getElement( xml.getRootElement(), xpath ) );
return result;
}
/**
* @return
*/
private List<ObservationOffering> parseOfferings() {
List<ObservationOffering> result = new ArrayList<ObservationOffering>();
List<OMElement> nodes;
xpath = new XPath( "//sos:ObservationOffering", nsContext );
nodes = xml.getElements( xml.getRootElement(), xpath );
for ( OMElement observationOffering : nodes ) {
ObservationOffering offering = new ObservationOffering();
offering.setId( observationOffering.getAttributeValue( new QName( CommonNamespaces.GMLNS, "id" ) ) );
List<OMElement> metadata = new ArrayList<OMElement>();
BoundedBy boundedBy = new BoundedBy();
List<OMElement> intendedApplication = new ArrayList<OMElement>();
Time time = new Time();
List<String> procedures = new ArrayList<String>();
List<String> observedProperties = new ArrayList<String>();
List<OMElement> featuresOfInterest = new ArrayList<OMElement>();
List<OMElement> responseFormats = new ArrayList<OMElement>();
List<OMElement> responseModes = new ArrayList<OMElement>();
List<OMElement> resultModels = new ArrayList<OMElement>();
xpath = new XPath( "child::*", nsContext );
List<OMElement> children = xml.getElements( observationOffering, xpath );
for ( OMElement child : children ) {
String childName = child.getLocalName();
if ( childName.equals( "metaDataProperty" ) || childName.equals( "description" )
|| childName.equals( "name" ) ) {
metadata.add( child );
} else if ( childName.equals( "boundedBy" ) ) {
OMElement element = xml.getElement( child, xpath );
boundedBy.setType( element.getLocalName() );
boundedBy.setText( element.getText() );
List<OMAttribute> attributes = new ArrayList<OMAttribute>();
Iterator<OMAttribute> attributeIterator;
for ( attributeIterator = element.getAllAttributes(); attributeIterator.hasNext(); ) {
OMAttribute attribute = attributeIterator.next();
attributes.add( attribute );
}
boundedBy.setAttributes( attributes );
boundedBy.setElements( xml.getElements( element, xpath ) );
} else if ( childName.equals( "intendedApplication" ) ) {
intendedApplication.add( child );
} else if ( childName.equals( "time" ) ) {
Iterator<OMAttribute> attributesIterator;
List<OMAttribute> attributes = new ArrayList<OMAttribute>();
for ( attributesIterator = child.getAllAttributes(); attributesIterator.hasNext(); ) {
OMAttribute attribute = attributesIterator.next();
attributes.add( attribute );
}
time.setAttributesOfTime( attributes );
OMElement element = xml.getElement( child, xpath );
if ( element == null ) {
time.setIsNull( true );
} else {
time.setIsNull( false );
attributes = new ArrayList<OMAttribute>();
for ( attributesIterator = child.getAllAttributes(); attributesIterator.hasNext(); ) {
OMAttribute attribute = attributesIterator.next();
attributes.add( attribute );
}
time.setAttributesOfChild( attributes );
time.setElements( xml.getElements( element, xpath ) );
}
} else if ( childName.equals( "procedure" ) ) {
Iterator<OMAttribute> attributes;
for ( attributes = child.getAllAttributes(); attributes.hasNext(); ) {
OMAttribute attribute = attributes.next();
procedures.add( attribute.getAttributeValue() );
}
} else if ( childName.equals( "observedProperty" ) ) {
Iterator<OMAttribute> attributes;
for ( attributes = child.getAllAttributes(); attributes.hasNext(); ) {
OMAttribute attribute = attributes.next();
observedProperties.add( attribute.getAttributeValue() );
}
} else if ( childName.equals( "featureOfInterest" ) ) {
featuresOfInterest.add( child );
} else if ( childName.equals( "responseFormat" ) ) {
responseFormats.add( child );
} else if ( childName.equals( "responseMode" ) ) {
responseModes.add( child );
} else if ( childName.equals( "resultModel" ) ) {
resultModels.add( child );
}
}
offering.setMetadata( metadata );
offering.setBoundedBy( boundedBy );
offering.setIntendedApplications( intendedApplication );
offering.setTime( time );
offering.setProcedures( procedures );
offering.setObservedProperties( observedProperties );
offering.setFeaturesOfInterest( featuresOfInterest );
offering.setResponseFormats( responseFormats );
offering.setResponseModes( responseModes );
offering.setResultModels( resultModels );
result.add( offering );
}
return result;
}
/**
* @return
*/
public StorageGetCapabilities getStorage() {
return storage;
}
}
|
package ti.modules.titanium.ui.widget.picker;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.KrollProxy;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.proxy.TiViewProxy;
import org.appcelerator.titanium.util.TiConvert;
import org.appcelerator.titanium.util.TiUIHelper;
import org.appcelerator.titanium.view.TiUIView;
import ti.modules.titanium.ui.PickerColumnProxy;
import ti.modules.titanium.ui.PickerProxy;
import android.app.Activity;
import android.content.Context;
import android.os.Build;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import android.widget.TextView;
public class TiUINativePicker extends TiUIPicker
implements OnItemSelectedListener
{
private static final String TAG = "TiUINativePicker";
private boolean firstSelectedFired = false;
public static class TiSpinnerAdapter<T> extends ArrayAdapter<T>
{
boolean fontSet = false;
String fontSize = null;
String fontWeight = null;
String fontFamily = null;
String fontStyle = null;
public TiSpinnerAdapter(Context context, int textViewResourceId, List<T> objects)
{
super(context, textViewResourceId, objects);
}
@Override
public View getView(int position, View convertView, ViewGroup parent)
{
TextView tv = (TextView) super.getView(position, convertView, parent);
if (fontSet) {
TiUIHelper.styleText(tv, fontFamily, fontSize, fontWeight, fontStyle);
}
return tv;
}
@Override
public View getDropDownView(int position, View convertView, ViewGroup parent)
{
TextView tv = (TextView) super.getDropDownView(position, convertView, parent);
if (fontSet) {
TiUIHelper.styleText(tv, fontFamily, fontSize, fontWeight, fontStyle);
}
return tv;
}
public void setFontProperties(HashMap<String, Object> d)
{
if (d != null) {
fontSet = true;
if (d.containsKey("fontSize")) {
fontSize = TiConvert.toString(d, "fontSize");
}
if (d.containsKey("fontWeight")) {
fontWeight = TiConvert.toString(d, "fontWeight");
}
if (d.containsKey("fontFamily")) {
fontFamily = TiConvert.toString(d, "fontFamily");
}
if (d.containsKey("fontStyle")) {
fontStyle = TiConvert.toString(d, "fontStyle");
}
}
}
}
public TiUINativePicker(TiViewProxy proxy)
{
super(proxy);
}
public TiUINativePicker(final TiViewProxy proxy, Activity activity)
{
this(proxy);
Spinner spinner = new Spinner(activity)
{
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom)
{
super.onLayout(changed, left, top, right, bottom);
TiUIHelper.firePostLayoutEvent(proxy);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
KrollDict data = new KrollDict();
data.put(TiC.PROPERTY_X, event.getX());
data.put(TiC.PROPERTY_Y, event.getY());
fireEvent(TiC.EVENT_CLICK, data);
}
return super.onTouchEvent(event);
}
};
setNativeView(spinner);
refreshNativeView();
preselectRows();
spinner.setOnItemSelectedListener(this);
}
private void preselectRows()
{
ArrayList<Integer> preselectedRows = getPickerProxy().getPreselectedRows();
if (preselectedRows == null || preselectedRows.size() == 0) {
return;
}
Spinner spinner = (Spinner)nativeView;
if (spinner == null)return;
try {
spinner.setOnItemSelectedListener(null);
for (int i = 0; i < preselectedRows.size(); i++) {
Integer rowIndex = preselectedRows.get(i);
if (rowIndex == 0 || rowIndex.intValue() < 0) {
continue;
}
selectRow(i, rowIndex, false);
}
} finally {
spinner.setOnItemSelectedListener(this);
firstSelectedFired = true;
}
}
@Override
public void selectRow(int columnIndex, int rowIndex, boolean animated)
{
// At the moment we only support one column.
if (columnIndex != 0) {
Log.w(TAG, "Only one column is supported. Ignoring request to set selected row of column " + columnIndex);
return;
}
Spinner view = (Spinner)nativeView;
int rowCount = view.getAdapter().getCount();
if (rowIndex < 0 || rowIndex >= rowCount) {
Log.w(TAG, "Ignoring request to select out-of-bounds row index " + rowIndex);
return;
}
view.setSelection(rowIndex, animated);
}
@Override
public int getSelectedRowIndex(int columnIndex)
{
if (columnIndex != 0) {
Log.w(TAG, "Ignoring request to get selected row from out-of-bounds columnIndex " + columnIndex);
return -1;
}
return ((Spinner)getNativeView()).getSelectedItemPosition();
}
@Override
protected void refreshNativeView()
{
// Don't allow change events here
suppressChangeEvent = true;
Spinner spinner = (Spinner)nativeView;
if (spinner == null) {
return;
}
try {
spinner.setOnItemSelectedListener(null);
int rememberSelectedRow = getSelectedRowIndex(0);
KrollDict font = null;
if (proxy.hasProperty(TiC.PROPERTY_FONT)) {
font = proxy.getProperties().getKrollDict(TiC.PROPERTY_FONT);
}
// Just one column - the first column - for now.
// Maybe someday we'll support multiple columns.
PickerColumnProxy column = getPickerProxy().getFirstColumn(false);
if (column == null) {
return;
}
TiViewProxy[] rowArray = column.getChildren();
if (rowArray == null || rowArray.length == 0) {
return;
}
ArrayList<TiViewProxy> rows = new ArrayList<TiViewProxy>(Arrays.asList(rowArray));
// At the moment we're using the simple spinner layouts provided
// in android because we're only supporting a piece of text, which
// is fetched via PickerRowProxy.toString(). If we allow
// anything beyond a string, we'll have to implement our own
// layouts (maybe our own Adapter too.)
TiSpinnerAdapter<TiViewProxy> adapter = new TiSpinnerAdapter<TiViewProxy>(spinner.getContext(),
android.R.layout.simple_spinner_item, rows);
adapter.setFontProperties(font);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
spinner.setAdapter(adapter);
if (rememberSelectedRow >= 0) {
selectRow(0, rememberSelectedRow, false);
}
} catch(Throwable t) {
Log.e(TAG, "Unable to refresh native spinner control: " + t.getMessage(), t);
} finally {
suppressChangeEvent = false;
spinner.setOnItemSelectedListener(this);
}
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position,
long itemId)
{
if (!firstSelectedFired) {
// swallow the first selected event that gets fired after the adapter gets set, so as to avoid
// firing our change event in that case.
firstSelectedFired = true;
return;
}
fireSelectionChange(0, position);
// Invalidate the parent view after the item is selected (TIMOB-13540).
if (Build.VERSION.SDK_INT >= TiC.API_LEVEL_HONEYCOMB) {
ViewParent p = nativeView.getParent();
if (p instanceof View) {
((View) p).invalidate();
}
}
}
@Override
public void onNothingSelected(AdapterView<?> arg0)
{
}
public void add(TiUIView child)
{
// Don't do anything. We don't add/remove views to the native picker (the Android "Spinner").
}
@Override
public void remove(TiUIView child)
{
// Don't do anything. We don't add/remove views to the native picker (the Android "Spinner").
}
@Override
public void onColumnAdded(int columnIndex)
{
if (!batchModelChange) {
refreshNativeView();
}
}
@Override
public void onColumnRemoved(int oldColumnIndex)
{
if (!batchModelChange) {
refreshNativeView();
}
}
@Override
public void onColumnModelChanged(int columnIndex)
{
if (!batchModelChange) {
refreshNativeView();
}
}
@Override
public void onRowChanged(int columnIndex, int rowIndex)
{
if (!batchModelChange) {
refreshNativeView();
}
}
protected void fireSelectionChange(int columnIndex, int rowIndex)
{
((PickerProxy)proxy).fireSelectionChange(columnIndex, rowIndex);
}
@Override
public void propertyChanged(String key, Object oldValue, Object newValue, KrollProxy proxy)
{
if (key.equals(TiC.PROPERTY_FONT)) {
Spinner spinner = (Spinner) nativeView;
TiSpinnerAdapter<TiViewProxy> adapter = (TiSpinnerAdapter<TiViewProxy>) spinner.getAdapter();
adapter.setFontProperties((HashMap) newValue);
adapter.notifyDataSetChanged();
} else {
super.propertyChanged(key, oldValue, newValue, proxy);
}
}
}
|
package caprica6.fblandroidhackathon.org.caprica6;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.support.wearable.view.WatchViewStub;
import android.view.View;
import android.widget.TextView;
import com.parse.Parse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MainActivity extends Activity {
//private static final List<String> COMMAND_WORDS = new ArrayList<String>();
private static final Map<String,Command> COMMAND_WORDS = new HashMap<String, Command>();
static {
//COMMAND_WORDS.put("noop");
COMMAND_WORDS.put("land",Command.LAND);
COMMAND_WORDS.put("panic",Command.LAND); //land
COMMAND_WORDS.put("take",Command.TAKEOFF); //take off
COMMAND_WORDS.put("left",Command.TURN_LEFT);
COMMAND_WORDS.put("right",Command.TURN_RIGHT);
COMMAND_WORDS.put("up",Command.UP);
COMMAND_WORDS.put("down",Command.DOWN);
/* COMMAND_WORDS.add("degrees");
COMMAND_WORDS.add("units");
COMMAND_WORDS.add("execute");
COMMAND_WORDS.add("clear"); //all commands
COMMAND_WORDS.add("remove"); //last command
COMMAND_WORDS.add("flip");
COMMAND_WORDS.add("10");
COMMAND_WORDS.add("15");
COMMAND_WORDS.add("20");
COMMAND_WORDS.add("25");
COMMAND_WORDS.add("30");
COMMAND_WORDS.add("35");
COMMAND_WORDS.add("40");
COMMAND_WORDS.add("45");
COMMAND_WORDS.add("60");
COMMAND_WORDS.add("90");
COMMAND_WORDS.add("135");
COMMAND_WORDS.add("180");
COMMAND_WORDS.add("270");
COMMAND_WORDS.add("360");
COMMAND_WORDS.add("position");
*/
}
private static final int SPEECH_REQUEST_CODE = 1;
private TextView mDebugTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final WatchViewStub stub = (WatchViewStub) findViewById(R.id.watch_view_stub);
stub.setOnLayoutInflatedListener(new WatchViewStub.OnLayoutInflatedListener() {
@Override
public void onLayoutInflated(WatchViewStub stub) {
mDebugTextView = (TextView) stub.findViewById(R.id.text);
}
});
stub.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
displaySpeechRecognizer();
}
});
}
private void displaySpeechRecognizer() {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
startActivityForResult(intent, SPEECH_REQUEST_CODE);
}
@Override
protected void onActivityResult(int requestCode, int resultCode,
Intent data) {
if (requestCode == SPEECH_REQUEST_CODE && resultCode == RESULT_OK) {
List<String> results = data.getStringArrayListExtra(
RecognizerIntent.EXTRA_RESULTS);
String foundCommand = mapCommands(results);
String spokenText = results.get(0);
StringBuilder sb = new StringBuilder(mDebugTextView.getText());
sb.append(spokenText);
sb.append("->");
sb.append(foundCommand);
mDebugTextView.setText(sb.toString());
}
super.onActivityResult(requestCode, resultCode, data);
}
private String mapCommands(List<String> results) {
for(String candidate : results) {
for(String command : COMMAND_WORDS.keySet()) {
if(candidate.contains(command)){
sendCommand(command);
return command;
}
}
}
return "noop";
}
private void sendCommand(String command) {
//TODO dispatch message to parse from here
CommandDispatcher.dispatch(COMMAND_WORDS.get(command));
}
}
|
package com.gps.capstone.traceroute;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.gps.capstone.traceroute.listeners.AccelerometerListener;
import com.gps.capstone.traceroute.listeners.BarometerListener;
import com.gps.capstone.traceroute.listeners.DirectionListener;
import com.gps.capstone.traceroute.listeners.GravityListener;
import com.gps.capstone.traceroute.listeners.GyroscopeListner;
import com.gps.capstone.traceroute.listeners.LinearAccelerationListener;
import com.gps.capstone.traceroute.listeners.StepCounterListener;
import com.gps.capstone.traceroute.listeners.StepDetectorListener;
import com.gps.capstone.traceroute.sensors.listeners.GyroscopeListener;
public class DebugConsole extends BasicActivity implements SensorEventListener {
// Tag used for logging
private final String TAG = this.getClass().getSimpleName();
// The sensor values will provide us values and we can register things with it
private SensorManager sensorManager;
// Lets grab each of the sensors we will be using and their corresponding listener
private Sensor mAccelerometer;
private SensorEventListener mAccelerometerListener;
private Sensor mGyroscope;
private SensorEventListener mGyroscopeListener;
private Sensor mBarometer;
private SensorEventListener barometerListener;
private Sensor mLinearAcceleration;
private SensorEventListener mLinearAccelerationListener;
private Sensor mGravity;
private SensorEventListener mGravityListener;
private Sensor mStepCounter;
private SensorEventListener mStepCounterListener;
private Sensor mStepDetector;
private SensorEventListener mStepDetectorListener;
private Sensor mDirectionVector;
private Sensor mGeomagneticDV;
private SensorEventListener mDirectionListener;
private com.gps.capstone.traceroute.sensors.listeners.DirectionListener mDL;
private GyroscopeListener mGL;
private Sensor mCompass;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_debug_console);
// For detecting direction
mDL = new com.gps.capstone.traceroute.sensors.listeners.DirectionListener(this);
mGL = new GyroscopeListener(this);
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
// Grab the mAccelerometer
mAccelerometer = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mAccelerometerListener = new AccelerometerListener((RelativeLayout) findViewById(R.id.acc_values));
// Grab the mGyroscope
mGyroscope = sensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
mGyroscopeListener = new GyroscopeListner((RelativeLayout) findViewById(R.id.gyro_values));
// Grab the mBarometer
mBarometer = sensorManager.getDefaultSensor(Sensor.TYPE_PRESSURE);
barometerListener = new BarometerListener((RelativeLayout) findViewById(R.id.barr_values));
// Grab the Linear Acceleration (Software sensor I think)
mLinearAcceleration = sensorManager.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION);
mLinearAccelerationListener = new LinearAccelerationListener((RelativeLayout) findViewById(R.id.lin_acc_values));
// Grab the mGravity sensor
mGravity = sensorManager.getDefaultSensor(Sensor.TYPE_GRAVITY);
mGravityListener = new GravityListener((RelativeLayout) findViewById(R.id.grav_values));
// Grab the step counter
mStepCounter = sensorManager.getDefaultSensor(Sensor.TYPE_STEP_COUNTER);
if (mStepCounter == null) {
Log.e(TAG, "FUCK");
}
mStepCounterListener = new StepCounterListener((RelativeLayout) findViewById(R.id.step_vals));
// Grab the step detector
mStepDetector = sensorManager.getDefaultSensor(Sensor.TYPE_STEP_DETECTOR);
mStepDetectorListener = new StepDetectorListener((RelativeLayout) findViewById(R.id.step_detect_vals));
// Get the direction vectors and listener
mDirectionVector = sensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
mGeomagneticDV = sensorManager.getDefaultSensor(Sensor.TYPE_GEOMAGNETIC_ROTATION_VECTOR);
if (mGeomagneticDV == null) {
Log.e(TAG, "AHH");
}
mDirectionListener = new DirectionListener((RelativeLayout) findViewById(R.id.direction_vals));
mCompass = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
}
@Override
protected void onResume() {
super.onResume();
Toast.makeText(this, "Registering the listeners", Toast.LENGTH_SHORT).show();
Log.d(TAG, "Registered the listeners");
mDL.register();
mGL.register();
// Register all the sensors with the listeners
sensorManager.registerListener(mAccelerometerListener, mAccelerometer, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(mGyroscopeListener, mGyroscope, SensorManager.SENSOR_DELAY_UI);
sensorManager.registerListener(barometerListener, mBarometer, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(mLinearAccelerationListener, mLinearAcceleration, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(mGravityListener, mGravity, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(mStepCounterListener, mStepCounter, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(mStepDetectorListener, mStepDetector, SensorManager.SENSOR_DELAY_NORMAL, Sensor.REPORTING_MODE_SPECIAL_TRIGGER);
sensorManager.registerListener(mDirectionListener, mDirectionVector, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(mDirectionListener, mGeomagneticDV, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(mGravityListener, mGyroscope, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(this, mCompass, SensorManager.SENSOR_DELAY_NORMAL);
}
@Override
protected void onPause() {
super.onPause();
Toast.makeText(this, "Unregister the listeners", Toast.LENGTH_SHORT).show();
Log.d(TAG, "Unregistered the listeners");
mDL.unregister();
mGL.unregister();
// Unregister the listeners, I'm not sure how this will factor in with the phone going to sleep
sensorManager.unregisterListener(mAccelerometerListener);
sensorManager.unregisterListener(mGyroscopeListener);
sensorManager.unregisterListener(barometerListener);
sensorManager.unregisterListener(mLinearAccelerationListener);
sensorManager.unregisterListener(mGravityListener);
sensorManager.unregisterListener(mStepCounterListener);
sensorManager.unregisterListener(mStepDetectorListener);
sensorManager.unregisterListener(mDirectionListener);
sensorManager.unregisterListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_debug_console, menu);
return true;
}
@Override
public void onSensorChanged(SensorEvent event) {
// Too lazy to move this to isn't own listener
((TextView) findViewById(R.id.comp_x_val)).setText(String.valueOf(event.values[0]));
((TextView) findViewById(R.id.comp_y_val)).setText(String.valueOf(event.values[1]));
((TextView) findViewById(R.id.comp_z_val)).setText(String.valueOf(event.values[2]));
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
|
package org.spine3.examples.todolist.c.aggregate;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.spine3.examples.todolist.LabelId;
import org.spine3.examples.todolist.TaskId;
import org.spine3.examples.todolist.UnsuccessfulTaskCommand;
import org.spine3.examples.todolist.c.failures.CannotAssignLabelToTask;
import org.spine3.examples.todolist.c.failures.CannotCreateDraft;
import org.spine3.examples.todolist.c.failures.CannotCreateTaskWithInappropriateDescription;
import org.spine3.examples.todolist.c.failures.CannotRemoveLabelFromTask;
import org.spine3.examples.todolist.c.failures.CannotUpdateTaskDescription;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.spine3.examples.todolist.c.aggregate.FailureHelper.TaskCreationFailures.throwCannotCreateDraftFailure;
import static org.spine3.examples.todolist.c.aggregate.FailureHelper.TaskCreationFailures.throwCannotCreateTaskWithInappropriateDescriptionFailure;
import static org.spine3.examples.todolist.c.aggregate.FailureHelper.TaskLabelFailures.throwCannotAssignLabelToTaskFailure;
import static org.spine3.examples.todolist.c.aggregate.FailureHelper.TaskLabelFailures.throwCannotRemoveLabelFromTaskFailure;
import static org.spine3.examples.todolist.c.aggregate.FailureHelper.UpdateFailures.throwCannotUpdateTaskDescriptionFailure;
import static org.spine3.test.Tests.hasPrivateParameterlessCtor;
/**
* @author Illia Shepilov
*/
@DisplayName("FailureHelper should")
class FailureHelperTest {
private final TaskId taskId = TaskId.getDefaultInstance();
private final LabelId labelId = LabelId.getDefaultInstance();
@Test
@DisplayName("have the private constructor")
public void havePrivateConstructor() {
assertTrue(hasPrivateParameterlessCtor(FailureHelper.class));
}
@Test
@DisplayName("throw CannotCreateDraft failure")
public void throwCannotCreateDraft() {
try {
throwCannotCreateDraftFailure(taskId);
} catch (CannotCreateDraft ex) {
final TaskId actual = ex.getFailure()
.getCreateDraftFailed()
.getFailedCommand()
.getTaskId();
assertEquals(taskId, actual);
}
}
@Test
@DisplayName("throw CannotRemoveLabeFromTask failure")
public void throwCannotRemoveLabelFromTask() {
try {
throwCannotRemoveLabelFromTaskFailure(labelId, taskId);
} catch (CannotRemoveLabelFromTask ex) {
final TaskId actual = ex.getFailure()
.getRemoveLabelFailed()
.getFailedCommand()
.getTaskId();
assertEquals(taskId, actual);
}
}
@Test
@DisplayName("throw CannotUpdateTaskDescription failure")
public void throwCannotUpdateTaskDescription() {
final String message = "Description is wrong";
try {
throwCannotUpdateTaskDescriptionFailure(taskId, message);
} catch (CannotUpdateTaskDescription ex) {
final UnsuccessfulTaskCommand failedCommand = ex.getFailure()
.getUpdateFailed()
.getFailedCommand();
final TaskId actualId = failedCommand.getTaskId();
assertEquals(taskId, actualId);
assertEquals(message, failedCommand.getMessage());
}
}
@Test
@DisplayName("throw CannotAssignLabelToTask failure")
public void throwCannotAssignLabelToTask() {
try {
throwCannotAssignLabelToTaskFailure(taskId, labelId);
} catch (CannotAssignLabelToTask ex) {
final UnsuccessfulTaskCommand failedCommand = ex.getFailure()
.getAssignLabelFailed()
.getFailedCommand();
final TaskId actualId = failedCommand.getTaskId();
assertEquals(taskId, actualId);
}
}
@Test
@DisplayName("throw CannotCreateTaskWithInappropriateDescription failure")
public void throwCannotCreateTaskWithInappropriateDescription() {
try {
throwCannotCreateTaskWithInappropriateDescriptionFailure(taskId);
} catch (CannotCreateTaskWithInappropriateDescription ex) {
final TaskId actual = ex.getFailure()
.getCreateTaskFailed()
.getFailedCommand()
.getTaskId();
assertEquals(taskId, actual);
}
}
}
|
package com.battlelancer.seriesguide.ui.episodes;
import android.content.Intent;
import android.content.res.Resources;
import android.database.Cursor;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.widget.TextViewCompat;
import androidx.fragment.app.Fragment;
import androidx.loader.app.LoaderManager;
import androidx.loader.content.CursorLoader;
import androidx.loader.content.Loader;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.OnLongClick;
import butterknife.Unbinder;
import com.battlelancer.seriesguide.R;
import com.battlelancer.seriesguide.api.Action;
import com.battlelancer.seriesguide.backend.settings.HexagonSettings;
import com.battlelancer.seriesguide.extensions.ActionsHelper;
import com.battlelancer.seriesguide.extensions.EpisodeActionsContract;
import com.battlelancer.seriesguide.extensions.EpisodeActionsLoader;
import com.battlelancer.seriesguide.extensions.ExtensionManager;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Episodes;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.ListItemTypes;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Seasons;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Shows;
import com.battlelancer.seriesguide.provider.SeriesGuideDatabase.Tables;
import com.battlelancer.seriesguide.settings.DisplaySettings;
import com.battlelancer.seriesguide.streaming.StreamingSearch;
import com.battlelancer.seriesguide.streaming.StreamingSearchConfigureDialog;
import com.battlelancer.seriesguide.thetvdbapi.TvdbImageTools;
import com.battlelancer.seriesguide.thetvdbapi.TvdbLinks;
import com.battlelancer.seriesguide.traktapi.CheckInDialogFragment;
import com.battlelancer.seriesguide.traktapi.RateDialogFragment;
import com.battlelancer.seriesguide.traktapi.TraktCredentials;
import com.battlelancer.seriesguide.traktapi.TraktRatingsTask;
import com.battlelancer.seriesguide.traktapi.TraktTools;
import com.battlelancer.seriesguide.ui.BaseNavDrawerActivity;
import com.battlelancer.seriesguide.ui.FullscreenImageActivity;
import com.battlelancer.seriesguide.ui.SeriesGuidePreferences;
import com.battlelancer.seriesguide.ui.comments.TraktCommentsActivity;
import com.battlelancer.seriesguide.ui.lists.ManageListsDialogFragment;
import com.battlelancer.seriesguide.util.ClipboardTools;
import com.battlelancer.seriesguide.util.LanguageTools;
import com.battlelancer.seriesguide.util.ServiceUtils;
import com.battlelancer.seriesguide.util.ShareUtils;
import com.battlelancer.seriesguide.util.TextTools;
import com.battlelancer.seriesguide.util.TimeTools;
import com.battlelancer.seriesguide.util.Utils;
import com.battlelancer.seriesguide.util.ViewTools;
import com.squareup.picasso.Callback;
import com.squareup.picasso.Picasso;
import com.uwetrottmann.androidutils.CheatSheet;
import java.text.NumberFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import timber.log.Timber;
/**
* Displays details about a single episode like summary, ratings and episode image if available.
*/
public class EpisodeDetailsFragment extends Fragment implements EpisodeActionsContract {
private static final String ARG_EPISODE_TVDBID = "episode_tvdbid";
private static final String ARG_IS_IN_MULTIPANE_LAYOUT = "multipane";
private static final String KEY_EPISODE_TVDB_ID = "episodeTvdbId";
private Handler handler = new Handler();
private TraktRatingsTask ratingsTask;
private boolean isInMultipane;
private int episodeTvdbId;
private int showTvdbId;
@Nullable private String showTvdbSlug;
private int seasonTvdbId;
private int seasonNumber;
private int episodeNumber;
private int episodeFlag;
private boolean collected;
private String episodeTitle;
private String showTitle;
private int showRunTime;
private long episodeReleaseTime;
@BindView(R.id.containerEpisode) View containerEpisode;
@BindView(R.id.containerRatings) View containerRatings;
@BindView(R.id.containerEpisodeActions) LinearLayout containerActions;
@BindView(R.id.containerEpisodeImage) View containerImage;
@BindView(R.id.imageViewEpisode) ImageView imageViewEpisode;
@BindView(R.id.textViewEpisodeTitle) TextView textViewTitle;
@BindView(R.id.textViewEpisodeDescription) TextView textViewDescription;
@BindView(R.id.textViewEpisodeReleaseTime) TextView textViewReleaseTime;
@BindView(R.id.textViewEpisodeReleaseDate) TextView textViewReleaseDate;
@BindView(R.id.labelEpisodeGuestStars) View textViewGuestStarsLabel;
@BindView(R.id.textViewEpisodeGuestStars) TextView textViewGuestStars;
@BindView(R.id.textViewEpisodeDirectors) TextView textViewDirectors;
@BindView(R.id.textViewEpisodeWriters) TextView textViewWriters;
@BindView(R.id.labelEpisodeDvd) View textViewDvdLabel;
@BindView(R.id.textViewEpisodeDvd) TextView textViewDvd;
@BindView(R.id.textViewRatingsValue) TextView textViewRating;
@BindView(R.id.textViewRatingsRange) TextView textViewRatingRange;
@BindView(R.id.textViewRatingsVotes) TextView textViewRatingVotes;
@BindView(R.id.textViewRatingsUser) TextView textViewRatingUser;
@BindView(R.id.dividerEpisodeButtons) View dividerEpisodeButtons;
@BindView(R.id.buttonEpisodeCheckin) Button buttonCheckin;
@BindView(R.id.buttonEpisodeStreamingSearch) Button buttonStreamingSearch;
@BindView(R.id.buttonEpisodeWatched) Button buttonWatch;
@BindView(R.id.buttonEpisodeCollected) Button buttonCollect;
@BindView(R.id.buttonEpisodeSkip) Button buttonSkip;
@BindView(R.id.buttonEpisodeImdb) Button imdbButton;
@BindView(R.id.buttonEpisodeTvdb) Button tvdbButton;
@BindView(R.id.buttonEpisodeTrakt) Button traktButton;
@BindView(R.id.buttonEpisodeComments) Button commentsButton;
private Unbinder unbinder;
public static EpisodeDetailsFragment newInstance(int episodeId, boolean isInMultiPaneLayout) {
EpisodeDetailsFragment f = new EpisodeDetailsFragment();
// Supply index input as an argument.
Bundle args = new Bundle();
args.putInt(ARG_EPISODE_TVDBID, episodeId);
args.putBoolean(ARG_IS_IN_MULTIPANE_LAYOUT, isInMultiPaneLayout);
f.setArguments(args);
return f;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle args = getArguments();
if (args != null) {
isInMultipane = args.getBoolean(ARG_IS_IN_MULTIPANE_LAYOUT);
episodeTvdbId = args.getInt(ARG_EPISODE_TVDBID);
} else {
throw new IllegalArgumentException("Missing arguments");
}
}
@Override
public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.fragment_episode, container, false);
unbinder = ButterKnife.bind(this, v);
containerEpisode.setVisibility(View.GONE);
textViewRatingRange.setText(getString(R.string.format_rating_range, 10));
// episode buttons
Resources.Theme theme = requireActivity().getTheme();
ViewTools.setVectorIconTop(theme, buttonWatch, R.drawable.ic_watch_black_24dp);
ViewTools.setVectorIconTop(theme, buttonCollect, R.drawable.ic_collect_black_24dp);
ViewTools.setVectorIconTop(theme, buttonSkip, R.drawable.ic_skip_black_24dp);
ViewTools.setVectorIconLeft(theme, buttonCheckin, R.drawable.ic_checkin_black_24dp);
ViewTools.setVectorIconLeft(theme, buttonStreamingSearch,
R.drawable.ic_play_arrow_black_24dp);
// comments button
ViewTools.setVectorIconLeft(theme, commentsButton, R.drawable.ic_forum_black_24dp);
// other bottom buttons
ViewTools.setVectorIconLeft(theme, imdbButton, R.drawable.ic_link_black_24dp);
ViewTools.setVectorIconLeft(theme, tvdbButton, R.drawable.ic_link_black_24dp);
ViewTools.setVectorIconLeft(theme, traktButton, R.drawable.ic_link_black_24dp);
// set up long-press to copy text to clipboard (d-pad friendly vs text selection)
ClipboardTools.copyTextToClipboardOnLongClick(textViewTitle);
ClipboardTools.copyTextToClipboardOnLongClick(textViewReleaseTime);
ClipboardTools.copyTextToClipboardOnLongClick(textViewDescription);
ClipboardTools.copyTextToClipboardOnLongClick(textViewGuestStars);
ClipboardTools.copyTextToClipboardOnLongClick(textViewDirectors);
ClipboardTools.copyTextToClipboardOnLongClick(textViewWriters);
ClipboardTools.copyTextToClipboardOnLongClick(textViewDvd);
ClipboardTools.copyTextToClipboardOnLongClick(textViewReleaseDate);
return v;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
LoaderManager.getInstance(this)
.initLoader(EpisodesActivity.EPISODE_LOADER_ID, null, episodeLoaderCallbacks);
setHasOptionsMenu(true);
}
@Override
public void onResume() {
super.onResume();
BaseNavDrawerActivity.ServiceActiveEvent event = EventBus.getDefault()
.getStickyEvent(BaseNavDrawerActivity.ServiceActiveEvent.class);
setEpisodeButtonsEnabled(event == null);
EventBus.getDefault().register(this);
loadEpisodeActionsDelayed();
}
@Override
public void onPause() {
super.onPause();
if (handler != null) {
handler.removeCallbacks(actionsRunnable);
}
EventBus.getDefault().unregister(this);
}
@Override
public void onDestroyView() {
super.onDestroyView();
// Always cancel the request here, this is safe to call even if the image has been loaded.
// This ensures that the anonymous callback we have does not prevent the fragment from
// being garbage collected. It also prevents our callback from getting invoked even after the
// fragment is destroyed.
Picasso.get().cancelRequest(imageViewEpisode);
unbinder.unbind();
}
@Override
public void onDestroy() {
super.onDestroy();
if (ratingsTask != null) {
ratingsTask.cancel(true);
ratingsTask = null;
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
boolean isLightTheme = SeriesGuidePreferences.THEME == R.style.Theme_SeriesGuide_Light;
// multi-pane layout has non-transparent action bar, adjust icon color
inflater.inflate(isLightTheme && !isInMultipane
? R.menu.episodedetails_menu_light : R.menu.episodedetails_menu, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int itemId = item.getItemId();
if (itemId == R.id.menu_share) {
shareEpisode();
return true;
} else if (itemId == R.id.menu_manage_lists) {
ManageListsDialogFragment
.show(getFragmentManager(), episodeTvdbId, ListItemTypes.EPISODE);
return true;
} else if (itemId == R.id.menu_action_episode_calendar) {
ShareUtils.suggestCalendarEvent(requireActivity(), showTitle,
TextTools.getNextEpisodeString(requireActivity(), seasonNumber, episodeNumber,
episodeTitle), episodeReleaseTime, showRunTime);
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* If episode was watched, flags as unwatched. Otherwise, flags as watched.
*/
private void onToggleWatched() {
boolean watched = EpisodeTools.isWatched(episodeFlag);
changeEpisodeFlag(watched ? EpisodeFlags.UNWATCHED : EpisodeFlags.WATCHED);
}
/**
* If episode was skipped, flags as unwatched. Otherwise, flags as skipped.
*/
private void onToggleSkipped() {
boolean skipped = EpisodeTools.isSkipped(episodeFlag);
changeEpisodeFlag(skipped ? EpisodeFlags.UNWATCHED : EpisodeFlags.SKIPPED);
}
private void changeEpisodeFlag(int episodeFlag) {
this.episodeFlag = episodeFlag;
EpisodeTools.episodeWatched(requireContext(), showTvdbId, episodeTvdbId,
seasonNumber, episodeNumber, episodeFlag);
}
private void onToggleCollected() {
collected = !collected;
EpisodeTools.episodeCollected(requireContext(), showTvdbId, episodeTvdbId,
seasonNumber, episodeNumber, collected);
}
@OnClick(R.id.buttonEpisodeStreamingSearch)
void onButtonStreamingSearchClick() {
if (showTitle == null) {
return;
}
if (StreamingSearch.isNotConfigured(requireContext())) {
showStreamingSearchConfigDialog();
} else {
StreamingSearch.searchForShow(requireContext(), showTitle);
}
}
@OnLongClick(R.id.buttonEpisodeStreamingSearch)
boolean onButtonStreamingSearchLongClick() {
showStreamingSearchConfigDialog();
return true;
}
private void showStreamingSearchConfigDialog() {
StreamingSearchConfigureDialog.show(requireFragmentManager());
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onStreamingSearchConfigured(
StreamingSearchConfigureDialog.StreamingSearchConfiguredEvent event) {
if (event.getTurnedOff()) {
buttonStreamingSearch.setVisibility(View.GONE);
} else {
onButtonStreamingSearchClick();
}
}
@Override
@Subscribe(threadMode = ThreadMode.MAIN)
public void onEventMainThread(ExtensionManager.EpisodeActionReceivedEvent event) {
if (episodeTvdbId == event.episodeTvdbId) {
loadEpisodeActionsDelayed();
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onEventEpisodeTask(BaseNavDrawerActivity.ServiceActiveEvent event) {
setEpisodeButtonsEnabled(false);
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onEventEpisodeTask(BaseNavDrawerActivity.ServiceCompletedEvent event) {
setEpisodeButtonsEnabled(true);
}
private void setEpisodeButtonsEnabled(boolean enabled) {
buttonWatch.setEnabled(enabled);
buttonCollect.setEnabled(enabled);
buttonSkip.setEnabled(enabled);
buttonCheckin.setEnabled(enabled);
buttonStreamingSearch.setEnabled(enabled);
}
private LoaderManager.LoaderCallbacks<Cursor> episodeLoaderCallbacks
= new LoaderManager.LoaderCallbacks<Cursor>() {
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
return new CursorLoader(requireContext(), Episodes.buildEpisodeWithShowUri(String
.valueOf(episodeTvdbId)), DetailsQuery.PROJECTION, null, null, null);
}
@Override
public void onLoadFinished(@NonNull Loader<Cursor> loader, Cursor data) {
if (!isAdded()) {
return;
}
populateEpisodeData(data);
}
@Override
public void onLoaderReset(@NonNull Loader<Cursor> loader) {
// do nothing (we are never holding onto the cursor
}
};
private void populateEpisodeData(Cursor cursor) {
if (cursor == null || !cursor.moveToFirst()) {
// no data to display
if (containerEpisode != null) {
containerEpisode.setVisibility(View.GONE);
}
return;
}
showTvdbId = cursor.getInt(DetailsQuery.SHOW_ID);
showTvdbSlug = cursor.getString(DetailsQuery.SHOW_SLUG);
seasonNumber = cursor.getInt(DetailsQuery.SEASON);
episodeNumber = cursor.getInt(DetailsQuery.NUMBER);
showRunTime = cursor.getInt(DetailsQuery.SHOW_RUNTIME);
episodeReleaseTime = cursor.getLong(DetailsQuery.FIRST_RELEASE_MS);
// title and description
episodeFlag = cursor.getInt(DetailsQuery.WATCHED);
episodeTitle = TextTools.getEpisodeTitle(requireContext(),
cursor.getString(DetailsQuery.TITLE), episodeNumber);
boolean hideDetails = EpisodeTools.isUnwatched(episodeFlag)
&& DisplaySettings.preventSpoilers(requireContext());
textViewTitle.setText(
TextTools.getEpisodeTitle(requireContext(), hideDetails ? null : episodeTitle,
episodeNumber));
String overview = cursor.getString(DetailsQuery.OVERVIEW);
if (TextUtils.isEmpty(overview)) {
// no description available, show no translation available message
String languageCode = cursor.getString(DetailsQuery.SHOW_LANGUAGE);
overview = getString(R.string.no_translation,
LanguageTools.getShowLanguageStringFor(getContext(), languageCode),
getString(R.string.tvdb));
} else if (hideDetails) {
overview = getString(R.string.no_spoilers);
}
long lastEditSeconds = cursor.getLong(DetailsQuery.LAST_EDITED);
textViewDescription.setText(
TextTools.textWithTvdbSource(textViewDescription.getContext(), overview,
lastEditSeconds));
// show title
showTitle = cursor.getString(DetailsQuery.SHOW_TITLE);
// release date, also build release time and day
boolean isReleased;
String timeText;
if (episodeReleaseTime != -1) {
Date actualRelease = TimeTools.applyUserOffset(requireContext(), episodeReleaseTime);
isReleased = TimeTools.isReleased(actualRelease);
textViewReleaseDate.setText(
TimeTools.formatToLocalDateAndDay(requireContext(), actualRelease));
String dateTime;
if (DisplaySettings.isDisplayExactDate(requireContext())) {
// "31. October 2010"
dateTime = TimeTools.formatToLocalDate(requireContext(), actualRelease);
} else {
// "in 15 mins"
dateTime = TimeTools.formatToLocalRelativeTime(requireContext(), actualRelease);
}
// append day: "in 15 mins (Fri)"
timeText = getString(R.string.format_date_and_day, dateTime,
TimeTools.formatToLocalDay(actualRelease)).toUpperCase(Locale.getDefault());
} else {
textViewReleaseDate.setText(R.string.unknown);
timeText = getString(R.string.episode_firstaired_unknown);
isReleased = false;
}
// absolute number (e.g. relevant for Anime): "ABSOLUTE 142"
int absoluteNumber = cursor.getInt(DetailsQuery.NUMBER_ABSOLUTE);
String absoluteNumberText = null;
if (absoluteNumber > 0) {
absoluteNumberText = NumberFormat.getIntegerInstance().format(absoluteNumber);
}
textViewReleaseTime.setText(TextTools.dotSeparate(timeText, absoluteNumberText));
// dim text color for title if not released
TextViewCompat.setTextAppearance(textViewTitle, isReleased
? R.style.TextAppearance_Title : R.style.TextAppearance_Title_Dim);
if (!isReleased) {
TextViewCompat.setTextAppearance(textViewReleaseTime,
R.style.TextAppearance_Caption_Dim);
}
// guest stars
ViewTools.setLabelValueOrHide(textViewGuestStarsLabel, textViewGuestStars,
TextTools.splitAndKitTVDBStrings(cursor.getString(DetailsQuery.GUESTSTARS))
);
// DVD episode number
ViewTools.setLabelValueOrHide(textViewDvdLabel, textViewDvd,
cursor.getDouble(DetailsQuery.NUMBER_DVD));
// directors
ViewTools.setValueOrPlaceholder(textViewDirectors, TextTools.splitAndKitTVDBStrings(cursor
.getString(DetailsQuery.DIRECTORS)));
// writers
ViewTools.setValueOrPlaceholder(textViewWriters, TextTools.splitAndKitTVDBStrings(cursor
.getString(DetailsQuery.WRITERS)));
// ratings
containerRatings.setOnClickListener(v -> rateEpisode());
CheatSheet.setup(containerRatings, R.string.action_rate);
// trakt rating
textViewRating.setText(
TraktTools.buildRatingString(cursor.getDouble(DetailsQuery.RATING_GLOBAL)));
textViewRatingVotes.setText(TraktTools.buildRatingVotesString(requireContext(),
cursor.getInt(DetailsQuery.RATING_VOTES)));
// user rating
textViewRatingUser.setText(TraktTools.buildUserRatingString(requireContext(),
cursor.getInt(DetailsQuery.RATING_USER)));
// episode image
final String imagePath = cursor.getString(DetailsQuery.IMAGE);
containerImage.setOnClickListener(v -> {
Intent intent = new Intent(requireActivity(), FullscreenImageActivity.class);
intent.putExtra(FullscreenImageActivity.EXTRA_IMAGE,
TvdbImageTools.artworkUrl(imagePath));
Utils.startActivityWithAnimation(requireActivity(), intent, v);
});
loadImage(imagePath, hideDetails);
// Buttons.
updatePrimaryButtons(cursor);
updateSecondaryButtons(cursor);
containerEpisode.setVisibility(View.VISIBLE);
loadDetails();
}
private void updatePrimaryButtons(Cursor cursor) {
// check in button
buttonCheckin.setOnClickListener(
v -> CheckInDialogFragment.show(getContext(), getFragmentManager(), episodeTvdbId));
CheatSheet.setup(buttonCheckin);
// hide check-in if not connected to trakt or hexagon is enabled
boolean isConnectedToTrakt = TraktCredentials.get(requireContext()).hasCredentials();
boolean displayCheckIn = isConnectedToTrakt && !HexagonSettings.isEnabled(requireContext());
buttonCheckin.setVisibility(displayCheckIn ? View.VISIBLE : View.GONE);
buttonStreamingSearch
.setNextFocusUpId(displayCheckIn ? R.id.buttonCheckIn : R.id.buttonEpisodeWatched);
// hide streaming search if turned off
boolean displayStreamingSearch = !StreamingSearch.isTurnedOff(requireContext());
buttonStreamingSearch.setVisibility(displayStreamingSearch ? View.VISIBLE : View.GONE);
dividerEpisodeButtons.setVisibility(displayCheckIn || displayStreamingSearch
? View.VISIBLE : View.GONE);
// watched button
Resources.Theme theme = requireActivity().getTheme();
boolean isWatched = EpisodeTools.isWatched(episodeFlag);
if (isWatched) {
ViewTools.setVectorDrawableTop(theme, buttonWatch, R.drawable.ic_watched_24dp);
} else {
ViewTools.setVectorIconTop(theme, buttonWatch, R.drawable.ic_watch_black_24dp);
}
buttonWatch.setOnClickListener(v -> onToggleWatched());
buttonWatch.setText(isWatched ? R.string.action_unwatched : R.string.action_watched);
CheatSheet.setup(buttonWatch, isWatched ? R.string.action_unwatched
: R.string.action_watched);
// collected button
collected = cursor.getInt(DetailsQuery.COLLECTED) == 1;
if (collected) {
ViewTools.setVectorDrawableTop(theme, buttonCollect, R.drawable.ic_collected_24dp);
} else {
ViewTools.setVectorIconTop(theme, buttonCollect, R.drawable.ic_collect_black_24dp);
}
buttonCollect.setOnClickListener(v -> onToggleCollected());
buttonCollect.setText(collected
? R.string.action_collection_remove : R.string.action_collection_add);
CheatSheet.setup(buttonCollect, collected
? R.string.action_collection_remove : R.string.action_collection_add);
// skip button
if (isWatched) {
// if watched do not allow skipping
buttonSkip.setVisibility(View.INVISIBLE);
} else {
buttonSkip.setVisibility(View.VISIBLE);
boolean isSkipped = EpisodeTools.isSkipped(episodeFlag);
if (isSkipped) {
ViewTools.setVectorDrawableTop(theme, buttonSkip, R.drawable.ic_skipped_24dp);
} else {
ViewTools.setVectorIconTop(theme, buttonSkip, R.drawable.ic_skip_black_24dp);
}
buttonSkip.setOnClickListener(v -> onToggleSkipped());
buttonSkip.setText(isSkipped ? R.string.action_dont_skip : R.string.action_skip);
CheatSheet.setup(buttonSkip,
isSkipped ? R.string.action_dont_skip : R.string.action_skip);
}
}
private void updateSecondaryButtons(Cursor cursor) {
// trakt
String traktLink = TraktTools.buildEpisodeUrl(episodeTvdbId);
ViewTools.openUriOnClick(traktButton, traktLink);
ClipboardTools.copyTextToClipboardOnLongClick(traktButton, traktLink);
// IMDb
String imdbId = cursor.getString(DetailsQuery.IMDBID);
if (TextUtils.isEmpty(imdbId)) {
// fall back to show IMDb id
imdbId = cursor.getString(DetailsQuery.SHOW_IMDBID);
}
ServiceUtils.setUpImdbButton(imdbId, imdbButton);
// TVDb
seasonTvdbId = cursor.getInt(DetailsQuery.SEASON_ID);
String tvdbLink = TvdbLinks.episode(showTvdbSlug, showTvdbId, seasonTvdbId, episodeTvdbId);
ViewTools.openUriOnClick(tvdbButton, tvdbLink);
ClipboardTools.copyTextToClipboardOnLongClick(tvdbButton, tvdbLink);
// trakt comments
commentsButton.setOnClickListener(v -> {
Intent intent = new Intent(requireActivity(), TraktCommentsActivity.class);
intent.putExtras(TraktCommentsActivity.createInitBundleEpisode(episodeTitle,
episodeTvdbId));
Utils.startActivityWithAnimation(requireActivity(), intent, v);
});
}
private void loadDetails() {
// update trakt ratings
if (ratingsTask == null || ratingsTask.getStatus() == AsyncTask.Status.FINISHED) {
ratingsTask = new TraktRatingsTask(requireContext(), showTvdbId, episodeTvdbId,
seasonNumber, episodeNumber);
ratingsTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
}
private void rateEpisode() {
RateDialogFragment.newInstanceEpisode(episodeTvdbId)
.safeShow(getContext(), getFragmentManager());
}
private void shareEpisode() {
if (episodeTitle == null || showTitle == null) {
return;
}
ShareUtils.shareEpisode(requireActivity(), showTvdbSlug, showTvdbId, seasonTvdbId,
episodeTvdbId, seasonNumber, episodeNumber, showTitle, episodeTitle);
}
private void loadImage(String imagePath, boolean hideDetails) {
// immediately hide container if there is no image
if (TextUtils.isEmpty(imagePath)) {
containerImage.setVisibility(View.GONE);
return;
}
if (hideDetails) {
// show image placeholder
imageViewEpisode.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
imageViewEpisode.setImageResource(R.drawable.ic_photo_gray_24dp);
} else {
// try loading image
containerImage.setVisibility(View.VISIBLE);
ServiceUtils.loadWithPicasso(requireContext(), TvdbImageTools.artworkUrl(imagePath))
.error(R.drawable.ic_photo_gray_24dp)
.into(imageViewEpisode,
new Callback() {
@Override
public void onSuccess() {
imageViewEpisode.setScaleType(ImageView.ScaleType.CENTER_CROP);
}
@Override
public void onError(Exception e) {
imageViewEpisode.setScaleType(
ImageView.ScaleType.CENTER_INSIDE);
}
}
);
}
}
private LoaderManager.LoaderCallbacks<List<Action>> actionsLoaderCallbacks =
new LoaderManager.LoaderCallbacks<List<Action>>() {
@Override
public Loader<List<Action>> onCreateLoader(int id, Bundle args) {
int episodeTvdbId = args.getInt(KEY_EPISODE_TVDB_ID);
return new EpisodeActionsLoader(requireContext(), episodeTvdbId);
}
@Override
public void onLoadFinished(@NonNull Loader<List<Action>> loader,
List<Action> data) {
if (!isAdded()) {
return;
}
if (data == null) {
Timber.e("onLoadFinished: did not receive valid actions for %s",
episodeTvdbId);
} else {
Timber.d("onLoadFinished: received %s actions for %s", data.size(),
episodeTvdbId);
}
ActionsHelper.populateActions(requireActivity().getLayoutInflater(),
requireActivity().getTheme(), containerActions, data);
}
@Override
public void onLoaderReset(@NonNull Loader<List<Action>> loader) {
// do nothing, we are not holding onto the actions list
}
};
public void loadEpisodeActions() {
Bundle args = new Bundle();
args.putInt(KEY_EPISODE_TVDB_ID, episodeTvdbId);
LoaderManager.getInstance(this)
.restartLoader(EpisodesActivity.ACTIONS_LOADER_ID, args, actionsLoaderCallbacks);
}
Runnable actionsRunnable = this::loadEpisodeActions;
public void loadEpisodeActionsDelayed() {
handler.removeCallbacks(actionsRunnable);
handler.postDelayed(actionsRunnable,
EpisodeActionsContract.ACTION_LOADER_DELAY_MILLIS);
}
interface DetailsQuery {
String[] PROJECTION = new String[]{
Tables.EPISODES + "." + Episodes._ID,
Episodes.NUMBER,
Episodes.ABSOLUTE_NUMBER,
Episodes.DVDNUMBER,
Seasons.REF_SEASON_ID,
Episodes.SEASON,
Episodes.IMDBID,
Episodes.TITLE,
Episodes.OVERVIEW,
Episodes.FIRSTAIREDMS,
Episodes.DIRECTORS,
Episodes.GUESTSTARS,
Episodes.WRITERS,
Episodes.IMAGE,
Tables.EPISODES + "." + Episodes.RATING_GLOBAL,
Episodes.RATING_VOTES,
Episodes.RATING_USER,
Episodes.WATCHED,
Episodes.COLLECTED,
Episodes.LAST_EDITED,
Shows.REF_SHOW_ID,
Shows.IMDBID,
Shows.TITLE,
Shows.RUNTIME,
Shows.LANGUAGE,
Shows.SLUG
};
int _ID = 0;
int NUMBER = 1;
int NUMBER_ABSOLUTE = 2;
int NUMBER_DVD = 3;
int SEASON_ID = 4;
int SEASON = 5;
int IMDBID = 6;
int TITLE = 7;
int OVERVIEW = 8;
int FIRST_RELEASE_MS = 9;
int DIRECTORS = 10;
int GUESTSTARS = 11;
int WRITERS = 12;
int IMAGE = 13;
int RATING_GLOBAL = 14;
int RATING_VOTES = 15;
int RATING_USER = 16;
int WATCHED = 17;
int COLLECTED = 18;
int LAST_EDITED = 19;
int SHOW_ID = 20;
int SHOW_IMDBID = 21;
int SHOW_TITLE = 22;
int SHOW_RUNTIME = 23;
int SHOW_LANGUAGE = 24;
int SHOW_SLUG = 25;
}
}
|
package com.jyutwaa.zhaoziliang.glimpse.Fragment.Bilibili;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.Snackbar;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewStub;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import com.jyutwaa.zhaoziliang.glimpse.Adapter.Bilibili.CoverAdapter;
import com.jyutwaa.zhaoziliang.glimpse.Fragment.BaseFragment;
import com.jyutwaa.zhaoziliang.glimpse.Model.Bilibili.TopListType;
import com.jyutwaa.zhaoziliang.glimpse.Presenter.presenterImpl.IBilibiliCoverPresenterImpl;
import com.jyutwaa.zhaoziliang.glimpse.Presenter.viewImpl.IBilibiliIntegratedFragment;
import com.jyutwaa.zhaoziliang.glimpse.R;
import com.jyutwaa.zhaoziliang.glimpse.View.GridItemDividerDecoration;
public class CoverFragment extends BaseFragment implements IBilibiliIntegratedFragment {
View mView;
IBilibiliCoverPresenterImpl mIBilibiliCoverPresenterImpl;
LinearLayoutManager mLinearLayoutManager;
RecyclerView.OnScrollListener mScrollListener;
CoverAdapter mAdapter;
RecyclerView rv_content;
ProgressBar mProgressBar;
ViewStub vs_no_connection;
RelativeLayout rl_no_connection;
boolean isConnected = false;
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
mView = inflater.inflate(R.layout.fragment_bilibili_cover, null);
initWidgets();
return mView;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
checkNetworkConnectivity();
initListeners();
initViewsAndLaunch();
}
@Override
public void updateList(TopListType topListType) {
if(topListType != null){
mAdapter.addItems(topListType.getCover_list().getAllItems());
}
}
@Override
public void showProgressbar() {
if(mProgressBar != null){
mProgressBar.setVisibility(View.VISIBLE);
}
}
@Override
public void hideProgressbar() {
mProgressBar.setVisibility(View.INVISIBLE);
}
@Override
public void showError(String message) {
if(rv_content != null){
Snackbar.make(rv_content, "", Snackbar.LENGTH_SHORT).show();
}
}
private void initViewsAndLaunch() {
mIBilibiliCoverPresenterImpl = new IBilibiliCoverPresenterImpl(getContext(), this);
mLinearLayoutManager = new LinearLayoutManager(getContext());
mAdapter = new CoverAdapter(getContext());
rv_content.setLayoutManager(mLinearLayoutManager);
rv_content.setHasFixedSize(true);
rv_content.addOnScrollListener(mScrollListener);
rv_content.setItemAnimator(new DefaultItemAnimator());
rv_content.addItemDecoration(new GridItemDividerDecoration(getContext(), R.dimen.divider_height, R.color.divider));
rv_content.setAdapter(mAdapter);
loadCoverTopList();
}
private void initListeners() {
mScrollListener = new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
}
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
super.onScrolled(recyclerView, dx, dy);
if(dy > 0){
int viewItemCount = mLinearLayoutManager.getChildCount();
int totalItemCount = mLinearLayoutManager.getItemCount();
int pastItemCount = mLinearLayoutManager.findFirstVisibleItemPosition();
if(viewItemCount + pastItemCount >= totalItemCount){
Snackbar.make(mView, "()", Snackbar.LENGTH_SHORT).show();
}
}
}
};
}
private void checkNetworkConnectivity() {
ConnectivityManager manager = (ConnectivityManager) getActivity().getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo info = manager.getActiveNetworkInfo();
isConnected = info != null && info.isConnected();
if(!isConnected){
mProgressBar.setVisibility(View.INVISIBLE);
if(rl_no_connection == null){
rl_no_connection = (RelativeLayout) vs_no_connection.inflate();
}
}
}
private void initWidgets() {
rv_content = (RecyclerView) mView.findViewById(R.id.rv_cover);
mProgressBar = (ProgressBar) mView.findViewById(R.id.progress_cover);
vs_no_connection = (ViewStub) mView.findViewById(R.id.vs_cover_no_connection);
}
private void loadCoverTopList() {
if(mAdapter.getItemCount() > 0){
mAdapter.clearData();
}
mIBilibiliCoverPresenterImpl.getCoverTopList();
}
}
|
package com.optimalorange.cooltechnologies.ui.fragment;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.optimalorange.cooltechnologies.BuildConfig;
import com.optimalorange.cooltechnologies.R;
import com.optimalorange.cooltechnologies.network.DestroyFavoriteRequest;
import com.optimalorange.cooltechnologies.network.GetMyFavoriteRequest;
import com.optimalorange.cooltechnologies.network.NetworkChecker;
import com.optimalorange.cooltechnologies.network.VolleySingleton;
import com.optimalorange.cooltechnologies.storage.DefaultSharedPreferencesSingleton;
import com.optimalorange.cooltechnologies.ui.LoginableBaseActivity;
import com.optimalorange.cooltechnologies.ui.ShowVideoDetailActivity;
import com.optimalorange.cooltechnologies.ui.entity.Empty;
import com.optimalorange.cooltechnologies.ui.entity.FavoriteFooter;
import com.optimalorange.cooltechnologies.ui.entity.Loading;
import com.optimalorange.cooltechnologies.ui.entity.Video;
import com.optimalorange.cooltechnologies.ui.viewholder.RecyclerEmptyViewHolder;
import com.optimalorange.cooltechnologies.ui.viewholder.RecyclerFavoriteFooterViewHolder;
import com.optimalorange.cooltechnologies.ui.viewholder.RecyclerFavoriteViewHolder;
import com.optimalorange.cooltechnologies.ui.viewholder.RecyclerLoadingViewHolder;
import com.umeng.analytics.MobclickAgent;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.ContextMenu;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import gq.baijie.classbasedviewadapter.android.adapter.ClassBasedRecyclerViewAdapter;
import gq.baijie.classbasedviewadapter.android.adapter.DataSet;
import gq.baijie.classbasedviewadapter.android.adapter.ViewHolderFactoryRegister;
public class FavoriteFragment extends SwipeRefreshFragment {
private static final String DEFAULT_CATEGORY_LABEL = "";
private String mYoukuClientId;
private VolleySingleton mVolleySingleton;
private DefaultSharedPreferencesSingleton mDefaultSharedPreferencesSingleton;
private NetworkChecker mNetworkChecker;
private List<Loading> mLoadingDataSet;
private FavoritesDataSet mFavoritesDataSet;
private boolean mIsLoadingFavorites = false;
private final ClassBasedRecyclerViewAdapter adapter = new ClassBasedRecyclerViewAdapter();
private ViewHolder vh;
private final LoginableBaseActivity.OnLoginStatusChangeListener mOnLoginStatusChangeListener =
new LoginableBaseActivity.OnLoginStatusChangeListener() {
@Override
public void onLoginStatusChanged(boolean hasLoggedIn) {
if (hasLoggedIn) {
setRefreshable(true);
onRefresh();
} else {
setRefreshable(false);
}
}
};
private void initProperties() {
Loading loading = new Loading();
Empty empty = new Empty();
FavoriteFooter haveMoreFooter = new FavoriteFooter();
FavoriteFooter noMoreFooter = new FavoriteFooter();
loading.hint = getString(R.string.favorite_new_loading);
empty.hint = getString(R.string.favorite_no_fav);
haveMoreFooter.hint = getString(R.string.favorite_view_more);
noMoreFooter.hint = getString(R.string.at_last);
haveMoreFooter.listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
getJsonData();
}
};
mLoadingDataSet = Collections.singletonList(loading);
mFavoritesDataSet = new FavoritesDataSet();
mFavoritesDataSet.empty = empty;
mFavoritesDataSet.haveMoreFooter = haveMoreFooter;
mFavoritesDataSet.noMoreFooter = noMoreFooter;
final ViewHolderFactoryRegister register = adapter.getRegister();
register.registerViewHolderFactory(new RecyclerLoadingViewHolder.Factory());
register.registerViewHolderFactory(new RecyclerEmptyViewHolder.Factory());
register.registerViewHolderFactory(new RecyclerFavoriteFooterViewHolder.Factory());
register.registerViewHolderFactory(new RecyclerFavoriteViewHolder.Factory() {
@Override
public void bindViewHolder(
RecyclerFavoriteViewHolder holder, final Video value, final int position) {
super.bindViewHolder(holder, value, position);
holder.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ShowVideoDetailActivity.start(v.getContext(), value.id);
}
});
holder.itemView.setOnCreateContextMenuListener(
new View.OnCreateContextMenuListener() {
@Override
public void onCreateContextMenu(ContextMenu menu, final View v,
ContextMenu.ContextMenuInfo menuInfo) {
menu.add(R.string.action_delete)
.setOnMenuItemClickListener(
new MenuItem.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
sendDeleteRequest(value.id, position);
return true;
}
});
}
});
}
});
adapter.setDataSet(mLoadingDataSet);
adapter.notifyDataSetChanged();
}
private void initState() {
mIsLoadingFavorites = false;
}
public void resetState() {
mIsLoadingFavorites = false; //TODO cancel pending requests
adapter.setDataSet(mLoadingDataSet);
adapter.notifyDataSetChanged();
mFavoritesDataSet.unsetFavorites();
}
public void addFavorites(Favorites added) {
if (mFavoritesDataSet.favorites != null) {
mFavoritesDataSet.addFavorites(added, adapter);
} else {
final Favorites newFavorites = new Favorites(new ArrayList<Video>());
newFavorites.add(added);
mFavoritesDataSet.favorites = newFavorites;
adapter.setDataSet(mFavoritesDataSet);
adapter.notifyDataSetChanged();
}
}
public void removeFavorites(int position) {
if (mFavoritesDataSet.favorites == null) {
throw new IllegalStateException("");
}
mFavoritesDataSet.remove(position, adapter);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
mVolleySingleton = VolleySingleton.getInstance(getActivity());
mYoukuClientId = getString(R.string.youku_client_id);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mNetworkChecker = NetworkChecker.newInstance(getActivity());
initProperties();
initState();
}
@Override
protected View onCreateChildView(
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_favorite, container, false);
vh = new ViewHolder(rootView);
return rootView;
}
@Override
public void onRefresh() {
getNewData();
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mDefaultSharedPreferencesSingleton =
DefaultSharedPreferencesSingleton.getInstance(getActivity());
((LoginableBaseActivity) getActivity())
.addLoginStatusChangeListener(mOnLoginStatusChangeListener);
if (((LoginableBaseActivity) getActivity()).hasLoggedIn()) {
setRefreshable(true);
} else {
setRefreshable(false);
}
vh.favorites.setLayoutManager(new LinearLayoutManager(vh.favorites.getContext()));
vh.favorites.setAdapter(adapter);
}
@Override
public void onStart() {
super.onStart();
// load data if not do it yet
if (mFavoritesDataSet.favorites == null && !mIsLoadingFavorites) {
vh.favorites.setVisibility(View.GONE);
getNewData();
}
}
@Override
public void onResume() {
super.onResume();
MobclickAgent.onPageStart(getClass().getSimpleName());
}
@Override
public void onPause() {
MobclickAgent.onPageEnd(getClass().getSimpleName());
super.onPause();
}
@Override
public void onDestroyView() {
((LoginableBaseActivity) getActivity())
.removeLoginStatusChangeListener(mOnLoginStatusChangeListener);
vh.favorites.setAdapter(null);
vh = null;
super.onDestroyView();
}
@Override
public void onDestroy() {
mNetworkChecker = null;
super.onDestroy();
}
private void getNewData() {
resetState();
getJsonData();
}
public void getJsonData() {
if (mIsLoadingFavorites) {
return;
}
if (vh.favorites.getVisibility() == View.GONE) {
setHint(R.string.favorite_new_loading);
}
if (mDefaultSharedPreferencesSingleton.hasLoggedIn()) {
if (!mNetworkChecker.isConnected()) {
setHint(R.string.favorite_hint_no_net);
return;
}
String token = mDefaultSharedPreferencesSingleton.retrieveString("access_token", "");
int nextPage;
if (mFavoritesDataSet.favorites == null) {
nextPage = 1;
} else {
nextPage = mFavoritesDataSet.favorites.getCurrentPage() + 1;
}
GetMyFavoriteRequest favoriteRequest = buildGetMyFavoriteRequest(token, nextPage, 10);
mIsLoadingFavorites = true;
mVolleySingleton.addToRequestQueue(favoriteRequest);
} else {
setHint(R.string.favorite_hint_no_login);
}
}
private void setHint(int res) {
if (vh != null) {
vh.mainHint.setText(getString(res));
vh.mainHint.setVisibility(View.VISIBLE);
vh.favorites.setVisibility(View.GONE);
vh.mainHint.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getNewData();
}
});
}
}
private void hideHint() {
if (vh != null) {
vh.mainHint.setVisibility(View.GONE);
vh.favorites.setVisibility(View.VISIBLE);
}
}
@Override
protected boolean canChildScrollUp() {
return vh.favorites.getVisibility() == View.VISIBLE &&
vh.favorites.canScrollVertically(-1);
}
private void sendDeleteRequest(String id, final int index) {
if (!mNetworkChecker.isConnected()) {
Toast.makeText(getActivity(), R.string.favorite_delete_no_net, Toast.LENGTH_SHORT)
.show();
return;
}
String token = mDefaultSharedPreferencesSingleton.retrieveString("access_token", "");
if (!mDefaultSharedPreferencesSingleton.hasLoggedIn()) {
Toast.makeText(getActivity(), R.string.favorite_delete_no_login, Toast.LENGTH_SHORT)
.show();
return;
}
mVolleySingleton.addToRequestQueue(buildDestroyFavoriteRequest(token, id, index));
}
private GetMyFavoriteRequest buildGetMyFavoriteRequest(String token, int page, int count) {
final GetMyFavoriteRequestHandler handler =
new GetMyFavoriteRequestHandler(new WeakReference<>(this));
return new GetMyFavoriteRequest.Builder()
.setClient_id(mYoukuClientId)
.setAccess_token(token)
.setPage(page)
.setCount(count)
.setResponseListener(handler)
.setErrorListener(handler)
.build();
}
private DestroyFavoriteRequest buildDestroyFavoriteRequest(
String token, String videoId, int videoIndexInListView) {
final DestroyFavoriteRequestHandler handler = new DestroyFavoriteRequestHandler(
videoIndexInListView, new WeakReference<>(getContext()), new WeakReference<>(this));
return new DestroyFavoriteRequest.Builder()
.setClient_id(mYoukuClientId)
.setVideo_id(videoId)
.setAccess_token(token)
.setResponseListener(handler)
.setErrorListener(handler)
.build();
}
private static class GetMyFavoriteRequestHandler
implements Response.Listener<JSONObject>, Response.ErrorListener {
private final WeakReference<FavoriteFragment> mOwner;
private GetMyFavoriteRequestHandler(WeakReference<FavoriteFragment> owner) {
mOwner = owner;
}
@Override
public void onErrorResponse(VolleyError error) {
new RuntimeException(error).printStackTrace();
final FavoriteFragment owner = mOwner.get();
if (owner != null) {
onFinished(owner);
//TODO show error info
}
}
@Override
public void onResponse(JSONObject response) {
final FavoriteFragment owner = mOwner.get();
if (owner != null) {
try {
doHandle(response, owner);
} catch (JSONException e) {
e.printStackTrace();
}
onFinished(owner);
}
}
//!!!!!! ownernulldestroyrecreate
private void onFinished(@NonNull FavoriteFragment owner) {
owner.mIsLoadingFavorites = false;
owner.setRefreshing(false);
owner.hideHint();
}
private static void doHandle(JSONObject response, FavoriteFragment owner)
throws JSONException {
owner.addFavorites(convertToFavoriteInfo(response));
}
private static Favorites convertToFavoriteInfo(JSONObject jsonObject)
throws JSONException {
JSONArray videoArray = jsonObject.getJSONArray("videos");
Favorites favorites = new Favorites(convertNeededVideos(videoArray));
favorites.setCurrentReadCountIncludingUnneeded(videoArray.length());
favorites.setTotal(jsonObject.getInt("total"));
favorites.setCurrentPage(jsonObject.getInt("page"));
return favorites;
}
private static List<Video> convertNeededVideos(JSONArray videoArray)
throws JSONException {
List<Video> result = new LinkedList<>();
for (int i = 0; i < videoArray.length(); i++) {
JSONObject itemObject = videoArray.getJSONObject(i);
if (itemObject.getString("category").equals(DEFAULT_CATEGORY_LABEL)) {
result.add(convertToFavorite(itemObject));
}
}
return result;
}
private static Video convertToFavorite(JSONObject jsonObject) throws JSONException {
Video result = new Video();
result.title = jsonObject.getString("title");
result.link = jsonObject.getString("link");
result.thumbnail = jsonObject.getString("thumbnail");
result.duration = jsonObject.getString("duration");
result.id = jsonObject.getString("id");
return result;
}
}
private static class DestroyFavoriteRequestHandler
implements Response.Listener<JSONObject>, Response.ErrorListener {
private final int mVideoIndexInListView;
private final WeakReference<Context> mContextWeakReference;
private final WeakReference<FavoriteFragment> mOwner;
private DestroyFavoriteRequestHandler(
int videoIndexInListView,
WeakReference<Context> contextWeakReference,
WeakReference<FavoriteFragment> owner) {
mVideoIndexInListView = videoIndexInListView;
mContextWeakReference = contextWeakReference;
mOwner = owner;
}
@Override
public void onErrorResponse(VolleyError error) {
final Context context = mContextWeakReference.get();
if (context != null) {
final String message = context.getString(R.string.favorite_delete_fail);
Toast.makeText(context, message, Toast.LENGTH_SHORT).show();
}
}
@Override
public void onResponse(JSONObject response) {
final Context context = mContextWeakReference.get();
final FavoriteFragment owner = mOwner.get();
if (owner != null) {
owner.removeFavorites(mVideoIndexInListView);
}
if (context != null) {
final String message = context.getString(R.string.favorite_delete_success);
Toast.makeText(context, message, Toast.LENGTH_SHORT).show();
}
}
}
private static class FavoritesDataSet implements DataSet {
@Nullable
private Favorites favorites;
private Empty empty;
private FavoriteFooter haveMoreFooter;
private FavoriteFooter noMoreFooter;
@NonNull
private Favorites getNonNullFavorites() {
if (favorites != null) {
return favorites;
} else {
throw new IllegalStateException("haven't init FavoritesDataSet");
}
}
@Override
public int size() {
// getNonNullFavorites().isEmpty() get(0)emptyfooter
return getNonNullFavorites().getInterestingFavorites().size() + 1;
}
//TODO
@Override
public Object get(int position) {
final List<Video> interestingFavorites =
getNonNullFavorites().getInterestingFavorites();
if (position < interestingFavorites.size()) {
return interestingFavorites.get(position);
} else {
if (!getNonNullFavorites().isEmpty()) {
return getFavoriteFooter(position);
} else {
return empty;
}
}
}
private FavoriteFooter getFavoriteFooter(int position) {
if (BuildConfig.DEBUG) {
// This if block will be auto deleted when release
if (position != getNonNullFavorites().getInterestingFavorites().size()) { //NOPMD
throw new IllegalStateException();
}
}
if (!getNonNullFavorites().allRead()) {
return haveMoreFooter;
} else {
return noMoreFooter;
}
}
public void unsetFavorites() {
favorites = null;
}
public void addFavorites(Favorites added, RecyclerView.Adapter adapter) {
Favorites nonNullFavorites = getNonNullFavorites();
if (nonNullFavorites.isEmpty()) {
nonNullFavorites.add(added);
adapter.notifyDataSetChanged();
} else {
final int sizeBeforeAdd = nonNullFavorites.getInterestingFavorites().size();
nonNullFavorites.add(added);
adapter.notifyItemRangeInserted(
sizeBeforeAdd, added.getInterestingFavorites().size());
if (nonNullFavorites.allRead()) {
// footernoMoreFooter
adapter.notifyItemChanged(nonNullFavorites.getInterestingFavorites().size());
}
}
}
public void remove(int position, RecyclerView.Adapter adapter) {
Favorites nonNullFavorites = getNonNullFavorites();
nonNullFavorites.remove(position);
if (nonNullFavorites.isEmpty()) {
adapter.notifyDataSetChanged();
} else {
adapter.notifyItemRemoved(position);
}
}
}
private static class Favorites {
private int total;
private int currentPage;
private int currentReadCountIncludingUnneeded;
@NonNull
private final List<Video> interestingFavorites;
public Favorites(@NonNull List<Video> interestingFavorites) {
this.interestingFavorites = interestingFavorites;
}
// TODO
public boolean allRead() {
if (BuildConfig.DEBUG) {
// This if block will be auto deleted when release
if (currentReadCountIncludingUnneeded > total) { //NOPMD
final String message = "currentReadCountIncludingUnneeded > total: " +
"currentReadCountIncludingUnneeded=" + currentReadCountIncludingUnneeded
+ ", total=" + total;
throw new AssertionError(message);
}
}
return currentReadCountIncludingUnneeded >= total;
}
public boolean isEmpty() {
if (total == 0) {
return true;
} else {
if (BuildConfig.DEBUG) {
// This if block will be auto deleted when release
if (total < 0) { //NOPMD
throw new AssertionError("total < 0");
}
}
return allRead() && getInterestingFavorites().isEmpty();
}
}
@NonNull
public List<Video> getInterestingFavorites() {
return Collections.unmodifiableList(interestingFavorites);
}
public int getCurrentPage() {
return currentPage;
}
public void setCurrentPage(int currentPage) {
this.currentPage = currentPage;
}
public void setCurrentReadCountIncludingUnneeded(int currentReadCountIncludingUnneeded) {
this.currentReadCountIncludingUnneeded = currentReadCountIncludingUnneeded;
}
public void setTotal(int total) {
this.total = total;
}
//TODO check state
public void add(Favorites added) {
//TODO check total != added.total?
total = added.total;
//TODO check currentPage + 1 != added.currentPage?
//TODO
currentPage = added.currentPage;
//TODO response
currentReadCountIncludingUnneeded += added.currentReadCountIncludingUnneeded;
interestingFavorites.addAll(added.getInterestingFavorites());
}
//TODO check state
public void remove(int index) {
total
currentReadCountIncludingUnneeded
interestingFavorites.remove(index);
}
}
static class ViewHolder {
RecyclerView favorites;
TextView mainHint;
private ViewHolder(View root) {
favorites = (RecyclerView) root.findViewById(R.id.favorites);
mainHint = (TextView) root.findViewById(R.id.main_hint);
}
}
}
|
package com.appleframework.orm.mybatis.pagehelper;
import org.apache.ibatis.builder.StaticSqlSource;
import org.apache.ibatis.builder.annotation.ProviderSqlSource;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.mapping.SqlSource;
import org.apache.ibatis.plugin.Invocation;
import org.apache.ibatis.reflection.MetaObject;
import org.apache.ibatis.reflection.SystemMetaObject;
import org.apache.ibatis.scripting.defaults.RawSqlSource;
import org.apache.ibatis.scripting.xmltags.DynamicSqlSource;
import org.apache.ibatis.session.RowBounds;
import com.appleframework.orm.mybatis.pagehelper.parser.Parser;
import com.appleframework.orm.mybatis.pagehelper.parser.impl.AbstractParser;
import com.appleframework.orm.mybatis.pagehelper.sqlsource.PageDynamicSqlSource;
import com.appleframework.orm.mybatis.pagehelper.sqlsource.PageProviderSqlSource;
import com.appleframework.orm.mybatis.pagehelper.sqlsource.PageRawSqlSource;
import com.appleframework.orm.mybatis.pagehelper.sqlsource.PageSqlSource;
import com.appleframework.orm.mybatis.pagehelper.sqlsource.PageStaticSqlSource;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
@SuppressWarnings({"rawtypes"})
public class SqlUtil implements Constant {
private static final ThreadLocal<Page> LOCAL_PAGE = new ThreadLocal<Page>();
//params
private static Map<String, String> PARAMS = new HashMap<String, String>(5);
//request
private static Boolean hasRequest;
private static Class<?> requestClass;
private static Method getParameterMap;
static {
try {
requestClass = Class.forName("javax.servlet.ServletRequest");
getParameterMap = requestClass.getMethod("getParameterMap", new Class[]{});
hasRequest = true;
} catch (Throwable e) {
hasRequest = false;
}
}
//countms
private static final Map<String, MappedStatement> msCountMap = new ConcurrentHashMap<String, MappedStatement>();
//RowBoundsoffsetPageNum -
private boolean offsetAsPageNo = false;
//RowBoundscount -
private boolean rowBoundsWithCount = false;
//truepagesize0RowBoundslimit=0
private boolean pageSizeZero = false;
//parser
private Parser parser;
//false
private boolean supportMethodsArguments = false;
/**
*
*
* @param strDialect
*/
public SqlUtil(String strDialect) {
if (strDialect == null || "".equals(strDialect)) {
throw new IllegalArgumentException("Mybatisdialect!");
}
Exception exception = null;
try {
Dialect dialect = Dialect.of(strDialect);
parser = AbstractParser.newParser(dialect);
} catch (Exception e) {
exception = e;
try {
Class<?> parserClass = Class.forName(strDialect);
if (Parser.class.isAssignableFrom(parserClass)) {
parser = (Parser) parserClass.newInstance();
}
} catch (ClassNotFoundException ex) {
exception = ex;
} catch (InstantiationException ex) {
exception = ex;
} catch (IllegalAccessException ex) {
exception = ex;
}
}
if (parser == null) {
throw new RuntimeException(exception);
}
}
public static Boolean getCOUNT() {
Page page = getLocalPage();
if (page != null) {
return page.getCountSignal();
}
return null;
}
/**
* Page
*
* @return
*/
public static Page getLocalPage() {
return LOCAL_PAGE.get();
}
public static void setLocalPage(Page page) {
LOCAL_PAGE.set(page);
}
public static void clearLocalPage() {
LOCAL_PAGE.remove();
}
/**
*
*
* @param params
* @return
*/
public static Page getPageFromObject(Object params) {
long pageNo;
long pageSize;
MetaObject paramsObject = null;
if (params == null) {
throw new NullPointerException("!");
}
if (hasRequest && requestClass.isAssignableFrom(params.getClass())) {
try {
paramsObject = SystemMetaObject.forObject(getParameterMap.invoke(params, new Object[]{}));
} catch (Exception e) {
}
} else {
paramsObject = SystemMetaObject.forObject(params);
}
if (paramsObject == null) {
throw new NullPointerException("!");
}
Object orderBy = getParamValue(paramsObject, "orderBy", false);
boolean hasOrderBy = false;
if (orderBy != null && orderBy.toString().length() > 0) {
hasOrderBy = true;
}
try {
Object _pageNo = getParamValue(paramsObject, "pageNo", hasOrderBy ? false : true);
Object _pageSize = getParamValue(paramsObject, "pageSize", hasOrderBy ? false : true);
if (_pageNo == null || _pageSize == null) {
Page page = new Page();
page.setOrderBy(orderBy.toString());
page.setOrderByOnly(true);
return page;
}
pageNo = Long.parseLong(String.valueOf(_pageNo));
pageSize = Long.parseLong(String.valueOf(_pageSize));
} catch (NumberFormatException e) {
throw new IllegalArgumentException("!");
}
Page page = new Page(pageNo, pageSize);
//count
Object _count = getParamValue(paramsObject, "count", false);
if (_count != null) {
page.setCount(Boolean.valueOf(String.valueOf(_count)));
}
if (hasOrderBy) {
page.setOrderBy(orderBy.toString());
}
Object pageSizeZero = getParamValue(paramsObject, "pageSizeZero", false);
if (pageSizeZero != null) {
page.setPageSizeZero(Boolean.valueOf(String.valueOf(pageSizeZero)));
}
return page;
}
/**
*
*
* @param paramsObject
* @param paramName
* @param required
* @return
*/
public static Object getParamValue(MetaObject paramsObject, String paramName, boolean required) {
Object value = null;
if (paramsObject.hasGetter(PARAMS.get(paramName))) {
value = paramsObject.getValue(PARAMS.get(paramName));
}
if (value != null && value.getClass().isArray()) {
Object[] values = (Object[]) value;
if (values.length == 0) {
value = null;
} else {
value = values[0];
}
}
if (required && value == null) {
throw new RuntimeException(":" + PARAMS.get(paramName));
}
return value;
}
/**
*
*
* @param ms
* @return
*/
public boolean isPageSqlSource(MappedStatement ms) {
if (ms.getSqlSource() instanceof PageSqlSource) {
return true;
}
return false;
}
/**
* []countsql
*
* @param dialect
* @param originalSql sql
* @deprecated 5.x
*/
@Deprecated
public static void testSql(String dialect, String originalSql) {
testSql(Dialect.of(dialect), originalSql);
}
/**
* []countsql
*
* @param dialect
* @param originalSql sql
* @deprecated 5.x
*/
@Deprecated
public static void testSql(Dialect dialect, String originalSql) {
Parser parser = AbstractParser.newParser(dialect);
if (dialect == Dialect.sqlserver) {
setLocalPage(new Page(1, 10));
}
String countSql = parser.getCountSql(originalSql);
System.out.println(countSql);
String pageSql = parser.getPageSql(originalSql);
System.out.println(pageSql);
if (dialect == Dialect.sqlserver) {
clearLocalPage();
}
}
/**
* SqlSource
*
* @param ms
* @throws Throwable
*/
public void processMappedStatement(MappedStatement ms) throws Throwable {
SqlSource sqlSource = ms.getSqlSource();
MetaObject msObject = SystemMetaObject.forObject(ms);
SqlSource pageSqlSource;
if (sqlSource instanceof StaticSqlSource) {
pageSqlSource = new PageStaticSqlSource((StaticSqlSource) sqlSource);
} else if (sqlSource instanceof RawSqlSource) {
pageSqlSource = new PageRawSqlSource((RawSqlSource) sqlSource);
} else if (sqlSource instanceof ProviderSqlSource) {
pageSqlSource = new PageProviderSqlSource((ProviderSqlSource) sqlSource);
} else if (sqlSource instanceof DynamicSqlSource) {
pageSqlSource = new PageDynamicSqlSource((DynamicSqlSource) sqlSource);
} else {
throw new RuntimeException("[" + sqlSource.getClass() + "]SqlSource");
}
msObject.setValue("sqlSource", pageSqlSource);
//countCountMS
msCountMap.put(ms.getId(), MSUtils.newCountMappedStatement(ms));
}
/**
*
*
* @param args
* @return Page
*/
public Page getPage(Object[] args) {
Page page = getLocalPage();
if (page == null || page.isOrderByOnly()) {
Page oldPage = page;
//,page.isOrderByOnly()true
if ((args[2] == null || args[2] == RowBounds.DEFAULT) && page != null) {
return oldPage;
}
if (args[2] instanceof RowBounds && args[2] != RowBounds.DEFAULT) {
RowBounds rowBounds = (RowBounds) args[2];
if (offsetAsPageNo) {
page = new Page(rowBounds.getOffset(), rowBounds.getLimit(), rowBoundsWithCount);
} else {
page = new Page(new long[]{rowBounds.getOffset(), rowBounds.getLimit()}, rowBoundsWithCount);
//offsetAsPageNo=falsePageNumreasonablefalse
}
} else {
try {
page = getPageFromObject(args[1]);
} catch (Exception e) {
return null;
}
}
if (oldPage != null) {
page.setOrderBy(oldPage.getOrderBy());
}
setLocalPage(page);
}
//truepagesize0RowBoundslimit=0
if (page.getPageSizeZero() == null) {
page.setPageSizeZero(pageSizeZero);
}
return page;
}
/**
* MybatisThreadlocal
*
* @param invocation
* @return
* @throws Throwable
*/
public Object processPage(Invocation invocation) throws Throwable {
try {
Object result = _processPage(invocation);
return result;
} finally {
clearLocalPage();
}
}
/**
* Mybatis
*
* @param invocation
* @return
* @throws Throwable
*/
private Object _processPage(Invocation invocation) throws Throwable {
final Object[] args = invocation.getArgs();
Page page = null;
//Page
if (supportMethodsArguments) {
page = getPage(args);
}
RowBounds rowBounds = (RowBounds) args[2];
//page == null
if ((supportMethodsArguments && page == null)
//LocalPageRowBounds
|| (!supportMethodsArguments && SqlUtil.getLocalPage() == null && rowBounds == RowBounds.DEFAULT)) {
return invocation.proceed();
} else {
//page==null
if (!supportMethodsArguments && page == null) {
page = getPage(args);
}
return doProcessPage(invocation, page, args);
}
}
/**
*
*
* @param page
* @return
*/
private boolean isQueryOnly(Page page) {
return page.isOrderByOnly()
|| ((page.getPageSizeZero() != null && page.getPageSizeZero()) && page.getPageSize() == 0);
}
/**
*
*
* @param page
* @param invocation
* @return
* @throws Throwable
*/
private Page doQueryOnly(Page page, Invocation invocation) throws Throwable {
page.setCountSignal(null);
Object result = invocation.proceed();
page.setList((List) result);
page.setPageNo(1);
//pageSize=total
page.setPageSize(page.getPageSize());
//total
page.setTotalCount(page.getPageSize());
//Page -
return page;
}
/**
* Mybatis
*
* @param invocation
* @return
* @throws Throwable
*/
private Page doProcessPage(Invocation invocation, Page page, Object[] args) throws Throwable {
//RowBounds
RowBounds rowBounds = (RowBounds) args[2];
MappedStatement ms = (MappedStatement) args[0];
//PageSqlSource
if (!isPageSqlSource(ms)) {
processMappedStatement(ms);
}
//parsersetThreadLocal
((PageSqlSource)ms.getSqlSource()).setParser(parser);
try {
//RowBounds-Mybatis
args[2] = RowBounds.DEFAULT;
// pageSizeZero
if (isQueryOnly(page)) {
return doQueryOnly(page, invocation);
}
//totalcount
if (page.isCount()) {
page.setCountSignal(Boolean.TRUE);
args[0] = msCountMap.get(ms.getId());
Object result = invocation.proceed();
args[0] = ms;
page.setTotalCount(Long.parseLong(String.valueOf(((List) result).get(0))));
if (page.getTotalCount() == 0) {
return page;
}
} else {
page.setTotalCount(-1l);
}
//pageSize>0pageSize<=0count
if (page.getPageSize() > 0 &&
((rowBounds == RowBounds.DEFAULT && page.getPageNo() > 0)
|| rowBounds != RowBounds.DEFAULT)) {
//MappedStatementqs
page.setCountSignal(null);
BoundSql boundSql = ms.getBoundSql(args[1]);
args[1] = parser.setPageParameter(ms, args[1], boundSql, page);
page.setCountSignal(Boolean.FALSE);
Object result = invocation.proceed();
page.setList((List) result);
}
} finally {
((PageSqlSource)ms.getSqlSource()).removeParser();
}
return page;
}
public void setOffsetAsPageNo(boolean offsetAsPageNo) {
this.offsetAsPageNo = offsetAsPageNo;
}
public void setRowBoundsWithCount(boolean rowBoundsWithCount) {
this.rowBoundsWithCount = rowBoundsWithCount;
}
public void setPageSizeZero(boolean pageSizeZero) {
this.pageSizeZero = pageSizeZero;
}
public void setSupportMethodsArguments(boolean supportMethodsArguments) {
this.supportMethodsArguments = supportMethodsArguments;
}
public static void setParams(String params) {
PARAMS.put("pageNo", "pageNo");
PARAMS.put("pageSize", "pageSize");
PARAMS.put("count", "countSql");
PARAMS.put("orderBy", "orderBy");
PARAMS.put("reasonable", "reasonable");
PARAMS.put("pageSizeZero", "pageSizeZero");
if (StringUtil.isNotEmpty(params)) {
String[] ps = params.split("[;|,|&]");
for (String s : ps) {
String[] ss = s.split("[=|:]");
if (ss.length == 2) {
PARAMS.put(ss[0], ss[1]);
}
}
}
}
public void setProperties(Properties p) {
//offsetPageNum
String offsetAsPageNo = p.getProperty("offsetAsPageNo");
this.offsetAsPageNo = Boolean.parseBoolean(offsetAsPageNo);
//RowBoundscount
String rowBoundsWithCount = p.getProperty("rowBoundsWithCount");
this.rowBoundsWithCount = Boolean.parseBoolean(rowBoundsWithCount);
//truepagesize0RowBoundslimit=0
String pageSizeZero = p.getProperty("pageSizeZero");
this.pageSizeZero = Boolean.parseBoolean(pageSizeZero);
//truefalse
//false
String supportMethodsArguments = p.getProperty("supportMethodsArguments");
this.supportMethodsArguments = Boolean.parseBoolean(supportMethodsArguments);
//offsetAsPageNo=false
setParams(p.getProperty("params"));
}
public void setSqlUtilConfig(SqlUtilConfig config) {
this.offsetAsPageNo = config.isOffsetAsPageNo();
this.rowBoundsWithCount = config.isRowBoundsWithCount();
this.pageSizeZero = config.isPageSizeZero();
this.supportMethodsArguments = config.isSupportMethodsArguments();
setParams(config.getParams());
}
}
|
package uk.org.ponder.rsf.test.sitemap;
import java.util.HashMap;
import uk.org.ponder.rsf.bare.junit.PlainRSFTests;
import uk.org.ponder.rsf.viewstate.EntityCentredViewParameters;
import uk.org.ponder.rsf.viewstate.support.BasicViewParametersParser;
public class TestSiteMap extends PlainRSFTests {
public TestSiteMap() {
contributeConfigLocation("classpath:uk/org/ponder/rsf/test/sitemap/sitemap-context.xml");
}
public void testParseECVP() {
BasicViewParametersParser bvpp = (BasicViewParametersParser) applicationContext.getBean("viewParametersParser");
HashMap attrmap = new HashMap();
attrmap.put("flowtoken", "ec38f0");
EntityCentredViewParameters ecvp = (EntityCentredViewParameters) bvpp.parse("/recipe/3652/", attrmap);
System.out.println("ECVP for entity " + ecvp.entity.ID + " of type " + ecvp.entity.entityname);
}
}
|
package net.thenumenorean.essence;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import org.bson.Document;
import com.gmail.kunicins.olegs.libshout.Libshout;
import com.google.common.io.Files;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.Updates;
import net.thenumenorean.essence.utils.RepeatingRunnable;
class TrackStreamer extends RepeatingRunnable {
private final MongoDriver mongoDriver;
private Libshout icecast;
private static final int DEFAULT_WAIT = 2000;
private InputStream track;
public TrackStreamer(MongoDriver mongoDriver) throws IOException {
this(mongoDriver, DEFAULT_WAIT);
}
public TrackStreamer(MongoDriver mongoDriver, int wait) throws IOException {
super(wait);
this.mongoDriver = mongoDriver;
icecast = new Libshout();
icecast.setHost("localhost");
icecast.setPort(8000);
icecast.setProtocol(Libshout.PROTOCOL_HTTP);
icecast.setPassword("SpaceMining");
icecast.setMount("/stream");
icecast.setFormat(Libshout.FORMAT_MP3);
icecast.setName("Essence");
icecast.setDescription("Essence music stream");
icecast.setUrl("http://essence.caltech.edu:8000/stream");
icecast.setGenre("All");
}
@Override
public void runBefore() {
// try {
// icecast.open();
// } catch (IOException e) {
// e.printStackTrace();
// super.stop();
}
@Override
public void runAfter() {
icecast.close();
}
@Override
public void loop() {
if (track == null) {
String next = getNextTrack();
if (next != null) {
EssenceRuntime.log.info("Found next track at " + Files.getNameWithoutExtension(next));
try {
track = new BufferedInputStream(new FileInputStream(new File(next)));
} catch (FileNotFoundException e) {
EssenceRuntime.log.info("Error reading file!");
e.printStackTrace();
}
} else {
EssenceRuntime.log.info("No next track available!");
return;
}
}
try {
if (!icecast.isConnected())
icecast.open();
byte[] buffer = new byte[1024];
int read = track.read(buffer);
while (read > 0 && !super.stoppedCalled()) {
icecast.send(buffer, read);
read = track.read(buffer);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
track.close();
} catch (IOException e) {
}
track = null;
}
}
/**
* Gets the next track that should be played
*
* @return
*/
public String getNextTrack() {
Document next = mongoDriver.getPlaylistColection().find(Filters.eq("rank", 0)).first();
if (next == null) {
return null;
}
// There is a track, so proceed
synchronized (mongoDriver.getPlaylistColection()) {
// Remove the just played songe from the playlist
Document justPlayed = mongoDriver.getPlaylistColection().findOneAndDelete(Filters.eq("rank", -1));
mongoDriver.getHistoryColection().insertOne(justPlayed);
// Update playlist for meantime accesses
// Move the up-next song (at rank 0) to playing (-1)
mongoDriver.getPlaylistColection().updateMany(Filters.exists("rank"), Updates.inc("rank", -1));
}
Document nextTrack = mongoDriver.getTrack(next.getObjectId("track_id"));
if (nextTrack == null) {
EssenceRuntime.log.severe("Couldnt find track with id " + next.getObjectId("track_id"));
return null;
}
return nextTrack.getString("location");
}
}
|
package io.rover;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.AnimRes;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.MenuItem;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.util.Date;
import java.util.UUID;
import io.rover.model.Action;
import io.rover.model.Block;
import io.rover.model.BlockPressEvent;
import io.rover.model.Experience;
import io.rover.model.ExperienceDismissEvent;
import io.rover.model.ExperienceLaunchEvent;
import io.rover.model.Screen;
import io.rover.model.ScreenViewEvent;
import io.rover.network.HttpResponse;
import io.rover.network.JsonResponseHandler;
import io.rover.network.NetworkTask;
import io.rover.ui.AssetManager;
import io.rover.ui.ExperienceScreenAnimation;
import io.rover.ui.ScreenFragment;
public class ExperienceActivity extends AppCompatActivity implements ScreenFragment.OnBlockListener {
private static String TAG = "ExperienceActivity";
private RelativeLayout mLayout;
private FetchExperienceTask mFetchTask;
private Experience mExperience;
private String mSessionId;
private boolean mHasPresentedFirstScreen = false;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
RelativeLayout layout = new RelativeLayout(this);
layout.setLayoutParams(new RelativeLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT));
layout.setId(R.id.screen_layout);
mLayout = layout;
setContentView(layout);
Uri data = getIntent().getData();
if (data != null) {
String experienceId = data.getPath();
if (experienceId != null) {
mFetchTask = new FetchExperienceTask();
mFetchTask.execute(experienceId);
}
}
mSessionId = UUID.randomUUID().toString();
}
@Override
protected void onDestroy() {
super.onDestroy();
/*
Clean up the image memory cache. We don't want to hold onto bitmaps when we aren't displaying the experience
*/
AssetManager manager = AssetManager.getSharedAssetManager(getApplicationContext());
manager.flushMemoryCache();
if (isFinishing() && mExperience != null) {
Rover.submitEvent(new ExperienceDismissEvent(mExperience, mSessionId, new Date()));
for (RoverObserver observer : Rover.mSharedInstance.mObservers) {
if (observer instanceof RoverObserver.ExperienceObserver) {
((RoverObserver.ExperienceObserver) observer).onExperienceDismiss(
mExperience,
mSessionId
);
}
}
}
}
public void setExperience(Experience experience) {
mExperience = experience;
Rover.submitEvent(new ExperienceLaunchEvent(mExperience, mSessionId, new Date()));
for (RoverObserver observer : Rover.mSharedInstance.mObservers) {
if (observer instanceof RoverObserver.ExperienceObserver) {
((RoverObserver.ExperienceObserver) observer).onExperienceLaunch(
mExperience,
mSessionId
);
}
if (observer instanceof RoverObserver.ExtendedExperienceObserver ) {
((RoverObserver.ExtendedExperienceObserver) observer).onExperienceLaunch(
this,
mExperience,
mSessionId
);
}
}
Screen homeScreen = mExperience.getHomeScreen();
presentNextScreen(homeScreen);
}
public void presentNextScreen(Screen screen) {
if (screen == null) {
return;
}
Fragment screenFragment = ScreenFragment.newInstance(screen);
presentNextScreen(screenFragment, screen, new ExperienceScreenAnimation());
}
public void presentNextScreen(Fragment screenFragment, Screen screen, ExperienceScreenAnimation animation) {
presentNextScreen(screenFragment, screen, animation, null, null);
}
public void presentNextScreen(Fragment screenFragment, Screen screen, ExperienceScreenAnimation animation, Screen fromScreen, Block fromBlock) {
if (screenFragment == null) {
return;
}
for (RoverObserver observer : Rover.mSharedInstance.mObservers) {
if (observer instanceof RoverObserver.ExtendedExperienceObserver) {
screenFragment = ((RoverObserver.ExtendedExperienceObserver) observer).willPresentScreen(this, screenFragment, screen);
}
}
if (screenFragment == null) {
return;
}
FragmentTransaction transaction = getSupportFragmentManager()
.beginTransaction()
.replace(mLayout.getId(), screenFragment, "SCREEN");
if (animation != null) {
transaction.setCustomAnimations(animation.getEnter(), animation.getExit(), animation.getPopEnter(), animation.getPopExit());
}
if (mHasPresentedFirstScreen) {
transaction.addToBackStack(null);
}
transaction.commitAllowingStateLoss();
if (!mHasPresentedFirstScreen) {
mHasPresentedFirstScreen = true;
}
if (screenFragment instanceof ScreenFragment) {
trackScreenView(screenFragment, screen, fromScreen, fromBlock);
}
}
public void popCurrentScreen() {
getSupportFragmentManager()
.popBackStack();
}
@Override
public void onBlockClick(Fragment screenFragment, Screen screen, Block block) {
Action action = block.getAction();
if (action == null) {
return;
}
Rover.submitEvent(new BlockPressEvent(block, screen, mExperience, mSessionId, new Date()));
for (RoverObserver observer : Rover.mSharedInstance.mObservers) {
if (observer instanceof RoverObserver.ExperienceObserver) {
((RoverObserver.ExperienceObserver) observer).onBlockClick(
block,
screen,
mExperience,
mSessionId
);
}
if (observer instanceof RoverObserver.ExtendedExperienceObserver) {
((RoverObserver.ExtendedExperienceObserver) observer).onBlockClick(
this,
screenFragment,
screen,
block,
mSessionId
);
}
}
switch (action.getType()) {
case Action.GOTO_SCREEN_ACTION: {
String screenId = action.getUrl();
Screen newScreen = mExperience.getScreen(screenId);
presentNextScreen(newScreen);
break;
}
default: {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(action.getUrl()));
startActivity(intent);
break;
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
getSupportFragmentManager().popBackStack();
return true;
} else {
return super.onOptionsItemSelected(item);
}
}
public static Intent createIntent(Context context, String id) {
Uri uri = new Uri.Builder().scheme("rover")
.authority("experience")
.appendPath(id).build();
Intent intent = new Intent(context, ExperienceActivity.class);
intent.setData(uri);
return intent;
}
private void trackScreenView(Fragment screenFragment, Screen screen, Screen fromScreen, Block fromBlock) {
Rover.submitEvent(new ScreenViewEvent(screen, mExperience, fromScreen, fromBlock, mSessionId, new Date()));
for (RoverObserver observer : Rover.mSharedInstance.mObservers) {
if (observer instanceof RoverObserver.ExperienceObserver) {
((RoverObserver.ExperienceObserver) observer).onScreenView(
screen,
mExperience,
fromScreen,
fromBlock,
mSessionId
);
}
if (observer instanceof RoverObserver.ExtendedExperienceObserver) {
((RoverObserver.ExtendedExperienceObserver) observer).onScreenView(
this,
screenFragment,
mExperience,
screen,
fromScreen,
fromBlock,
mSessionId
);
}
}
}
private class FetchExperienceTask extends AsyncTask<String, Void, Experience> implements JsonResponseHandler.JsonCompletionHandler {
private ObjectMapper mObjectMapper;
private Experience experience = null;
@Override
protected Experience doInBackground(String... params) {
String experienceId = params[0];
if (experienceId == null) {
return null;
}
mObjectMapper = new ObjectMapper();
JsonResponseHandler responseHandler = new JsonResponseHandler();
responseHandler.setCompletionHandler(this);
NetworkTask networkTask = Router.getExperienceNetworkTask(experienceId);
HttpResponse response = networkTask.run();
if (response != null) {
try {
responseHandler.onHandleResponse(response);
} catch (IOException e) {
e.printStackTrace();
} finally {
response.close();
}
}
return experience;
}
@Override
public void onReceivedJSONObject(JSONObject jsonObject) {
try {
JSONObject data = jsonObject.getJSONObject("data");
experience = (Experience) mObjectMapper.getObject("experiences", data.getString("id"),
data.getJSONObject("attributes"));
} catch (JSONException e) {
Log.e("ExperienceActivity", "Error downloading experience");
}
}
@Override
public void onReceivedJSONArray(JSONArray jsonArray) {}
@Override
protected void onPostExecute(Experience experience) {
if (experience == null) { return; }
setExperience(experience);
}
}
}
|
package dr.app.beauti.generator;
import dr.app.beast.BeastVersion;
import dr.app.beauti.components.ComponentFactory;
import dr.app.beauti.options.*;
import dr.app.beauti.types.*;
import dr.app.beauti.util.XMLWriter;
import dr.app.util.Arguments;
import dr.evolution.alignment.Alignment;
import dr.evolution.continuous.Continuous;
import dr.evolution.datatype.*;
import dr.evolution.util.Taxa;
import dr.evolution.util.Taxon;
import dr.evolution.util.TaxonList;
import dr.evolution.util.Units;
import dr.evomodel.substmodel.AbstractSubstitutionModel;
import dr.evomodelxml.speciation.MultiSpeciesCoalescentParser;
import dr.evomodelxml.speciation.SpeciationLikelihoodParser;
import dr.evomodelxml.substmodel.GeneralSubstitutionModelParser;
import dr.evoxml.AlignmentParser;
import dr.evoxml.DateParser;
import dr.evoxml.TaxaParser;
import dr.evoxml.TaxonParser;
import dr.inferencexml.distribution.MixedDistributionLikelihoodParser;
import dr.inferencexml.model.CompoundLikelihoodParser;
import dr.inferencexml.model.CompoundParameterParser;
import dr.inferencexml.operators.SimpleOperatorScheduleParser;
import dr.util.Attribute;
import dr.util.Version;
import dr.xml.AttributeParser;
import dr.xml.XMLParser;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This class holds all the data for the current BEAUti Document
*
* @author Andrew Rambaut
* @author Alexei Drummond
* @author Walter Xie
* @version $Id: BeastGenerator.java,v 1.4 2006/09/05 13:29:34 rambaut Exp $
*/
public class BeastGenerator extends Generator {
private final static Version version = new BeastVersion();
private final AlignmentGenerator alignmentGenerator;
private final PatternListGenerator patternListGenerator;
private final TreePriorGenerator treePriorGenerator;
private final TreeLikelihoodGenerator treeLikelihoodGenerator;
private final SubstitutionModelGenerator substitutionModelGenerator;
private final InitialTreeGenerator initialTreeGenerator;
private final TreeModelGenerator treeModelGenerator;
private final BranchRatesModelGenerator branchRatesModelGenerator;
private final OperatorsGenerator operatorsGenerator;
private final ParameterPriorGenerator parameterPriorGenerator;
private final LogGenerator logGenerator;
// private final DiscreteTraitGenerator discreteTraitGenerator;
private final STARBEASTGenerator starBeastGenerator;
private final TMRCAStatisticsGenerator tmrcaStatisticsGenerator;
public BeastGenerator(BeautiOptions options, ComponentFactory[] components) {
super(options, components);
alignmentGenerator = new AlignmentGenerator(options, components);
patternListGenerator = new PatternListGenerator(options, components);
tmrcaStatisticsGenerator = new TMRCAStatisticsGenerator(options, components);
substitutionModelGenerator = new SubstitutionModelGenerator(options, components);
treePriorGenerator = new TreePriorGenerator(options, components);
treeLikelihoodGenerator = new TreeLikelihoodGenerator(options, components);
initialTreeGenerator = new InitialTreeGenerator(options, components);
treeModelGenerator = new TreeModelGenerator(options, components);
branchRatesModelGenerator = new BranchRatesModelGenerator(options, components);
operatorsGenerator = new OperatorsGenerator(options, components);
parameterPriorGenerator = new ParameterPriorGenerator(options, components);
logGenerator = new LogGenerator(options, components);
// this has moved into the component system...
// discreteTraitGenerator = new DiscreteTraitGenerator(options, components);
starBeastGenerator = new STARBEASTGenerator(options, components);
}
public void checkOptions() throws IllegalArgumentException {
//++++++++++++++++ Taxon List ++++++++++++++++++
TaxonList taxonList = options.taxonList;
Set<String> ids = new HashSet<String>();
ids.add(TaxaParser.TAXA);
ids.add(AlignmentParser.ALIGNMENT);
if (taxonList != null) {
if (taxonList.getTaxonCount() < 2) {
throw new IllegalArgumentException("BEAST requires at least two taxa to run.");
}
for (int i = 0; i < taxonList.getTaxonCount(); i++) {
Taxon taxon = taxonList.getTaxon(i);
if (ids.contains(taxon.getId())) {
throw new IllegalArgumentException("A taxon has the same id," + taxon.getId() +
"\nas another element (taxon, sequence, taxon set etc.):\nAll ids should be unique.");
}
ids.add(taxon.getId());
}
}
//++++++++++++++++ Taxon Sets ++++++++++++++++++
for (Taxa taxa : options.taxonSets) {
if (taxa.getTaxonCount() < 2) {
throw new IllegalArgumentException("Taxon set, " + taxa.getId() + ", should contain\n" +
"at least two taxa.");
}
if (ids.contains(taxa.getId())) {
throw new IllegalArgumentException("A taxon sets has the same id," + taxa.getId() +
"\nas another element (taxon, sequence, taxon set etc.):\nAll ids should be unique.");
}
ids.add(taxa.getId());
}
//++++++++++++++++ Traits ++++++++++++++++++
if (options.useStarBEAST && !options.traitExists(TraitData.TRAIT_SPECIES)) {
throw new IllegalArgumentException("Keyword \"species\" is reserved for *BEAST only !" +
"\nPlease check the consistency between Use *BEAST check-box and Traits table.");
}
// missing data is not necessarily an issue...
// for (TraitData trait : options.traits) {
// for (int i = 0; i < trait.getTaxaCount(); i++) {
//// System.out.println("Taxon " + trait.getTaxon(i).getId() + " : [" + trait.getTaxon(i).getAttribute(trait.getName()) + "]");
// if (!trait.hasValue(i))
// " has no value for Trait " + trait.getName());
//++++++++++++++++ Tree Prior ++++++++++++++++++
// if (options.isShareSameTreePrior()) {
if (options.getPartitionTreeModels().size() > 1) { //TODO not allowed multi-prior yet
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
if (prior.getNodeHeightPrior() == TreePriorType.GMRF_SKYRIDE) {
throw new IllegalArgumentException("For GMRF, tree model/tree prior combination not implemented by BEAST yet" +
"\nIt is only available for single tree model partition for this release.");
}
}
}
//+++++++++++++++ Starting tree ++++++++++++++++
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
if (model.getStartingTreeType() == StartingTreeType.USER) {
if (model.getUserStartingTree() == null) {
throw new IllegalArgumentException("Please selected a starting tree in Trees panel");
}
}
}
//++++++++++++++++ Random local clock model validation ++++++++++++++++++
for (PartitionClockModel model : options.getPartitionClockModels()) {
// 1 random local clock CANNOT have different tree models
if (model.getClockType() == ClockType.RANDOM_LOCAL_CLOCK) { // || AUTOCORRELATED_LOGNORMAL
PartitionTreeModel treeModel = null;
for (AbstractPartitionData pd : options.getAllPartitionData(model)) { // only the PDs linked to this tree model
if (treeModel != null && treeModel != pd.getPartitionTreeModel()) {
throw new IllegalArgumentException("One random local clock CANNOT have different tree models !");
}
treeModel = pd.getPartitionTreeModel();
}
}
}
//++++++++++++++++ Tree Model ++++++++++++++++++
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
int numOfTaxa = -1;
for (AbstractPartitionData pd : options.getAllPartitionData(model)) {
if (pd.getTaxonCount() > 0) {
if (numOfTaxa > 0) {
if (numOfTaxa != pd.getTaxonCount()) {
throw new IllegalArgumentException("Partitions with different taxa cannot share the same tree");
}
} else {
numOfTaxa = pd.getTaxonCount();
}
}
}
}
//++++++++++++++++ Prior Bounds ++++++++++++++++++
for (Parameter param : options.selectParameters()) {
if (param.initial != Double.NaN) {
if (param.isTruncated && (param.initial < param.truncationLower || param.initial > param.truncationUpper)) {
throw new IllegalArgumentException("Parameter \"" + param.getName() + "\":" +
"\ninitial value " + param.initial + " is NOT in the range [" + param.truncationLower + ", " + param.truncationUpper + "]," +
"\nor this range is wrong. Please check the Prior panel.");
}
if (param.isNonNegative && param.initial < 0.0) {
throw new IllegalArgumentException("Parameter \"" + param.getName() + "\":" +
"\ninitial value " + param.initial + " should be non-negative. Please check the Prior panel.");
}
if (param.isZeroOne && (param.initial < 0.0 || param.initial > 1.0)) {
throw new IllegalArgumentException("Parameter \"" + param.getName() + "\":" +
"\ninitial value " + param.initial + " should lie in the interval [0, 1]. Please check the Prior panel.");
}
}
}
// add other tests and warnings here
// Speciation model with dated tips
// Sampling rates without dated tips or priors on rate or nodes
}
/**
* Generate a beast xml file from these beast options
*
* @param file File
* @throws java.io.IOException IOException
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
writer.writeComment("Generated by BEAUTi " + version.getVersionString(),
" by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard",
" Department of Computer Science, University of Auckland and",
" Institute of Evolutionary Biology, University of Edinburgh",
" David Geffen School of Medicine, University of California, Los Angeles",
" http://beast.bio.ed.ac.uk/");
writer.writeOpenTag("beast");
writer.writeText("");
// this gives any added implementations of the 'Component' interface a
// chance to generate XML at this point in the BEAST file.
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
//++++++++++++++++ Taxon List ++++++++++++++++++
try {
// write complete taxon list
writeTaxa(options.taxonList, writer);
writer.writeText("");
if (!options.partitionsHaveIdenticalTaxa()) {
// write all taxa in each gene tree regarding each data partition,
for (AbstractPartitionData partition : options.dataPartitions) {
if (partition.getTaxonList() != null) {
writeDifferentTaxa(partition, writer);
}
}
}
} catch (Exception e) {
System.err.println(e);
throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Taxon Sets ++++++++++++++++++
List<Taxa> taxonSets = options.taxonSets;
try {
if (taxonSets != null && taxonSets.size() > 0) {
tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Alignments ++++++++++++++++++
List<Alignment> alignments = new ArrayList<Alignment>();
try {
for (AbstractPartitionData partition : options.dataPartitions) {
Alignment alignment = null;
if (partition instanceof PartitionData) { // microsat has no alignment
alignment = ((PartitionData) partition).getAlignment();
}
if (alignment != null && !alignments.contains(alignment)) {
alignments.add(alignment);
}
}
if (alignments.size() > 0) {
alignmentGenerator.writeAlignments(alignments, writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Pattern Lists ++++++++++++++++++
try {
if (!options.samplePriorOnly) {
List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
for (AbstractPartitionData partition : options.dataPartitions) { // Each PD has one TreeLikelihood
if (partition.getTaxonList() != null) {
switch (partition.getDataType().getType()) {
case DataType.NUCLEOTIDES:
case DataType.AMINO_ACIDS:
case DataType.CODONS:
case DataType.COVARION:
case DataType.TWO_STATES:
patternListGenerator.writePatternList((PartitionData) partition, writer);
break;
case DataType.GENERAL:
case DataType.CONTINUOUS:
// no patternlist for trait data - discrete (general) data type uses an
// attribute patterns which is generated next bit of this method.
break;
case DataType.MICRO_SAT:
// microsat does not have alignment
patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
break;
default:
throw new IllegalArgumentException("Unsupported data type");
}
writer.writeText("");
}
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
//++++++++++++++++ Tree Prior Model ++++++++++++++++++
try {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeTreePriorModel(prior, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Starting Tree ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
initialTreeGenerator.writeStartingTree(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Tree Model +++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treeModelGenerator.writeTreeModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Statistics ++++++++++++++++++
try {
if (taxonSets != null && taxonSets.size() > 0) {
tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
PartitionTreePrior prior = model.getPartitionTreePrior();
treePriorGenerator.writePriorLikelihood(prior, model, parameterPriorGenerator, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeEBSPVariableDemographic(prior, writer);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Branch Rates Model ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
branchRatesModelGenerator.writeBranchRatesModel(model, writer);
writer.writeText("");
}
// write allClockRate for fix mean option in clock model panel
for (ClockModelGroup clockModelGroup : options.clockModelOptions.getClockModelGroups()) {
if (clockModelGroup.getRateTypeOption() == FixRateType.FIX_MEAN) {
writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER,
new Attribute[]{new Attribute.Default<String>(XMLParser.ID, clockModelGroup.getName())});
for (PartitionClockModel model : options.getPartitionClockModels(clockModelGroup)) {
branchRatesModelGenerator.writeAllClockRateRefs(model, writer);
}
writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
writer.writeText("");
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Branch rates model generation is failed:\n" + e.getMessage());
}
//++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
try {
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
substitutionModelGenerator.writeAllMus(model, writer); // allMus
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Site Model ++++++++++++++++++
// for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
// substitutionModelGenerator.writeSiteModel(model, writer); // site model
// substitutionModelGenerator.writeAllMus(model, writer); // allMus
// writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
//++++++++++++++++ Tree Likelihood ++++++++++++++++++
try {
for (AbstractPartitionData partition : options.dataPartitions) {
// generate tree likelihoods for alignment data partitions
if (partition.getTaxonList() != null) {
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREE_LIKELIHOOD, writer);
if (partition instanceof PartitionData) {
if (partition.getDataType() != GeneralDataType.INSTANCE &&
partition.getDataType() != ContinuousDataType.INSTANCE) {
treeLikelihoodGenerator.writeTreeLikelihood((PartitionData) partition, writer);
writer.writeText("");
}
} else if (partition instanceof PartitionPattern) { // microsat
treeLikelihoodGenerator.writeTreeLikelihood((PartitionPattern) partition, writer);
writer.writeText("");
} else {
throw new GeneratorException("Find unrecognized partition:\n" + partition.getName());
}
}
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ *BEAST ++++++++++++++++++
try {
if (options.useStarBEAST) { // species
writeStarBEAST(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST special part generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
//++++++++++++++++ Operators ++++++++++++++++++
try {
List<Operator> operators = options.selectOperators();
operatorsGenerator.writeOperatorSchedule(operators, writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ MCMC ++++++++++++++++++
try {
// XMLWriter writer, List<PartitionSubstitutionModel> models,
writeMCMC(writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("MCMC or log generation is failed:\n" + e.getMessage());
}
try {
writeTimerReport(writer);
writer.writeText("");
if (options.performTraceAnalysis) {
writeTraceAnalysis(writer);
}
if (options.generateCSV) {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
}
writer.writeCloseTag("beast");
writer.flush();
writer.close();
}
/**
* Generate a taxa block from these beast options
*
* @param writer the writer
* @param taxonList the taxon list to write
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
private void writeTaxa(TaxonList taxonList, XMLWriter writer) throws Arguments.ArgumentException {
// -1 (single taxa), 0 (1st gene of multi-taxa)
writer.writeComment("The list of taxa to be analysed (can also include dates/ages).",
"ntax=" + taxonList.getTaxonCount());
writer.writeOpenTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, TaxaParser.TAXA)});
boolean hasAttr = options.traits.size() > 0;
boolean firstDate = true;
for (int i = 0; i < taxonList.getTaxonCount(); i++) {
Taxon taxon = taxonList.getTaxon(i);
boolean hasDate = false;
if (options.clockModelOptions.isTipCalibrated()) {
hasDate = TaxonList.Utils.hasAttribute(taxonList, i, dr.evolution.util.Date.DATE);
}
writer.writeTag(TaxonParser.TAXON, new Attribute[]{
new Attribute.Default<String>(XMLParser.ID, taxon.getId())},
!(hasDate || hasAttr)); // false if any of hasDate or hasAttr is true
if (hasDate) {
dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute(dr.evolution.util.Date.DATE);
if (firstDate) {
options.units = date.getUnits();
firstDate = false;
} else {
if (options.units != date.getUnits()) {
System.err.println("Error: Units in dates do not match.");
}
}
Attribute[] attributes = {
new Attribute.Default<Double>(DateParser.VALUE, date.getTimeValue()),
new Attribute.Default<String>(DateParser.DIRECTION, date.isBackwards() ? DateParser.BACKWARDS : DateParser.FORWARDS),
new Attribute.Default<String>(DateParser.UNITS, Units.Utils.getDefaultUnitName(options.units))
//new Attribute.Default("origin", date.getOrigin()+"")
};
writer.writeTag(dr.evolution.util.Date.DATE, attributes, true);
}
for (TraitData trait : options.traits) {
writer.writeOpenTag(AttributeParser.ATTRIBUTE, new Attribute[]{
new Attribute.Default<String>(Attribute.NAME, trait.getName())});
// denotes missing data using '?'
writer.writeText(taxon.containsAttribute(trait.getName()) ? taxon.getAttribute(trait.getName()).toString() : "?");
writer.writeCloseTag(AttributeParser.ATTRIBUTE);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TAXON, taxon, writer);
if (hasDate || hasAttr) writer.writeCloseTag(TaxonParser.TAXON);
}
writer.writeCloseTag(TaxaParser.TAXA);
}
public void writeDifferentTaxa(AbstractPartitionData dataPartition, XMLWriter writer) {
TaxonList taxonList = dataPartition.getTaxonList();
String name = dataPartition.getName();
writer.writeComment("gene name = " + name + ", ntax= " + taxonList.getTaxonCount());
writer.writeOpenTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, name + "." + TaxaParser.TAXA)});
for (int i = 0; i < taxonList.getTaxonCount(); i++) {
final Taxon taxon = taxonList.getTaxon(i);
writer.writeIDref(TaxonParser.TAXON, taxon.getId());
}
writer.writeCloseTag(TaxaParser.TAXA);
}
/**
* *BEAST block
*
* @param writer XMLWriter
*/
private void writeStarBEAST(XMLWriter writer) {
String traitName = TraitData.TRAIT_SPECIES;
writer.writeText("");
writer.writeComment(options.starBEASTOptions.getDescription());
writer.writeOpenTag(traitName, new Attribute[]{
new Attribute.Default<String>(XMLParser.ID, traitName)});
//new Attribute.Default<String>("traitType", traitType)});
starBeastGenerator.writeMultiSpecies(options.taxonList, writer);
writer.writeCloseTag(traitName);
starBeastGenerator.writeSTARBEAST(writer);
}
/**
* Write the timer report block.
*
* @param writer the writer
*/
public void writeTimerReport(XMLWriter writer) {
writer.writeOpenTag("report");
writer.writeOpenTag("property", new Attribute.Default<String>("name", "timer"));
writer.writeIDref("mcmc", "mcmc");
writer.writeCloseTag("property");
writer.writeCloseTag("report");
}
/**
* Write the trace analysis block.
*
* @param writer the writer
*/
public void writeTraceAnalysis(XMLWriter writer) {
writer.writeTag(
"traceAnalysis",
new Attribute[]{
new Attribute.Default<String>("fileName", options.logFileName)
},
true
);
}
/**
* Write the MCMC block.
*
* @param writer XMLWriter
*/
public void writeMCMC(XMLWriter writer) {
writer.writeComment("Define MCMC");
List<Attribute> attributes = new ArrayList<Attribute>();
attributes.add(new Attribute.Default<String>(XMLParser.ID, "mcmc"));
attributes.add(new Attribute.Default<Integer>("chainLength", options.chainLength));
attributes.add(new Attribute.Default<String>("autoOptimize", options.autoOptimize ? "true" : "false"));
if (options.operatorAnalysis) {
attributes.add(new Attribute.Default<String>("operatorAnalysis", options.operatorAnalysisFileName));
}
writer.writeOpenTag("mcmc", attributes);
if (options.hasData()) {
writer.writeOpenTag(CompoundLikelihoodParser.POSTERIOR, new Attribute.Default<String>(XMLParser.ID, "posterior"));
}
// write prior block
writer.writeOpenTag(CompoundLikelihoodParser.PRIOR, new Attribute.Default<String>(XMLParser.ID, "prior"));
if (options.useStarBEAST) { // species
// coalescent prior
writer.writeIDref(MultiSpeciesCoalescentParser.SPECIES_COALESCENT, TraitData.TRAIT_SPECIES + "." + COALESCENT);
// prior on population sizes
// if (options.speciesTreePrior == TreePriorType.SPECIES_YULE) {
writer.writeIDref(MixedDistributionLikelihoodParser.DISTRIBUTION_LIKELIHOOD, SPOPS);
// } else {
// writer.writeIDref(SpeciesTreeBMPrior.STPRIOR, STP);
// prior on species tree
writer.writeIDref(SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD, SPECIATION_LIKE);
}
parameterPriorGenerator.writeParameterPriors(writer);
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
PartitionTreePrior prior = model.getPartitionTreePrior();
treePriorGenerator.writePriorLikelihoodReference(prior, model, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeEBSPVariableDemographicReference(prior, writer);
writer.writeText("");
}
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
// e.g. <svsGeneralSubstitutionModel idref="locations.model" />
// if (!(model.getLocationSubstType() == DiscreteSubstModelType.SYM_SUBST && (!model.isActivateBSSVS()))) {
if (model.isActivateBSSVS()) {
writer.writeIDref(GeneralSubstitutionModelParser.GENERAL_SUBSTITUTION_MODEL, model.getPrefix() + AbstractSubstitutionModel.MODEL);
writer.writeText("");
}
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_MCMC_PRIOR, writer);
writer.writeCloseTag(CompoundLikelihoodParser.PRIOR);
if (options.hasData()) {
// write likelihood block
writer.writeOpenTag(CompoundLikelihoodParser.LIKELIHOOD, new Attribute.Default<String>(XMLParser.ID, "likelihood"));
treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
branchRatesModelGenerator.writeClockLikelihoodReferences(writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_MCMC_LIKELIHOOD, writer);
writer.writeCloseTag(CompoundLikelihoodParser.LIKELIHOOD);
writer.writeCloseTag(CompoundLikelihoodParser.POSTERIOR);
}
writer.writeIDref(SimpleOperatorScheduleParser.OPERATOR_SCHEDULE, "operators");
// write log to screen
logGenerator.writeLogToScreen(writer, branchRatesModelGenerator, substitutionModelGenerator);
// write log to file
logGenerator.writeLogToFile(writer, treePriorGenerator, branchRatesModelGenerator,
substitutionModelGenerator, treeLikelihoodGenerator);
// write tree log to file
logGenerator.writeTreeLogToFile(writer);
writer.writeCloseTag("mcmc");
}
}
|
package dr.app.beauti.options;
import java.util.ArrayList;
import java.util.List;
import dr.app.beauti.enumTypes.OperatorType;
import dr.app.beauti.enumTypes.PriorScaleType;
import dr.app.beauti.enumTypes.TreePriorType;
import dr.app.beauti.generator.Generator;
import dr.evolution.util.Taxon;
import dr.evomodel.coalescent.GMRFFixedGridImportanceSampler;
import dr.evomodel.operators.TreeNodeSlide;
import dr.evomodel.speciation.SpeciesTreeModel;
import dr.evomodelxml.BirthDeathModelParser;
import dr.evomodelxml.YuleModelParser;
/**
* @author Walter Xie
* @version $Id$
*/
public class STARBEASTOptions extends ModelOptions {
// Instance variables
private final BeautiOptions options;
public final String POP_MEAN = "popMean";
public final String SPECIES_TREE_FILE_NAME = TraitGuesser.Traits.TRAIT_SPECIES
+ "." + GMRFFixedGridImportanceSampler.TREE_FILE_NAME; // species.trees
public STARBEASTOptions(BeautiOptions options) {
this.options = options;
initSpeciesParametersAndOperators();
}
private void initSpeciesParametersAndOperators() {
double spWeights = 5.0;
double spTuning = 0.9;
createParameterJeffreysPrior(TraitGuesser.Traits.TRAIT_SPECIES + "." + POP_MEAN, "Species tree: population hyper-parameter operator",
PriorScaleType.TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
// species tree Yule
createParameterJeffreysPrior(TraitGuesser.Traits.TRAIT_SPECIES + "." + YuleModelParser.YULE + "." + YuleModelParser.BIRTH_RATE,
"Speices tree: Yule process birth rate", PriorScaleType.BIRTH_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
// species tree Birth Death
createParameterJeffreysPrior(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.BIRTHDIFF_RATE_PARAM_NAME,
"Speices tree: Birth Death model mean growth rate", PriorScaleType.BIRTH_RATE_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
createParameterUniformPrior(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME,
"Speices tree: Birth Death model relative death rate", PriorScaleType.BIRTH_RATE_SCALE, 0.5, 0.0, 1.0);
createParameterJeffreysPrior(SpeciesTreeModel.SPECIES_TREE + "." + Generator.SPLIT_POPS, "Species tree: population size operator",
PriorScaleType.TIME_SCALE, 1.0, 0.0, Double.POSITIVE_INFINITY);
createParameter(TraitGuesser.Traits.TRAIT_SPECIES + "." + TreeNodeSlide.TREE_NODE_REHEIGHT, "Species tree: tree node operator");
createScaleOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + POP_MEAN, spTuning, spWeights);
createScaleOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + YuleModelParser.YULE + "." + YuleModelParser.BIRTH_RATE, demoTuning, demoWeights);
createScaleOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.BIRTHDIFF_RATE_PARAM_NAME, demoTuning, demoWeights);
createScaleOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME, demoTuning, demoWeights);
createScaleOperator(SpeciesTreeModel.SPECIES_TREE + "." + Generator.SPLIT_POPS, 0.5, 94);
createOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + TreeNodeSlide.TREE_NODE_REHEIGHT, OperatorType.NODE_REHIGHT, demoTuning, 94);
//TODO: more
for (PartitionClockModel model : options.getPartitionClockModels()) {
model.iniClockRateStarBEAST();
}
}
/**
* return a list of parameters that are required
*
* @param params the parameter list
*/
public void selectParameters(List<Parameter> params) {
params.add(getParameter(TraitGuesser.Traits.TRAIT_SPECIES + "." + POP_MEAN));
if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_BIRTH_DEATH) {
params.add(getParameter(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.BIRTHDIFF_RATE_PARAM_NAME));
params.add(getParameter(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME));
} else if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_YULE) {
params.add(getParameter(TraitGuesser.Traits.TRAIT_SPECIES + "." + YuleModelParser.YULE + "." + YuleModelParser.BIRTH_RATE));
}
// params.add(getParameter(SpeciesTreeModel.SPECIES_TREE + "." + Generator.SPLIT_POPS));
//TODO: more
}
/**
* return a list of operators that are required
*
* @param ops the operator list
*/
public void selectOperators(List<Operator> ops) {
ops.add(getOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + POP_MEAN));
if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_BIRTH_DEATH) {
ops.add(getOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.BIRTHDIFF_RATE_PARAM_NAME));
ops.add(getOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME));
// ops.add(getOperator("upDownBirthDeathSpeciesTree"));
// ops.add(getOperator("upDownBirthDeathSTPop"));
// for (PartitionTreeModel tree : getPartitionTreeModels()) {
// ops.add(getOperator(tree.getPrefix() + "upDownBirthDeathGeneTree"));
} else if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_YULE) {
ops.add(getOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + YuleModelParser.YULE + "." + YuleModelParser.BIRTH_RATE));
// ops.add(getOperator("upDownYuleSpeciesTree"));
// ops.add(getOperator("upDownYuleSTPop"));
// for (PartitionTreeModel tree : getPartitionTreeModels()) {
// ops.add(getOperator(tree.getPrefix() + "upDownYuleGeneTree"));
}
ops.add(getOperator(SpeciesTreeModel.SPECIES_TREE + "." + Generator.SPLIT_POPS));
ops.add(getOperator(TraitGuesser.Traits.TRAIT_SPECIES + "." + TreeNodeSlide.TREE_NODE_REHEIGHT));
//TODO: more
}
public boolean isSpeciesAnalysis() {
return options.traitOptions.containTrait(TraitGuesser.Traits.TRAIT_SPECIES.toString());
}
public List<String> getSpeciesList() {
List<String> species = new ArrayList<String>();
String sp;
if (options.taxonList != null) {
for (int i = 0; i < options.taxonList.getTaxonCount(); i++) {
Taxon taxon = options.taxonList.getTaxon(i);
sp = (String) taxon.getAttribute(TraitGuesser.Traits.TRAIT_SPECIES.toString());
if (sp == null) return null;
if (!species.contains(sp)) {
species.add(sp);
}
}
return species;
} else {
return null;
}
}
}
|
package dr.evomodel.operators;
import dr.evolution.tree.MutableTree;
import dr.evolution.tree.NodeRef;
import dr.evomodel.tree.TreeModel;
import dr.inference.operators.MCMCOperator;
import dr.inference.operators.OperatorFailedException;
import dr.math.MathUtils;
import dr.xml.*;
/**
* Implements branch exchange operations. There is a NARROW and WIDE variety.
* The narrow exchange is very similar to a rooted-tree nearest-neighbour
* interchange but with the restriction that node height must remain consistent.
* <p/>
* KNOWN BUGS: WIDE operator cannot be used on trees with 4 or less tips!
*/
public class ExchangeOperator extends AbstractTreeOperator {
public static final String NARROW_EXCHANGE = "narrowExchange";
public static final String WIDE_EXCHANGE = "wideExchange";
public static final String INTERMEDIATE_EXCHANGE = "intermediateExchange";
public static final int NARROW = 0;
public static final int WIDE = 1;
public static final int INTERMEDIATE = 2;
private static final int MAX_TRIES = 100;
private int mode = NARROW;
private final TreeModel tree;
private double[] distances;
public ExchangeOperator(int mode, TreeModel tree, double weight) {
this.mode = mode;
this.tree = tree;
setWeight(weight);
}
public double doOperation() throws OperatorFailedException {
final int tipCount = tree.getExternalNodeCount();
double hastingsRatio = 0;
switch( mode ) {
case NARROW:
narrow();
break;
case WIDE:
wide();
break;
case INTERMEDIATE:
hastingsRatio = intermediate();
break;
}
assert tree.getExternalNodeCount() == tipCount :
"Lost some tips in " + ((mode == NARROW) ? "NARROW mode." : "WIDE mode.");
return hastingsRatio;
}
/**
* WARNING: Assumes strictly bifurcating tree.
*/
public void narrow() throws OperatorFailedException {
final int nNodes = tree.getNodeCount();
final NodeRef root = tree.getRoot();
NodeRef i = tree.getNode(MathUtils.nextInt(nNodes));
while( root == i || tree.getParent(i) == root ) {
i = tree.getNode(MathUtils.nextInt(nNodes));
}
final NodeRef iParent = tree.getParent(i);
final NodeRef iGrandParent = tree.getParent(iParent);
NodeRef iUncle = tree.getChild(iGrandParent, 0);
if( iUncle == iParent ) {
iUncle = tree.getChild(iGrandParent, 1);
}
assert tree.getNodeHeight(i) < tree.getNodeHeight(iGrandParent);
if( tree.getNodeHeight(iUncle) < tree.getNodeHeight(iParent) ) {
eupdate(i, iUncle, iParent, iGrandParent);
// tree.pushTreeChangedEvent(iParent);
// tree.pushTreeChangedEvent(iGrandParent);
return;
}
// System.out.println("tries = " + tries);
throw new OperatorFailedException(
"Couldn't find valid narrow move on this tree!!");
}
/**
* WARNING: Assumes strictly bifurcating tree.
*/
public void wide() throws OperatorFailedException {
final int nodeCount = tree.getNodeCount();
final NodeRef root = tree.getRoot();
NodeRef i = root;
while( root == i ) {
i = tree.getNode(MathUtils.nextInt(nodeCount));
}
NodeRef j = i; // tree.getNode(MathUtils.nextInt(nodeCount));
while( j == i || j == root ) {
j = tree.getNode(MathUtils.nextInt(nodeCount));
}
final NodeRef iP = tree.getParent(i);
final NodeRef jP = tree.getParent(j);
if( (iP != jP) && (i != jP) && (j != iP)
&& (tree.getNodeHeight(j) < tree.getNodeHeight(iP))
&& (tree.getNodeHeight(i) < tree.getNodeHeight(jP)) ) {
eupdate(i, j, iP, jP);
// System.out.println("tries = " + tries+1);
return;
}
throw new OperatorFailedException(
"Couldn't find valid wide move on this tree!");
}
/**
* @deprecated WARNING: SHOULD NOT BE USED!
* WARNING: Assumes strictly bifurcating tree.
*/
public double intermediate() throws OperatorFailedException {
final int nodeCount = tree.getNodeCount();
final NodeRef root = tree.getRoot();
for(int tries = 0; tries < MAX_TRIES; ++tries) {
NodeRef i, j;
NodeRef[] possibleNodes;
do {
// get a random node
i = root; // tree.getNode(MathUtils.nextInt(nodeCount));
// if (root != i) {
// possibleNodes = tree.getNodes();
// check if we got the root
while( root == i ) {
// if so get another one till we haven't got anymore the
// root
i = tree.getNode(MathUtils.nextInt(nodeCount));
// if (root != i) {
// possibleNodes = tree.getNodes();
}
possibleNodes = tree.getNodes();
// get another random node
// NodeRef j = tree.getNode(MathUtils.nextInt(nodeCount));
j = getRandomNode(possibleNodes, i);
// check if they are the same and if the new node is the root
} while( j == null || j == i || j == root );
double forward = getWinningChance(indexOf(possibleNodes, j));
// possibleNodes = getPossibleNodes(j);
calcDistances(possibleNodes, j);
forward += getWinningChance(indexOf(possibleNodes, i));
// get the parent of both of them
final NodeRef iP = tree.getParent(i);
final NodeRef jP = tree.getParent(j);
// check if both parents are equal -> we are siblings :) (this
// wouldnt effect a change on topology)
// check if I m your parent or vice versa (this would destroy the
// tree)
// check if you are younger then my father
// check if I m younger then your father
if( (iP != jP) && (i != jP) && (j != iP)
&& (tree.getNodeHeight(j) < tree.getNodeHeight(iP))
&& (tree.getNodeHeight(i) < tree.getNodeHeight(jP)) ) {
// if 1 & 2 are false and 3 & 4 are true then we found a valid
// candidate
exchangeNodes(tree, i, j, iP, jP);
// possibleNodes = getPossibleNodes(i);
calcDistances(possibleNodes, i);
double backward = getWinningChance(indexOf(possibleNodes, j));
// possibleNodes = getPossibleNodes(j);
calcDistances(possibleNodes, j);
backward += getWinningChance(indexOf(possibleNodes, i));
// System.out.println("tries = " + tries+1);
return Math.log(Math.min(1, (backward) / (forward)));
// return 0.0;
}
}
throw new OperatorFailedException(
"Couldn't find valid wide move on this tree!");
}
/* why not use Arrays.asList(a).indexOf(n) ? */
private int indexOf(NodeRef[] a, NodeRef n) {
for(int i = 0; i < a.length; i++) {
if( a[i] == n ) {
return i;
}
}
return -1;
}
private double getWinningChance(int index) {
double sum = 0;
for(int i = 0; i < distances.length; i++) {
sum += (1.0 / distances[i]);
}
return (1.0 / distances[index]) / sum;
}
private void calcDistances(NodeRef[] nodes, NodeRef ref) {
distances = new double[nodes.length];
for(int i = 0; i < nodes.length; i++) {
distances[i] = getNodeDistance(ref, nodes[i]) + 1;
}
}
private NodeRef getRandomNode(NodeRef[] nodes, NodeRef ref) {
calcDistances(nodes, ref);
double sum = 0;
for(int i = 0; i < distances.length; i++) {
sum += 1.0 / distances[i];
}
double randomValue = MathUtils.nextDouble() * sum;
NodeRef n = null;
for(int i = 0; i < distances.length; i++) {
randomValue -= 1.0 / distances[i];
if( randomValue <= 0 ) {
n = nodes[i];
break;
}
}
return n;
}
private int getNodeDistance(NodeRef i, NodeRef j) {
int count = 0;
while( i != j ) {
count++;
if( tree.getNodeHeight(i) < tree.getNodeHeight(j) ) {
i = tree.getParent(i);
} else {
j = tree.getParent(j);
}
}
return count;
}
public int getMode() {
return mode;
}
public String getOperatorName() {
return ((mode == NARROW) ? "Narrow" : "Wide") + " Exchange" + "("
+ tree.getId() + ")";
}
/* exchange subtrees whose root are i and j */
private void eupdate(NodeRef i, NodeRef j, NodeRef iP, NodeRef jP)
throws OperatorFailedException {
tree.beginTreeEdit();
tree.removeChild(iP, i);
tree.removeChild(jP, j);
tree.addChild(jP, i);
tree.addChild(iP, j);
try {
tree.endTreeEdit();
} catch( MutableTree.InvalidTreeException ite ) {
throw new OperatorFailedException(ite.toString());
}
}
public double getMinimumAcceptanceLevel() {
if( mode == NARROW ) {
return 0.05;
} else {
return 0.01;
}
}
public double getMinimumGoodAcceptanceLevel() {
if( mode == NARROW ) {
return 0.05;
} else {
return 0.01;
}
}
public String getPerformanceSuggestion() {
if( MCMCOperator.Utils.getAcceptanceProbability(this) < getMinimumAcceptanceLevel() ) {
return "";
} else if( MCMCOperator.Utils.getAcceptanceProbability(this) > getMaximumAcceptanceLevel() ) {
return "";
} else {
return "";
}
}
public static XMLObjectParser NARROW_EXCHANGE_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return NARROW_EXCHANGE;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
final double weight = xo.getDoubleAttribute("weight");
return new ExchangeOperator(NARROW, treeModel, weight);
}
// AbstractXMLObjectParser implementation
public String getParserDescription() {
return "This element represents a narrow exchange operator. "
+ "This operator swaps a random subtree with its uncle.";
}
public Class getReturnType() {
return ExchangeOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule("weight"),
new ElementRule(TreeModel.class)};
};
public static XMLObjectParser WIDE_EXCHANGE_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return WIDE_EXCHANGE;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
double weight = xo.getDoubleAttribute("weight");
return new ExchangeOperator(WIDE, treeModel, weight);
}
// AbstractXMLObjectParser implementation
public String getParserDescription() {
return "This element represents a wide exchange operator. "
+ "This operator swaps two random subtrees.";
}
public Class getReturnType() {
return ExchangeOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules;{
rules = new XMLSyntaxRule[]{
AttributeRule.newDoubleRule("weight"),
new ElementRule(TreeModel.class)};
}
};
public static XMLObjectParser INTERMEDIATE_EXCHANGE_PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return INTERMEDIATE_EXCHANGE;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
double weight = xo.getDoubleAttribute("weight");
return new ExchangeOperator(INTERMEDIATE, treeModel, weight);
}
// AbstractXMLObjectParser implementation
public String getParserDescription() {
return "This element represents a intermediate exchange operator. "
+ "This operator swaps two random subtrees.";
}
public Class getReturnType() {
return ExchangeOperator.class;
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private final XMLSyntaxRule[] rules = {
AttributeRule.newDoubleRule("weight"),
new ElementRule(TreeModel.class)};
};
}
|
package dr.inference.trace;
import dr.inferencexml.trace.MarginalLikelihoodAnalysisParser;
import dr.util.*;
import dr.xml.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.StringTokenizer;
/**
* @author Philippe Lemey
* @author Marc A. Suchard
*/
public class DnDsPerSiteAnalysis implements Citable {
public static final String DNDS_PER_SITE_ANALYSIS = "dNdSPerSiteAnalysis";
public static final String CUTOFF = "cutoff";
public static final String PROPORTION = "proportion";
public static final String INCLUDE_SIGNIFICANT_SYMBOL = "includeSymbol";
public static final String INCLUDE_SIGNIFICANCE_LEVEL = "includeLevel";
public static final String INCLUDE_SITE_CLASSIFICATION = "includeClassification";
public static final String SIGNIFICANCE_TEST = "test";
public static final String SEPARATOR_STRING = "separator";
public static final String INCLUDE_SIMULATION_OUTCOME = "simulationOutcome";
public static final String INCLUDE_HPD = "includeHPD";
public static final String SITE_SIMULATION = "siteSimulation";
public DnDsPerSiteAnalysis(TraceList traceList) {
this.traceList = traceList;
this.numSites = traceList.getTraceCount();
this.format = new OutputFormat();
fieldWidth = 14;
firstField = 10;
numberFormatter = new NumberFormatter(6);
numberFormatter.setPadding(true);
numberFormatter.setFieldWidth(fieldWidth);
}
public void setIncludeMean(boolean b) {
format.includeMean = b;
}
public void setIncludeHPD(boolean b) {
format.includeHPD = b;
}
public void setIncludeSignificanceLevel(boolean b) {
format.includeSignificanceLevel = b;
}
public void setIncludeSignificantSymbol(boolean b) {
format.includeSignificantSymbol = b;
}
public void setIncludeSimulationOutcome(boolean b) {
format.includeSimulationOutcome = b;
}
public boolean getIncludeSimulationOutcome() {
return(format.includeSimulationOutcome);
}
public void setProportion(double d) {
format.proportion = d;
}
public void setSiteSimulation(String[] d) {
format.siteSimulation = d;
}
public void setIncludeSiteClassification(boolean b) {
format.includeSiteClassification = b;
}
public void setCutoff(double d) {
format.cutoff = d;
}
public void setSeparator(String s) {
format.separator = s;
}
public void setSignificanceTest(SignificanceTest t) {
format.test = t;
}
private String toStringSite(int index, OutputFormat format) {
StringBuilder sb = new StringBuilder();
traceList.analyseTrace(index);
TraceDistribution distribution = traceList.getDistributionStatistics(index);
sb.append(numberFormatter.formatToFieldWidth(Integer.toString(index + 1), firstField));
if (format.includeMean) {
sb.append(format.separator);
sb.append(numberFormatter.format(distribution.getMean()));
}
if (format.includeHPD) {
sb.append(format.separator);
sb.append(numberFormatter.format(distribution.getLowerHPD()));
sb.append(format.separator);
sb.append(numberFormatter.format(distribution.getUpperHPD()));
}
if (format.includeSignificanceLevel || format.includeSignificantSymbol || format.includeSiteClassification || format.includeSimulationOutcome) {
boolean isSignificant = false;
String classification = "0";
String level;
if (format.test == SignificanceTest.NOT_EQUAL) {
double[] hpd = new double[2];
if (format.proportion == 0.95){
hpd[0] = distribution.getLowerHPD();
hpd[1] = distribution.getUpperHPD();
} else if (format.proportion == 1.0){
hpd[0] = distribution.getMinimum();
hpd[1] = distribution.getMaximum();
} else {
// distribution does not allow to specify proportion
hpd = getHPDInterval(format.proportion,traceList.getValues(index));
}
if (hpd[0] < format.cutoff && hpd[1] < format.cutoff) {
level = numberFormatter.formatToFieldWidth(">0.95", fieldWidth);
isSignificant = true;
classification = "-";
} else if (hpd[0] > format.cutoff && hpd[1] > format.cutoff) {
level = numberFormatter.formatToFieldWidth(">0.95", fieldWidth);
isSignificant = true;
classification = "+";
} else {
level = numberFormatter.formatToFieldWidth("<=0.95", fieldWidth);
}
} else {
List values = traceList.getValues(index);
double levelPosValue = 0.0;
double levelNegValue = 0.0;
int total = 0;
for (Object obj : values) {
double d = ((Number) obj).doubleValue();
// if ((format.test == SignificanceTest.LESS_THAN && d < format.cutoff) ||
// (format.test == SignificanceTest.GREATER_THAN && d > format.cutoff)) {
if (d < format.cutoff) {
if(format.test == SignificanceTest.LESS_THAN || format.test == SignificanceTest.LESS_OR_GREATER_THAN) {
levelNegValue++;
}
} else if (d > format.cutoff){
if (format.test == SignificanceTest.GREATER_THAN || format.test == SignificanceTest.LESS_OR_GREATER_THAN){
levelPosValue++;
}
}
total++;
}
levelPosValue /= total;
levelNegValue /= total;
if (levelPosValue > format.proportion) {
isSignificant = true;
classification = "+";
} else if (levelNegValue > format.proportion) {
isSignificant = true;
classification = "-";
}
if (levelPosValue > levelNegValue) {
level = numberFormatter.format(levelPosValue);
} else {
level = numberFormatter.format(levelNegValue);
}
}
if (format.includeSignificanceLevel) {
sb.append(format.separator);
sb.append(level);
}
if (format.includeSiteClassification) {
sb.append(format.separator);
sb.append(classification);
}
if (format.includeSignificantSymbol) {
sb.append(format.separator);
if (isSignificant) {
sb.append("*");
} else {
// Do nothing?
}
}
if (format.includeSimulationOutcome) {
sb.append(format.separator);
sb.append(format.siteSimulation[index]);
sb.append(format.separator);
if (format.siteSimulation[index].equals("+") || format.siteSimulation[index].equals("-")) {
if (classification.equals(format.siteSimulation[index])){
sb.append("TP"); // True Positive
} else {
sb.append("FN"); // True Negative
}
} else {
if (classification.equals(format.siteSimulation[index])){
sb.append("TN"); // True Negative
} else {
sb.append("FP"); // False Positive
}
}
}
}
sb.append("\n");
return sb.toString();
}
public String header(OutputFormat format) {
StringBuilder sb = new StringBuilder();
sb.append("# Some information here\n");
sb.append("# Please cite: " + Citable.Utils.getCitationString(this));
sb.append(numberFormatter.formatToFieldWidth("Site", firstField));
if (format.includeMean) {
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Mean", fieldWidth));
}
if (format.includeHPD) {
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Lower", fieldWidth));
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Upper", fieldWidth));
}
if (format.includeSignificanceLevel) {
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Level", fieldWidth));
}
if (format.includeSiteClassification) {
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Classification", fieldWidth));
}
if (format.includeSignificantSymbol) {
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Significant", fieldWidth));
}
if (format.includeSimulationOutcome) {
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Simulated", fieldWidth));
sb.append(format.separator);
sb.append(numberFormatter.formatToFieldWidth("Evaluation", fieldWidth));
}
sb.append("\n");
return sb.toString();
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(header(format));
for (int i = 0; i < numSites; ++i) {
sb.append(toStringSite(i, format));
}
return sb.toString();
}
public List<Citation> getCitations() {
List<Citation> citations = new ArrayList<Citation>();
citations.add(
new Citation(
new Author[]{
new Author("P", "Lemey"),
new Author("VN", "Minin"),
new Author("MA", "Suchard")
},
Citation.Status.IN_PREPARATION
)
);
return citations;
}
private class OutputFormat {
boolean includeMean;
boolean includeHPD;
boolean includeSignificanceLevel;
boolean includeSignificantSymbol;
boolean includeSiteClassification;
boolean includeSimulationOutcome;
String[] siteSimulation;
double cutoff;
double proportion;
SignificanceTest test;
String separator;
OutputFormat() {
this(true, true, true, true, true, false, null, 1.0, 0.95, SignificanceTest.NOT_EQUAL, "\t");
}
OutputFormat(boolean includeMean,
boolean includeHPD,
boolean includeSignificanceLevel,
boolean includeSignificantSymbol,
boolean includeSiteClassification,
boolean includeSimulationOutcome,
String[] siteSimulation,
double cutoff,
double proportion,
SignificanceTest test,
String separator) {
this.includeMean = includeMean;
this.includeHPD = includeHPD;
this.includeSignificanceLevel = includeSignificanceLevel;
this.includeSignificantSymbol = includeSignificantSymbol;
this.includeSiteClassification = includeSiteClassification;
this.includeSimulationOutcome = includeSimulationOutcome;
this.siteSimulation = siteSimulation;
this.cutoff = cutoff;
this.proportion = proportion;
this.test = test;
this.separator = separator;
}
}
public enum SignificanceTest {
GREATER_THAN("gt"),
LESS_THAN("lt"),
NOT_EQUAL("ne"),
LESS_OR_GREATER_THAN("logt");
private SignificanceTest(String text) {
this.text = text;
}
public String getText() {
return text;
}
public static SignificanceTest parseFromString(String text) {
for (SignificanceTest test : SignificanceTest.values()) {
if (test.getText().compareToIgnoreCase(text) == 0)
return test;
}
return null;
}
private final String text;
}
private static double[] getHPDInterval(double proportion, List list) {
double returnArray[] = new double[2];
int length = list.size();
int[] indices = new int[length];
Double[] resultObjArray = (Double[]) list.toArray( new Double[0] );
double[] result = toPrimitiveDoubleArray(resultObjArray);
HeapSort.sort(result, indices);
double minRange = Double.MAX_VALUE;
int hpdIndex = 0;
int diff = (int)Math.round(proportion * (double)length);
for (int i = 0; i <= (length - diff); i++) {
double minValue = result[indices[i]];
double maxValue = result[indices[i+diff-1]];
double range = Math.abs(maxValue - minValue);
if (range < minRange) {
minRange = range;
hpdIndex = i;
}
}
returnArray[0] = result[indices[hpdIndex]];
returnArray[1] = result[indices[hpdIndex+diff-1]];
return returnArray;
}
private static double[] toPrimitiveDoubleArray(Double[] array){
double[] returnArray = new double[array.length];
for(int i = 0; i < array.length; i++ ){
returnArray[i] = array[i].doubleValue();
}
return returnArray;
}
private static String[] parseVariableLengthStringArray(String inString) {
List<String> returnList = new ArrayList<String>();
StringTokenizer st = new StringTokenizer(inString, ",");
while (st.hasMoreTokens()) {
returnList.add(st.nextToken());
}
if (returnList.size() > 0) {
String[] stringArray = new String[returnList.size()];
stringArray = returnList.toArray(stringArray);
return stringArray;
}
return null;
}
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return DNDS_PER_SITE_ANALYSIS;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
String fileName = xo.getStringAttribute(FileHelpers.FILE_NAME);
try {
File file = new File(fileName);
String name = file.getName();
String parent = file.getParent();
if (!file.isAbsolute()) {
parent = System.getProperty("user.dir");
}
file = new File(parent, name);
fileName = file.getAbsolutePath();
LogFileTraces traces = new LogFileTraces(fileName, file);
traces.loadTraces();
int maxState = traces.getMaxState();
// leaving the burnin attribute off will result in 10% being used
int burnin = xo.getAttribute(MarginalLikelihoodAnalysisParser.BURN_IN, maxState / 10);
//TODO: implement custom burn-in
if (burnin < 0 || burnin >= maxState) {
burnin = maxState / 5;
System.out.println("WARNING: Burn-in larger than total number of states - using to 20%");
}
traces.setBurnIn(burnin);
// TODO: Filter traces to include only dNdS columns
DnDsPerSiteAnalysis analysis = new DnDsPerSiteAnalysis(traces);
analysis.setSignificanceTest(
SignificanceTest.parseFromString(
xo.getAttribute(SIGNIFICANCE_TEST, SignificanceTest.NOT_EQUAL.getText())
)
);
analysis.setCutoff(xo.getAttribute(CUTOFF, 1.0));
analysis.setProportion(xo.getAttribute(PROPORTION, 0.95));
analysis.setSeparator(xo.getAttribute(SEPARATOR_STRING, "\t"));
analysis.setIncludeHPD(xo.getAttribute(INCLUDE_HPD, true));
analysis.setIncludeSignificanceLevel(xo.getAttribute(INCLUDE_SIGNIFICANCE_LEVEL, false));
analysis.setIncludeSignificantSymbol(xo.getAttribute(INCLUDE_SIGNIFICANT_SYMBOL, true));
analysis.setIncludeSiteClassification(xo.getAttribute(INCLUDE_SITE_CLASSIFICATION, true));
analysis.setIncludeSimulationOutcome(xo.getAttribute(INCLUDE_SIMULATION_OUTCOME, false));
if (analysis.getIncludeSimulationOutcome()){
String sites = (String)xo.getAttribute(SITE_SIMULATION, "empty");
if (sites.equals("empty")){
System.err.println("you want simulation evaluation but do not provide a site simulation string??");
} else {
String[] siteSimulation = parseVariableLengthStringArray(sites);
analysis.setSiteSimulation(siteSimulation);
}
}
return analysis;
} catch (FileNotFoundException fnfe) {
throw new XMLParseException("File '" + fileName + "' can not be opened for " + getParserName() + " element.");
} catch (java.io.IOException ioe) {
throw new XMLParseException(ioe.getMessage());
} catch (TraceException e) {
throw new XMLParseException(e.getMessage());
}
}
|
package org.apereo.cas.documentation;
import org.apereo.cas.CentralAuthenticationService;
import org.apereo.cas.audit.AuditableActions;
import org.apereo.cas.metadata.CasConfigurationMetadataCatalog;
import org.apereo.cas.metadata.CasReferenceProperty;
import org.apereo.cas.metadata.ConfigurationMetadataCatalogQuery;
import org.apereo.cas.services.RegisteredServiceProperty;
import org.apereo.cas.shell.commands.CasShellCommand;
import org.apereo.cas.util.RandomUtils;
import org.apereo.cas.util.ReflectionUtils;
import org.apereo.cas.util.RegexUtils;
import org.apereo.cas.util.spring.boot.ConditionalOnFeatureEnabled;
import io.swagger.v3.oas.annotations.Operation;
import lombok.Getter;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.RegExUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.jooq.lambda.Unchecked;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.actuate.endpoint.annotation.DeleteOperation;
import org.springframework.boot.actuate.endpoint.annotation.Endpoint;
import org.springframework.boot.actuate.endpoint.annotation.ReadOperation;
import org.springframework.boot.actuate.endpoint.annotation.Selector;
import org.springframework.boot.actuate.endpoint.annotation.WriteOperation;
import org.springframework.boot.actuate.endpoint.web.annotation.RestControllerEndpoint;
import org.springframework.boot.actuate.endpoint.web.annotation.WebEndpoint;
import org.springframework.core.LocalVariableTableParameterNameDiscoverer;
import org.springframework.core.io.ClassPathResource;
import org.springframework.shell.standard.ShellCommandGroup;
import org.springframework.shell.standard.ShellComponent;
import org.springframework.shell.standard.ShellMethod;
import org.springframework.shell.standard.ShellOption;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PatchMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
/**
* This is {@link CasDocumentationApplication}.
*
* @author Misagh Moayyed
* @since 6.4.0
*/
public class CasDocumentationApplication {
private static final Logger LOGGER = LoggerFactory.getLogger(CasDocumentationApplication.class);
public static void main(final String[] args) throws Exception {
var options = new Options();
var dt = new Option("d", "data", true, "Data directory");
dt.setRequired(true);
options.addOption(dt);
var ver = new Option("v", "version", true, "Project version");
ver.setRequired(true);
options.addOption(ver);
var root = new Option("r", "root", true, "Project root directory");
root.setRequired(true);
options.addOption(root);
var ft = new Option("f", "filter", true, "Property filter pattern");
ft.setRequired(false);
options.addOption(ft);
var act = new Option("a", "actuators", true, "Generate data for actuator endpoints");
act.setRequired(false);
options.addOption(act);
var tp = new Option("tp", "thirdparty", true, "Generate data for third party");
tp.setRequired(false);
options.addOption(tp);
var sp = new Option("sp", "serviceproperties", true, "Generate data for registered services properties");
sp.setRequired(false);
options.addOption(sp);
var feats = new Option("ft", "features", true, "Generate data for feature toggles and descriptors");
feats.setRequired(false);
options.addOption(feats);
var csh = new Option("csh", "shell", true, "Generate data for CAS command-line shell commands and groups");
csh.setRequired(false);
options.addOption(csh);
var aud = new Option("aud", "audit", true, "Generate data for CAS auditable events");
aud.setRequired(false);
options.addOption(aud);
new HelpFormatter().printHelp("CAS Documentation", options);
var cmd = new DefaultParser().parse(options, args);
var dataDirectory = cmd.getOptionValue("data");
var projectVersion = cmd.getOptionValue("version");
var projectRootDirectory = cmd.getOptionValue("root");
var propertyFilter = cmd.getOptionValue("filter", ".+");
var results = CasConfigurationMetadataCatalog.query(
ConfigurationMetadataCatalogQuery.builder()
.queryType(ConfigurationMetadataCatalogQuery.QueryTypes.CAS)
.queryFilter(property -> RegexUtils.find(propertyFilter, property.getName()))
.build());
var groups = new HashMap<String, Set<CasReferenceProperty>>();
results.properties()
.stream()
.filter(property -> StringUtils.isNotBlank(property.getModule()))
.peek(property -> {
var desc = cleanDescription(property);
property.setDescription(desc);
})
.forEach(property -> {
if (groups.containsKey(property.getModule())) {
groups.get(property.getModule()).add(property);
} else {
var values = new TreeSet<CasReferenceProperty>();
values.add(property);
groups.put(property.getModule(), values);
}
});
var dataPath = new File(dataDirectory, projectVersion);
if (dataPath.exists()) {
FileUtils.deleteQuietly(dataPath);
}
dataPath.mkdirs();
groups.forEach((key, value) -> {
var destination = new File(dataPath, key);
destination.mkdirs();
var configFile = new File(destination, "config.yml");
CasConfigurationMetadataCatalog.export(configFile, value);
});
var thirdparty = cmd.getOptionValue("thirdparty", "true");
if (StringUtils.equalsIgnoreCase("true", thirdparty)) {
exportThirdPartyConfiguration(dataPath, propertyFilter);
}
var registeredServicesProps = cmd.getOptionValue("serviceproperties", "true");
if (StringUtils.equalsIgnoreCase("true", registeredServicesProps)) {
exportRegisteredServiceProperties(dataPath);
}
exportTemplateViews(projectRootDirectory, dataPath);
exportThemeProperties(projectRootDirectory, dataPath);
var actuators = cmd.getOptionValue("actuators", "true");
if (StringUtils.equalsIgnoreCase("true", actuators)) {
exportActuatorEndpoints(dataPath);
}
var features = cmd.getOptionValue("features", "true");
if (StringUtils.equalsIgnoreCase("true", features)) {
exportFeatureToggles(dataPath);
}
var shell = cmd.getOptionValue("shell", "true");
if (StringUtils.equalsIgnoreCase("true", shell)) {
exportCommandlineShell(dataPath);
}
var audit = cmd.getOptionValue("audit", "true");
if (StringUtils.equalsIgnoreCase("true", audit)) {
exportAuditableEvents(dataPath);
}
}
private static void exportAuditableEvents(final File dataPath) {
var parentPath = new File(dataPath, "audits");
var properties = new ArrayList<Map<?, ?>>();
if (parentPath.exists()) {
FileUtils.deleteQuietly(parentPath);
}
if (!parentPath.mkdirs()) {
LOGGER.debug("Unable to create directory");
}
Arrays.stream(AuditableActions.class.getDeclaredFields())
.filter(it -> Modifier.isStatic(it.getModifiers()) && Modifier.isFinal(it.getModifiers()))
.forEach(it -> {
var event = new LinkedHashMap();
event.put("name", it.getName());
LOGGER.info("Adding audit [{}]", event);
properties.add(event);
});
if (!properties.isEmpty()) {
var configFile = new File(parentPath, "config.yml");
CasConfigurationMetadataCatalog.export(configFile, properties);
}
}
private static void exportCommandlineShell(final File dataPath) {
var parentPath = new File(dataPath, "shell");
if (parentPath.exists()) {
FileUtils.deleteQuietly(parentPath);
}
if (!parentPath.mkdirs()) {
LOGGER.debug("Unable to create directory");
}
var subTypes = ReflectionUtils.findClassesWithAnnotationsInPackage(List.of(ShellComponent.class), CasShellCommand.NAMESPACE);
var properties = new ArrayList<Map<?, ?>>();
subTypes.forEach(clazz -> {
LOGGER.info("Locating shell command group for [{}]", clazz.getSimpleName());
var group = clazz.getAnnotation(ShellCommandGroup.class);
if (group == null) {
LOGGER.warn("Shell command group is missing for {}", clazz.getName());
}
var methods = new LinkedHashMap();
for (var method : clazz.getDeclaredMethods()) {
if (method.isAnnotationPresent(ShellMethod.class)) {
var annotInstance = method.getAnnotation(ShellMethod.class);
var cmd = new ShellCommand();
cmd.parameters = new ArrayList<Map<String, String>>();
var parameterAnnotations = method.getParameterAnnotations();
for (var i = 0; i < parameterAnnotations.length; i++) {
for (var j = 0; j < parameterAnnotations[i].length; j++) {
var ann = (ShellOption) parameterAnnotations[i][j];
var option = (ShellOption) ann;
cmd.parameters.add(Map.of(
"name", String.join(",", option.value()),
"help", String.valueOf(option.help()),
"optOut", String.valueOf(option.optOut()),
"defaultValue", option.defaultValue()));
}
}
cmd.description = annotInstance.value();
cmd.name = String.join(",", annotInstance.key());
cmd.group = group == null ? "other" : group.value();
LOGGER.info("Adding shell command [{}]", cmd.name);
methods.put(cmd.name, cmd);
}
}
properties.add(methods);
});
if (!properties.isEmpty()) {
var configFile = new File(parentPath, "config.yml");
CasConfigurationMetadataCatalog.export(configFile, properties);
}
}
@Getter
private static class ShellCommand {
public String name;
public String description;
public List parameters;
public String group;
}
private static String cleanDescription(final CasReferenceProperty property) {
return property.getDescription()
.replace("{@code ", "<code>")
.replace("{@value ", "<code>")
.replace("{@link ", "<code>")
.replace("}}", "[%s]</code>")
.replace("}", "</code>")
.replace("[%s]", "}");
}
private static void exportFeatureToggles(final File dataPath) throws Exception {
var parentPath = new File(dataPath, "features");
if (parentPath.exists()) {
FileUtils.deleteQuietly(parentPath);
}
if (!parentPath.mkdirs()) {
LOGGER.debug("Unable to create directory");
}
var subTypes = ReflectionUtils.findClassesWithAnnotationsInPackage(List.of(ConditionalOnFeatureEnabled.class), CentralAuthenticationService.NAMESPACE);
var properties = new ArrayList<Map<?, ?>>();
var allToggleProps = new HashSet<String>();
subTypes.forEach(clazz -> {
var features = clazz.getAnnotationsByType(ConditionalOnFeatureEnabled.class);
Arrays.stream(features).forEach(feature -> {
var propName = feature.feature().toProperty(feature.module());
if (!allToggleProps.contains(propName)) {
allToggleProps.add(propName);
var map = new LinkedHashMap<>();
map.put("type", clazz.getName());
map.put("feature", feature.feature());
if (StringUtils.isNotBlank(feature.module())) {
map.put("module", feature.module());
}
map.put("enabledByDefault", feature.enabledByDefault());
map.put("property", propName);
properties.add(map);
}
});
});
if (!properties.isEmpty()) {
var configFile = new File(parentPath, "config.yml");
CasConfigurationMetadataCatalog.export(configFile, properties);
}
}
private static Pair<String, String> getEndpoint(final Class clazz) {
var endpoint = (Endpoint) clazz.getAnnotation(Endpoint.class);
if (endpoint != null) {
return Pair.of(endpoint.id(), endpoint.annotationType().getSimpleName());
}
LOGGER.info("[{}] is not an Endpoint. Checking for WebEndpoint...", clazz.getName());
var webEndpoint = (WebEndpoint) clazz.getAnnotation(WebEndpoint.class);
if (webEndpoint != null) {
return Pair.of(webEndpoint.id(), webEndpoint.annotationType().getSimpleName());
}
LOGGER.warn("Unable to determine endpoint from " + clazz.getName());
return null;
}
private static void exportActuatorEndpoints(final File dataPath) throws Exception {
var parentPath = new File(dataPath, "actuators");
if (parentPath.exists()) {
FileUtils.deleteQuietly(parentPath);
}
if (!parentPath.mkdirs()) {
LOGGER.debug("Unable to create directory");
}
var subTypes = ReflectionUtils.findClassesWithAnnotationsInPackage(List.of(RestControllerEndpoint.class), "org");
subTypes.forEach(clazz -> {
var properties = new ArrayList<Map<?, ?>>();
var endpoint = clazz.getAnnotation(RestControllerEndpoint.class);
var methods = findAnnotatedMethods(clazz, GetMapping.class);
LOGGER.info("Checking actuator endpoint (GET) for [{}]", clazz.getName());
methods.forEach(Unchecked.consumer(method -> {
var get = method.getAnnotation(GetMapping.class);
var map = new LinkedHashMap<>();
var paths = Arrays.stream(get.path())
.map(path -> StringUtils.isBlank(path) ? endpoint.id() : endpoint.id() + StringUtils.prependIfMissing(path, "/"))
.findFirst()
.orElse(null);
map.put("method", RequestMethod.GET.name());
map.put("path", paths == null ? endpoint.id() : paths);
map.put("name", endpoint.id());
map.put("endpointType", RestControllerEndpoint.class.getSimpleName());
collectActuatorEndpointMethodMetadata(method, map, endpoint.id());
if (get.produces().length > 0) {
map.put("produces", get.produces());
}
if (get.consumes().length > 0) {
map.put("consumes", get.consumes());
}
if (get.params().length > 0) {
map.put("parameters", get.params());
}
if (get.headers().length > 0) {
map.put("headers", get.headers());
}
if (get.value().length > 0) {
map.put("value", get.value());
}
properties.add(map);
}));
LOGGER.info("Checking actuator endpoint (DELETE) for [{}]", clazz.getName());
methods = findAnnotatedMethods(clazz, DeleteMapping.class);
methods.forEach(Unchecked.consumer(method -> {
var delete = method.getAnnotation(DeleteMapping.class);
var map = new LinkedHashMap<>();
var paths = Arrays.stream(delete.path())
.map(path -> StringUtils.isBlank(path) ? endpoint.id() : endpoint.id()
+ StringUtils.prependIfMissing(path, "/"))
.findFirst().orElse(null);
map.put("method", RequestMethod.DELETE.name());
map.put("path", paths == null ? endpoint.id() : paths);
map.put("name", endpoint.id());
map.put("endpointType", RestControllerEndpoint.class.getSimpleName());
collectActuatorEndpointMethodMetadata(method, map, endpoint.id());
if (delete.produces().length > 0) {
map.put("produces", delete.produces());
}
if (delete.consumes().length > 0) {
map.put("consumes", delete.consumes());
}
if (delete.params().length > 0) {
map.put("parameters", delete.params());
}
if (delete.headers().length > 0) {
map.put("headers", delete.headers());
}
if (delete.value().length > 0) {
map.put("value", delete.value());
}
properties.add(map);
}));
LOGGER.info("Checking actuator endpoint (POST) for [{}]", clazz.getName());
methods = findAnnotatedMethods(clazz, PostMapping.class);
methods.forEach(Unchecked.consumer(method -> {
var post = method.getAnnotation(PostMapping.class);
var map = new LinkedHashMap<>();
var paths = Arrays.stream(post.path())
.map(path -> StringUtils.isBlank(path) ? endpoint.id() : endpoint.id()
+ StringUtils.prependIfMissing(path, "/"))
.findFirst().orElse(null);
map.put("method", RequestMethod.POST.name());
map.put("path", paths == null ? endpoint.id() : paths);
map.put("name", endpoint.id());
map.put("endpointType", RestControllerEndpoint.class.getSimpleName());
collectActuatorEndpointMethodMetadata(method, map, endpoint.id());
if (post.produces().length > 0) {
map.put("produces", post.produces());
}
if (post.consumes().length > 0) {
map.put("consumes", post.consumes());
}
if (post.params().length > 0) {
map.put("parameters", post.params());
}
if (post.headers().length > 0) {
map.put("headers", post.headers());
}
if (post.value().length > 0) {
map.put("value", post.value());
}
properties.add(map);
}));
LOGGER.info("Checking actuator endpoint (PATCH) for [{}]", clazz.getName());
methods = findAnnotatedMethods(clazz, PatchMapping.class);
methods.forEach(Unchecked.consumer(method -> {
var patch = method.getAnnotation(PatchMapping.class);
var map = new LinkedHashMap<>();
var paths = Arrays.stream(patch.path())
.map(path -> StringUtils.isBlank(path) ? endpoint.id() : endpoint.id()
+ StringUtils.prependIfMissing(path, "/"))
.findFirst().orElse(null);
map.put("method", RequestMethod.PATCH.name());
map.put("path", paths == null ? endpoint.id() : paths);
map.put("name", endpoint.id());
map.put("endpointType", RestControllerEndpoint.class.getSimpleName());
collectActuatorEndpointMethodMetadata(method, map, endpoint.id());
if (patch.produces().length > 0) {
map.put("produces", patch.produces());
}
if (patch.consumes().length > 0) {
map.put("consumes", patch.consumes());
}
if (patch.params().length > 0) {
map.put("parameters", patch.params());
}
if (patch.headers().length > 0) {
map.put("headers", patch.headers());
}
if (patch.value().length > 0) {
map.put("value", patch.value());
}
properties.add(map);
}));
LOGGER.info("Checking actuator endpoint (PUT) for [{}]", clazz.getName());
methods = findAnnotatedMethods(clazz, PutMapping.class);
methods.forEach(Unchecked.consumer(method -> {
var put = method.getAnnotation(PutMapping.class);
var map = new LinkedHashMap<>();
var paths = Arrays.stream(put.path())
.map(path -> StringUtils.isBlank(path) ? endpoint.id() : endpoint.id()
+ StringUtils.prependIfMissing(path, "/"))
.findFirst().orElse(null);
map.put("method", RequestMethod.PUT.name());
map.put("path", paths == null ? endpoint.id() : paths);
map.put("name", endpoint.id());
map.put("endpointType", RestControllerEndpoint.class.getSimpleName());
collectActuatorEndpointMethodMetadata(method, map, endpoint.id());
if (put.produces().length > 0) {
map.put("produces", put.produces());
}
if (put.consumes().length > 0) {
map.put("consumes", put.consumes());
}
if (put.params().length > 0) {
map.put("parameters", put.params());
}
if (put.headers().length > 0) {
map.put("headers", put.headers());
}
if (put.value().length > 0) {
map.put("value", put.value());
}
properties.add(map);
}));
if (!properties.isEmpty()) {
var destination = new File(parentPath, endpoint.id());
if (!destination.mkdirs()) {
LOGGER.debug("Unable to create directory");
}
var configFile = new File(destination, "config.yml");
CasConfigurationMetadataCatalog.export(configFile, properties);
}
});
LOGGER.info("Checking endpoints...");
subTypes = ReflectionUtils.findClassesWithAnnotationsInPackage(List.of(Endpoint.class), "org");
subTypes.forEach(clazz -> {
var properties = new ArrayList<Map<?, ?>>();
var endpoint = getEndpoint(clazz);
if (endpoint != null) {
LOGGER.info("Checking endpoints (READ) for [{}]", clazz.getName());
var methods = findAnnotatedMethods(clazz, ReadOperation.class);
methods.forEach(Unchecked.consumer(method -> {
var read = method.getAnnotation(ReadOperation.class);
var map = new LinkedHashMap<>();
map.put("method", RequestMethod.GET.name());
map.put("path", endpoint.getKey());
map.put("name", endpoint.getKey());
map.put("endpointType", endpoint.getValue());
collectActuatorEndpointMethodMetadata(method, map, endpoint.getKey());
if (read.produces().length > 0) {
map.put("produces", read.produces());
}
properties.add(map);
}));
LOGGER.info("Checking endpoints (WRITE) for [{}]", clazz.getName());
methods = findAnnotatedMethods(clazz, WriteOperation.class);
methods.forEach(Unchecked.consumer(method -> {
var write = method.getAnnotation(WriteOperation.class);
var map = new LinkedHashMap<>();
map.put("method", RequestMethod.POST.name());
map.put("path", endpoint.getKey());
map.put("name", endpoint.getKey());
map.put("endpointType", Endpoint.class.getSimpleName());
collectActuatorEndpointMethodMetadata(method, map, endpoint.getKey());
if (write.produces().length > 0) {
map.put("produces", write.produces());
}
properties.add(map);
}));
LOGGER.info("Checking endpoints (DELETE) for [{}]", clazz.getName());
methods = findAnnotatedMethods(clazz, DeleteOperation.class);
methods.forEach(Unchecked.consumer(method -> {
var delete = method.getAnnotation(DeleteOperation.class);
var map = new LinkedHashMap<>();
map.put("method", RequestMethod.DELETE.name());
map.put("path", endpoint.getKey());
map.put("name", endpoint.getKey());
map.put("endpointType", endpoint.getValue());
collectActuatorEndpointMethodMetadata(method, map, endpoint.getKey());
if (delete.produces().length > 0) {
map.put("produces", delete.produces());
}
properties.add(map);
}));
}
if (!properties.isEmpty()) {
var destination = new File(parentPath, endpoint.getKey());
if (!destination.mkdirs()) {
LOGGER.debug("Unable to create directory");
}
var configFile = new File(destination, "config.yml");
CasConfigurationMetadataCatalog.export(configFile, properties);
}
});
}
private static void collectActuatorEndpointMethodMetadata(final Method method,
final Map<Object, Object> map,
final String endpointId) throws Exception {
var actuatorProperties = new Properties();
actuatorProperties.load(new ClassPathResource("actuators.properties").getInputStream());
var clazz = method.getDeclaringClass();
var signature = method.toGenericString();
signature = signature.substring(signature.lastIndexOf(method.getDeclaringClass().getSimpleName()));
signature = RegExUtils.removePattern(signature, "throws.+");
map.put("signature", signature);
map.put("owner", clazz.getName());
var returnType = method.getReturnType().getSimpleName();
if (!StringUtils.equalsAnyIgnoreCase(returnType, "void")) {
map.put("returnType", returnType);
}
map.put("casEndpoint", isCasEndpoint(clazz));
if (clazz.getAnnotation(Deprecated.class) != null) {
map.put("deprecated", true);
}
if (method.getAnnotation(Deprecated.class) != null) {
map.put("deprecated", true);
}
var paramNames = ArrayUtils.EMPTY_STRING_ARRAY;
try {
paramNames = new LocalVariableTableParameterNameDiscoverer().getParameterNames(method);
} catch (final Throwable e) {
LOGGER.error(e.getMessage());
}
for (var i = 0; i < method.getParameters().length; i++) {
var param = method.getParameters()[i];
var selector = param.getAnnotation(Selector.class) != null;
selector = selector || param.getAnnotation(PathVariable.class) != null;
if (selector) {
var path = (String) map.get("path");
if (path.indexOf('{') == -1) {
var paramName = StringUtils.EMPTY;
if (param.getAnnotation(PathVariable.class) != null) {
paramName = param.getAnnotation(PathVariable.class).name();
}
if (StringUtils.isBlank(paramName) && paramNames.length > 0) {
paramName = paramNames[i];
}
path = StringUtils.appendIfMissing(path, "/")
.concat(String.format("{%s}", paramName));
map.put("path", path);
}
}
}
var parameters = new ArrayList<Map<?, ?>>();
if (isCasEndpoint(clazz)) {
var operation = Objects.requireNonNull(method.getAnnotation(Operation.class),
() -> "Unable to locate @Operation annotation for " + method.toGenericString()
+ " in declaring class " + clazz.getName());
if (!map.containsKey("deprecated") && operation.deprecated()) {
map.put("deprecated", true);
}
map.put("summary", StringUtils.appendIfMissing(operation.summary(), "."));
var paramCount = Arrays.stream(method.getParameterTypes())
.filter(type -> !type.equals(HttpServletRequest.class) && !type.equals(HttpServletResponse.class)).count();
if (operation.parameters().length == 0 && paramCount > 0) {
for (var i = 0; i < method.getParameterTypes().length; i++) {
var parameter = method.getParameters()[i];
var pathAnn = parameter.getAnnotation(PathVariable.class);
if (pathAnn != null) {
var paramData = new LinkedHashMap<String, Object>();
paramData.put("name", "path-variable" + RandomUtils.randomNumeric(4));
paramData.put("description", "Path variable selector");
paramData.put("required", pathAnn.required());
paramData.put("defaultValue", pathAnn.value());
parameters.add(paramData);
}
var requestParamAnn = parameter.getAnnotation(RequestParam.class);
if (requestParamAnn != null) {
var paramData = new LinkedHashMap<String, Object>();
var name = StringUtils.defaultString(requestParamAnn.name(), requestParamAnn.value());
name = StringUtils.defaultString(name, parameter.getName());
paramData.put("name", name);
paramData.put("description", "Request query parameter");
paramData.put("required", requestParamAnn.required());
paramData.put("defaultValue", requestParamAnn.defaultValue());
parameters.add(paramData);
}
var selectorAnn = parameter.getAnnotation(Selector.class);
if (selectorAnn != null) {
var paramData = new LinkedHashMap<String, Object>();
paramData.put("name", RandomUtils.randomNumeric(4));
paramData.put("description", "Path variable selector");
paramData.put("required", true);
parameters.add(paramData);
}
}
if (parameters.isEmpty()) {
throw new RuntimeException("Unable to locate @Parameter annotation for " + method.toGenericString()
+ " in declaring class " + clazz.getName());
}
}
for (var i = 0; i < operation.parameters().length; i++) {
var parameter = operation.parameters()[i];
var paramData = new LinkedHashMap<String, Object>();
paramData.put("name", parameter.name());
if (StringUtils.isNotBlank(parameter.description())) {
paramData.put("description", parameter.description());
}
paramData.put("required", parameter.required());
parameters.add(paramData);
}
} else {
var name = String.format("actuator.endpoint.%s.description", endpointId);
var summary = actuatorProperties.getProperty(name);
if (StringUtils.isBlank(summary)) {
throw new RuntimeException("Unable to locate undocumented endpoint summary for: " + endpointId + " found in " + clazz.getName());
}
map.put("summary", StringUtils.appendIfMissing(summary, "."));
}
if (!parameters.isEmpty()) {
map.put("parameters", parameters);
}
}
private static boolean isCasEndpoint(final Class<?> clazz) {
return clazz.getPackageName().startsWith(CentralAuthenticationService.NAMESPACE);
}
private static void exportThemeProperties(final String projectRootDirectory, final File dataPath) throws Exception {
var themeProps = new File(dataPath, "theme-properties");
if (themeProps.exists()) {
FileUtils.deleteQuietly(themeProps);
}
themeProps.mkdirs();
var uiFile = new File(themeProps, "config.yml");
var properties = new ArrayList<Map<?, ?>>();
var root = new File(projectRootDirectory, "support/cas-server-support-thymeleaf");
var file = new File(root, "src/main/resources/cas-theme-default.properties");
var lines = FileUtils.readLines(file, StandardCharsets.UTF_8);
var it = lines.iterator();
var comments = new StringBuilder();
var pattern = Pattern.compile("#*\\s*(cas.+)=(\\S+)*");
while (it.hasNext()) {
var ln = it.next();
var matcher = pattern.matcher(ln);
if (matcher.find()) {
var prop = matcher.group(1);
var value = StringUtils.defaultString(matcher.group(2));
var comm = comments.toString().stripLeading().trim();
var map = new LinkedHashMap<String, Object>();
map.put("name", prop);
map.put("value", value);
map.put("description", comm);
properties.add(map);
comments = new StringBuilder();
} else {
ln = ln.replace("
comments.append(ln);
}
it.remove();
}
CasConfigurationMetadataCatalog.export(uiFile, properties);
}
private static void exportTemplateViews(final String projectRootDirectory, final File dataPath) {
var serviceProps = new File(dataPath, "userinterface-templates");
if (serviceProps.exists()) {
FileUtils.deleteQuietly(serviceProps);
}
serviceProps.mkdirs();
var uiFile = new File(serviceProps, "config.yml");
var properties = new ArrayList<Map<?, ?>>();
var root = new File(projectRootDirectory, "support/cas-server-support-thymeleaf");
var parent = new File(root, "src/main/resources/templates");
var files = FileUtils.listFiles(parent, new String[]{"html"}, true);
files
.stream()
.sorted()
.forEach(file -> {
var map = new LinkedHashMap<String, Object>();
var path = StringUtils.remove(file.getAbsolutePath(), root.getAbsolutePath());
map.put("name", path);
properties.add(map);
});
CasConfigurationMetadataCatalog.export(uiFile, properties);
}
private static void exportRegisteredServiceProperties(final File dataPath) {
var serviceProps = new File(dataPath, "registered-service-properties");
if (serviceProps.exists()) {
FileUtils.deleteQuietly(serviceProps);
}
serviceProps.mkdirs();
var servicePropsFile = new File(serviceProps, "config.yml");
var properties = new ArrayList<Map<?, ?>>();
for (var property : RegisteredServiceProperty.RegisteredServiceProperties.values()) {
var map = new LinkedHashMap<String, Object>();
map.put("name", property.getPropertyName());
map.put("defaultValue", property.getDefaultValue());
map.put("type", property.getType().name());
map.put("group", property.getGroup().name());
map.put("description", property.getDescription());
properties.add(map);
}
CasConfigurationMetadataCatalog.export(servicePropsFile, properties);
}
private static void exportThirdPartyConfiguration(final File dataPath, final String propertyFilter) {
var results = CasConfigurationMetadataCatalog.query(
ConfigurationMetadataCatalogQuery.builder()
.queryType(ConfigurationMetadataCatalogQuery.QueryTypes.THIRD_PARTY)
.queryFilter(property -> RegexUtils.find(propertyFilter, property.getName()))
.build());
results.properties().forEach(property -> {
var desc = cleanDescription(property);
property.setDescription(desc);
});
var destination = new File(dataPath, "third-party");
if (destination.exists()) {
FileUtils.deleteQuietly(destination);
}
destination.mkdirs();
var configFile = new File(destination, "config.yml");
CasConfigurationMetadataCatalog.export(configFile, results.properties());
}
private static List<Method> findAnnotatedMethods(final Class<?> clazz,
final Class<? extends Annotation> annotationClass) {
var annotatedMethods = new ArrayList<Method>();
try {
var methods = clazz.getMethods();
for (var method : methods) {
if (method.isAnnotationPresent(annotationClass)) {
annotatedMethods.add(method);
}
}
} catch (final Throwable throwable) {
LOGGER.info("Failed to locate annotated methods: {}", throwable.getMessage());
}
return annotatedMethods;
}
}
|
package com.bignerdranch.expandablerecyclerview.Adapter;
import android.app.Activity;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.view.ViewGroup;
import com.bignerdranch.expandablerecyclerview.Model.ParentListItem;
import com.bignerdranch.expandablerecyclerview.Model.ParentWrapper;
import com.bignerdranch.expandablerecyclerview.ViewHolder.ChildViewHolder;
import com.bignerdranch.expandablerecyclerview.ViewHolder.ParentViewHolder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* RecyclerView.Adapter implementation that
* adds the ability to expand and collapse list items.
*
* Changes should be notified through:
* {@link #notifyParentItemInserted(int)}
* {@link #notifyParentItemRemoved(int)}
* {@link #notifyParentItemChanged(int)}
* {@link #notifyParentItemRangeInserted(int, int)}
* {@link #notifyChildItemInserted(int, int)}
* {@link #notifyChildItemRemoved(int, int)}
* {@link #notifyChildItemChanged(int, int)}
* methods and not the notify methods of RecyclerView.Adapter.
*
* @author Ryan Brooks
* @version 1.0
* @since 5/27/2015
*/
public abstract class ExpandableRecyclerAdapter<PVH extends ParentViewHolder, CVH extends ChildViewHolder>
extends RecyclerView.Adapter<RecyclerView.ViewHolder> implements ParentViewHolder.ParentListItemExpandCollapseListener {
private static final String EXPANDED_STATE_MAP = "ExpandableRecyclerAdapter.ExpandedStateMap";
private static final int TYPE_PARENT = 0;
private static final int TYPE_CHILD = 1;
/**
* A {@link List} of all currently expanded {@link ParentListItem} objects
* and their children, in order. Changes to this list should be made through the add/remove methods
* available in {@link ExpandableRecyclerAdapter}
*/
protected List<Object> mItemList;
private List<? extends ParentListItem> mParentItemList;
private ExpandCollapseListener mExpandCollapseListener;
private List<RecyclerView> mAttachedRecyclerViewPool;
/**
* Allows objects to register themselves as expand/collapse listeners to be
* notified of change events.
* <p>
* Implement this in your {@link android.app.Activity} or {@link android.app.Fragment}
* to receive these callbacks.
*/
public interface ExpandCollapseListener {
/**
* Called when a list item is expanded.
*
* @param position The index of the item in the list being expanded
*/
void onListItemExpanded(int position);
/**
* Called when a list item is collapsed.
*
* @param position The index of the item in the list being collapsed
*/
void onListItemCollapsed(int position);
}
/**
* Primary constructor. Sets up {@link #mParentItemList} and {@link #mItemList}.
*
* Changes to {@link #mParentItemList} should be made through add/remove methods in
* {@link ExpandableRecyclerAdapter}
*
* @param parentItemList List of all {@link ParentListItem} objects to be
* displayed in the RecyclerView that this
* adapter is linked to
*/
public ExpandableRecyclerAdapter(@NonNull List<? extends ParentListItem> parentItemList) {
super();
mParentItemList = parentItemList;
mItemList = ExpandableRecyclerAdapterHelper.generateParentChildItemList(parentItemList);
mAttachedRecyclerViewPool = new ArrayList<>();
}
/**
* Implementation of Adapter.onCreateViewHolder(ViewGroup, int)
* that determines if the list item is a parent or a child and calls through
* to the appropriate implementation of either {@link #onCreateParentViewHolder(ViewGroup)}
* or {@link #onCreateChildViewHolder(ViewGroup)}.
*
* @param viewGroup The {@link ViewGroup} into which the new {@link android.view.View}
* will be added after it is bound to an adapter position.
* @param viewType The view type of the new {@code android.view.View}.
* @return A new RecyclerView.ViewHolder
* that holds a {@code android.view.View} of the given view type.
*/
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup viewGroup, int viewType) {
if (viewType == TYPE_PARENT) {
PVH pvh = onCreateParentViewHolder(viewGroup);
pvh.setParentListItemExpandCollapseListener(this);
return pvh;
} else if (viewType == TYPE_CHILD) {
return onCreateChildViewHolder(viewGroup);
} else {
throw new IllegalStateException("Incorrect ViewType found");
}
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
Object listItem = getListItem(position);
if (listItem instanceof ParentWrapper) {
PVH parentViewHolder = (PVH) holder;
if (parentViewHolder.shouldItemViewClickToggleExpansion()) {
parentViewHolder.setMainItemClickToExpand();
}
ParentWrapper parentWrapper = (ParentWrapper) listItem;
parentViewHolder.setExpanded(parentWrapper.isExpanded());
onBindParentViewHolder(parentViewHolder, position, parentWrapper.getParentListItem());
} else if (listItem == null) {
throw new IllegalStateException("Incorrect ViewHolder found");
} else {
onBindChildViewHolder((CVH) holder, position, listItem);
}
}
/**
* Callback called from {@link #onCreateViewHolder(ViewGroup, int)} when
* the list item created is a parent.
*
* @param parentViewGroup The {@link ViewGroup} in the list for which a {@link PVH}
* is being created
* @return A {@code PVH} corresponding to the {@link ParentListItem} with
* the {@code ViewGroup} parentViewGroup
*/
public abstract PVH onCreateParentViewHolder(ViewGroup parentViewGroup);
/**
* Callback called from {@link #onCreateViewHolder(ViewGroup, int)} when
* the list item created is a child.
*
* @param childViewGroup The {@link ViewGroup} in the list for which a {@link CVH}
* is being created
* @return A {@code CVH} corresponding to the child list item with the
* {@code ViewGroup} childViewGroup
*/
public abstract CVH onCreateChildViewHolder(ViewGroup childViewGroup);
/**
* Callback called from onBindViewHolder(RecyclerView.ViewHolder, int)
* when the list item bound to is a parent.
* <p>
* Bind data to the {@link PVH} here.
*
* @param parentViewHolder The {@code PVH} to bind data to
* @param position The index in the list at which to bind
* @param parentListItem The {@link ParentListItem} which holds the data to
* be bound to the {@code PVH}
*/
public abstract void onBindParentViewHolder(PVH parentViewHolder, int position, ParentListItem parentListItem);
/**
* Callback called from onBindViewHolder(RecyclerView.ViewHolder, int)
* when the list item bound to is a child.
* <p>
* Bind data to the {@link CVH} here.
*
* @param childViewHolder The {@code CVH} to bind data to
* @param position The index in the list at which to bind
* @param childListItem The child list item which holds that data to be
* bound to the {@code CVH}
*/
public abstract void onBindChildViewHolder(CVH childViewHolder, int position, Object childListItem);
/**
* Gets the number of parent and child objects currently expanded.
*
* @return The size of {@link #mItemList}
*/
@Override
public int getItemCount() {
return mItemList.size();
}
@Override
public int getItemViewType(int position) {
Object listItem = getListItem(position);
if (listItem instanceof ParentWrapper) {
return TYPE_PARENT;
} else if (listItem == null) {
throw new IllegalStateException("Null object added");
} else {
return TYPE_CHILD;
}
}
/**
* Gets the list of ParentItems that is backing this adapter.
* Changes can be made to the list and the adapter notified via the
* {@link #notifyParentItemInserted(int)}
* {@link #notifyParentItemRemoved(int)}
* {@link #notifyParentItemChanged(int)}
* {@link #notifyParentItemRangeInserted(int, int)}
* {@link #notifyChildItemInserted(int, int)}
* {@link #notifyChildItemRemoved(int, int)}
* {@link #notifyChildItemChanged(int, int)}
* methods.
*
*
* @return The list of ParentListItems that this adapter represents
*/
public List<? extends ParentListItem> getParentItemList() {
return mParentItemList;
}
/**
* Implementation of {@link com.bignerdranch.expandablerecyclerview.ViewHolder.ParentViewHolder.ParentListItemExpandCollapseListener#onParentListItemExpanded(int)}.
* <p>
* Called when a {@link ParentListItem} is triggered to expand.
*
* @param position The index of the item in the list being expanded
*/
@Override
public void onParentListItemExpanded(int position) {
Object listItem = getListItem(position);
if (listItem instanceof ParentWrapper) {
expandParentListItem((ParentWrapper) listItem, position, true);
}
}
/**
* Implementation of {@link com.bignerdranch.expandablerecyclerview.ViewHolder.ParentViewHolder.ParentListItemExpandCollapseListener#onParentListItemCollapsed(int)}.
* <p>
* Called when a {@link ParentListItem} is triggered to collapse.
*
* @param position The index of the item in the list being collapsed
*/
@Override
public void onParentListItemCollapsed(int position) {
Object listItem = getListItem(position);
if (listItem instanceof ParentWrapper) {
collapseParentListItem((ParentWrapper) listItem, position, true);
}
}
/**
* Implementation of Adapter#onAttachedToRecyclerView(RecyclerView).
* <p>
* Called when this {@link ExpandableRecyclerAdapter} is attached to a RecyclerView.
*
* @param recyclerView The {@code RecyclerView} this {@code ExpandableRecyclerAdapter}
* is being attached to
*/
@Override
public void onAttachedToRecyclerView(RecyclerView recyclerView) {
super.onAttachedToRecyclerView(recyclerView);
mAttachedRecyclerViewPool.add(recyclerView);
}
/**
* Implementation of Adapter.onDetachedFromRecyclerView(RecyclerView)
* <p>
* Called when this ExpandableRecyclerAdapter is detached from a RecyclerView.
*
* @param recyclerView The {@code RecyclerView} this {@code ExpandableRecyclerAdapter}
* is being detached from
*/
@Override
public void onDetachedFromRecyclerView(RecyclerView recyclerView) {
super.onDetachedFromRecyclerView(recyclerView);
mAttachedRecyclerViewPool.remove(recyclerView);
}
public void setExpandCollapseListener(ExpandCollapseListener expandCollapseListener) {
mExpandCollapseListener = expandCollapseListener;
}
// region Programmatic Expansion/Collapsing
/**
* Expands the parent with the specified index in the list of parents.
*
* @param parentIndex The index of the parent to expand
*/
public void expandParent(int parentIndex) {
int parentWrapperIndex = getParentWrapperIndex(parentIndex);
Object listItem = getListItem(parentWrapperIndex);
ParentWrapper parentWrapper;
if (listItem instanceof ParentWrapper) {
parentWrapper = (ParentWrapper) listItem;
} else {
return;
}
expandViews(parentWrapper, parentWrapperIndex);
}
/**
* Expands the parent associated with a specified {@link ParentListItem} in
* the list of parents.
*
* @param parentListItem The {@code ParentListItem} of the parent to expand
*/
public void expandParent(ParentListItem parentListItem) {
ParentWrapper parentWrapper = getParentWrapper(parentListItem);
int parentWrapperIndex = mItemList.indexOf(parentWrapper);
if (parentWrapperIndex == -1) {
return;
}
expandViews(parentWrapper, parentWrapperIndex);
}
/**
* Expands all parents in a range of indices in the list of parents.
*
* @param startParentIndex The index at which to to start expanding parents
* @param parentCount The number of parents to expand
*/
public void expandParentRange(int startParentIndex, int parentCount) {
int endParentIndex = startParentIndex + parentCount;
for (int i = startParentIndex; i < endParentIndex; i++) {
expandParent(i);
}
}
/**
* Expands all parents in the list.
*/
public void expandAllParents() {
for (ParentListItem parentListItem : mParentItemList) {
expandParent(parentListItem);
}
}
/**
* Collapses the parent with the specified index in the list of parents.
*
* @param parentIndex The index of the parent to collapse
*/
public void collapseParent(int parentIndex) {
int parentWrapperIndex = getParentWrapperIndex(parentIndex);
Object listItem = getListItem(parentWrapperIndex);
ParentWrapper parentWrapper;
if (listItem instanceof ParentWrapper) {
parentWrapper = (ParentWrapper) listItem;
} else {
return;
}
collapseViews(parentWrapper, parentWrapperIndex);
}
/**
* Collapses the parent associated with a specified {@link ParentListItem} in
* the list of parents.
*
* @param parentListItem The {@code ParentListItem} of the parent to collapse
*/
public void collapseParent(ParentListItem parentListItem) {
ParentWrapper parentWrapper = getParentWrapper(parentListItem);
int parentWrapperIndex = mItemList.indexOf(parentWrapper);
if (parentWrapperIndex == -1) {
return;
}
collapseViews(parentWrapper, parentWrapperIndex);
}
/**
* Collapses all parents in a range of indices in the list of parents.
*
* @param startParentIndex The index at which to to start collapsing parents
* @param parentCount The number of parents to collapse
*/
public void collapseParentRange(int startParentIndex, int parentCount) {
int endParentIndex = startParentIndex + parentCount;
for (int i = startParentIndex; i < endParentIndex; i++) {
collapseParent(i);
}
}
/**
* Collapses all parents in the list.
*/
public void collapseAllParents() {
for (ParentListItem parentListItem : mParentItemList) {
collapseParent(parentListItem);
}
}
/**
* Stores the expanded state map across state loss.
* <p>
* Should be called from {@link Activity#onSaveInstanceState(Bundle)} in
* the {@link Activity} that hosts the RecyclerView that this
* {@link ExpandableRecyclerAdapter} is attached to.
* <p>
* This will make sure to add the expanded state map as an extra to the
* instance state bundle to be used in {@link #onRestoreInstanceState(Bundle)}.
*
* @param savedInstanceState The {@code Bundle} into which to store the
* expanded state map
*/
public void onSaveInstanceState(Bundle savedInstanceState) {
savedInstanceState.putSerializable(EXPANDED_STATE_MAP, generateExpandedStateMap());
}
/**
* Fetches the expandable state map from the saved instance state {@link Bundle}
* and restores the expanded states of all of the list items.
* <p>
* Should be called from {@link Activity#onRestoreInstanceState(Bundle)} in
* the {@link Activity} that hosts the RecyclerView that this
* {@link ExpandableRecyclerAdapter} is attached to.
* <p>
* Assumes that the list of parent list items is the same as when the saved
* instance state was stored.
*
* @param savedInstanceState The {@code Bundle} from which the expanded
* state map is loaded
*/
public void onRestoreInstanceState(Bundle savedInstanceState) {
if (savedInstanceState == null
|| !savedInstanceState.containsKey(EXPANDED_STATE_MAP)) {
return;
}
HashMap<Integer, Boolean> expandedStateMap = (HashMap<Integer, Boolean>) savedInstanceState.getSerializable(EXPANDED_STATE_MAP);
if (expandedStateMap == null) {
return;
}
List<Object> parentWrapperList = new ArrayList<>();
ParentListItem parentListItem;
ParentWrapper parentWrapper;
int parentListItemCount = mParentItemList.size();
for (int i = 0; i < parentListItemCount; i++) {
parentListItem = mParentItemList.get(i);
parentWrapper = new ParentWrapper(parentListItem);
parentWrapperList.add(parentWrapper);
if (expandedStateMap.containsKey(i)) {
boolean expanded = expandedStateMap.get(i);
if (expanded) {
parentWrapper.setExpanded(true);
int childListItemCount = parentWrapper.getChildItemList().size();
for (int j = 0; j < childListItemCount; j++) {
parentWrapperList.add(parentWrapper.getChildItemList().get(j));
}
}
}
}
mItemList = parentWrapperList;
notifyDataSetChanged();
}
/**
* Gets the list item held at the specified adapter position.
*
* @param position The index of the list item to return
* @return The list item at the specified position
*/
protected Object getListItem(int position) {
boolean indexInRange = position >= 0 && position < mItemList.size();
if (indexInRange) {
return mItemList.get(position);
} else {
return null;
}
}
/**
* Calls through to the ParentViewHolder to expand views for each
* RecyclerView the specified parent is a child of.
*
* These calls to the ParentViewHolder are made so that animations can be
* triggered at the ViewHolder level.
*
* @param parentIndex The index of the parent to expand
*/
private void expandViews(ParentWrapper parentWrapper, int parentIndex) {
PVH viewHolder;
for (RecyclerView recyclerView : mAttachedRecyclerViewPool) {
viewHolder = (PVH) recyclerView.findViewHolderForAdapterPosition(parentIndex);
if (viewHolder != null && !viewHolder.isExpanded()) {
viewHolder.setExpanded(true);
viewHolder.onExpansionToggled(false);
}
expandParentListItem(parentWrapper, parentIndex, false);
}
}
/**
* Calls through to the ParentViewHolder to collapse views for each
* RecyclerView a specified parent is a child of.
*
* These calls to the ParentViewHolder are made so that animations can be
* triggered at the ViewHolder level.
*
* @param parentIndex The index of the parent to collapse
*/
private void collapseViews(ParentWrapper parentWrapper, int parentIndex) {
PVH viewHolder;
for (RecyclerView recyclerView : mAttachedRecyclerViewPool) {
viewHolder = (PVH) recyclerView.findViewHolderForAdapterPosition(parentIndex);
if (viewHolder != null && viewHolder.isExpanded()) {
viewHolder.setExpanded(false);
viewHolder.onExpansionToggled(true);
}
collapseParentListItem(parentWrapper, parentIndex, false);
}
}
/**
* Expands a specified parent item. Calls through to the
* ExpandCollapseListener and adds children of the specified parent to the
* total list of items.
*
* @param parentWrapper The ParentWrapper of the parent to expand
* @param parentIndex The index of the parent to expand
* @param expansionTriggeredByListItemClick true if expansion was triggered
* by a click event, false otherwise.
*/
private void expandParentListItem(ParentWrapper parentWrapper, int parentIndex, boolean expansionTriggeredByListItemClick) {
if (!parentWrapper.isExpanded()) {
parentWrapper.setExpanded(true);
List<?> childItemList = parentWrapper.getChildItemList();
if (childItemList != null) {
int childListItemCount = childItemList.size();
for (int i = 0; i < childListItemCount; i++) {
mItemList.add(parentIndex + i + 1, childItemList.get(i));
}
notifyItemRangeInserted(parentIndex + 1, childListItemCount);
}
if (expansionTriggeredByListItemClick && mExpandCollapseListener != null) {
int expandedCountBeforePosition = getExpandedItemCount(parentIndex);
mExpandCollapseListener.onListItemExpanded(parentIndex - expandedCountBeforePosition);
}
}
}
/**
* Collapses a specified parent item. Calls through to the
* ExpandCollapseListener and adds children of the specified parent to the
* total list of items.
*
* @param parentWrapper The ParentWrapper of the parent to collapse
* @param parentIndex The index of the parent to collapse
* @param collapseTriggeredByListItemClick true if expansion was triggered
* by a click event, false otherwise.
*/
private void collapseParentListItem(ParentWrapper parentWrapper, int parentIndex, boolean collapseTriggeredByListItemClick) {
if (parentWrapper.isExpanded()) {
parentWrapper.setExpanded(false);
List<?> childItemList = parentWrapper.getChildItemList();
if (childItemList != null) {
int childListItemCount = childItemList.size();
for (int i = childListItemCount - 1; i >= 0; i
mItemList.remove(parentIndex + i + 1);
}
notifyItemRangeRemoved(parentIndex + 1, childListItemCount);
}
if (collapseTriggeredByListItemClick && mExpandCollapseListener != null) {
int expandedCountBeforePosition = getExpandedItemCount(parentIndex);
mExpandCollapseListener.onListItemCollapsed(parentIndex - expandedCountBeforePosition);
}
}
}
/**
* Gets the number of expanded child list items before the specified position.
*
* @param position The index before which to return the number of expanded
* child list items
* @return The number of expanded child list items before the specified position
*/
private int getExpandedItemCount(int position) {
if (position == 0) {
return 0;
}
int expandedCount = 0;
for (int i = 0; i < position; i++) {
Object listItem = getListItem(i);
if (!(listItem instanceof ParentWrapper)) {
expandedCount++;
}
}
return expandedCount;
}
// endregion
// region Data Manipulation
/**
* Notify any registered observers that the ParentListItem reflected at {@code parentPosition}
* has been newly inserted. The ParentListItem previously at {@code parentPosition} is now at
* position {@code parentPosition + 1}.
* <p>
* This is a structural change event. Representations of other existing items in the
* data set are still considered up to date and will not be rebound, though their
* positions may be altered.
*
* @param parentPosition Position of the newly inserted ParentListItem in the data set, relative
* to list of ParentListItems only.
*
* @see #notifyParentItemRangeInserted(int, int)
*/
public void notifyParentItemInserted(int parentPosition) {
ParentListItem parentListItem = mParentItemList.get(parentPosition);
int wrapperIndex;
if (parentPosition < mParentItemList.size() - 1) {
wrapperIndex = getParentWrapperIndex(parentPosition);
} else {
wrapperIndex = mItemList.size();
}
int sizeChanged = addParentWrapper(wrapperIndex, parentListItem);
notifyItemRangeInserted(wrapperIndex, sizeChanged);
}
/**
* Notify any registered observers that the currently reflected {@code itemCount}
* ParentListItems starting at {@code parentPositionStart} have been newly inserted.
* The ParentListItems previously located at {@code parentPositionStart} and beyond
* can now be found starting at position {@code parentPositionStart + itemCount}.
* <p>
* This is a structural change event. Representations of other existing items in the
* data set are still considered up to date and will not be rebound, though their positions
* may be altered.
*
* @param parentPositionStart Position of the first ParentListItem that was inserted, relative
* to list of ParentListItems only.
* @param itemCount Number of items inserted
*
* @see #notifyParentItemInserted(int)
*/
public void notifyParentItemRangeInserted(int parentPositionStart, int itemCount) {
int initialWrapperIndex;
if (parentPositionStart < mParentItemList.size() - itemCount) {
initialWrapperIndex = getParentWrapperIndex(parentPositionStart);
} else {
initialWrapperIndex = mItemList.size();
}
int sizeChanged = 0;
int wrapperIndex = initialWrapperIndex;
int changed;
int parentPositionEnd = parentPositionStart + itemCount;
for (int i = parentPositionStart; i < parentPositionEnd; i++) {
ParentListItem parentListItem = mParentItemList.get(i);
changed = addParentWrapper(wrapperIndex, parentListItem);
wrapperIndex += changed;
sizeChanged += changed;
}
notifyItemRangeInserted(initialWrapperIndex, sizeChanged);
}
private int addParentWrapper(int wrapperIndex, ParentListItem parentListItem) {
int sizeChanged = 1;
ParentWrapper parentWrapper = new ParentWrapper(parentListItem);
mItemList.add(wrapperIndex, parentWrapper);
if (parentWrapper.isInitiallyExpanded()) {
parentWrapper.setExpanded(true);
List<?> childItemList = parentWrapper.getChildItemList();
mItemList.addAll(wrapperIndex + sizeChanged, childItemList);
sizeChanged += childItemList.size();
}
return sizeChanged;
}
/**
* Notify any registered observers that the ParentListItem previously located at {@code parentPosition}
* has been removed from the data set. The ParentListItems previously located at and after
* {@code parentPosition} may now be found at {@code oldPosition - 1}.
* <p>
* This is a structural change event. Representations of other existing items in the
* data set are still considered up to date and will not be rebound, though their positions
* may be altered.
*
* @param parentPosition Position of the ParentListItem that has now been removed, relative
* to list of ParentListItems only.
*/
public void notifyParentItemRemoved(int parentPosition) {
int sizeChanged = 1;
int wrapperIndex = getParentWrapperIndex(parentPosition);
ParentWrapper parentWrapper = (ParentWrapper) mItemList.remove(wrapperIndex);
if (parentWrapper.isExpanded()) {
int childListSize = parentWrapper.getChildItemList().size();
for (int i = 0; i < childListSize; i++) {
mItemList.remove(wrapperIndex);
sizeChanged++;
}
}
notifyItemRangeRemoved(wrapperIndex, sizeChanged);
}
/**
* Notify any registered observers that the ParentListItem at {@code parentPosition} has changed.
* This will also trigger an item changed for children of the ParentList specified.
* <p>
* This is an item change event, not a structural change event. It indicates that any
* reflection of the data at {@code parentPosition} is out of date and should be updated.
* The ParentListItem at {@code parentPosition} retains the same identity. This means
* the number of children must stay the same.
*
* @param parentPosition Position of the item that has changed
*/
public void notifyParentItemChanged(int parentPosition) {
ParentListItem parentListItem = mParentItemList.get(parentPosition);
int wrapperIndex = getParentWrapperIndex(parentPosition);
ParentWrapper parentWrapper = (ParentWrapper) mItemList.get(wrapperIndex);
parentWrapper.setParentListItem(parentListItem);
int sizeChanged = 1;
if (parentWrapper.isExpanded()) {
List<?> childItems = parentWrapper.getChildItemList();
int childListSize = childItems.size();
Object child;
for (int i = 0; i < childListSize; i++) {
child = childItems.get(i);
mItemList.set(wrapperIndex + i + 1, child);
sizeChanged++;
}
}
notifyItemRangeChanged(wrapperIndex, sizeChanged);
}
/**
* Notify any registered observers that the ParentListItem reflected at {@code parentPosition}
* has a ChildItem that has been newly inserted at {@code childPosition}.
* The ChildItem previously at {@code childPosition} is now at
* position {@code childPosition + 1}.
* <p>
* This is a structural change event. Representations of other existing items in the
* data set are still considered up to date and will not be rebound, though their
* positions may be altered.
*
* @param parentPosition Position of the ParentListItem which has been added a child, relative
* to list of ParentListItems only.
* @param childPosition Position of the child object that has been inserted, relative to children
* of the ParentListItem specified by {@code parentPosition} only.
*
*/
public void notifyChildItemInserted(int parentPosition, int childPosition) {
ParentListItem parentListItem = mParentItemList.get(parentPosition);
Object child = parentListItem.getChildItemList().get(childPosition);
int parentWrapperIndex = getParentWrapperIndex(parentPosition);
ParentWrapper parentWrapper = (ParentWrapper) mItemList.get(parentWrapperIndex);
if (parentWrapper.isExpanded()) {
mItemList.add(parentWrapperIndex + childPosition + 1, child);
notifyItemInserted(parentWrapperIndex + childPosition + 1);
}
}
/**
* Notify any registered observers that the ParentListItem located at {@code parentPosition}
* has a ChildItem that has been removed from the data set, previously located at {@code childPosition}.
* The ChildItem previously located at and after {@code childPosition} may
* now be found at {@code childPosition - 1}.
* <p>
* This is a structural change event. Representations of other existing items in the
* data set are still considered up to date and will not be rebound, though their positions
* may be altered.
*
* @param parentPosition Position of the ParentListItem which has a child removed from, relative
* to list of ParentListItems only.
* @param childPosition Position of the child object that has been removed, relative to children
* of the ParentListItem specified by {@code parentPosition} only.
*/
public void notifyChildItemRemoved(int parentPosition, int childPosition) {
int parentWrapperIndex = getParentWrapperIndex(parentPosition);
ParentWrapper parentWrapper = (ParentWrapper) mItemList.get(parentWrapperIndex);
if (parentWrapper.isExpanded()) {
mItemList.remove(parentWrapperIndex + childPosition + 1);
notifyItemRemoved(parentWrapperIndex + childPosition + 1);
}
}
/**
* Notify any registered observers that the ParentListItem at {@code parentPosition} has
* a child located at {@code childPosition} that has changed.
* <p>
* This is an item change event, not a structural change event. It indicates that any
* reflection of the data at {@code childPosition} is out of date and should be updated.
* The ParentListItem at {@code childPosition} retains the same identity.
*
* @param parentPosition Position of the ParentListItem who has a child that has changed
* @param childPosition Position of the child that has changed
*/
public void notifyChildItemChanged(int parentPosition, int childPosition) {
ParentListItem parentListItem = mParentItemList.get(parentPosition);
int parentWrapperIndex = getParentWrapperIndex(parentPosition);
ParentWrapper parentWrapper = (ParentWrapper) mItemList.get(parentWrapperIndex);
parentWrapper.setParentListItem(parentListItem);
if (parentWrapper.isExpanded()) {
int listChildPosition = parentWrapperIndex + childPosition + 1;
Object child = parentWrapper.getChildItemList().get(childPosition);
mItemList.set(listChildPosition, child);
notifyItemChanged(listChildPosition);
}
}
// endregion
/**
* Generates a HashMap used to store expanded state for items in the list
* on configuration change or whenever onResume is called.
*
* @return A HashMap containing the expanded state of all parent list items
*/
private HashMap<Integer, Boolean> generateExpandedStateMap() {
HashMap<Integer, Boolean> parentListItemHashMap = new HashMap<>();
int childCount = 0;
Object listItem;
ParentWrapper parentWrapper;
int listItemCount = mItemList.size();
for (int i = 0; i < listItemCount; i++) {
if (mItemList.get(i) != null) {
listItem = getListItem(i);
if (listItem instanceof ParentWrapper) {
parentWrapper = (ParentWrapper) listItem;
parentListItemHashMap.put(i - childCount, parentWrapper.isExpanded());
} else {
childCount++;
}
}
}
return parentListItemHashMap;
}
/**
* Gets the index of a ParentWrapper within the helper item list based on
* the index of the ParentWrapper.
*
* @param parentIndex The index of the parent in the list of parent items
* @return The index of the parent in the list of all views in the adapter
*/
private int getParentWrapperIndex(int parentIndex) {
int parentCount = 0;
int listItemCount = mItemList.size();
for (int i = 0; i < listItemCount; i++) {
if (mItemList.get(i) instanceof ParentWrapper) {
parentCount++;
if (parentCount > parentIndex) {
return i;
}
}
}
return -1;
}
/**
* Gets the ParentWrapper for a specified ParentListItem from the list of
* parents.
*
* @param parentListItem A ParentListItem in the list of parents
* @return If the parent exists on the list, returns its ParentWrapper.
* Otherwise, returns null.
*/
private ParentWrapper getParentWrapper(ParentListItem parentListItem) {
int listItemCount = mItemList.size();
for (int i = 0; i < listItemCount; i++) {
Object listItem = mItemList.get(i);
if (listItem instanceof ParentWrapper) {
if (((ParentWrapper) listItem).getParentListItem().equals(parentListItem)) {
return (ParentWrapper) listItem;
}
}
}
return null;
}
}
|
package org.ovirt.engine.ui.uicommonweb.models.disks;
import java.util.ArrayList;
import org.ovirt.engine.core.common.action.AddDiskParameters;
import org.ovirt.engine.core.common.action.AttachDettachVmDiskParameters;
import org.ovirt.engine.core.common.action.RemoveDiskParameters;
import org.ovirt.engine.core.common.action.UpdateVmDiskParameters;
import org.ovirt.engine.core.common.action.VdcActionParametersBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.action.VmDiskOperatinParameterBase;
import org.ovirt.engine.core.common.businessentities.Disk;
import org.ovirt.engine.core.common.businessentities.Disk.DiskStorageType;
import org.ovirt.engine.core.common.businessentities.DiskImage;
import org.ovirt.engine.core.common.businessentities.DiskInterface;
import org.ovirt.engine.core.common.businessentities.ImageStatus;
import org.ovirt.engine.core.common.businessentities.LUNs;
import org.ovirt.engine.core.common.businessentities.LunDisk;
import org.ovirt.engine.core.common.businessentities.PropagateErrors;
import org.ovirt.engine.core.common.businessentities.Quota;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VmEntityType;
import org.ovirt.engine.core.common.businessentities.VolumeType;
import org.ovirt.engine.core.common.businessentities.storage_domains;
import org.ovirt.engine.core.common.businessentities.storage_pool;
import org.ovirt.engine.core.common.interfaces.SearchType;
import org.ovirt.engine.core.common.mode.ApplicationMode;
import org.ovirt.engine.core.common.queries.SearchParameters;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.NGuid;
import org.ovirt.engine.core.compat.ObservableCollection;
import org.ovirt.engine.core.compat.StringHelper;
import org.ovirt.engine.core.searchbackend.SearchObjects;
import org.ovirt.engine.ui.frontend.AsyncQuery;
import org.ovirt.engine.ui.frontend.Frontend;
import org.ovirt.engine.ui.frontend.INewAsyncCallback;
import org.ovirt.engine.ui.uicommonweb.Linq;
import org.ovirt.engine.ui.uicommonweb.UICommand;
import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.ListModel;
import org.ovirt.engine.ui.uicommonweb.models.ListWithDetailsModel;
import org.ovirt.engine.ui.uicommonweb.models.configure.PermissionListModel;
import org.ovirt.engine.ui.uicommonweb.models.templates.CopyDiskModel;
import org.ovirt.engine.ui.uicommonweb.models.vms.DiskModel;
import org.ovirt.engine.ui.uicommonweb.models.vms.MoveDiskModel;
import org.ovirt.engine.ui.uicommonweb.models.vms.RemoveDiskModel;
import org.ovirt.engine.ui.uicompat.ConstantsManager;
import org.ovirt.engine.ui.uicompat.FrontendMultipleActionAsyncResult;
import org.ovirt.engine.ui.uicompat.IFrontendMultipleActionAsyncCallback;
@SuppressWarnings("unused")
public class DiskListModel extends ListWithDetailsModel
{
private UICommand privateNewCommand;
public UICommand getNewCommand()
{
return privateNewCommand;
}
private void setNewCommand(UICommand value)
{
privateNewCommand = value;
}
private UICommand privateEditCommand;
public UICommand getEditCommand()
{
return privateEditCommand;
}
private void setEditCommand(UICommand value)
{
privateEditCommand = value;
}
private UICommand privateRemoveCommand;
public UICommand getRemoveCommand()
{
return privateRemoveCommand;
}
private void setRemoveCommand(UICommand value)
{
privateRemoveCommand = value;
}
private UICommand privateMoveCommand;
public UICommand getMoveCommand()
{
return privateMoveCommand;
}
private void setMoveCommand(UICommand value)
{
privateMoveCommand = value;
}
private UICommand privateCopyCommand;
public UICommand getCopyCommand()
{
return privateCopyCommand;
}
private void setCopyCommand(UICommand value)
{
privateCopyCommand = value;
}
private ListModel diskVmListModel;
private ListModel diskTemplateListModel;
private ListModel diskStorageListModel;
public DiskListModel()
{
setTitle(ConstantsManager.getInstance().getConstants().disksTitle());
setDefaultSearchString("Disks:"); //$NON-NLS-1$
setSearchString(getDefaultSearchString());
setSearchObjects(new String[] { SearchObjects.DISK_OBJ_NAME, SearchObjects.DISK_PLU_OBJ_NAME });
setAvailableInModes(ApplicationMode.VirtOnly);
setNewCommand(new UICommand("New", this)); //$NON-NLS-1$
setEditCommand(new UICommand("Edit", this)); //$NON-NLS-1$
setRemoveCommand(new UICommand("Remove", this)); //$NON-NLS-1$
setMoveCommand(new UICommand("Move", this)); //$NON-NLS-1$
setCopyCommand(new UICommand("Copy", this)); //$NON-NLS-1$
UpdateActionAvailability();
getSearchNextPageCommand().setIsAvailable(true);
getSearchPreviousPageCommand().setIsAvailable(true);
}
@Override
protected void SyncSearch()
{
SearchParameters tempVar = new SearchParameters(getSearchString(), SearchType.Disk);
tempVar.setMaxCount(getSearchPageSize());
super.SyncSearch(VdcQueryType.Search, tempVar);
}
@Override
public void setItems(Iterable value)
{
if (value == null) {
super.setItems(null);
return;
}
ArrayList<Disk> disks = Linq.<Disk> Cast(value);
super.setItems(disks);
}
@Override
protected void InitDetailModels()
{
super.InitDetailModels();
diskVmListModel = new DiskVmListModel();
diskVmListModel.setIsAvailable(false);
diskTemplateListModel = new DiskTemplateListModel();
diskTemplateListModel.setIsAvailable(false);
diskStorageListModel = new DiskStorageListModel();
diskStorageListModel.setIsAvailable(false);
ObservableCollection<EntityModel> list = new ObservableCollection<EntityModel>();
list.add(new DiskGeneralModel());
list.add(diskVmListModel);
list.add(diskTemplateListModel);
list.add(diskStorageListModel);
list.add(new PermissionListModel());
setDetailModels(list);
}
@Override
protected void UpdateDetailsAvailability()
{
if (getSelectedItem() != null)
{
Disk disk = (Disk) getSelectedItem();
diskVmListModel.setIsAvailable(disk.getVmEntityType() != VmEntityType.TEMPLATE);
diskTemplateListModel.setIsAvailable(disk.getVmEntityType() == VmEntityType.TEMPLATE);
diskStorageListModel.setIsAvailable(disk.getDiskStorageType() == DiskStorageType.IMAGE);
}
}
public void Cancel()
{
setWindow(null);
}
@Override
protected void OnSelectedItemChanged()
{
super.OnSelectedItemChanged();
UpdateActionAvailability();
}
@Override
protected void SelectedItemsChanged()
{
super.SelectedItemsChanged();
UpdateActionAvailability();
}
private void New()
{
DiskModel model = new DiskModel();
setWindow(model);
model.setTitle(ConstantsManager.getInstance().getConstants().addVirtualDiskTitle());
model.setHashName("new_virtual_disk"); //$NON-NLS-1$
model.setIsNew(true);
model.getIsInVm().setEntity(false);
model.StartProgress(null);
AsyncDataProvider.GetDataCenterList(new AsyncQuery(this, new INewAsyncCallback() {
@Override
public void OnSuccess(Object target, Object returnValue) {
DiskListModel diskListModel = (DiskListModel) target;
DiskModel diskModel = (DiskModel) diskListModel.getWindow();
ArrayList<storage_pool> dataCenters = (ArrayList<storage_pool>) returnValue;
diskModel.getDataCenter().setItems(dataCenters);
diskModel.getDataCenter().setSelectedItem(Linq.FirstOrDefault(dataCenters));
ArrayList<UICommand> commands = new ArrayList<UICommand>();
UICommand tempVar2 = new UICommand("OnSave", diskListModel); //$NON-NLS-1$
tempVar2.setTitle(ConstantsManager.getInstance().getConstants().ok());
tempVar2.setIsDefault(true);
diskModel.getCommands().add(tempVar2);
UICommand tempVar3 = new UICommand("Cancel", diskListModel); //$NON-NLS-1$
tempVar3.setTitle(ConstantsManager.getInstance().getConstants().cancel());
tempVar3.setIsCancel(true);
diskModel.getCommands().add(tempVar3);
diskModel.StopProgress();
}
}));
}
private void Edit()
{
}
private void OnSave()
{
DiskModel model = (DiskModel) getWindow();
if (model.getProgress() != null || !model.Validate())
{
return;
}
if ((Boolean) model.getAttachDisk().getEntity())
{
OnAttachDisks();
return;
}
// Save changes.
storage_domains storageDomain = (storage_domains) model.getStorageDomain().getSelectedItem();
Disk disk;
if ((Boolean) model.getIsInternal().getEntity()) {
DiskImage diskImage = model.getIsNew() ? new DiskImage() : (DiskImage) getSelectedItem();
diskImage.setSizeInGigabytes(Integer.parseInt(model.getSize().getEntity().toString()));
diskImage.setvolume_type((VolumeType) model.getVolumeType().getSelectedItem());
diskImage.setvolume_format(model.getVolumeFormat());
if (model.getQuota().getIsAvailable()) {
diskImage.setQuotaId(((Quota) model.getQuota().getSelectedItem()).getId());
}
disk = diskImage;
}
else {
LunDisk lunDisk = model.getIsNew() ? new LunDisk() : (LunDisk) getSelectedItem();
lunDisk.setLun((LUNs) model.getSanStorageModel().getAddedLuns().get(0).getEntity());
disk = lunDisk;
}
disk.setDiskAlias((String) model.getAlias().getEntity());
disk.setDiskDescription((String) model.getDescription().getEntity());
disk.setDiskInterface((DiskInterface) model.getInterface().getSelectedItem());
disk.setWipeAfterDelete((Boolean) model.getWipeAfterDelete().getEntity());
disk.setBoot((Boolean) model.getIsBootable().getEntity());
disk.setShareable((Boolean) model.getIsShareable().getEntity());
disk.setPlugged((Boolean) model.getIsPlugged().getEntity());
disk.setPropagateErrors(PropagateErrors.Off);
VdcActionType actionType;
VmDiskOperatinParameterBase parameters;
if (model.getIsNew())
{
parameters = new AddDiskParameters(Guid.Empty, disk);
((AddDiskParameters) parameters).setStorageDomainId(storageDomain.getId());
actionType = VdcActionType.AddDisk;
}
else
{
parameters = new UpdateVmDiskParameters(Guid.Empty, disk.getId(), disk);
actionType = VdcActionType.UpdateVmDisk;
}
model.StartProgress(null);
ArrayList<VdcActionParametersBase> paramerterList = new ArrayList<VdcActionParametersBase>();
paramerterList.add(parameters);
Frontend.RunMultipleAction(actionType, paramerterList,
new IFrontendMultipleActionAsyncCallback() {
@Override
public void Executed(FrontendMultipleActionAsyncResult result) {
DiskListModel localModel = (DiskListModel) result.getState();
localModel.getWindow().StopProgress();
Cancel();
}
},
this);
}
private void OnAttachDisks()
{
VM vm = (VM) getEntity();
DiskModel model = (DiskModel) getWindow();
ArrayList<VdcActionParametersBase> paramerterList = new ArrayList<VdcActionParametersBase>();
ArrayList<EntityModel> disksToAttach = (Boolean) model.getIsInternal().getEntity() ?
(ArrayList<EntityModel>) model.getInternalAttachableDisks().getSelectedItems() :
(ArrayList<EntityModel>) model.getExternalAttachableDisks().getSelectedItems();
for (EntityModel item : disksToAttach)
{
DiskModel disk = (DiskModel) item.getEntity();
AttachDettachVmDiskParameters parameters = new AttachDettachVmDiskParameters(
vm.getId(), disk.getDisk().getId(), (Boolean) model.getIsPlugged().getEntity());
paramerterList.add(parameters);
}
model.StartProgress(null);
Frontend.RunMultipleAction(VdcActionType.AttachDiskToVm, paramerterList,
new IFrontendMultipleActionAsyncCallback() {
@Override
public void Executed(FrontendMultipleActionAsyncResult result) {
DiskListModel localModel = (DiskListModel) result.getState();
localModel.getWindow().StopProgress();
Cancel();
}
},
this);
}
private void Move()
{
ArrayList<DiskImage> disks = (ArrayList<DiskImage>) getSelectedItems();
if (disks == null || getWindow() != null)
{
return;
}
MoveDiskModel model = new MoveDiskModel();
model.setIsSingleDiskMove(disks.size() == 1);
setWindow(model);
model.setTitle(ConstantsManager.getInstance().getConstants().moveDisksTitle());
model.setHashName("move_disks"); //$NON-NLS-1$
model.setIsSourceStorageDomainNameAvailable(true);
model.setEntity(this);
model.init(disks);
model.StartProgress(null);
}
private void Copy()
{
ArrayList<DiskImage> disks = (ArrayList<DiskImage>) getSelectedItems();
if (disks == null || getWindow() != null)
{
return;
}
CopyDiskModel model = new CopyDiskModel();
model.setIsSingleDiskMove(disks.size() == 1);
setWindow(model);
model.setTitle(ConstantsManager.getInstance().getConstants().copyDisksTitle());
model.setHashName("copy_disks"); //$NON-NLS-1$
model.setEntity(this);
model.init(disks);
model.StartProgress(null);
}
private void Remove()
{
if (getWindow() != null)
{
return;
}
RemoveDiskModel model = new RemoveDiskModel();
setWindow(model);
model.setTitle(ConstantsManager.getInstance().getConstants().removeDisksTitle());
model.setHashName("remove_disk"); //$NON-NLS-1$
model.setMessage(ConstantsManager.getInstance().getConstants().disksMsg());
model.getLatch().setIsAvailable(false);
ArrayList<String> items = new ArrayList<String>();
for (Object item : getSelectedItems())
{
Disk disk = (Disk) item;
items.add(disk.getDiskAlias());
}
model.setItems(items);
UICommand tempVar = new UICommand("OnRemove", this); //$NON-NLS-1$
tempVar.setTitle(ConstantsManager.getInstance().getConstants().ok());
tempVar.setIsDefault(true);
model.getCommands().add(tempVar);
UICommand tempVar2 = new UICommand("Cancel", this); //$NON-NLS-1$
tempVar2.setTitle(ConstantsManager.getInstance().getConstants().cancel());
tempVar2.setIsCancel(true);
model.getCommands().add(tempVar2);
}
private void OnRemove()
{
VM vm = (VM) getEntity();
RemoveDiskModel model = (RemoveDiskModel) getWindow();
ArrayList<VdcActionParametersBase> paramerterList = new ArrayList<VdcActionParametersBase>();
for (Object item : getSelectedItems())
{
Disk disk = (Disk) item;
VdcActionParametersBase parameters = new RemoveDiskParameters(disk.getId());
paramerterList.add(parameters);
}
model.StartProgress(null);
Frontend.RunMultipleAction(VdcActionType.RemoveDisk, paramerterList,
new IFrontendMultipleActionAsyncCallback() {
@Override
public void Executed(FrontendMultipleActionAsyncResult result) {
DiskListModel localModel = (DiskListModel) result.getState();
localModel.StopProgress();
Cancel();
}
},
this);
}
private void UpdateActionAvailability()
{
VM vm = (VM) getEntity();
Disk disk = (Disk) getSelectedItem();
ArrayList<Disk> disks = getSelectedItems() != null ? (ArrayList<Disk>) getSelectedItems() : null;
boolean isDiskLocked = disk != null && disk.getDiskStorageType() == DiskStorageType.IMAGE &&
((DiskImage) disk).getimageStatus() == ImageStatus.LOCKED;
getNewCommand().setIsExecutionAllowed(true);
getEditCommand().setIsExecutionAllowed(disk != null && disks != null && disks.size() == 1 && !isDiskLocked);
getRemoveCommand().setIsExecutionAllowed(disks != null && disks.size() > 0 && isRemoveCommandAvailable());
getMoveCommand().setIsExecutionAllowed(disks != null && disks.size() > 0 && isMoveCommandAvailable());
getCopyCommand().setIsExecutionAllowed(disks != null && disks.size() > 0 && isCopyCommandAvailable());
}
private boolean isMoveCommandAvailable() {
ArrayList<Disk> disks =
getSelectedItems() != null ? Linq.<Disk> Cast(getSelectedItems()) : new ArrayList<Disk>();
Disk firstDisk = disks.get(0);
if (firstDisk.getDiskStorageType() != DiskStorageType.IMAGE) {
return false;
}
NGuid datacenterId = ((DiskImage) firstDisk).getstorage_pool_id();
for (Disk disk : disks)
{
if (disk.getDiskStorageType() != DiskStorageType.IMAGE) {
return false;
}
DiskImage diskImage = (DiskImage) disk;
if (disk.getDiskStorageType() != DiskStorageType.IMAGE ||
diskImage.getimageStatus() != ImageStatus.OK ||
disk.getVmEntityType() == VmEntityType.TEMPLATE ||
!datacenterId.equals(diskImage.getstorage_pool_id()))
{
return false;
}
}
return true;
}
private boolean isCopyCommandAvailable() {
ArrayList<Disk> disks =
getSelectedItems() != null ? Linq.<Disk> Cast(getSelectedItems()) : new ArrayList<Disk>();
Disk firstDisk = disks.get(0);
if (firstDisk.getDiskStorageType() != DiskStorageType.IMAGE) {
return false;
}
NGuid datacenterId = ((DiskImage) firstDisk).getstorage_pool_id();
for (Disk disk : disks)
{
DiskImage diskImage = (DiskImage) disk;
if (diskImage.getimageStatus() != ImageStatus.OK || disk.getVmEntityType() != VmEntityType.TEMPLATE ||
!datacenterId.equals(diskImage.getstorage_pool_id()))
{
return false;
}
}
return true;
}
private boolean isRemoveCommandAvailable() {
ArrayList<Disk> disks =
getSelectedItems() != null ? Linq.<Disk> Cast(getSelectedItems()) : new ArrayList<Disk>();
for (Disk disk : disks)
{
boolean isTemplateDisk = disk.getVmEntityType() == VmEntityType.TEMPLATE;
boolean isImageLocked = disk.getDiskStorageType() == DiskStorageType.IMAGE
&& ((DiskImage) disk).getimageStatus() == ImageStatus.LOCKED;
if (isTemplateDisk || isImageLocked)
{
return false;
}
}
return true;
}
@Override
public void ExecuteCommand(UICommand command)
{
super.ExecuteCommand(command);
if (command == getNewCommand())
{
New();
}
else if (command == getEditCommand())
{
Edit();
}
else if (command == getRemoveCommand())
{
Remove();
}
else if (command == getMoveCommand())
{
Move();
}
else if (command == getCopyCommand())
{
Copy();
}
else if (StringHelper.stringsEqual(command.getName(), "OnSave")) //$NON-NLS-1$
{
OnSave();
}
else if (StringHelper.stringsEqual(command.getName(), "Cancel")) //$NON-NLS-1$
{
Cancel();
}
else if (StringHelper.stringsEqual(command.getName(), "OnRemove")) //$NON-NLS-1$
{
OnRemove();
}
}
@Override
public boolean IsSearchStringMatch(String searchString)
{
return searchString.trim().toLowerCase().startsWith("disk"); //$NON-NLS-1$
}
@Override
protected String getListName() {
return "DiskListModel"; //$NON-NLS-1$
}
}
|
package org.ovirt.engine.ui.uicommonweb.models.vms;
import java.util.ArrayList;
import java.util.List;
import org.ovirt.engine.core.common.action.AttachDettachVmDiskParameters;
import org.ovirt.engine.core.common.action.ChangeQuotaParameters;
import org.ovirt.engine.core.common.action.HotPlugDiskToVmParameters;
import org.ovirt.engine.core.common.action.RemoveDiskParameters;
import org.ovirt.engine.core.common.action.VdcActionParametersBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.businessentities.Disk;
import org.ovirt.engine.core.common.businessentities.Disk.DiskStorageType;
import org.ovirt.engine.core.common.businessentities.DiskImage;
import org.ovirt.engine.core.common.businessentities.DiskInterface;
import org.ovirt.engine.core.common.businessentities.ImageStatus;
import org.ovirt.engine.core.common.businessentities.Quota;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.queries.ConfigurationValues;
import org.ovirt.engine.core.common.queries.GetAllDisksByVmIdParameters;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.compat.StringHelper;
import org.ovirt.engine.core.compat.Version;
import org.ovirt.engine.ui.frontend.AsyncQuery;
import org.ovirt.engine.ui.frontend.Frontend;
import org.ovirt.engine.ui.frontend.INewAsyncCallback;
import org.ovirt.engine.ui.uicommonweb.Linq;
import org.ovirt.engine.ui.uicommonweb.Linq.DiskByAliasComparer;
import org.ovirt.engine.ui.uicommonweb.UICommand;
import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider;
import org.ovirt.engine.ui.uicommonweb.models.ISupportSystemTreeContext;
import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemModel;
import org.ovirt.engine.ui.uicommonweb.models.quota.ChangeQuotaItemModel;
import org.ovirt.engine.ui.uicommonweb.models.quota.ChangeQuotaModel;
import org.ovirt.engine.ui.uicompat.ConstantsManager;
import org.ovirt.engine.ui.uicompat.FrontendMultipleActionAsyncResult;
import org.ovirt.engine.ui.uicompat.IFrontendMultipleActionAsyncCallback;
import org.ovirt.engine.ui.uicompat.PropertyChangedEventArgs;
@SuppressWarnings("unused")
public class VmDiskListModel extends VmDiskListModelBase
{
private UICommand privateNewCommand;
public UICommand getNewCommand()
{
return privateNewCommand;
}
private void setNewCommand(UICommand value)
{
privateNewCommand = value;
}
private UICommand privateEditCommand;
@Override
public UICommand getEditCommand()
{
return privateEditCommand;
}
private void setEditCommand(UICommand value)
{
privateEditCommand = value;
}
private UICommand privateRemoveCommand;
public UICommand getRemoveCommand()
{
return privateRemoveCommand;
}
private void setRemoveCommand(UICommand value)
{
privateRemoveCommand = value;
}
private UICommand privatePlugCommand;
public UICommand getPlugCommand()
{
return privatePlugCommand;
}
private void setPlugCommand(UICommand value)
{
privatePlugCommand = value;
}
private UICommand privateUnPlugCommand;
public UICommand getUnPlugCommand()
{
return privateUnPlugCommand;
}
private void setUnPlugCommand(UICommand value)
{
privateUnPlugCommand = value;
}
ISupportSystemTreeContext systemTreeContext;
public ISupportSystemTreeContext getSystemTreeContext() {
return systemTreeContext;
}
public void setSystemTreeContext(ISupportSystemTreeContext systemTreeContext) {
this.systemTreeContext = systemTreeContext;
}
private UICommand privateChangeQuotaCommand;
public UICommand getChangeQuotaCommand()
{
return privateChangeQuotaCommand;
}
private void setChangeQuotaCommand(UICommand value)
{
privateChangeQuotaCommand = value;
}
private UICommand privateMoveCommand;
public UICommand getMoveCommand()
{
return privateMoveCommand;
}
private void setMoveCommand(UICommand value)
{
privateMoveCommand = value;
}
private boolean privateIsDiskHotPlugSupported;
public boolean getIsDiskHotPlugSupported()
{
VM vm = getEntity();
boolean isVmStatusApplicableForHotPlug =
vm != null && (vm.getStatus() == VMStatus.Up || vm.getStatus() == VMStatus.Down ||
vm.getStatus() == VMStatus.Paused || vm.getStatus() == VMStatus.Suspended);
return privateIsDiskHotPlugSupported && isVmStatusApplicableForHotPlug;
}
private void setIsDiskHotPlugSupported(boolean value)
{
if (privateIsDiskHotPlugSupported != value)
{
privateIsDiskHotPlugSupported = value;
onPropertyChanged(new PropertyChangedEventArgs("IsDiskHotPlugSupported")); //$NON-NLS-1$
}
}
private boolean isLiveStorageMigrationEnabled;
public boolean getIsLiveStorageMigrationEnabled()
{
return isLiveStorageMigrationEnabled;
}
private void setIsLiveStorageMigrationEnabled(boolean value)
{
if (isLiveStorageMigrationEnabled != value)
{
isLiveStorageMigrationEnabled = value;
onPropertyChanged(new PropertyChangedEventArgs("IsLiveStorageMigrationEnabled")); //$NON-NLS-1$
}
}
public VmDiskListModel()
{
setTitle(ConstantsManager.getInstance().getConstants().disksTitle());
setHashName("disks"); //$NON-NLS-1$
setNewCommand(new UICommand("New", this)); //$NON-NLS-1$
setEditCommand(new UICommand("Edit", this)); //$NON-NLS-1$
setRemoveCommand(new UICommand("Remove", this)); //$NON-NLS-1$
setPlugCommand(new UICommand("Plug", this)); //$NON-NLS-1$
setUnPlugCommand(new UICommand("Unplug", this)); //$NON-NLS-1$
setMoveCommand(new UICommand("Move", this)); //$NON-NLS-1$
setChangeQuotaCommand(new UICommand("changeQuota", this)); //$NON-NLS-1$
getChangeQuotaCommand().setIsAvailable(false);
updateActionAvailability();
}
@Override
public VM getEntity()
{
return (VM) super.getEntity();
}
public void setEntity(VM value)
{
super.setEntity(value);
}
@Override
protected void onEntityChanged()
{
super.onEntityChanged();
if (getEntity() != null)
{
getSearchCommand().execute();
updateIsDiskHotPlugAvailable();
updateLiveStorageMigrationEnabled();
}
updateActionAvailability();
}
@Override
protected void syncSearch()
{
if (getEntity() == null)
{
return;
}
VM vm = getEntity();
super.syncSearch(VdcQueryType.GetAllDisksByVmId, new GetAllDisksByVmIdParameters(vm.getId()));
}
@Override
protected void asyncSearch()
{
super.asyncSearch();
VM vm = getEntity();
setAsyncResult(null);
setItems(getAsyncResult().getData());
}
@Override
public void setItems(Iterable value)
{
ArrayList<Disk> disks =
value != null ? Linq.<Disk> cast(value) : new ArrayList<Disk>();
Linq.sort(disks, new DiskByAliasComparer());
super.setItems(disks);
updateActionAvailability();
}
private void newEntity()
{
final VM vm = getEntity();
if (getWindow() != null)
{
return;
}
NewDiskModel model = new NewDiskModel();
model.setTitle(ConstantsManager.getInstance().getConstants().addVirtualDiskTitle());
model.setHashName("new_virtual_disk"); //$NON-NLS-1$
model.setVm(vm);
setWindow(model);
UICommand cancelCommand = new UICommand("Cancel", this); //$NON-NLS-1$
cancelCommand.setTitle(ConstantsManager.getInstance().getConstants().cancel());
cancelCommand.setIsCancel(true);
model.setCancelCommand(cancelCommand);
model.initialize();
}
private void changeQuota() {
ArrayList<DiskImage> disks = (ArrayList<DiskImage>) getSelectedItems();
if (disks == null || getWindow() != null)
{
return;
}
ChangeQuotaModel model = new ChangeQuotaModel();
setWindow(model);
model.setTitle(ConstantsManager.getInstance().getConstants().assignQuotaForDisk());
model.setHashName("change_quota_disks"); //$NON-NLS-1$
model.startProgress(null);
model.init(disks);
UICommand command = new UICommand("onChangeQuota", this); //$NON-NLS-1$
command.setTitle(ConstantsManager.getInstance().getConstants().ok());
command.setIsDefault(true);
model.getCommands().add(command);
command = new UICommand("Cancel", this); //$NON-NLS-1$
command.setTitle(ConstantsManager.getInstance().getConstants().cancel());
command.setIsCancel(true);
model.getCommands().add(command);
}
private void onChangeQuota() {
ChangeQuotaModel model = (ChangeQuotaModel) getWindow();
ArrayList<VdcActionParametersBase> paramerterList = new ArrayList<VdcActionParametersBase>();
for (Object item : model.getItems())
{
ChangeQuotaItemModel itemModel = (ChangeQuotaItemModel) item;
DiskImage disk = (DiskImage) itemModel.getEntity();
VdcActionParametersBase parameters =
new ChangeQuotaParameters(((Quota) itemModel.getQuota().getSelectedItem()).getId(),
disk.getId(),
disk.getStorageIds().get(0),
disk.getStoragePoolId().getValue());
paramerterList.add(parameters);
}
model.startProgress(null);
Frontend.RunMultipleAction(VdcActionType.ChangeQuotaForDisk, paramerterList,
new IFrontendMultipleActionAsyncCallback() {
@Override
public void executed(FrontendMultipleActionAsyncResult result) {
cancel();
}
},
this);
}
private void edit()
{
final Disk disk = (Disk) getSelectedItem();
if (getWindow() != null)
{
return;
}
EditDiskModel model = new EditDiskModel();
model.setTitle(ConstantsManager.getInstance().getConstants().editVirtualDiskTitle());
model.setHashName("edit_virtual_disk"); //$NON-NLS-1$
model.setVm(getEntity());
model.setDisk(disk);
setWindow(model);
UICommand cancelCommand = new UICommand("Cancel", this); //$NON-NLS-1$
cancelCommand.setTitle(ConstantsManager.getInstance().getConstants().cancel());
cancelCommand.setIsCancel(true);
model.setCancelCommand(cancelCommand);
model.initialize();
}
private void remove()
{
if (getWindow() != null)
{
return;
}
boolean hasSystemDiskWarning = false;
RemoveDiskModel model = new RemoveDiskModel();
setWindow(model);
model.setTitle(ConstantsManager.getInstance().getConstants().removeDisksTitle());
model.setHashName("remove_disk"); //$NON-NLS-1$
model.setMessage(ConstantsManager.getInstance().getConstants().disksMsg());
model.getLatch().setEntity(false);
ArrayList<DiskModel> items = new ArrayList<DiskModel>();
for (Object item : getSelectedItems())
{
Disk disk = (Disk) item;
DiskModel diskModel = new DiskModel();
diskModel.setDisk(disk);
diskModel.setVm(getEntity());
items.add(diskModel);
// A shared disk can only be detached
if (disk.getNumberOfVms() > 1) {
model.getLatch().setIsChangable(false);
}
}
model.setItems(items);
UICommand tempVar = new UICommand("OnRemove", this); //$NON-NLS-1$
tempVar.setTitle(ConstantsManager.getInstance().getConstants().ok());
tempVar.setIsDefault(true);
model.getCommands().add(tempVar);
UICommand tempVar2 = new UICommand("Cancel", this); //$NON-NLS-1$
tempVar2.setTitle(ConstantsManager.getInstance().getConstants().cancel());
tempVar2.setIsCancel(true);
model.getCommands().add(tempVar2);
}
private void onRemove() {
VM vm = getEntity();
RemoveDiskModel model = (RemoveDiskModel) getWindow();
boolean removeDisk = (Boolean) model.getLatch().getEntity();
VdcActionType actionType = removeDisk ? VdcActionType.RemoveDisk : VdcActionType.DetachDiskFromVm;
ArrayList<VdcActionParametersBase> paramerterList = new ArrayList<VdcActionParametersBase>();
for (Object item : getSelectedItems()) {
Disk disk = (Disk) item;
VdcActionParametersBase parameters = removeDisk ?
new RemoveDiskParameters(disk.getId()) :
new AttachDettachVmDiskParameters(vm.getId(), disk.getId(), true);
paramerterList.add(parameters);
}
model.startProgress(null);
Frontend.RunMultipleAction(actionType, paramerterList,
new IFrontendMultipleActionAsyncCallback() {
@Override
public void executed(FrontendMultipleActionAsyncResult result) {
VmDiskListModel localModel = (VmDiskListModel) result.getState();
localModel.stopProgress();
cancel();
}
},
this);
}
private void plug(boolean plug) {
VM vm = getEntity();
ArrayList<VdcActionParametersBase> paramerterList = new ArrayList<VdcActionParametersBase>();
for (Object item : getSelectedItems()) {
Disk disk = (Disk) item;
disk.setPlugged(plug);
paramerterList.add(new HotPlugDiskToVmParameters(vm.getId(), disk.getId()));
}
VdcActionType plugAction = VdcActionType.HotPlugDiskToVm;
if (!plug) {
plugAction = VdcActionType.HotUnPlugDiskFromVm;
}
Frontend.RunMultipleAction(plugAction, paramerterList,
new IFrontendMultipleActionAsyncCallback() {
@Override
public void executed(FrontendMultipleActionAsyncResult result) {
}
},
this);
}
private void move()
{
ArrayList<DiskImage> disks = (ArrayList<DiskImage>) getSelectedItems();
if (disks == null)
{
return;
}
if (getWindow() != null)
{
return;
}
VM vm = getEntity();
MoveDiskModel model = new MoveDiskModel();
setWindow(model);
model.setVmId(vm.getStatus() == VMStatus.Up ? vm.getId() : null);
model.setWarningAvailable(vm.getStatus() == VMStatus.Up);
model.setMessage(vm.getStatus() == VMStatus.Up ?
ConstantsManager.getInstance().getConstants().liveStorageMigrationWarning() :
null);
model.setTitle(ConstantsManager.getInstance().getConstants().moveDisksTitle());
model.setHashName("move_disk"); //$NON-NLS-1$
model.setIsSourceStorageDomainNameAvailable(true);
model.setEntity(this);
model.init(disks);
model.startProgress(null);
}
private void cancel()
{
setWindow(null);
Frontend.Unsubscribe();
}
@Override
protected void onSelectedItemChanged()
{
super.onSelectedItemChanged();
updateActionAvailability();
}
@Override
protected void selectedItemsChanged()
{
super.selectedItemsChanged();
updateActionAvailability();
}
@Override
protected void entityPropertyChanged(Object sender, PropertyChangedEventArgs e)
{
super.entityPropertyChanged(sender, e);
if (e.PropertyName.equals("status")) //$NON-NLS-1$
{
updateActionAvailability();
}
}
private void updateActionAvailability()
{
VM vm = getEntity();
Disk disk = (Disk) getSelectedItem();
boolean isDiskLocked = disk != null && disk.getDiskStorageType() == DiskStorageType.IMAGE &&
((DiskImage) disk).getImageStatus() == ImageStatus.LOCKED;
getNewCommand().setIsExecutionAllowed(true);
getEditCommand().setIsExecutionAllowed(getSelectedItem() != null && getSelectedItems() != null
&& getSelectedItems().size() == 1 && (isVmDown() || !disk.getPlugged()) && !isDiskLocked);
getRemoveCommand().setIsExecutionAllowed(getSelectedItems() != null && getSelectedItems().size() > 0
&& isRemoveCommandAvailable());
getMoveCommand().setIsExecutionAllowed(getSelectedItems() != null && getSelectedItems().size() > 0
&& (isMoveCommandAvailable() || isLiveMoveCommandAvailable()));
getPlugCommand().setIsExecutionAllowed(isPlugCommandAvailable(true));
getUnPlugCommand().setIsExecutionAllowed(isPlugCommandAvailable(false));
ChangeQuotaModel.updateChangeQuotaActionAvailability(getItems() != null ? (List<Disk>) getItems() : null,
getSelectedItems() != null ? (List<Disk>) getSelectedItems() : null,
getSystemTreeSelectedItem(),
getChangeQuotaCommand());
}
public boolean isVmDown() {
VM vm = getEntity();
return vm != null && vm.getStatus() == VMStatus.Down;
}
public boolean isHotPlugAvailable() {
VM vm = getEntity();
return vm != null && (vm.getStatus() == VMStatus.Up ||
vm.getStatus() == VMStatus.Paused || vm.getStatus() == VMStatus.Suspended);
}
private boolean isPlugCommandAvailable(boolean plug) {
return getSelectedItems() != null && getSelectedItems().size() > 0
&& isPlugAvailableByDisks(plug) &&
(isVmDown() || (isHotPlugAvailable() && getIsDiskHotPlugSupported()));
}
private boolean isPlugAvailableByDisks(boolean plug) {
ArrayList<Disk> disks =
getSelectedItems() != null ? Linq.<Disk> cast(getSelectedItems()) : new ArrayList<Disk>();
for (Disk disk : disks)
{
boolean isLocked =
disk.getDiskStorageType() == DiskStorageType.IMAGE
&& ((DiskImage) disk).getImageStatus() == ImageStatus.LOCKED;
if (disk.getPlugged() == plug || isLocked || (disk.getDiskInterface() == DiskInterface.IDE && !isVmDown()))
{
return false;
}
}
return true;
}
private boolean isImageDiskOK(Disk disk) {
return disk.getDiskStorageType() == DiskStorageType.IMAGE &&
((DiskImage) disk).getImageStatus() == ImageStatus.OK;
}
private boolean isMoveCommandAvailable() {
ArrayList<Disk> disks =
getSelectedItems() != null ? Linq.<Disk> cast(getSelectedItems()) : new ArrayList<Disk>();
for (Disk disk : disks) {
if (!isImageDiskOK(disk) || (!isVmDown() && disk.getPlugged())) {
return false;
}
}
return true;
}
private boolean isLiveMoveCommandAvailable() {
if (!getIsLiveStorageMigrationEnabled()) {
return false;
}
VM vm = getEntity();
if (vm == null || !vm.getStatus().isUpOrPaused()) {
return false;
}
ArrayList<Disk> disks = getSelectedItems() != null ?
Linq.<Disk> cast(getSelectedItems()) : new ArrayList<Disk>();
for (Disk disk : disks) {
if (!isImageDiskOK(disk)) {
return false;
}
}
return true;
}
private boolean isRemoveCommandAvailable() {
ArrayList<Disk> disks =
getSelectedItems() != null ? Linq.<Disk> cast(getSelectedItems()) : new ArrayList<Disk>();
for (Disk disk : disks)
{
if (disk.getDiskStorageType() == DiskStorageType.IMAGE &&
((DiskImage) disk).getImageStatus() == ImageStatus.LOCKED || (!isVmDown() && disk.getPlugged()))
{
return false;
}
}
return true;
}
@Override
public void executeCommand(UICommand command)
{
super.executeCommand(command);
if (command == getNewCommand())
{
newEntity();
}
else if (command == getEditCommand())
{
edit();
}
else if (command == getRemoveCommand())
{
remove();
}
else if (command == getMoveCommand())
{
move();
}
else if (StringHelper.stringsEqual(command.getName(), "Cancel")) //$NON-NLS-1$
{
cancel();
}
else if (StringHelper.stringsEqual(command.getName(), "OnRemove")) //$NON-NLS-1$
{
onRemove();
}
else if (command == getPlugCommand())
{
plug(true);
}
else if (command == getUnPlugCommand())
{
plug(false);
} else if (command == getChangeQuotaCommand()) {
changeQuota();
} else if (command.getName().equals("onChangeQuota")) { //$NON-NLS-1$
onChangeQuota();
}
}
protected void updateIsDiskHotPlugAvailable()
{
VM vm = getEntity();
Version clusterCompatibilityVersion = vm.getVdsGroupCompatibilityVersion();
if (clusterCompatibilityVersion == null) {
setIsDiskHotPlugSupported(false);
} else {
setIsDiskHotPlugSupported((Boolean) AsyncDataProvider.getConfigValuePreConverted(
ConfigurationValues.HotPlugEnabled, clusterCompatibilityVersion.toString()));
}
}
protected void updateLiveStorageMigrationEnabled()
{
final VM vm = getEntity();
AsyncDataProvider.getDataCenterById(new AsyncQuery(this, new INewAsyncCallback() {
@Override
public void onSuccess(Object target, Object returnValue) {
VmDiskListModel model = (VmDiskListModel) target;
StoragePool dataCenter = (StoragePool) returnValue;
Version dcCompatibilityVersion = dataCenter.getcompatibility_version() != null
? dataCenter.getcompatibility_version() : new Version();
AsyncDataProvider.isCommandCompatible(new AsyncQuery(model,
new INewAsyncCallback() {
@Override
public void onSuccess(Object target, Object returnValue) {
VmDiskListModel model = (VmDiskListModel) target;
model.setIsLiveStorageMigrationEnabled((Boolean) returnValue);
}
}),
VdcActionType.LiveMigrateVmDisks,
vm.getVdsGroupCompatibilityVersion(),
dcCompatibilityVersion);
}
}), vm.getStoragePoolId().getValue());
}
@Override
protected String getListName() {
return "VmDiskListModel"; //$NON-NLS-1$
}
public SystemTreeItemModel getSystemTreeSelectedItem() {
if (getSystemTreeContext() == null) {
return null;
}
return getSystemTreeContext().getSystemTreeSelectedItem();
}
}
|
package edu.northwestern.bioinformatics.studycalendar.grid;
import edu.northwestern.bioinformatics.studycalendar.StudyCalendarSystemException;
import edu.northwestern.bioinformatics.studycalendar.dao.SubjectDao;
import edu.northwestern.bioinformatics.studycalendar.domain.Epoch;
import edu.northwestern.bioinformatics.studycalendar.domain.Gender;
import edu.northwestern.bioinformatics.studycalendar.domain.Population;
import edu.northwestern.bioinformatics.studycalendar.domain.ScheduledCalendar;
import edu.northwestern.bioinformatics.studycalendar.domain.Study;
import edu.northwestern.bioinformatics.studycalendar.domain.StudySegment;
import edu.northwestern.bioinformatics.studycalendar.domain.StudySite;
import edu.northwestern.bioinformatics.studycalendar.domain.StudySubjectAssignment;
import edu.northwestern.bioinformatics.studycalendar.domain.Subject;
import edu.northwestern.bioinformatics.studycalendar.security.authorization.PscUser;
import edu.northwestern.bioinformatics.studycalendar.security.authorization.PscUserDetailsService;
import edu.northwestern.bioinformatics.studycalendar.service.StudyService;
import edu.northwestern.bioinformatics.studycalendar.service.SubjectService;
import gov.nih.nci.cabig.ccts.domain.IdentifierType;
import gov.nih.nci.cabig.ccts.domain.OrganizationAssignedIdentifierType;
import gov.nih.nci.cabig.ccts.domain.ParticipantType;
import gov.nih.nci.cabig.ccts.domain.Registration;
import gov.nih.nci.cabig.ccts.domain.ScheduledTreatmentEpochType;
import gov.nih.nci.cabig.ctms.audit.dao.AuditHistoryRepository;
import gov.nih.nci.cabig.ctms.suite.authorization.SuiteRole;
import gov.nih.nci.cabig.ctms.suite.authorization.SuiteRoleMembership;
import gov.nih.nci.ccts.grid.common.RegistrationConsumerI;
import gov.nih.nci.ccts.grid.stubs.types.InvalidRegistrationException;
import gov.nih.nci.ccts.grid.stubs.types.RegistrationConsumptionException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.oasis.wsrf.properties.GetMultipleResourcePropertiesResponse;
import org.oasis.wsrf.properties.GetMultipleResourceProperties_Element;
import org.oasis.wsrf.properties.GetResourcePropertyResponse;
import org.oasis.wsrf.properties.QueryResourcePropertiesResponse;
import org.oasis.wsrf.properties.QueryResourceProperties_Element;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.transaction.annotation.Transactional;
import javax.xml.namespace.QName;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* @author <a href="mailto:joshua.phillips@semanticbits.com>Joshua Phillips</a>
*/
@Transactional(readOnly = false)
public class PSCRegistrationConsumer implements RegistrationConsumerI {
private static final Log logger = LogFactory.getLog(PSCRegistrationConsumer.class);
public static final String SERVICE_BEAN_NAME = "scheduledCalendarService";
private static final String MRN_IDENTIFIER_TYPE = "MRN";
private static final String COORDINATING_CENTER_IDENTIFIER_TYPE = "Coordinating Center Identifier";
// private StudyDao studyDao;
private StudyService studyService;
private SubjectDao subjectDao;
private SubjectService subjectService;
private AuditHistoryRepository auditHistoryRepository;
private String registrationConsumerGridServiceUrl;
private String rollbackTimeOut;
private PscUserDetailsService pscUserDetailsService;
private RegistrationGridServiceAuthorizationHelper gridServicesAuthorizationHelper;
/**
* This method authorize the caller for REGISTRAR Role
* @return boolean
*/
private SuiteRoleMembership getUserSuiteRoleMembership(){
String userName = getGridServicesAuthorizationHelper().getCurrentUsername();
SuiteRoleMembership suiteRoleMembership;
if (userName != null){
PscUser loadedUser = pscUserDetailsService.loadUserByUsername(userName);
Map<SuiteRole, SuiteRoleMembership> memberships = loadedUser.getMemberships();
suiteRoleMembership = memberships.get(SuiteRole.REGISTRAR);
return suiteRoleMembership;
}
return null;
}
public boolean authorizedStudyIdentifier(String studyIdentifier,SuiteRoleMembership suiteRoleMembership ){
if(suiteRoleMembership.isAllStudies()){
return true;
}else {
return suiteRoleMembership.getStudyIdentifiers().contains(studyIdentifier);
}
}
public boolean authorizedSiteIdentifier(String siteidentifier,SuiteRoleMembership suiteRoleMembership){
if(suiteRoleMembership.isAllSites()){
return true;
}else {
return suiteRoleMembership.getSiteIdentifiers().contains(siteidentifier);
}
}
/**
* Does nothing as we are already committing Registration message by default.
*
* @param registration
* @throws RemoteException
* @throws InvalidRegistrationException
*/
public void commit(final Registration registration) throws RemoteException, InvalidRegistrationException {
}
public void rollback(final Registration registration) throws RemoteException, InvalidRegistrationException {
//Get the study
String ccIdentifier = findCoordinatingCenterIdentifier(registration);
Study study = fetchStudy(ccIdentifier);
if (study == null) {
String message = "Study identified by Coordinating Center Identifier '" + ccIdentifier + "' doesn't exist";
throw getInvalidRegistrationException(message);
}
//Get the studySite
String siteNCICode = registration.getStudySite().getHealthcareSite(0).getNciInstituteCode();
StudySite studySite = findStudySite(study, siteNCICode);
if (studySite == null) {
siteNCICode = registration.getStudySite().getHealthcareSite(0).getGridId();
if((siteNCICode != null) && !(siteNCICode.equals(""))){
studySite = findStudySite(study, siteNCICode);
}
if (studySite == null){
String message = "The study '" + study.getLongTitle() + "', identified by Coordinating Center Identifier '" + ccIdentifier
+ "' is not associated to a site identified by NCI code :'" + siteNCICode + "'";
throw getInvalidRegistrationException(message);
}
}
//Get the Subject
String mrn = findMedicalRecordNumber(registration.getParticipant());
Subject subject = fetchCommitedSubject(mrn);
if (subject == null) {
String message = "Exception while rollback subject..no subject found with given identifier: " + mrn;
throw getInvalidRegistrationException(message);
}
try {
boolean checkIfSubjectWasCreatedByGridService = auditHistoryRepository.checkIfEntityWasCreatedByUrl(subject.getClass(),
subject.getId(), registrationConsumerGridServiceUrl);
Calendar calendar = Calendar.getInstance();
Integer rollbackTime = 1;
try {
rollbackTime = Integer.parseInt(rollbackTimeOut);
} catch (NumberFormatException e) {
logger.error(String.format("error parsing value of rollback time out. Value of rollback time out %s must be integer.", rollbackTimeOut));
}
boolean checkIfSubjectWasCreatedOneMinuteBeforeCurrentTime = auditHistoryRepository.
checkIfEntityWasCreatedMinutesBeforeSpecificDate(subject.getClass(), subject.getId(), calendar, rollbackTime);
if (checkIfSubjectWasCreatedByGridService && checkIfSubjectWasCreatedOneMinuteBeforeCurrentTime) {
logger.info("Subject (id:" + subject.getId() + ") was created by the grid service url:" + registrationConsumerGridServiceUrl);
logger.info(String.format("Subject was created %s minute before the current time:%s", rollbackTime, calendar.getTime().toString()));
logger.info("So deleting the subject: " + subject.getId());
subjectDao.delete(subject);
}else{
removeStudyAssignments(subject, studySite, rollbackTime);
}
} catch (Exception exception) {
String message = "Error while rollback, " + exception.getMessage();
throw getRegistrationConsumerException(message);
}
}
private void removeStudyAssignments(Subject subject, StudySite studySite, Integer rollbackTime){
Calendar calendar = Calendar.getInstance();
List<StudySubjectAssignment> assignmentList = subject.getAssignments();
List<StudySubjectAssignment> tempStudySubjectAssignmentList = new ArrayList<StudySubjectAssignment>();
for (StudySubjectAssignment studySubjectAssignment: assignmentList){
if(studySubjectAssignment.getStudySite().getId().equals(studySite.getId())){
boolean checkIfAssignmentWasCreatedOneMinuteBeforeCurrentTime = auditHistoryRepository.
checkIfEntityWasCreatedMinutesBeforeSpecificDate(studySubjectAssignment.getClass(),
studySubjectAssignment.getId(), calendar, rollbackTime);
boolean checkIfAssignmentWasCreatedByGridService = auditHistoryRepository.checkIfEntityWasCreatedByUrl(studySubjectAssignment.getClass(), studySubjectAssignment.getId(), registrationConsumerGridServiceUrl);
if (checkIfAssignmentWasCreatedByGridService && checkIfAssignmentWasCreatedOneMinuteBeforeCurrentTime){
tempStudySubjectAssignmentList.add(studySubjectAssignment);
}
}
}
if(tempStudySubjectAssignmentList.size() > 0){
logger.info(String.format("SubjectAssignment was created %s minute before the current time:%s", rollbackTime, calendar.getTime().toString()));
logger.info(String.format("So deleting subjectAssignment for Subject: " + subject.getId()
+ "and StudySite: " + studySite.getId()));
for(StudySubjectAssignment studySubjectAssignmentObj: tempStudySubjectAssignmentList){
assignmentList.remove(studySubjectAssignmentObj);
}
subjectDao.save(subject);
}else{
logger.info(String.format("Subject/ SubjectAssignment was not created %s minute " +
"before the current time:%s so can not rollback this registration:%s",
rollbackTime, calendar.getTime().toString(), subject.getId()));
}
}
/*
* (non-Javadoc)
* @see gov.nih.nci.cabig.ctms.common.RegistrationConsumer#createRegistration(gov.nih.nci.cabig.ctms.grid.RegistrationType)
*/
public Registration register(final Registration registration) throws RemoteException, InvalidRegistrationException,
RegistrationConsumptionException {
// Check for Role
// 1. If Role is Registrar, then process, otherwise Access Denied.
SuiteRoleMembership suiteRoleMembership = getUserSuiteRoleMembership();
if(suiteRoleMembership == null){
String message = "Access Denied: user does not have REGISTRAR role";
throw getInvalidRegistrationException(message);
}
String ccIdentifier = findCoordinatingCenterIdentifier(registration);
// Authorization for study
if(!authorizedStudyIdentifier(ccIdentifier, suiteRoleMembership)){
String message = "Access Denied: Registrar is not authorized for the Study:" + ccIdentifier;
throw getInvalidRegistrationException(message);
}
Study study = fetchStudy(ccIdentifier);
if (study == null) {
String message = "Study identified by Coordinating Center Identifier '" + ccIdentifier + "' doesn't exist";
throw getInvalidRegistrationException(message);
}
String siteNCICode = registration.getStudySite().getHealthcareSite(0).getNciInstituteCode();
StudySite studySite = findStudySite(study, siteNCICode);
if (studySite == null) {
siteNCICode = registration.getStudySite().getHealthcareSite(0).getGridId();
if((siteNCICode != null) && !(siteNCICode.equals(""))){
studySite = findStudySite(study, siteNCICode);
}
if (studySite == null){
String message = "The study '" + study.getLongTitle() + "', identified by Coordinating Center Identifier '" + ccIdentifier
+ "' is not associated to a site identified by NCI code :'" + siteNCICode + "'";
throw getInvalidRegistrationException(message);
}
}
// Authorization for site
if(!authorizedSiteIdentifier(siteNCICode, suiteRoleMembership)){
String message = "Access Denied: Registrar is not authorized for the associated StudySite:" + siteNCICode;
throw getInvalidRegistrationException(message);
}
String mrn = findMedicalRecordNumber(registration.getParticipant());
Subject subject = fetchCommitedSubject(mrn);
if (subject == null) {
subject = createSubject(registration.getParticipant(), mrn);
subjectDao.save(subject);
} else {
StudySubjectAssignment assignment = subjectDao.getAssignment(subject, study, studySite.getSite());
if (assignment != null) {
String message = "Subject already assigned to this study. Use scheduleNextArm to change to the next arm.";
throw getInvalidRegistrationException(message);
}
}
// // retrieve Arm
StudySegment studySegment = null;
StudySegment loadedStudySegment = null;
if (registration.getScheduledEpoch() != null
&& registration.getScheduledEpoch() instanceof ScheduledTreatmentEpochType
&& ((ScheduledTreatmentEpochType) registration.getScheduledEpoch()).getScheduledArm() != null
&& ((ScheduledTreatmentEpochType) registration.getScheduledEpoch()).getScheduledArm().getArm() != null) {
studySegment = new StudySegment();
studySegment.setName(((ScheduledTreatmentEpochType) registration.getScheduledEpoch()).getScheduledArm()
.getArm().getName());
studySegment.setGridId(((ScheduledTreatmentEpochType) registration.getScheduledEpoch()).getScheduledArm()
.getArm().getGridId());
loadedStudySegment = loadAndValidateStudySegmentInStudy(study, studySegment);
} else {
try {
loadedStudySegment = study.getPlannedCalendar().getEpochs().get(0).getStudySegments().get(0);
} catch (Exception e) {
String message = "The study '" + study.getLongTitle() + "', identified by Coordinating Center Identifier '" + ccIdentifier
+ "' does not have any arm'";
throw getInvalidRegistrationException(message);
}
}
String registrationGridId = registration.getGridId();
// Using the informed consent date as the calendar start date
Date startDate = registration.getInformedConsentFormSignedDate();
if (startDate == null) {
startDate = new Date();
}
StudySubjectAssignment newAssignment = null;
try {
newAssignment = subjectService.assignSubject(studySite,
new edu.northwestern.bioinformatics.studycalendar.service.presenter.Registration.Builder().
subject(subject).firstStudySegment(loadedStudySegment).date(startDate).
desiredAssignmentId(registrationGridId).studySubjectId(registrationGridId).
populations(Collections.<Population>emptySet()).
toRegistration());
} catch (StudyCalendarSystemException exp) {
throw getRegistrationConsumerException(exp.getMessage());
}
ScheduledCalendar scheduledCalendar = newAssignment.getScheduledCalendar();
logger.info("Created assignment " + scheduledCalendar.getId());
return registration;
}
private Subject fetchCommitedSubject(String mrn) {
return subjectService.findSubjectByPersonId(mrn);
}
private StudySite findStudySite(final Study study, final String siteNCICode) {
for (StudySite studySite : study.getStudySites()) {
if (StringUtils.equals(studySite.getSite().getAssignedIdentifier(), siteNCICode)) {
return studySite;
}
}
return null;
}
/*
* Finds the coordinating center identifier for the sutdy
*/
private String findCoordinatingCenterIdentifier(final Registration registration)
throws InvalidRegistrationException {
String ccIdentifier = findIdentifierOfType(registration.getStudyRef().getIdentifier(),
COORDINATING_CENTER_IDENTIFIER_TYPE);
if (ccIdentifier == null) {
String message = "In StudyRef-Identifiers, Coordinating Center Identifier is not available";
throw getInvalidRegistrationException(message);
}
return ccIdentifier;
}
private InvalidRegistrationException getInvalidRegistrationException(String message) {
InvalidRegistrationException invalidRegistrationException = new InvalidRegistrationException();
invalidRegistrationException.setFaultReason(message);
invalidRegistrationException.setFaultString(message);
logger.error(message);
return invalidRegistrationException;
}
private String findIdentifierOfType(final IdentifierType[] idTypes, final String ofType) {
if (idTypes == null) {
return null;
}
for (IdentifierType identifierType : idTypes) {
if (identifierType instanceof OrganizationAssignedIdentifierType && StringUtils.equals(identifierType.getType(), ofType)) {
return identifierType.getValue();
}
}
return null;
}
private Study fetchStudy(final String ccIdentifier) {
Study study = studyService.getStudyByAssignedIdentifier(ccIdentifier);
return study;
}
private String findMedicalRecordNumber(final ParticipantType participantType) throws InvalidRegistrationException {
String subjectIdentifier = findIdentifierOfType(participantType.getIdentifier(), MRN_IDENTIFIER_TYPE);
if (subjectIdentifier == null) {
String message = "There is no identifier associated to this subject, Medical Record Number(MRN) is needed to register this subject ";
throw getInvalidRegistrationException(message);
}
return subjectIdentifier;
}
private StudySegment loadAndValidateStudySegmentInStudy(final Study study, final StudySegment requiredStudySegment) throws InvalidRegistrationException {
for (Epoch epoch : study.getPlannedCalendar().getEpochs()) {
List<StudySegment> studySegments = epoch.getStudySegments();
for (StudySegment studySegment : studySegments) {
if (studySegment.getName().equals(requiredStudySegment.getName())) {
return studySegment;
}
}
}
String message = "Arm " + requiredStudySegment.getName() + " not part of template for study "
+ study.getLongTitle();
throw getInvalidRegistrationException(message);
}
private Subject createSubject(final ParticipantType participantType, final String mrn) {
Subject subject = new Subject();
subject.setGridId(participantType.getGridId());
if (Gender.getByCode(participantType.getAdministrativeGenderCode()) != null) {
subject.setGender(Gender.getByCode(participantType.getAdministrativeGenderCode()));
} else {
subject.setGender(Gender.MALE);
}
subject.setDateOfBirth(participantType.getBirthDate());
subject.setFirstName(participantType.getFirstName());
subject.setLastName(participantType.getLastName());
subject.setPersonId(mrn);
return subject;
}
private RegistrationConsumptionException getRegistrationConsumerException(String message) {
RegistrationConsumptionException registrationConsumptionException = new RegistrationConsumptionException();
registrationConsumptionException.setFaultReason(message);
registrationConsumptionException.setFaultString(message);
logger.error(message);
return registrationConsumptionException;
}
@Required
public void setStudyService(StudyService studyService) {
this.studyService = studyService;
}
@Required
public void setSubjectDao(SubjectDao subjectDao) {
this.subjectDao = subjectDao;
}
@Required
public void setSubjectService(SubjectService subjectService) {
this.subjectService = subjectService;
}
@Required
public void setAuditHistoryRepository(AuditHistoryRepository auditHistoryRepository) {
this.auditHistoryRepository = auditHistoryRepository;
}
@Required
public void setRegistrationConsumerGridServiceUrl(String registrationConsumerGridServiceUrl) {
this.registrationConsumerGridServiceUrl = registrationConsumerGridServiceUrl;
}
@Required
public void setRollbackTimeOut(String rollbackTimeOut) {
this.rollbackTimeOut = rollbackTimeOut;
}
public GetMultipleResourcePropertiesResponse getMultipleResourceProperties(final GetMultipleResourceProperties_Element getMultipleResourceProperties_element) throws RemoteException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public GetResourcePropertyResponse getResourceProperty(final QName qName) throws RemoteException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public QueryResourcePropertiesResponse queryResourceProperties(final QueryResourceProperties_Element queryResourceProperties_element) throws RemoteException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public PscUserDetailsService getPscUserDetailsService() {
return pscUserDetailsService;
}
@Required
public void setPscUserDetailsService(PscUserDetailsService pscUserDetailsService) {
this.pscUserDetailsService = pscUserDetailsService;
}
public RegistrationGridServiceAuthorizationHelper getGridServicesAuthorizationHelper() {
if(gridServicesAuthorizationHelper==null){
gridServicesAuthorizationHelper = new RegistrationGridServiceAuthorizationHelper();
}
return gridServicesAuthorizationHelper;
}
public void setGridServicesAuthorizationHelper(
RegistrationGridServiceAuthorizationHelper gridServicesAuthorizationHelper) {
this.gridServicesAuthorizationHelper = gridServicesAuthorizationHelper;
}
}
|
package org.hibernate.ogm.datastore.infinispan.impl;
import javax.transaction.TransactionManager;
import org.hibernate.service.jta.platform.spi.JtaPlatform;
import org.infinispan.transaction.lookup.TransactionManagerLookup;
public class TransactionManagerLookupDelegator implements TransactionManagerLookup {
private final JtaPlatform platform;
public TransactionManagerLookupDelegator(JtaPlatform platform) {
this.platform = platform;
}
@Override
public TransactionManager getTransactionManager() throws Exception {
if ( platform != null ) {
return platform.retrieveTransactionManager();
}
else {
return null;
}
}
protected boolean isValid() {
return platform != null ? platform.retrieveTransactionManager() != null : false;
}
}
|
package com.sequenceiq.it.cloudbreak.testcase.e2e.environment;
import java.util.Map;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.Test;
import com.sequenceiq.environment.api.v1.environment.model.response.EnvironmentStatus;
import com.sequenceiq.it.cloudbreak.assertion.util.CloudProviderSideTagAssertion;
import com.sequenceiq.it.cloudbreak.client.CredentialTestClient;
import com.sequenceiq.it.cloudbreak.client.DistroXTestClient;
import com.sequenceiq.it.cloudbreak.client.EnvironmentTestClient;
import com.sequenceiq.it.cloudbreak.client.SdxTestClient;
import com.sequenceiq.it.cloudbreak.context.Description;
import com.sequenceiq.it.cloudbreak.context.RunningParameter;
import com.sequenceiq.it.cloudbreak.context.TestContext;
import com.sequenceiq.it.cloudbreak.dto.credential.CredentialTestDto;
import com.sequenceiq.it.cloudbreak.dto.distrox.DistroXTestDto;
import com.sequenceiq.it.cloudbreak.dto.environment.EnvironmentTestDto;
import com.sequenceiq.it.cloudbreak.dto.sdx.SdxInternalTestDto;
import com.sequenceiq.it.cloudbreak.dto.telemetry.TelemetryTestDto;
import com.sequenceiq.it.cloudbreak.testcase.e2e.AbstractE2ETest;
import com.sequenceiq.sdx.api.model.SdxClusterStatusResponse;
public class EnvironmentStopStartTests extends AbstractE2ETest {
private static final Logger LOGGER = LoggerFactory.getLogger(EnvironmentStopStartTests.class);
private static final Map<String, String> ENV_TAGS = Map.of("envTagKey", "envTagValue");
private static final Map<String, String> SDX_TAGS = Map.of("sdxTagKey", "sdxTagValue");
private static final Map<String, String> DX1_TAGS = Map.of("distroxTagKey", "distroxTagValue");
@Inject
private EnvironmentTestClient environmentTestClient;
@Inject
private SdxTestClient sdxTestClient;
@Inject
private DistroXTestClient distroXTestClient;
@Inject
private CredentialTestClient credentialTestClient;
@Inject
private CloudProviderSideTagAssertion cloudProviderSideTagAssertion;
@Override
protected void setupTest(TestContext testContext) {
createDefaultUser(testContext);
initializeDefaultBlueprints(testContext);
}
@Test(dataProvider = TEST_CONTEXT, timeOut = 9000000)
@Description(
given = "there is a running cloudbreak",
when = "create an attached SDX and Datahub",
then = "should be stopped first and started after it")
public void testCreateStopStartEnvironment(TestContext testContext) {
LOGGER.info("Environment stop-start test execution has been started....");
testContext
.given(CredentialTestDto.class)
.when(credentialTestClient.create())
.given("telemetry", TelemetryTestDto.class)
.withLogging()
.withReportClusterLogs()
.given(EnvironmentTestDto.class)
.withNetwork()
.withTelemetry("telemetry")
.withCreateFreeIpa(Boolean.TRUE)
.addTags(ENV_TAGS)
.when(environmentTestClient.create())
.given(SdxInternalTestDto.class)
.addTags(SDX_TAGS)
.withCloudStorage(getCloudStorageRequest(testContext))
.when(sdxTestClient.createInternal())
.given(EnvironmentTestDto.class)
.await(EnvironmentStatus.AVAILABLE)
.then(cloudProviderSideTagAssertion.verifyEnvironmentTags(ENV_TAGS))
.given(SdxInternalTestDto.class)
.await(SdxClusterStatusResponse.RUNNING)
.then(cloudProviderSideTagAssertion.verifyInternalSdxTags(SDX_TAGS))
.given("dx1", DistroXTestDto.class)
.addTags(DX1_TAGS)
.when(distroXTestClient.create(), RunningParameter.key("dx1"))
.given("dx2", DistroXTestDto.class)
.when(distroXTestClient.create(), RunningParameter.key("dx2"))
.given("dx1", DistroXTestDto.class)
.await(STACK_AVAILABLE, RunningParameter.key("dx1"))
.then(cloudProviderSideTagAssertion.verifyDistroxTags(DX1_TAGS))
.given("dx2", DistroXTestDto.class)
.await(STACK_AVAILABLE, RunningParameter.key("dx2"))
.given(EnvironmentTestDto.class)
.when(environmentTestClient.stop())
.await(EnvironmentStatus.ENV_STOPPED)
.given(EnvironmentTestDto.class)
.when(environmentTestClient.start())
.await(EnvironmentStatus.AVAILABLE)
.validate();
LOGGER.info("Environment stop-start test execution has been finished....");
}
}
|
package com.atlassian.jira.plugins.dvcs.smartcommits;
import com.atlassian.activeobjects.external.ActiveObjects;
import com.atlassian.jira.plugins.dvcs.activeobjects.QueryHelper;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.ChangesetMapping;
import com.atlassian.jira.plugins.dvcs.activeobjects.v3.RepositoryMapping;
import com.atlassian.jira.plugins.dvcs.dao.ChangesetDao;
import com.atlassian.jira.plugins.dvcs.dao.impl.ChangesetDaoImpl;
import com.atlassian.jira.plugins.dvcs.model.Changeset;
import com.atlassian.jira.plugins.dvcs.model.DefaultProgress;
import com.atlassian.jira.plugins.dvcs.model.Repository;
import com.atlassian.jira.plugins.dvcs.service.ChangesetService;
import com.atlassian.jira.plugins.dvcs.smartcommits.model.CommandsResults;
import com.atlassian.jira.plugins.dvcs.smartcommits.model.CommitCommands;
import com.atlassian.jira.plugins.dvcs.sync.Synchronizer;
import com.atlassian.jira.plugins.dvcs.sync.impl.DefaultSynchronizer;
import com.atlassian.sal.api.transaction.TransactionCallback;
import net.java.ao.EntityStreamCallback;
import net.java.ao.Query;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.concurrent.Executors;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@SuppressWarnings("all")
public class SmartcommitOperationTest
{
private static final int CHANGESET_ID = 1;
private static final int REPOSITORY_ID = 100;
private CommitMessageParser commitMessageParser = new DefaultCommitMessageParser();
@Mock
private SmartcommitsService smartcommitsServiceMock;
@Mock
private SmartcommitsChangesetsProcessor changesetsProcessorMock;
@Mock
private ActiveObjects activeObjectsMock;
@Mock
private QueryHelper queryHelper;
@Mock
private Repository repositoryMock;
@Mock
private ChangesetService changesetServiceMock;
ChangesetDao changesetDao;
SmartcommitOperation operation;
private Synchronizer synchronizer;
public SmartcommitOperationTest() {
super();
}
@SuppressWarnings("unchecked")
@BeforeMethod
public void setUp()
{
MockitoAnnotations.initMocks(this);
changesetDao = new ChangesetDaoImpl(activeObjectsMock, queryHelper);
synchronizer = new DefaultSynchronizer(Executors.newSingleThreadScheduledExecutor(), changesetsProcessorMock);
operation = new SmartcommitOperation(changesetDao, commitMessageParser, smartcommitsServiceMock, synchronizer, repositoryMock, changesetServiceMock);
final ChangesetMapping sampleChangesetMapping = (ChangesetMapping) sampleChangesetMapping();
Mockito.doAnswer(new Answer<Object>()
{
@Override
public Object answer(InvocationOnMock invocation) throws Throwable
{
((EntityStreamCallback)invocation.getArguments()[2]).onRowRead(sampleChangesetMapping);
return null;
}
}).when(activeObjectsMock).stream(Mockito.isA(Class.class), Mockito.isA(Query.class), Mockito.isA(EntityStreamCallback.class));
when(activeObjectsMock.executeInTransaction(isA(TransactionCallback.class))).thenAnswer(new Answer<Object>()
{
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable
{
return ((TransactionCallback) invocationOnMock.getArguments()[0]).doInTransaction();
}
});
when (activeObjectsMock.get(eq( ChangesetMapping.class ), eq (CHANGESET_ID) )).thenReturn(sampleChangesetMapping);
when(repositoryMock.getId()).thenReturn(REPOSITORY_ID);
}
@SuppressWarnings("unchecked")
@Test
public void testRunOperation ()
{
when(smartcommitsServiceMock.doCommands(any(CommitCommands.class))).thenReturn(new CommandsResults());
operation.run();
verify(smartcommitsServiceMock).doCommands(any(CommitCommands.class));
}
@SuppressWarnings("unchecked")
@Test
public void testSmartCommitWithError()
{
final ChangesetMapping changesetMapping = sampleChangesetMapping();
when(activeObjectsMock.find(eq( ChangesetMapping.class ), (Query) any())).thenReturn(new ChangesetMapping[]{changesetMapping});
final CommandsResults commandsResults = new CommandsResults();
commandsResults.addGlobalError("errorMsg");
when(smartcommitsServiceMock.doCommands(any(CommitCommands.class))).thenReturn(commandsResults);
final DefaultProgress progress = new DefaultProgress();
synchronizer.putProgress(repositoryMock, progress);
when(changesetServiceMock.getCommitUrl((Repository) any(), (Changeset) any())).thenReturn("http://host/path");
operation.run();
assertThat(progress.getSmartCommitErrors()).hasSize(1);
assertThat(progress.getSmartCommitErrors().get(0).getShortChangesetNode()).isEqualTo("abcd123");
assertThat(progress.getSmartCommitErrors().get(0).getCommitUrl()).isEqualTo("http://host/path");
assertThat(progress.getSmartCommitErrors().get(0).getError()).isEqualTo("errorMsg");
}
private ChangesetMapping sampleChangesetMapping()
{
ChangesetMapping changesetMappigMock = Mockito.mock(ChangesetMapping.class);
final RepositoryMapping repositoryMapping = sampleRepositoryMapping();
when(changesetMappigMock.getID()).thenReturn(CHANGESET_ID);
when(changesetMappigMock.getRepositories()).thenReturn(new RepositoryMapping[]{repositoryMapping});
when(changesetMappigMock.getAuthorEmail()).thenReturn("sam@example.com");
when(changesetMappigMock.getNode()).thenReturn("abcd1234efgh5678");
when(changesetMappigMock.getMessage()).thenReturn("HAD-4 #comment mighty comment");
return changesetMappigMock;
}
private RepositoryMapping sampleRepositoryMapping() {
RepositoryMapping repositoryMappingMock = Mockito.mock(RepositoryMapping.class);
when(repositoryMappingMock.getID()).thenReturn(REPOSITORY_ID);
when(repositoryMappingMock.isLinked()).thenReturn(true);
return repositoryMappingMock;
}
}
|
package org.eclipse.persistence.testing.tests.jpa.dynamic.employee;
//java eXtension imports
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
//JUnit4 imports
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
//EclipseLink imports
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.dynamic.DynamicType;
import org.eclipse.persistence.jpa.JpaHelper;
import org.eclipse.persistence.jpa.dynamic.JPADynamicHelper;
import org.eclipse.persistence.mappings.ManyToManyMapping;
import org.eclipse.persistence.mappings.OneToManyMapping;
import org.eclipse.persistence.mappings.OneToOneMapping;
import org.eclipse.persistence.sessions.server.Server;
//domain-specific (testing) imports
import org.eclipse.persistence.testing.tests.jpa.dynamic.DynamicTestHelper;
import static org.eclipse.persistence.testing.tests.jpa.dynamic.DynamicTestHelper.DYNAMIC_PERSISTENCE_NAME;
public class MappingConfigTestSuite {
//test fixtures
static EntityManagerFactory emf = null;
static JPADynamicHelper helper = null;
static Server serverSession = null;
@BeforeClass
public static void setUp() throws Exception {
emf = DynamicTestHelper.createEMF(DYNAMIC_PERSISTENCE_NAME);
boolean isMySQL = JpaHelper.getServerSession(emf).getDatasourcePlatform().
getClass().getName().contains("MySQLPlatform");
assumeTrue(isMySQL);
helper = new JPADynamicHelper(emf);
DynamicEmployeeSystem.buildProject(helper);
serverSession = JpaHelper.getServerSession(emf);
}
@AfterClass
public static void tearDown() {
serverSession.executeNonSelectingSQL("DROP TABLE D_SALARY");
serverSession.executeNonSelectingSQL("DROP TABLE D_PROJ_EMP");
serverSession.executeNonSelectingSQL("DROP TABLE D_PROJECT");
serverSession.executeNonSelectingSQL("DROP TABLE D_PHONE");
serverSession.executeNonSelectingSQL("DROP TABLE D_EMPLOYEE");
serverSession.executeNonSelectingSQL("DROP TABLE D_ADDRESS");
helper = null;
emf.close();
emf = null;
}
@Test
public void verifyServerSession() throws Exception {
assertNotNull(serverSession);
assertTrue(serverSession.isConnected());
assertTrue(serverSession.isServerSession());
assertTrue(serverSession.getName().equals(DYNAMIC_PERSISTENCE_NAME));
}
@Test
public void verifyEmployeeDescriptor() throws Exception {
ClassDescriptor descriptor = serverSession.getDescriptorForAlias("Employee");
assertNotNull(descriptor);
assertEquals("Employee", descriptor.getAlias());
assertNull(descriptor.getInheritancePolicyOrNull());
// Address Mapping
OneToOneMapping addrMapping = (OneToOneMapping)descriptor.getMappingForAttributeName("address");
assertNotNull(addrMapping);
assertTrue(addrMapping.isPrivateOwned());
assertSame(serverSession.getDescriptorForAlias("Address"), addrMapping.getReferenceDescriptor());
// PhoneNumber Mapping
OneToManyMapping phoneMapping = (OneToManyMapping)descriptor.getMappingForAttributeName("phoneNumbers");
assertNotNull(phoneMapping);
assertTrue(phoneMapping.isPrivateOwned());
assertSame(serverSession.getDescriptorForAlias("PhoneNumber"), phoneMapping.getReferenceDescriptor());
// Manager Mapping
OneToOneMapping managerMapping = (OneToOneMapping)descriptor.getMappingForAttributeName("manager");
assertNotNull(managerMapping);
assertFalse(managerMapping.isPrivateOwned());
assertSame(descriptor, managerMapping.getReferenceDescriptor());
// Managed Employees Mapping
OneToManyMapping managedEmployeesMapping = (OneToManyMapping)descriptor.getMappingForAttributeName("managedEmployees");
assertNotNull(managedEmployeesMapping);
assertFalse(managedEmployeesMapping.isPrivateOwned());
assertSame(descriptor, managedEmployeesMapping.getReferenceDescriptor());
// Projects Mapping
ManyToManyMapping projectsMapping = (ManyToManyMapping)descriptor.getMappingForAttributeName("projects");
assertNotNull(projectsMapping);
assertFalse(projectsMapping.isPrivateOwned());
assertSame(serverSession.getDescriptorForAlias("Project"), projectsMapping.getReferenceDescriptor());
}
@Test
public void verifyAddressDescriptor() throws Exception {
ClassDescriptor descriptor = serverSession.getDescriptorForAlias("Address");
assertNotNull(descriptor);
assertEquals("Address", descriptor.getAlias());
assertNull(descriptor.getInheritancePolicyOrNull());
}
@Test
public void verifyPhoneNumberDescriptor() {
ClassDescriptor descriptor = serverSession.getDescriptorForAlias("PhoneNumber");
assertNotNull(descriptor);
assertEquals("PhoneNumber", descriptor.getAlias());
assertNull(descriptor.getInheritancePolicyOrNull());
}
@Test
public void verifyProjectDescriptor() {
ClassDescriptor descriptor = serverSession.getDescriptorForAlias("Project");
assertNotNull(descriptor);
assertEquals("Project", descriptor.getAlias());
assertNull(descriptor.getInheritancePolicyOrNull());
}
}
|
package com.graphhopper.jsprit.core.algorithm.recreate;
import com.graphhopper.jsprit.core.algorithm.state.InternalStates;
import com.graphhopper.jsprit.core.problem.cost.VehicleRoutingActivityCosts;
import com.graphhopper.jsprit.core.problem.cost.VehicleRoutingTransportCosts;
import com.graphhopper.jsprit.core.problem.misc.JobInsertionContext;
import com.graphhopper.jsprit.core.problem.solution.route.activity.End;
import com.graphhopper.jsprit.core.problem.solution.route.activity.TourActivity;
import com.graphhopper.jsprit.core.problem.solution.route.state.RouteAndActivityStateGetter;
import com.graphhopper.jsprit.core.problem.vehicle.Vehicle;
/**
* Calculates activity insertion costs locally, i.e. by comparing the additional costs of insertion the new activity k between
* activity i (prevAct) and j (nextAct).
* Additional costs are then basically calculated as delta c = c_ik + c_kj - c_ij.
* <p/>
* <p>Note once time has an effect on costs this class requires activity endTimes.
*
* @author stefan
*/
class LocalActivityInsertionCostsCalculator implements ActivityInsertionCostsCalculator {
private VehicleRoutingTransportCosts routingCosts;
private VehicleRoutingActivityCosts activityCosts;
private double activityCostsWeight = 1.;
private double solutionCompletenessRatio = 1.;
private RouteAndActivityStateGetter stateManager;
public LocalActivityInsertionCostsCalculator(VehicleRoutingTransportCosts routingCosts, VehicleRoutingActivityCosts actCosts, RouteAndActivityStateGetter stateManager) {
super();
this.routingCosts = routingCosts;
this.activityCosts = actCosts;
this.stateManager = stateManager;
}
@Override
public double getCosts(JobInsertionContext iFacts, TourActivity prevAct, TourActivity nextAct, TourActivity newAct, double depTimeAtPrevAct) {
double tp_costs_prevAct_newAct = routingCosts.getTransportCost(prevAct.getLocation(), newAct.getLocation(), depTimeAtPrevAct, iFacts.getNewDriver(), iFacts.getNewVehicle());
double tp_time_prevAct_newAct = routingCosts.getTransportTime(prevAct.getLocation(), newAct.getLocation(), depTimeAtPrevAct, iFacts.getNewDriver(), iFacts.getNewVehicle());
double newAct_arrTime = depTimeAtPrevAct + tp_time_prevAct_newAct;
double newAct_endTime = Math.max(newAct_arrTime, newAct.getTheoreticalEarliestOperationStartTime()) + activityCosts.getActivityDuration(newAct, newAct_arrTime, iFacts.getNewDriver(), iFacts.getNewVehicle());
double act_costs_newAct = activityCosts.getActivityCost(newAct, newAct_arrTime, iFacts.getNewDriver(), iFacts.getNewVehicle());
if (isEnd(nextAct) && !toDepot(iFacts.getNewVehicle())) return tp_costs_prevAct_newAct;
double tp_costs_newAct_nextAct = routingCosts.getTransportCost(newAct.getLocation(), nextAct.getLocation(), newAct_endTime, iFacts.getNewDriver(), iFacts.getNewVehicle());
double tp_time_newAct_nextAct = routingCosts.getTransportTime(newAct.getLocation(), nextAct.getLocation(), newAct_endTime, iFacts.getNewDriver(), iFacts.getNewVehicle());
double nextAct_arrTime = newAct_endTime + tp_time_newAct_nextAct;
double endTime_nextAct_new = Math.max(nextAct_arrTime, nextAct.getTheoreticalEarliestOperationStartTime()) + activityCosts.getActivityDuration(nextAct, nextAct_arrTime, iFacts.getNewDriver(), iFacts.getNewVehicle());
double act_costs_nextAct = activityCosts.getActivityCost(nextAct, nextAct_arrTime, iFacts.getNewDriver(), iFacts.getNewVehicle());
double totalCosts = tp_costs_prevAct_newAct + tp_costs_newAct_nextAct + solutionCompletenessRatio * activityCostsWeight * (act_costs_newAct + act_costs_nextAct);
double oldCosts = 0.;
if (iFacts.getRoute().isEmpty()) {
double tp_costs_prevAct_nextAct = routingCosts.getTransportCost(prevAct.getLocation(), nextAct.getLocation(), depTimeAtPrevAct, iFacts.getNewDriver(), iFacts.getNewVehicle());
oldCosts += tp_costs_prevAct_nextAct;
} else {
double tp_costs_prevAct_nextAct = routingCosts.getTransportCost(prevAct.getLocation(), nextAct.getLocation(), prevAct.getEndTime(), iFacts.getRoute().getDriver(), iFacts.getRoute().getVehicle());
double arrTime_nextAct = depTimeAtPrevAct + routingCosts.getTransportTime(prevAct.getLocation(), nextAct.getLocation(), prevAct.getEndTime(), iFacts.getRoute().getDriver(), iFacts.getRoute().getVehicle());
double endTime_nextAct_old = Math.max(arrTime_nextAct, nextAct.getTheoreticalEarliestOperationStartTime()) + activityCosts.getActivityDuration(nextAct, arrTime_nextAct, iFacts.getRoute().getDriver(),iFacts.getRoute().getVehicle());
double actCost_nextAct = activityCosts.getActivityCost(nextAct, arrTime_nextAct, iFacts.getRoute().getDriver(), iFacts.getRoute().getVehicle());
double endTimeDelay_nextAct = Math.max(0, endTime_nextAct_new - endTime_nextAct_old);
Double futureWaiting = stateManager.getActivityState(nextAct, iFacts.getRoute().getVehicle(), InternalStates.FUTURE_WAITING, Double.class);
if (futureWaiting == null) futureWaiting = 0.;
double waitingTime_savings_timeUnit = Math.min(futureWaiting, endTimeDelay_nextAct);
double waitingTime_savings = waitingTime_savings_timeUnit * iFacts.getRoute().getVehicle().getType().getVehicleCostParams().perWaitingTimeUnit;
oldCosts += solutionCompletenessRatio * activityCostsWeight * waitingTime_savings;
oldCosts += tp_costs_prevAct_nextAct + solutionCompletenessRatio * activityCostsWeight * actCost_nextAct;
}
return totalCosts - oldCosts;
}
private boolean toDepot(Vehicle newVehicle) {
return newVehicle.isReturnToDepot();
}
private boolean isEnd(TourActivity nextAct) {
return nextAct instanceof End;
}
public void setSolutionCompletenessRatio(double solutionCompletenessRatio) {
this.solutionCompletenessRatio = solutionCompletenessRatio;
}
}
|
package com.github.ksoichiro.android.observablescrollview.test;
import android.test.ActivityInstrumentationTestCase2;
import android.util.DisplayMetrics;
import android.util.TypedValue;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ListAdapter;
import android.widget.SimpleAdapter;
import com.github.ksoichiro.android.observablescrollview.ObservableGridView;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class HeaderGridViewActivityTest extends ActivityInstrumentationTestCase2<HeaderGridViewActivity> {
private HeaderGridViewActivity activity;
private ObservableGridView scrollable;
public HeaderGridViewActivityTest() {
super(HeaderGridViewActivity.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
setActivityInitialTouchMode(true);
activity = getActivity();
scrollable = (ObservableGridView) activity.findViewById(R.id.scrollable);
}
public void testScroll() throws Throwable {
UiTestUtils.swipeVertically(this, scrollable, UiTestUtils.Direction.UP);
getInstrumentation().waitForIdleSync();
UiTestUtils.swipeVertically(this, scrollable, UiTestUtils.Direction.DOWN);
getInstrumentation().waitForIdleSync();
}
public void testSaveAndRestoreInstanceState() throws Throwable {
UiTestUtils.saveAndRestoreInstanceState(this, activity);
testScroll();
}
public void testScrollVerticallyTo() throws Throwable {
final DisplayMetrics metrics = activity.getResources().getDisplayMetrics();
runTestOnUiThread(new Runnable() {
@Override
public void run() {
scrollable.scrollVerticallyTo((int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 48, metrics));
}
});
getInstrumentation().waitForIdleSync();
runTestOnUiThread(new Runnable() {
@Override
public void run() {
scrollable.scrollVerticallyTo(0);
}
});
getInstrumentation().waitForIdleSync();
}
public void testHeaderViewFeatures() throws Throwable {
runTestOnUiThread(new Runnable() {
@Override
public void run() {
assertEquals(1, scrollable.getHeaderViewCount());
ListAdapter adapter = scrollable.getAdapter();
assertTrue(adapter instanceof ObservableGridView.HeaderViewGridAdapter);
ObservableGridView.HeaderViewGridAdapter hvgAdapter = (ObservableGridView.HeaderViewGridAdapter) adapter;
assertEquals(1, hvgAdapter.getHeadersCount());
assertNotNull(hvgAdapter.getWrappedAdapter());
assertTrue(hvgAdapter.areAllItemsEnabled());
assertFalse(hvgAdapter.isEmpty());
Object data = hvgAdapter.getItem(0);
assertNull(data);
assertNotNull(hvgAdapter.getView(0, null, scrollable));
assertNotNull(hvgAdapter.getFilter());
assertTrue(scrollable.removeHeaderView(activity.headerView));
assertEquals(0, scrollable.getHeaderViewCount());
assertEquals(0, hvgAdapter.getHeadersCount());
assertFalse(scrollable.removeHeaderView(activity.headerView));
activity.headerView = new View(activity);
final int flexibleSpaceImageHeight = activity.getResources().getDimensionPixelSize(R.dimen.flexible_space_image_height);
FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT,
flexibleSpaceImageHeight);
activity.headerView.setLayoutParams(lp);
// This is required to disable header's list selector effect
activity.headerView.setClickable(true);
scrollable.addHeaderView(activity.headerView);
}
});
}
public void testHeaderViewGridExceptions() throws Throwable {
runTestOnUiThread(new Runnable() {
@Override
public void run() {
try {
new ObservableGridView.HeaderViewGridAdapter(null, null);
fail();
} catch (IllegalArgumentException e) {
assertEquals("headerViewInfos cannot be null", e.getMessage());
}
ListAdapter adapter = scrollable.getAdapter();
ObservableGridView.HeaderViewGridAdapter hvgAdapter = (ObservableGridView.HeaderViewGridAdapter) adapter;
try {
hvgAdapter.setNumColumns(0);
fail();
} catch (IllegalArgumentException e) {
assertEquals("Number of columns must be 1 or more", e.getMessage());
}
ArrayList<ObservableGridView.FixedViewInfo> headerViewInfos = new ArrayList<>();
ObservableGridView.HeaderViewGridAdapter adapter1 = new ObservableGridView.HeaderViewGridAdapter(headerViewInfos, null);
assertTrue(adapter1.isEmpty());
try {
adapter1.isEnabled(-1);
fail();
} catch (ArrayIndexOutOfBoundsException ignore) {
}
try {
adapter1.getItem(-1);
fail();
} catch (ArrayIndexOutOfBoundsException ignore) {
}
try {
adapter1.getView(0, null, null);
fail();
} catch (IllegalArgumentException e) {
assertEquals("Parent cannot be null", e.getMessage());
}
try {
adapter1.getView(-1, null, scrollable);
fail();
} catch (ArrayIndexOutOfBoundsException ignore) {
}
}
});
}
public void testHeaderViewGridAdapter() throws Throwable {
runTestOnUiThread(new Runnable() {
@Override
public void run() {
try {
ObservableGridView.HeaderViewGridAdapter adapter =
new ObservableGridView.HeaderViewGridAdapter(null, null);
fail();
} catch (IllegalArgumentException ignore) {
}
}
});
runTestOnUiThread(new Runnable() {
@Override
public void run() {
ArrayList<ObservableGridView.FixedViewInfo> list = new ArrayList<>();
Map<String, String> map = new LinkedHashMap<>();
map.put("text", "A");
List<Map<String, ?>> data = new ArrayList<>();
data.add(map);
ObservableGridView.HeaderViewGridAdapter adapter =
new ObservableGridView.HeaderViewGridAdapter(
list,
new SimpleAdapter(
activity,
data,
android.R.layout.simple_list_item_1,
new String[] {"text"},
new int[]{android.R.id.text1}));
assertFalse(adapter.removeHeader(null));
assertEquals(1, adapter.getCount());
}
});
runTestOnUiThread(new Runnable() {
@Override
public void run() {
ArrayList<ObservableGridView.FixedViewInfo> list = new ArrayList<>();
ObservableGridView.HeaderViewGridAdapter adapter =
new ObservableGridView.HeaderViewGridAdapter(
list,
null);
assertEquals(0, adapter.getCount());
try {
adapter.isEnabled(1);
fail();
} catch (ArrayIndexOutOfBoundsException ignore) {
}
}
});
}
}
|
package net.beaconcontroller.devicemanager.internal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import net.beaconcontroller.core.IBeaconProvider;
import net.beaconcontroller.core.IOFMessageListener;
import net.beaconcontroller.core.IOFSwitch;
import net.beaconcontroller.core.IOFSwitchListener;
import net.beaconcontroller.devicemanager.Device;
import net.beaconcontroller.devicemanager.IDeviceManager;
import net.beaconcontroller.packet.IPv4;
import net.beaconcontroller.topology.ITopology;
import net.beaconcontroller.topology.IdPortTuple;
import org.openflow.protocol.OFMatch;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketIn;
import org.openflow.protocol.OFPhysicalPort.OFPortConfig;
import org.openflow.protocol.OFPhysicalPort.OFPortState;
import org.openflow.protocol.OFPortStatus;
import org.openflow.protocol.OFPortStatus.OFPortReason;
import org.openflow.protocol.OFType;
import org.openflow.util.HexString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author David Erickson (daviderickson@cs.stanford.edu)
*
*/
public class DeviceManagerImpl implements IDeviceManager, IOFMessageListener, IOFSwitchListener {
protected static Logger log = LoggerFactory.getLogger(DeviceManagerImpl.class);
protected IBeaconProvider beaconProvider;
protected Map<Integer, Device> dataLayerAddressDeviceMap;
protected ReentrantReadWriteLock lock;
protected Map<Integer, Device> networkLayerAddressDeviceMap;
protected Map<Long, Set<Device>> switchDeviceMap;
protected Map<IdPortTuple, Set<Device>> switchPortDeviceMap;
protected ITopology topology;
public DeviceManagerImpl() {
this.dataLayerAddressDeviceMap = new ConcurrentHashMap<Integer, Device>();
this.networkLayerAddressDeviceMap = new ConcurrentHashMap<Integer, Device>();
this.switchDeviceMap = new ConcurrentHashMap<Long, Set<Device>>();
this.switchPortDeviceMap = new ConcurrentHashMap<IdPortTuple, Set<Device>>();
this.lock = new ReentrantReadWriteLock();
}
public void startUp() {
beaconProvider.addOFMessageListener(OFType.PACKET_IN, this);
beaconProvider.addOFMessageListener(OFType.PORT_STATUS, this);
}
public void shutDown() {
beaconProvider.removeOFMessageListener(OFType.PACKET_IN, this);
beaconProvider.removeOFMessageListener(OFType.PORT_STATUS, this);
}
@Override
public String getName() {
return "devicemanager";
}
public Command handlePortStatus(IOFSwitch sw, OFPortStatus ps) {
// if ps is a delete, or a modify where the port is down or configured down
if ((byte)OFPortReason.OFPPR_DELETE.ordinal() == ps.getReason() ||
((byte)OFPortReason.OFPPR_MODIFY.ordinal() == ps.getReason() &&
(((OFPortConfig.OFPPC_PORT_DOWN.getValue() & ps.getDesc().getConfig()) > 0) ||
((OFPortState.OFPPS_LINK_DOWN.getValue() & ps.getDesc().getState()) > 0)))) {
IdPortTuple id = new IdPortTuple(sw.getId(), ps.getDesc().getPortNumber());
lock.writeLock().lock();
try {
if (switchPortDeviceMap.containsKey(id)) {
// Remove the devices
for (Device device : switchPortDeviceMap.get(id)) {
delDevice(device);
// Remove the device from the switch->device mapping
switchDeviceMap.get(id.getId()).remove(device);
}
// Remove this switch:port mapping
switchPortDeviceMap.remove(id);
}
} finally {
lock.writeLock().unlock();
}
}
return Command.CONTINUE;
}
/**
* Removes the specified device from data layer and network layer maps.
* Does NOT remove the device from switch and switch:port level maps.
* Must be called from within a write lock.
* @param device
*/
protected void delDevice(Device device) {
dataLayerAddressDeviceMap.remove(Arrays.hashCode(device.getDataLayerAddress()));
if (!device.getNetworkAddresses().isEmpty()) {
for (Integer nwAddress : device.getNetworkAddresses()) {
networkLayerAddressDeviceMap.remove(nwAddress);
}
}
if (log.isDebugEnabled()) {
log.debug("Removed device {}", device);
}
}
@Override
public Command receive(IOFSwitch sw, OFMessage msg) {
if (msg instanceof OFPortStatus) {
return handlePortStatus(sw, (OFPortStatus) msg);
}
OFPacketIn pi = (OFPacketIn) msg;
OFMatch match = new OFMatch();
match.loadFromPacket(pi.getPacketData(), pi.getInPort());
// if the source is multicast/broadcast ignore it
if ((match.getDataLayerSource()[0] & 0x1) != 0)
return Command.CONTINUE;
Integer dlAddrHash = Arrays.hashCode(match.getDataLayerSource());
Integer nwSrc = match.getNetworkSource();
Device device = null;
Device nwDevice = null;
lock.readLock().lock();
try {
device = dataLayerAddressDeviceMap.get(dlAddrHash);
nwDevice = networkLayerAddressDeviceMap.get(nwSrc);
} finally {
lock.readLock().unlock();
}
IdPortTuple ipt = new IdPortTuple(sw.getId(), pi.getInPort());
if (!topology.isInternal(ipt)) {
if (device != null) {
// Write lock is expensive, check if we have an update first
boolean updateNeeded = false;
boolean movedLocation = false;
boolean addedNW = false;
boolean nwChanged = false;
if ((sw.getId() != device.getSwId().longValue())
|| (pi.getInPort() != device.getSwPort().shortValue())) {
movedLocation = true;
}
if (nwDevice == null && nwSrc != 0) {
addedNW = true;
} else if (nwDevice != null && !device.equals(nwDevice)) {
nwChanged = true;
}
if (movedLocation || addedNW || nwChanged) {
updateNeeded = true;
}
if (updateNeeded) {
// Update everything needed during one write lock
lock.writeLock().lock();
try {
// Update both mappings once so no duplicated work later
if (movedLocation) {
delSwitchDeviceMapping(device.getSwId(), device);
delSwitchPortDeviceMapping(
new IdPortTuple(device.getSwId(),
device.getSwPort()), device);
if (log.isDebugEnabled()) {
log.debug(
"Device {} moved to switch: {} port: {}",
new Object[] {
device,
HexString.toHexString(sw
.getId()),
0xffff & pi.getInPort() });
}
device.setSwId(sw.getId());
device.setSwPort(pi.getInPort());
addSwitchDeviceMapping(sw.getId(), device);
addSwitchPortDeviceMapping(
new IdPortTuple(device.getSwId(),
device.getSwPort()), device);
}
if (addedNW) {
// add the address
device.getNetworkAddresses().add(nwSrc);
this.networkLayerAddressDeviceMap.put(nwSrc, device);
if (log.isDebugEnabled()) {
log.debug("Added IP {} to MAC {}",
IPv4.fromIPv4Address(nwSrc),
HexString.toHexString(device.getDataLayerAddress()));
}
} else if (nwChanged) {
// IP changed MACs.. really rare, potentially an error
nwDevice.getNetworkAddresses().remove(nwSrc);
device.getNetworkAddresses().add(nwSrc);
this.networkLayerAddressDeviceMap.put(nwSrc, device);
if (log.isWarnEnabled()) {
log.warn(
"IP Address {} changed from MAC {} to {}",
new Object[] {
IPv4.fromIPv4Address(nwSrc),
HexString.toHexString(nwDevice
.getDataLayerAddress()),
HexString.toHexString(device
.getDataLayerAddress()) });
}
}
} finally {
lock.writeLock().unlock();
}
}
} else {
device = new Device();
device.setDataLayerAddress(match.getDataLayerSource());
device.setSwId(sw.getId());
device.setSwPort(pi.getInPort());
lock.writeLock().lock();
try {
this.dataLayerAddressDeviceMap.put(dlAddrHash, device);
if (nwSrc != 0) {
device.getNetworkAddresses().add(nwSrc);
this.networkLayerAddressDeviceMap.put(nwSrc, device);
}
addSwitchDeviceMapping(device.getSwId(), device);
addSwitchPortDeviceMapping(new IdPortTuple(
device.getSwId(), device.getSwPort()), device);
if (nwDevice != null) {
nwDevice.getNetworkAddresses().remove(nwSrc);
if (log.isWarnEnabled()) {
log.warn(
"IP Address {} changed from MAC {} to {}",
new Object[] {
IPv4.fromIPv4Address(nwSrc),
HexString.toHexString(nwDevice
.getDataLayerAddress()),
HexString.toHexString(device
.getDataLayerAddress()) });
}
}
} finally {
lock.writeLock().unlock();
}
log.debug("New Device: {}", device);
}
}
return Command.CONTINUE;
}
protected void addSwitchDeviceMapping(Long id, Device device) {
if (switchDeviceMap.get(id) == null) {
switchDeviceMap.put(id, new HashSet<Device>());
}
switchDeviceMap.get(id).add(device);
}
protected void delSwitchDeviceMapping(Long id, Device device) {
switchDeviceMap.get(id).remove(device);
if (switchDeviceMap.get(id).isEmpty()) {
switchDeviceMap.remove(id);
}
}
protected void addSwitchPortDeviceMapping(IdPortTuple id, Device device) {
if (switchPortDeviceMap.get(id) == null) {
switchPortDeviceMap.put(id, new HashSet<Device>());
}
switchPortDeviceMap.get(id).add(device);
}
protected void delSwitchPortDeviceMapping(IdPortTuple id, Device device) {
switchPortDeviceMap.get(id).remove(device);
if (switchPortDeviceMap.get(id).isEmpty()) {
switchPortDeviceMap.remove(id);
}
}
@Override
public Device getDeviceByNetworkLayerAddress(Integer address) {
lock.readLock().lock();
try {
return this.networkLayerAddressDeviceMap.get(address);
} finally {
lock.readLock().unlock();
}
}
/**
* @param beaconProvider the beaconProvider to set
*/
public void setBeaconProvider(IBeaconProvider beaconProvider) {
this.beaconProvider = beaconProvider;
}
/**
* @param topology the topology to set
*/
public void setTopology(ITopology topology) {
this.topology = topology;
}
@Override
public Device getDeviceByDataLayerAddress(Integer hashCode) {
lock.readLock().lock();
try {
return this.dataLayerAddressDeviceMap.get(hashCode);
} finally {
lock.readLock().unlock();
}
}
@Override
public Device getDeviceByDataLayerAddress(byte[] address) {
lock.readLock().lock();
try {
return this.getDeviceByDataLayerAddress(Arrays.hashCode(address));
} finally {
lock.readLock().unlock();
}
}
@Override
public List<Device> getDevices() {
lock.readLock().lock();
try {
return new ArrayList<Device>(this.dataLayerAddressDeviceMap.values());
} finally {
lock.readLock().unlock();
}
}
@Override
public void addedSwitch(IOFSwitch sw) {
}
@Override
public void removedSwitch(IOFSwitch sw) {
// remove all devices attached to this switch
lock.writeLock().lock();
try {
Long id = sw.getId();
if (switchDeviceMap.get(id) != null) {
// Remove all devices on this switch
for (Device device : switchDeviceMap.get(id)) {
delDevice(device);
}
switchDeviceMap.remove(id);
// Remove all switch:port mappings where the switch is sw
for (Iterator<Map.Entry<IdPortTuple, Set<Device>>> it = switchPortDeviceMap
.entrySet().iterator(); it.hasNext();) {
Map.Entry<IdPortTuple, Set<Device>> entry = it.next();
if (entry.getKey().getId().equals(id)) {
it.remove();
}
}
}
} finally {
lock.writeLock().unlock();
}
}
}
|
package org.mwc.debrief.limpet_integration.measured_data;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.commands.operations.IUndoableOperation;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.january.dataset.DoubleDataset;
import org.eclipse.january.dataset.Maths;
import org.eclipse.january.metadata.AxesMetadata;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.Separator;
import org.mwc.cmap.core.CorePlugin;
import org.mwc.cmap.core.operations.CMAPOperation;
import org.mwc.cmap.core.property_support.RightClickSupport.RightClickContextItemGenerator;
import org.mwc.debrief.core.providers.measured_data.DatasetWrapper;
import Debrief.Wrappers.Extensions.Measurements.DataFolder;
import Debrief.Wrappers.Extensions.Measurements.TimeSeriesCore;
import Debrief.Wrappers.Extensions.Measurements.TimeSeriesDatasetDouble;
import MWC.GUI.Editable;
import MWC.GUI.Layer;
import MWC.GUI.Layers;
public class MeasuredDataOperations implements RightClickContextItemGenerator
{
protected static class DatasetsOperation extends CMAPOperation
{
final private Calculate _operation;
final private List<TimeSeriesDatasetDouble> _items;
private TimeSeriesCore _newData;
private DataFolder _target;
final Layers _theLayers;
private String _units;
private List<Layer> _fParents;
public DatasetsOperation(String title, Calculate operation,
List<TimeSeriesDatasetDouble> fWrappers, List<Layer> fParents,
Layers theLayers, final String units)
{
super(title);
_operation = operation;
_items = fWrappers;
_theLayers = theLayers;
_units = units;
_fParents = fParents;
}
@Override
public IStatus execute(IProgressMonitor monitor, IAdaptable info)
throws ExecutionException
{
// calculate the dataset
_newData = calculate(_operation, _items);
if (_newData != null)
{
// sort out the destination
_target = getTarget();
// and store it
_target.add(_newData);
// share the good news
fireUpdated();
return Status.OK_STATUS;
}
else
{
CorePlugin.logError(Status.WARNING,
"Failed to perform calculation on measured data", null);
return Status.CANCEL_STATUS;
}
}
public TimeSeriesCore calculate(Calculate _operation,
List<TimeSeriesDatasetDouble> items)
{
// perform the calculation
DoubleDataset dResult = _operation.calculate(items);
// put the times back in
AxesMetadata times =
items.get(0).getDataset().getFirstMetadata(AxesMetadata.class);
dResult.addMetadata(times);
TimeSeriesDatasetDouble res =
new TimeSeriesDatasetDouble(dResult, _units);
// wrap it
return res;
}
private DataFolder getTarget()
{
DataFolder target = _items.get(0).getParent();
if (_items.size() == 2)
{
DataFolder folder2 = _items.get(1).getParent();
if (target.equals(folder2))
{
// ok, keep the target, they're both in
// the same directory
}
else
{
// in different folders, move up a level
if (target.getParent() != null)
{
target = target.getParent();
}
else
{
// ok, no parent. keep it in this folder
}
}
}
return target;
}
@Override
public boolean canRedo()
{
return _newData != null;
}
@Override
public boolean canUndo()
{
return _newData != null;
}
@Override
public IStatus undo(IProgressMonitor monitor, IAdaptable info)
throws ExecutionException
{
// ok, delete the dataset
_target.remove(_newData);
// share the good news
fireUpdated();
return Status.OK_STATUS;
}
@Override
public IStatus redo(IProgressMonitor monitor, IAdaptable info)
throws ExecutionException
{
// ok, put the dataset back into the parent
_target.add(_newData);
// share the good news
fireUpdated();
return Status.OK_STATUS;
}
void fireUpdated()
{
// how many parents
Layer singleLayer = null;
for (Layer thisLayer : _fParents)
{
if (singleLayer == null)
{
singleLayer = thisLayer;
}
else if (thisLayer != singleLayer)
{
singleLayer = null;
break;
}
}
_theLayers.fireExtended(null, singleLayer);
}
}
/**
* perform operation on the set of time series datasets
*
* @author ian
*
*/
private interface Calculate
{
DoubleDataset calculate(List<TimeSeriesDatasetDouble> items);
}
/**
* operate on two datasets
*
* @author ian
*
*/
abstract private class Operation2 implements Calculate
{
/**
* do calculation on these two datasets
*
* @param val1
* @param val2
* @return
*/
abstract DoubleDataset calc(DoubleDataset val1, DoubleDataset val2);
abstract String nameFor(String one, String two);
private String nameFor(TimeSeriesCore one, TimeSeriesCore two)
{
// are they in the same folder?
final String sOne;
final String sTwo;
if (one.getParent().equals(two.getParent()))
{
// ok, in the same folder, we don't need more metadata
sOne = one.getName();
sTwo = two.getName();
}
else
{
sOne = one.getPath();
sTwo = two.getPath();
}
return nameFor(sOne, sTwo);
}
@Override
public DoubleDataset calculate(List<TimeSeriesDatasetDouble> items)
{
final TimeSeriesDatasetDouble ts1 = items.get(0);
final TimeSeriesDatasetDouble ts2 = items.get(1);
final DoubleDataset d1 = (DoubleDataset) ts1.getDataset();
final DoubleDataset d2 = (DoubleDataset) ts2.getDataset();
final DoubleDataset first;
final DoubleDataset second;
if (d1.getSize() == d2.getSize())
{
first = d1;
second = d2;
}
else
{
// ok, do all the interpolation processing
first = null;
second = null;
}
final DoubleDataset res;
if (first != null)
{
// get the new dataset
res = calc(first, second);
// and the name
res.setName(nameFor(ts1, ts2));
}
else
{
res = null;
}
return res;
}
}
/**
* operate on a single dataset
*
* @author ian
*
*/
abstract private class Operation1 implements Calculate
{
@Override
public DoubleDataset calculate(List<TimeSeriesDatasetDouble> items)
{
DoubleDataset d1 = (DoubleDataset) items.get(0).getDataset();
DoubleDataset res = calc(d1);
return res;
}
/**
* do a calculation with a single dataset
*
* @param val1
* @return
*/
abstract DoubleDataset calc(DoubleDataset val1);
}
@Override
public void generate(final IMenuManager parent, final Layers theLayers,
final Layer[] parentLayers, final Editable[] subjects)
{
List<TimeSeriesCore> timeSeries = null;
List<Editable> wrappers = null;
List<Layer> parents = null;
// ok, let's have a look
for (int i = 0; i < subjects.length; i++)
{
Editable thisE = subjects[i];
if (thisE instanceof DatasetWrapper)
{
DatasetWrapper dw = (DatasetWrapper) thisE;
TimeSeriesCore core = dw.getDataset();
if (timeSeries == null)
{
timeSeries = new ArrayList<TimeSeriesCore>();
wrappers = new ArrayList<Editable>();
parents = new ArrayList<Layer>();
}
timeSeries.add(core);
wrappers.add(dw);
parents.add(parentLayers[i]);
}
}
// success?
if (timeSeries != null)
{
List<IAction> items = new ArrayList<IAction>();
// extract the datasets
final List<TimeSeriesDatasetDouble> fWrappers =
new ArrayList<TimeSeriesDatasetDouble>();
final List<Editable> fEditables = new ArrayList<Editable>();
final List<Layer> fParents = new ArrayList<Layer>();
for (int i = 0; i < timeSeries.size(); i++)
{
TimeSeriesCore dataset = timeSeries.get(i);
if (dataset instanceof TimeSeriesDatasetDouble)
{
fWrappers.add((TimeSeriesDatasetDouble) dataset);
fEditables.add(wrappers.get(i));
fParents.add(parents.get(i));
}
}
// ok, let's have a go.
if (fWrappers.size() == 2)
{
// ok, generate addition and subtraction
Operation2 doAdd = new Operation2()
{
@Override
public DoubleDataset calc(DoubleDataset val1, DoubleDataset val2)
{
final DoubleDataset res =
(DoubleDataset) Maths.add(val1, val2, null);
return res;
}
@Override
String nameFor(String one, String two)
{
return "Sum of " + one + " and " + two;
}
};
items
.add(new DoAction("Add datasets", new DatasetsOperation("Do add",
doAdd, fWrappers, fParents, theLayers, fWrappers.get(0)
.getUnits())));
Operation2 doSubtract = new Operation2()
{
@Override
public DoubleDataset calc(DoubleDataset val1, DoubleDataset val2)
{
final DoubleDataset res =
(DoubleDataset) Maths.subtract(val1, val2, null);
return res;
}
@Override
String nameFor(String one, String two)
{
return one + " minus " + two;
}
};
items.add(new DoAction("Subtract datasets", new DatasetsOperation(
"Do add", doSubtract, fWrappers, fParents, theLayers, fWrappers
.get(0).getUnits())));
// multiply and divide
Operation2 doMultiply = new Operation2()
{
@Override
public DoubleDataset calc(DoubleDataset val1, DoubleDataset val2)
{
final DoubleDataset res =
(DoubleDataset) Maths.multiply(val1, val2, null);
return res;
}
@Override
String nameFor(String one, String two)
{
return "Product of " + one + " and " + two;
}
};
items.add(new DoAction("Multiply datasets", new DatasetsOperation(
"Do add", doMultiply, fWrappers, fParents, theLayers, fWrappers
.get(0).getUnits()
+ "x" + fWrappers.get(1).getUnits())));
// multiply and divide
Operation2 doDivide = new Operation2()
{
@Override
public DoubleDataset calc(DoubleDataset val1, DoubleDataset val2)
{
final DoubleDataset res =
(DoubleDataset) Maths.divide(val1, val2, null);
return res;
}
@Override
String nameFor(String one, String two)
{
return one + " / " + two;
}
};
items.add(new DoAction("Divide datasets", new DatasetsOperation(
"Do add", doDivide, fWrappers, fParents, theLayers, fWrappers
.get(0).getUnits()
+ "/" + fWrappers.get(1).getUnits())));
}
// ok, let's have a go.
if (fWrappers.size() == 1)
{
// abs
Operation1 doAbs = new Operation1()
{
@Override
public DoubleDataset calc(DoubleDataset val1)
{
final DoubleDataset res = (DoubleDataset) Maths.abs(val1, null);
res.setName("Absolute of " + val1.getName());
return res;
}
};
items.add(new DoAction("Calculate Absolute", new DatasetsOperation(
"Do add", doAbs, fWrappers, fParents, theLayers, fWrappers.get(0)
.getUnits())));
// inv
Operation1 doInverse = new Operation1()
{
@Override
public DoubleDataset calc(DoubleDataset val1)
{
final DoubleDataset res =
(DoubleDataset) Maths.divide(1, val1, null);
res.setName("Inverse of " + val1.getName());
return res;
}
};
items.add(new DoAction("Calculate inverse", new DatasetsOperation(
"Do add", doInverse, fWrappers, fParents, theLayers, "1 / "
+ fWrappers.get(0).getUnits())));
// sqrt
Operation1 doSqrt = new Operation1()
{
@Override
public DoubleDataset calc(DoubleDataset val1)
{
final DoubleDataset res = (DoubleDataset) Maths.sqrt(val1, null);
res.setName("Square root of " + val1.getName());
return res;
}
};
items.add(new DoAction("Calculate square root", new DatasetsOperation(
"Do add", doSqrt, fWrappers, fParents, theLayers, "sqrt "
+ fWrappers.get(0).getUnits())));
// sqrt
Operation1 doCbrt = new Operation1()
{
@Override
public DoubleDataset calc(DoubleDataset val1)
{
final DoubleDataset res = (DoubleDataset) Maths.cbrt(val1, null);
res.setName("Cube Root of " + val1.getName());
return res;
}
};
items.add(new DoAction("Calculate cube root", new DatasetsOperation(
"Do add", doCbrt, fWrappers, fParents, theLayers, "cbrt "
+ fWrappers.get(0).getUnits())));
}
// create any?
if (!items.isEmpty())
{
parent.add(new Separator("Calculations"));
// and add them all
for (IAction item : items)
{
parent.add(item);
}
}
}
}
/**
* warp the process of calling an action
*
* @author ian
*
*/
protected class DoAction extends Action
{
final private IUndoableOperation _theAction;
public DoAction(final String title, final IUndoableOperation theAction)
{
super(title);
_theAction = theAction;
}
public void run()
{
CorePlugin.run(_theAction);
}
}
}
|
package dk.statsbiblioteket.util.xml;
import dk.statsbiblioteket.util.qa.QAInfo;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.w3c.dom.Document;
import javax.xml.transform.*;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
/**
* Helpers for transforming XML using XSLTs. All methods are Thread-safe,
* as long as Threads do not share the same Transformer.
* </p><p>
* Most of the helpers have an option for ifnoring XML namespace. Setting this
* to true strips namespaces from the input by doing a full DOM-parsing.
* Besides being fairly expensive in terms of processing time and temporary
* memory allocation, this is also a bad practice with regard to QA of the
* input.
* </p><p>
* Note: Transformer-errors and exceptions are thrown when they occur while
* warnings are logged on {@link #warnlog}.
*/
@QAInfo(level = QAInfo.Level.NORMAL,
state = QAInfo.State.IN_DEVELOPMENT,
author = "te")
public class XSLT {
private static Log warnlog = LogFactory.getLog(XSLT.class.getName() + "#warnings");
private static Log log = LogFactory.getLog(XSLT.class);
/**
* Creates a new transformer based on the given XSLTLocation.
*
* @param xslt the location of the XSLT.
* @return a Transformer based on the given XSLT.
* @throws javax.xml.transform.TransformerException thrown if for some
* reason a Transformer could not be instantiated.
* This is normally due to problems with the {@code xslt} URL
* @see {@link #getLocalTransformer} for reusing Transformers.
*/
public static Transformer createTransformer(URL xslt) throws TransformerException {
log.debug("Requesting and compiling XSLT from '" + xslt + "'");
TransformerFactory tfactory = TransformerFactory.newInstance();
InputStream in = null;
Transformer transformer;
try {
if (xslt == null) {
throw new NullPointerException("xslt URL is null");
}
in = xslt.openStream();
transformer = tfactory.newTransformer(new StreamSource(in, xslt.toString()));
transformer.setErrorListener(getErrorListener());
} catch (TransformerException e) {
throw new TransformerException(String.format(
"Unable to instantiate Transformer, a system configuration error for XSLT at '%s'", xslt), e);
} catch (MalformedURLException e) {
throw new TransformerException(String.format("The URL to the XSLT is not a valid URL: '%s'", xslt), e);
} catch (IOException e) {
throw new TransformerException(String.format("Unable to open the XSLT resource due to IOException '%s'",
xslt), e);
} catch (Exception e) {
throw new TransformerException(String.format("Unable to open the XSLT resource '%s'", xslt), e);
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException e) {
log.warn("Non-fatal IOException while closing stream to '" + xslt + "'");
}
}
return transformer;
}
private static ErrorListener ERRORLISTENER; // Singleton
private static ErrorListener getErrorListener() {
if (ERRORLISTENER == null) {
ERRORLISTENER = new ErrorListener() {
@Override
public void warning(TransformerException exception)
throws TransformerException {
warnlog.debug("A transformer warning occured", exception);
}
@Override
public void error(TransformerException exception)
throws TransformerException {
throw new TransformerException("A Transformer error occured", exception);
}
@Override
public void fatalError(TransformerException exception)
throws TransformerException {
throw new TransformerException("A Transformer exception occurred", exception);
}
};
}
return ERRORLISTENER;
}
private static ThreadLocal<Map<String, Transformer>> localMapCache = createLocalMapCache();
private static ThreadLocal<Map<String, Transformer>> createLocalMapCache() {
return new ThreadLocal<Map<String, Transformer>>() {
@Override
protected Map<String, Transformer> initialValue() {
return new HashMap<String, Transformer>();
}
};
}
/**
* Create or re-use a Transformer for the given xsltLocation.
* The Transformer is {@link ThreadLocal}, so the method is thread-safe.
* </p><p>
* Warning: A list is maintained for all XSLTs so changes to the xslt will
* not be reflected. Call {@link #clearTransformerCache} to clear
* the list.
*
* @param xslt the location of the XSLT.
* @return a Transformer using the given XSLT.
* @throws TransformerException if the Transformor could not be constructed.
*/
public static Transformer getLocalTransformer(URL xslt) throws TransformerException {
return getLocalTransformer(xslt, null);
}
/**
* Create or re-use a Transformer for the given xsltLocation.
* The Transformer is {@link ThreadLocal}, so the method is thread-safe.
* </p><p>
* Warning: A list is maintained for all XSLTs so changes to the xslt will
* not be reflected. Call {@link #clearTransformerCache} to clear
* the list.
*
* @param xslt the location of the XSLT.
* @param parameters for the Transformer. The keys must be Strings. If the map is null, it will be ignored.
* @return a Transformer using the given XSLT.
* @throws TransformerException if the Transformer could not be constructed.
*/
public static Transformer getLocalTransformer(URL xslt, Map parameters) throws TransformerException {
if (xslt == null) {
throw new NullPointerException("The xslt was null");
}
Map<String, Transformer> map = localMapCache.get();
Transformer transformer = map.get(xslt.toString());
if (transformer == null) {
transformer = createTransformer(xslt);
map.put(xslt.toString(), transformer);
}
transformer.clearParameters(); // Is this safe? Any defaults lost?
if (parameters != null) {
for (Object entryObject : parameters.entrySet()) {
Map.Entry entry = (Map.Entry) entryObject;
transformer.setParameter((String) entry.getKey(), entry.getValue());
}
}
return transformer;
}
/**
* Clears the cache used by {@link #getLocalTransformer(java.net.URL)}.
* This is safe to call as it only affects performance. Clearing the cache
* means that changes to underlying XSLTs will be reflected and that any
* memory allocated for caching is freed.
* </p><p>
* Except for special cases, such as a huge number of different XSLTs,
* the cache should only be cleared when the underlying XSLTs are changed.
*/
public static void clearTransformerCache() {
localMapCache = createLocalMapCache();
}
/**
* Requests a cached ThreadLocal Transformer and performs the
* transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @return the transformed content.
* @throws TransformerException if the transformation failed.
*/
public static String transform(URL xslt, String in) throws TransformerException {
return transform(xslt, in, null, false);
}
/**
* Requests a cached ThreadLocal Transformer and performs the
* transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param ignoreXMLNamespaces if true, namespaces in the input content will be stripped. This is not recommended,
* but a lot of XML and XSLTs does not match namespaces correctly. Setting this to true
* will have an impact on performance.
* @return the transformed content.
* @throws TransformerException if the transformation failed.
*/
public static String transform(URL xslt, String in, boolean ignoreXMLNamespaces) throws TransformerException {
return transform(xslt, in, null, ignoreXMLNamespaces);
}
/**
* Requests a cached ThreadLocal Transformer and performs the
* transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings. If the map is null, it will be ignored.
* @return the transformed content.
* @throws TransformerException if the transformation failed.
*/
public static String transform(URL xslt, String in, Map parameters) throws TransformerException {
return transform(xslt, in, parameters, false);
}
/**
* Requests a cached ThreadLocal Transformer and performs the transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings.
* If the map is null, it will be ignored.
* @param ignoreXMLNamespaces if true, namespaces in the input content will
* be stripped. This is not recommended, but a lot of XML and XSLTs
* does not match namespaces correctly. Setting this to true will
* have an impact on performance.
* @return the transformed content.
* @throws TransformerException if the transformation failed.
*/
public static String transform(URL xslt, String in, Map parameters, boolean ignoreXMLNamespaces)
throws TransformerException {
StringWriter sw = new StringWriter();
if (!ignoreXMLNamespaces) {
transform(xslt, new StringReader(in), sw, parameters);
} else {
// Slowest
//Reader noNamespace = removeNamespaces(new StringReader(in));
//transform(getLocalTransformer(xslt, parameters), noNamespace, sw);
// Slow
//Document dom = DOM.stringToDOM(in);
//transform(getLocalTransformer(xslt, parameters), dom, sw);
// Roughly 30% faster than DOM-base NS stripping
/*try {
XMLFilter filter = new ParsingNamespaceRemover(
XMLReaderFactory.createXMLReader());
Source source =
new SAXSource(filter, new InputSource(new StringReader(in)));
getLocalTransformer(xslt, parameters).transform(
source, new StreamResult(sw));
} catch (SAXException e) {
// The Java runtime doesn't provide an XMLReader,
// so we are doomed
throw new RuntimeException(
"Failed to load default XMLReader implementation", e);
}*/
// More than twice as fast as DOM base NS stripping
Reader noNamespace = new NamespaceRemover(new StringReader(in));
transform(getLocalTransformer(xslt, parameters), noNamespace, sw);
}
return sw.toString();
}
/**
* Requests a cached ThreadLocal Transformer and performs the
* transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings.
* If the map is null, it will be ignored.
* @return the transformed content.
* @throws TransformerException if the transformation failed.
*/
public static String transform(URL xslt, Reader in, Map parameters) throws TransformerException {
return transform(xslt, in, parameters, false);
}
/**
* Requests a cached ThreadLocal Transformer and performs the transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings.
* If the map is null, it will be ignored.
* @param ignoreXMLNamespaces if true, namespaces in the input content will be stripped. This is not recommended,
* but a lot of XML and XSLTs does not match namespaces correctly. Setting this to true
* will have an impact on performance.
* @return the transformed content.
* @throws TransformerException if the transformation failed.
*/
public static String transform(URL xslt, Reader in, Map parameters, boolean ignoreXMLNamespaces)
throws TransformerException {
StringWriter sw = new StringWriter();
if (!ignoreXMLNamespaces) {
transform(xslt, in, sw, parameters);
} else {
transform(getLocalTransformer(xslt, parameters), new NamespaceRemover(in), sw);
}
return sw.toString();
}
/**
* Requests a cached ThreadLocal Transformer and performs the transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings. If the map is null, it will be ignored.
* @return the transformed content. Note that the correct charset must be
* supplied to toString("charset") to get proper String results.
* The charset is specified by the XSLT.
* @throws TransformerException if the transformation failed.
*/
public static ByteArrayOutputStream transform(URL xslt, byte[] in, Map parameters) throws TransformerException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
transform(xslt, new ByteArrayInputStream(in), out, parameters);
return out;
}
/**
* Requests a cached ThreadLocal Transformer and performs the transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings.
* If the map is null, it will be ignored.
* @param ignoreXMLNamespaces if true, namespaces in the input content will be stripped. This is not recommended,
* but a lot of XML and XSLTs does not match namespaces correctly. Setting this to true
* will have an impact on performance.
* @return the transformed content. Note that the correct charset must be
* supplied to toString("charset") to get proper String results.
* The charset is specified by the XSLT.
* @throws TransformerException if the transformation failed.
*/
public static ByteArrayOutputStream transform(URL xslt, byte[] in, Map parameters, boolean ignoreXMLNamespaces)
throws TransformerException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
if (!ignoreXMLNamespaces) {
transform(xslt, new ByteArrayInputStream(in), out, parameters);
} else {
Writer writer = new OutputStreamWriter(out);
Reader reader = new NamespaceRemover(new InputStreamReader(new ByteArrayInputStream(in)));
transform(getLocalTransformer(xslt, parameters), reader, writer);
}
return out;
}
/**
* Requests a cached ThreadLocal Transformer and performs the transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings. If the map is null, it will be ignored.
* @return the transformed content. Note that the correct charset must be
* supplied to toString("charset") to get proper String results.
* The charset is specified by the XSLT.
* @throws TransformerException if the transformation failed.
*/
public static ByteArrayOutputStream transform(URL xslt, InputStream in, Map parameters)
throws TransformerException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
transform(getLocalTransformer(xslt, parameters), in, out);
return out;
}
/**
* Requests a cached ThreadLocal Transformer and performs the
* transformation.
*
* @param xslt the location of the XSLT to use.
* @param in the content to transform.
* @param parameters for the Transformer. The keys must be Strings. If the map is null, it will be ignored.
* @param ignoreXMLNamespaces if true, namespaces in the input content will be stripped. This is not recommended,
* but a lot of XML and XSLTs does not match namespaces correctly. Setting this to true
* will have an impact on performance.
* @return the transformed content. Note that the correct charset must be
* supplied to toString("charset") to get proper String results.
* The charset is specified by the XSLT.
* @throws TransformerException if the transformation failed.
*/
public static ByteArrayOutputStream transform(URL xslt, InputStream in, Map parameters,
boolean ignoreXMLNamespaces) throws TransformerException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
if (!ignoreXMLNamespaces) {
transform(getLocalTransformer(xslt, parameters), in, out);
} else {
Writer writer = new OutputStreamWriter(out);
Reader reader = new NamespaceRemover(new InputStreamReader(in));
transform(getLocalTransformer(xslt, parameters), reader, writer);
}
return out;
}
/**
* Requests a cached ThreadLocal Transformer and performs the transformation.
*
* @param xslt the location of the XSLT to use.
* @param dom the content to transform.
* @param parameters for the Transformer. The keys must be Strings. If the map is null, it will be ignored.
* @return the transformed content. Note that the correct charset must be
* supplied to toString("charset") to get proper String results.
* The charset is specified by the XSLT.
* @throws TransformerException if the transformation failed.
*/
public static ByteArrayOutputStream transform(URL xslt, Document dom, Map parameters) throws TransformerException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
transform(getLocalTransformer(xslt, parameters), dom, out);
return out;
}
/**
* Requests a cached ThreadLocal Transformer and performs a transformation from Stream to Stream.
*
* @param xslt the location of the XSLT to use.
* @param in input Stream.
* @param out output Stream.
* @param parameters for the Transformer. The keys must be Strings.
* If the map is null, it will be ignored.
* @throws TransformerException if the transformation failed.
*/
public static void transform(URL xslt, InputStream in, OutputStream out, Map parameters)
throws TransformerException {
transform(getLocalTransformer(xslt, parameters), in, out);
}
/**
* Requests a cached ThreadLocal Transformer and performs a transformation from Reader to Writer.
*
* @param xslt the location of the XSLT to use.
* @param in input.
* @param out output.
* @param parameters for the Transformer. The keys must be Strings.
* If the map is null, it will be ignored.
* @throws TransformerException if the transformation failed.
*/
public static void transform(URL xslt, Reader in, Writer out, Map parameters) throws TransformerException {
transform(getLocalTransformer(xslt, parameters), in, out);
}
/**
* Performs a transformation from Document to Stream with the transformer.
*
* @param transformer probably retrieved by {@link #getLocalTransformer}.
* @param dom input.
* @param out output.
* @throws TransformerException if the transformation failed.
*/
public static void transform(Transformer transformer, Document dom, OutputStream out) throws TransformerException {
transformer.transform(new DOMSource(dom), new StreamResult(out));
}
/**
* Performs a transformation from Stream to Stream with the transformer.
*
* @param transformer probably retrieved by {@link #getLocalTransformer}.
* @param in input Stream.
* @param out output Stream.
* @throws TransformerException if the transformation failed.
*/
public static void transform(Transformer transformer, InputStream in, OutputStream out)
throws TransformerException {
transformer.transform(new StreamSource(in), new StreamResult(out));
}
/**
* Performs a transformation from Reader to Writer with the transformer.
*
* @param transformer probably retrieved by {@link #getLocalTransformer}.
* @param in input.
* @param out output.
* @throws TransformerException if the transformation failed.
*/
public static void transform(Transformer transformer, Reader in, Writer out) throws TransformerException {
transformer.transform(new StreamSource(in), new StreamResult(out));
}
/**
* Performs a transformation from DOM to Writer with the transformer.
*
* @param transformer probably retrieved by {@link #getLocalTransformer}.
* @param dom input.
* @param out output.
* @throws TransformerException if the transformation failed.
*/
public static void transform(Transformer transformer, Document dom, Writer out) throws TransformerException {
transformer.transform(new DOMSource(dom), new StreamResult(out));
}
/* Using XSLT's to remove the namespaces is slower than DOM-parsing.
Memory-usage has not been tested.
*/
static URL NAMESPACE_XSLT;
static {
NAMESPACE_XSLT = Thread.currentThread().getContextClassLoader().getResource(
"dk/statsbiblioteket/util/xml/namespace_remover.xslt");
}
/*static ByteArrayOutputStream removeNamespaces(InputStream in) throws
TransformerException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
transform(NAMESPACE_XSLT, in, out, null);
return out;
}
static Reader removeNamespaces(Reader in) throws TransformerException {
StringWriter sw = new StringWriter();
transform(NAMESPACE_XSLT, in, sw, null);
return new StringReader(sw.toString());
}*/
}
|
package org.yakindu.sct.ui.editor.providers;
import org.eclipse.draw2d.Connection;
import org.eclipse.draw2d.ConnectionLocator;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.draw2d.geometry.PointList;
import org.eclipse.gef.EditDomain;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.editparts.AbstractConnectionEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.GraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.parts.DiagramEditDomain;
import org.eclipse.gmf.runtime.diagram.ui.services.decorator.IDecorator;
import org.eclipse.gmf.runtime.diagram.ui.services.decorator.IDecoratorTarget;
import org.eclipse.gmf.runtime.draw2d.ui.geometry.LineSeg.KeyPoint;
import org.eclipse.gmf.runtime.draw2d.ui.geometry.PointListUtilities;
import org.eclipse.gmf.runtime.draw2d.ui.mapmode.MapModeUtil;
import org.yakindu.sct.model.sgraph.SGraphPackage;
import org.yakindu.sct.model.sgraph.Transition;
import org.yakindu.sct.model.sgraph.Vertex;
import org.yakindu.sct.ui.editor.editor.figures.PriorityFigure;
import org.yakindu.sct.ui.editor.editparts.TransitionEditPart;
/**
* @author andreas muelder - Initial contribution and API
*/
public class TransitionPriorityDecorationProvider extends AbstractPriorityDecorationProvider {
public static final int DISTANCE_TO_SOURCE = 6;
@Override
public void createDecorators(IDecoratorTarget decoratorTarget) {
EditPart editPart = (EditPart) decoratorTarget.getAdapter(EditPart.class);
if (editPart instanceof GraphicalEditPart || editPart instanceof AbstractConnectionEditPart) {
EditDomain ed = editPart.getViewer().getEditDomain();
if (!(ed instanceof DiagramEditDomain)) {
return;
}
if (shouldInstall(((DiagramEditDomain) ed).getEditorPart()) && editPart instanceof TransitionEditPart) {
IDecorator decorator = createStatusDecorator(decoratorTarget);
decorators.add(decorator);
decoratorTarget.installDecorator(getDecoratorKey(), decorator);
}
}
}
protected IDecorator createStatusDecorator(IDecoratorTarget decoratorTarget) {
return new TransitionPriorityDecorator(decoratorTarget);
}
public static class TransitionPriorityDecorator extends AbstractPriorityDecorator {
public TransitionPriorityDecorator(IDecoratorTarget decoratorTarget) {
super(decoratorTarget, SGraphPackage.Literals.VERTEX__OUTGOING_TRANSITIONS);
}
@Override
public void activate() {
if (!(semanticElement instanceof Transition)
|| !(((Transition) semanticElement).eContainer() instanceof Vertex)) {
return;
}
if (((Transition) semanticElement).eContainer() != null) {
owningElement = (Vertex) ((Transition) semanticElement).eContainer();
}
super.activate();
}
@Override
public void deactivate() {
if (!(semanticElement instanceof Transition)) {
return;
}
owningElement = null;
super.deactivate();
}
public boolean needsDecoration(IGraphicalEditPart editPart) {
Transition transition = (Transition) editPart.resolveSemanticElement();
Vertex container = (Vertex) transition.eContainer();
return container.getOutgoingTransitions().size() > 1;
}
public void createDecorators(IGraphicalEditPart editPart) {
PriorityFigure figure = new PriorityFigure(MapModeUtil.getMapMode(), getPriority(editPart));
figure.setSize(12, 13);
setDecoration(getDecoratorTarget().addDecoration(figure,
new ConnectionLocator((Connection) editPart.getFigure(), ConnectionLocator.TARGET) {
protected Point getLocation(PointList points) {
Point p = PointListUtilities.pointOn(PointListUtilities.copyPoints(points),
DISTANCE_TO_SOURCE, KeyPoint.ORIGIN, new Point());
return p;
}
}, false));
figure.setToolTip(new Label("Transition Priority " + getPriority(editPart)));
}
public int getPriority(IGraphicalEditPart editPart) {
Transition transition = ((Transition) editPart.resolveSemanticElement());
Vertex container = (Vertex) transition.eContainer();
int indexOf = container.getOutgoingTransitions().indexOf(transition);
// visible priorities should start with 1
return indexOf + 1;
}
}
}
|
package org.csstudio.alarm.dal2jms;
import javax.annotation.Nonnull;
import org.csstudio.platform.management.CommandParameters;
import org.csstudio.platform.management.CommandResult;
import org.csstudio.platform.management.IManagementCommand;
/**
* Remote command registered at the management command extension point.
*
* @author jpenning
* @author $Author$
* @version $Revision$
* @since 10.06.2010
*/
public class StopDal2JmsAction implements IManagementCommand {
/**
* {@inheritDoc}
*/
@Nonnull
public final CommandResult execute(@Nonnull final CommandParameters parameters) {
Dal2JmsApplication.getRunningServer().stop();
return CommandResult.createSuccessResult();
}
}
|
package uk.ac.ebi.quickgo.client.presets.read.assignedby;
import org.junit.Before;
import org.junit.Test;
import org.springframework.batch.item.validator.ValidationException;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNull.notNullValue;
/**
* Created 05/09/16
* @author Edd
*/
public class RawAssignedByPresetValidatorTest {
private RawAssignedByPresetValidator validator;
@Before
public void setUp() {
this.validator = new RawAssignedByPresetValidator();
}
@Test(expected = ValidationException.class)
public void nullRawPresetIsInvalid() throws Exception {
validator.process(null);
}
@Test(expected = ValidationException.class)
public void nullNameIsInvalid() throws Exception {
RawAssignedByPreset value = new RawAssignedByPreset();
value.name = null;
validator.process(value);
}
@Test(expected = ValidationException.class)
public void emptyNameIsInvalid() throws Exception {
RawAssignedByPreset value = new RawAssignedByPreset();
value.name = "";
validator.process(value);
}
@Test
public void nonEmptyNameIsValid() throws Exception {
RawAssignedByPreset value = new RawAssignedByPreset();
value.name = "valid name";
RawAssignedByPreset processedValue = validator.process(value);
assertThat(processedValue, is(notNullValue()));
}
}
|
package com.wonderpush.sdk;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.Application;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import java.util.WeakHashMap;
class ActivityLifecycleMonitor {
private static final Monitor sSingleton;
private static boolean sActivityLifecycleCallbacksRegistered;
private static WeakHashMap<Activity, Object> sTrackedActivities = new WeakHashMap<>();
static {
Monitor monitor = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
try {
monitor = new Monitor();
} catch (Throwable ex) { // java.lang.NoClassDefFoundError: com.wonderpush.sdk.ActivityLifecycleMonitor$Monitor
Log.d(WonderPush.TAG, "Cannot instantiate the ActivityLifecycleMonitor, pre Android 14 version?", ex);
}
}
sSingleton = monitor;
}
private static boolean isSupported() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH;
}
protected static void monitorActivitiesLifecycle() {
if (isSupported() && !sActivityLifecycleCallbacksRegistered && WonderPush.sApplication != null) {
WonderPushCompatibilityHelper.ApplicationRegisterActivityLifecycleCallbacks(WonderPush.sApplication, sSingleton);
sActivityLifecycleCallbacksRegistered = true;
}
}
protected static void addTrackedActivity(Activity activity) {
sTrackedActivities.put(activity, null);
}
protected static Activity getCurrentActivity() {
Activity candidate = null;
if (sActivityLifecycleCallbacksRegistered
&& sSingleton != null
&& sSingleton.hasResumedActivities()) {
candidate = sSingleton.getLastResumedActivity();
}
if (candidate == null) {
for (Activity activity : sTrackedActivities.keySet()) {
if (activity.hasWindowFocus() && !activity.isFinishing()) {
candidate = activity;
break;
}
}
}
return candidate;
}
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
static class Monitor implements Application.ActivityLifecycleCallbacks {
private int createCount;
private int startCount;
private int resumeCount;
private int pausedCount;
private int stopCount;
private int destroyCount;
private long createFirstDate;
private long startFirstDate;
private long resumeFirstDate;
private long pausedLastDate;
private long stopLastDate;
private long destroyLastDate;
private Activity lastResumedActivity;
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {
if (!hasCreatedActivities()) {
createFirstDate = TimeSync.getTime();
}
++createCount;
WonderPush.showPotentialNotification(activity, activity.getIntent());
}
@Override
public void onActivityStarted(Activity activity) {
if (createCount == 0) {
// The monitor was probably setup inside a Activity.onCreate() call
this.onActivityCreated(activity, null);
}
if (!hasStartedActivities()) {
startFirstDate = TimeSync.getTime();
}
++startCount;
WonderPush.onInteraction();
}
@Override
public void onActivityResumed(Activity activity) {
if (!hasResumedActivities()) {
resumeFirstDate = TimeSync.getTime();
}
lastResumedActivity = activity;
++resumeCount;
WonderPush.onInteraction();
}
@Override
public void onActivityPaused(Activity activity) {
++pausedCount;
if (!hasResumedActivities()) {
pausedLastDate = TimeSync.getTime();
}
WonderPush.onInteraction();
}
@Override
public void onActivityStopped(Activity activity) {
++stopCount;
if (!hasStartedActivities()) {
stopLastDate = TimeSync.getTime();
}
WonderPush.onInteraction();
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {
}
@Override
public void onActivityDestroyed(Activity activity) {
++destroyCount;
if (!hasCreatedActivities()) {
destroyLastDate = TimeSync.getTime();
}
WonderPush.onInteraction();
}
protected boolean hasResumedActivities() {
return resumeCount > pausedCount;
}
protected boolean hasStartedActivities() {
return startCount > stopCount;
}
protected boolean hasCreatedActivities() {
return createCount > destroyCount;
}
protected Activity getLastResumedActivity() {
return lastResumedActivity;
}
protected long getCreateFirstDate() {
return createFirstDate;
}
protected long getStartFirstDate() {
return startFirstDate;
}
protected long getResumeFirstDate() {
return resumeFirstDate;
}
protected long getPausedLastDate() {
return pausedLastDate;
}
protected long getStopLastDate() {
return stopLastDate;
}
protected long getDestroyLastDate() {
return destroyLastDate;
}
}
}
|
package org.spine3.server.command;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.collect.Iterators;
import com.google.protobuf.Message;
import org.spine3.SPI;
import org.spine3.base.Command;
import org.spine3.base.CommandContext;
import org.spine3.base.CommandId;
import org.spine3.base.CommandStatus;
import org.spine3.base.Commands;
import org.spine3.base.Error;
import org.spine3.base.Failure;
import org.spine3.protobuf.TypeUrl;
import org.spine3.server.storage.AbstractStorage;
import javax.annotation.Nullable;
import java.util.Iterator;
import static org.spine3.base.CommandStatus.ERROR;
import static org.spine3.base.CommandStatus.RECEIVED;
import static org.spine3.base.Commands.generateId;
import static org.spine3.base.Commands.getId;
import static org.spine3.base.Stringifiers.EMPTY_ID;
import static org.spine3.base.Stringifiers.idToString;
import static org.spine3.protobuf.Timestamps2.getCurrentTime;
import static org.spine3.validate.Validate.checkNotDefault;
/**
* A storage used by {@link CommandStore} for keeping command data.
*
* @author Alexander Yevsyukov
*/
@SPI
public abstract class CommandStorage extends AbstractStorage<CommandId, CommandRecord> {
private static final Function<CommandRecord, Command> TO_COMMAND =
new Function<CommandRecord, Command>() {
@Override
public Command apply(@Nullable CommandRecord record) {
if (record == null) {
return Command.getDefaultInstance();
}
final Command cmd = record.getCommand();
return cmd;
}
};
protected CommandStorage(boolean multitenant) {
super(multitenant);
}
/**
* Obtains or generates a {@code CommandId} from the passed command.
*
* <p>We don't have a command ID in the passed command.
* But need an ID to store the error in the record associated
* with this command. So, the ID will be generated.
*
* <p>We pass this ID to the record, so that it has an identity.
* But this ID does not belong to the command.
*
* <p>Therefore, commands without ID can be found by records
* where `command.context.command_id` field is empty.
*/
private static CommandId getOrGenerateCommandId(Command command) {
CommandId id = getId(command);
if (idToString(id).equals(EMPTY_ID)) {
id = generateId();
}
return id;
}
/**
* Creates a command storage record builder passed on the passed parameters.
*
* <p>{@code targetId} and {@code targetIdType} are set to empty strings if
* the command is not for an entity.
*
* @param command a command to convert to a record. This includes instances of faulty
* commands. An example of such a fault is missing command ID.
* @param status a command status to set in the record
* @param generatedCommandId a command ID to used because the passed command does not have own
* ID. If the command has own ID this parameter is {@code null}.
* @return a storage record
*/
@VisibleForTesting
static CommandRecord.Builder newRecordBuilder(Command command,
CommandStatus status,
@Nullable CommandId generatedCommandId) {
final CommandContext context = command.getContext();
final CommandId commandId = generatedCommandId != null
? generatedCommandId
: context.getCommandId();
final Message commandMessage = Commands.getMessage(command);
final String commandType = TypeUrl.of(commandMessage)
.getSimpleName();
final CommandRecord.Builder builder =
CommandRecord.newBuilder()
.setCommandId(commandId)
.setCommandType(commandType)
.setCommand(command)
.setTimestamp(getCurrentTime())
.setStatus(ProcessingStatus.newBuilder()
.setCode(status));
return builder;
}
/** Converts {@code CommandStorageRecord}s to {@code Command}s. */
@VisibleForTesting
static Iterator<Command> toCommandIterator(Iterator<CommandRecord> records) {
return Iterators.transform(records, TO_COMMAND);
}
protected void store(Command command) {
store(command, RECEIVED);
}
protected void store(Command command, CommandStatus status) {
checkNotClosed();
final CommandRecord record = newRecordBuilder(command, status, null).build();
final CommandId commandId = getId(command);
write(commandId, record);
}
protected void store(Command command, Error error) {
checkNotClosed();
checkNotDefault(error);
final CommandId id = getOrGenerateCommandId(command);
final CommandRecord.Builder builder = newRecordBuilder(command, ERROR, id);
builder.getStatusBuilder()
.setError(error);
final CommandRecord record = builder.build();
write(id, record);
}
protected Iterator<Command> iterator(CommandStatus status) {
checkNotClosed();
final Iterator<CommandRecord> recordIterator = read(status);
final Iterator<Command> commandIterator = toCommandIterator(recordIterator);
return commandIterator;
}
/**
* Reads all command records with the given status.
*
* @param status a command status to search by
* @return records with the given status
*/
protected abstract Iterator<CommandRecord> read(CommandStatus status);
/** Updates the status of the command to {@link CommandStatus#OK}. */
protected abstract void setOkStatus(CommandId commandId);
/** Updates the status of the command with the error. */
protected abstract void updateStatus(CommandId commandId, Error error);
/** Updates the status of the command with the business failure. */
protected abstract void updateStatus(CommandId commandId, Failure failure);
}
|
package org.sagebionetworks.repo.manager.file.preview;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.imageio.ImageIO;
import javax.imageio.ImageReadParam;
import javax.imageio.ImageReader;
import javax.imageio.stream.ImageInputStream;
import org.imgscalr.Scalr;
import org.imgscalr.Scalr.Mode;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.StackConfigurationSingleton;
/**
* Generates previews for image content types.
*
* @author John
*
*/
public class ImagePreviewGenerator implements PreviewGenerator {
public static final String IMAGE_BMP = "image/bmp";
public static final String IMAGE_PJPEG = "image/pjpeg";
public static final String IMAGE_JPEG = "image/jpeg";
public static final String IMAGE_GIF = "image/gif";
public static final String IMAGE_PNG = "image/png";
/**
* The supported content types for this generator, and the memory multipler that should be used.
*/
private static Map<String, Float> SUPPORTED_CONTENT_TYPES;
static{
SUPPORTED_CONTENT_TYPES = new HashMap<String, Float>();
// Map the types to the memory requirements.
// Since it is better to error on the high side, we multiple the calculated
// memory use for each type by a fudge factor.
float fudgeFactor = 1.2f;
SUPPORTED_CONTENT_TYPES.put(IMAGE_BMP, 4.05f*fudgeFactor);
SUPPORTED_CONTENT_TYPES.put(IMAGE_PJPEG, 23.38f*fudgeFactor);
SUPPORTED_CONTENT_TYPES.put(IMAGE_JPEG, 23.38f*fudgeFactor);
SUPPORTED_CONTENT_TYPES.put(IMAGE_GIF, 19.98f*fudgeFactor);
SUPPORTED_CONTENT_TYPES.put(IMAGE_PNG, 46.28f*fudgeFactor);
}
@Override
public PreviewOutputMetadata generatePreview(InputStream from, OutputStream to) throws IOException {
// Determine the size of the image
// First load the image
BufferedImage image = loadImageWithSizeCheck(from);
if(image == null){
throw new IllegalArgumentException("The passed input stream was not an image");
}
// Let image scalar do the heavy lifting!
int maxWidthPixels = StackConfigurationSingleton.singleton().getMaximumPreviewWidthPixels();
int maxHeightPixels = StackConfigurationSingleton.singleton().getMaximumPreviewHeightPixels();
//only resize if original image is bigger than our preview max size
int height = image.getHeight();
int width = image.getWidth();
if (height > maxHeightPixels || width > maxWidthPixels) {
if (width > maxWidthPixels ) {
image = Scalr.resize(image, Mode.FIT_TO_WIDTH, maxWidthPixels);
height = image.getHeight();
}
if (height > maxHeightPixels)
image = Scalr.resize(image, Mode.FIT_TO_HEIGHT, maxHeightPixels);
}
ImageIO.write(image, "png", to);
// the resulting image is a png
return new PreviewOutputMetadata(IMAGE_PNG, ".png");
}
@Override
public boolean supportsContentType(String contentType, String extension) {
return SUPPORTED_CONTENT_TYPES.keySet().contains(contentType.toLowerCase());
}
public static void main(String[] args) throws IOException, InterruptedException, InstantiationException, IllegalAccessException{
for(String filePath: args){
File toRead = new File(filePath);
PreviewGeneratorUtils.calculateMemoryRequirments(toRead, ImagePreviewGenerator.class);
}
}
@Override
public long calculateNeededMemoryBytesForPreview(String mimeType, long contentSize) {
double multiplier = SUPPORTED_CONTENT_TYPES.get(mimeType);
long memoryNeededBytes = (long) Math.ceil((((double) contentSize) * multiplier));
return memoryNeededBytes;
}
/**
* Load an image from the given input stream with a size check before the load is finished.
* @param from
* @return
* @throws IOException
*/
public static BufferedImage loadImageWithSizeCheck(InputStream from) throws IOException {
ImageInputStream stream = ImageIO.createImageInputStream(from);
Iterator iter = ImageIO.getImageReaders(stream);
if (!iter.hasNext()) {
return null;
}
ImageReader reader = (ImageReader)iter.next();
ImageReadParam param = reader.getDefaultReadParam();
reader.setInput(stream, true, true);
long width = reader.getWidth(0);
long height = reader.getHeight(0);
long size = width*height;
if(size > 1000) {
throw new IllegalArgumentException("Image is too large to load into memrory.");
}
BufferedImage image;
try {
image = reader.read(0, param);
} finally {
reader.dispose();
stream.close();
}
if (image == null) {
stream.close();
}
return image;
}
}
|
package io.flutter.embedding.engine;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.os.Build;
import android.os.Looper;
import android.view.Surface;
import android.view.SurfaceHolder;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.UiThread;
import androidx.annotation.VisibleForTesting;
import io.flutter.Log;
import io.flutter.embedding.engine.FlutterEngine.EngineLifecycleListener;
import io.flutter.embedding.engine.dart.PlatformMessageHandler;
import io.flutter.embedding.engine.deferredcomponents.DeferredComponentManager;
import io.flutter.embedding.engine.mutatorsstack.FlutterMutatorsStack;
import io.flutter.embedding.engine.renderer.FlutterUiDisplayListener;
import io.flutter.embedding.engine.renderer.RenderSurface;
import io.flutter.embedding.engine.renderer.SurfaceTextureWrapper;
import io.flutter.plugin.common.StandardMessageCodec;
import io.flutter.plugin.localization.LocalizationPlugin;
import io.flutter.plugin.platform.PlatformViewsController;
import io.flutter.util.Preconditions;
import io.flutter.view.AccessibilityBridge;
import io.flutter.view.FlutterCallbackInformation;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
/**
* Interface between Flutter embedding's Java code and Flutter engine's C/C++ code.
*
* <p>Flutter's engine is built with C/C++. The Android Flutter embedding is responsible for
* coordinating Android OS events and app user interactions with the C/C++ engine. Such coordination
* requires messaging from an Android app in Java code to the C/C++ engine code. This communication
* requires a JNI (Java Native Interface) API to cross the Java/native boundary.
*
* <p>The entirety of Flutter's JNI API is codified in {@code FlutterJNI}. There are multiple
* reasons that all such calls are centralized in one class. First, JNI calls are inherently static
* and contain no Java implementation, therefore there is little reason to associate calls with
* different classes. Second, every JNI call must be registered in C/C++ code and this registration
* becomes more complicated with every additional Java class that contains JNI calls. Third, most
* Android developers are not familiar with native development or JNI intricacies, therefore it is
* in the interest of future maintenance to reduce the API surface that includes JNI declarations.
* Thus, all Flutter JNI calls are centralized in {@code FlutterJNI}.
*
* <p>Despite the fact that individual JNI calls are inherently static, there is state that exists
* within {@code FlutterJNI}. Most calls within {@code FlutterJNI} correspond to a specific
* "platform view", of which there may be many. Therefore, each {@code FlutterJNI} instance holds
* onto a "native platform view ID" after {@link #attachToNative(boolean)}, which is shared with the
* native C/C++ engine code. That ID is passed to every platform-view-specific native method. ID
* management is handled within {@code FlutterJNI} so that developers don't have to hold onto that
* ID.
*
* <p>To connect part of an Android app to Flutter's C/C++ engine, instantiate a {@code FlutterJNI}
* and then attach it to the native side:
*
* <pre>{@code
* // Instantiate FlutterJNI and attach to the native side.
* FlutterJNI flutterJNI = new FlutterJNI();
* flutterJNI.attachToNative();
*
* // Use FlutterJNI as desired. flutterJNI.dispatchPointerDataPacket(...);
*
* // Destroy the connection to the native side and cleanup.
* flutterJNI.detachFromNativeAndReleaseResources();
* }</pre>
*
* <p>To provide a visual, interactive surface for Flutter rendering and touch events, register a
* {@link RenderSurface} with {@link #setRenderSurface(RenderSurface)}
*
* <p>To receive callbacks for certain events that occur on the native side, register listeners:
*
* <ol>
* <li>{@link #addEngineLifecycleListener(EngineLifecycleListener)}
* <li>{@link #addIsDisplayingFlutterUiListener(FlutterUiDisplayListener)}
* </ol>
*
* To facilitate platform messages between Java and Dart running in Flutter, register a handler:
*
* <p>{@link #setPlatformMessageHandler(PlatformMessageHandler)}
*
* <p>To invoke a native method that is not associated with a platform view, invoke it statically:
*
* <p>{@code bool enabled = FlutterJNI.getIsSoftwareRenderingEnabled(); }
*/
@Keep
public class FlutterJNI {
private static final String TAG = "FlutterJNI";
// BEGIN Methods related to loading for FlutterLoader.
/**
* Loads the libflutter.so C++ library.
*
* <p>This must be called before any other native methods, and can be overridden by tests to avoid
* loading native libraries.
*
* <p>This method should only be called once across all FlutterJNI instances.
*/
public void loadLibrary() {
if (FlutterJNI.loadLibraryCalled) {
Log.w(TAG, "FlutterJNI.loadLibrary called more than once");
}
System.loadLibrary("flutter");
FlutterJNI.loadLibraryCalled = true;
}
private static boolean loadLibraryCalled = false;
/**
* Prefetch the default font manager provided by SkFontMgr::RefDefault() which is a process-wide
* singleton owned by Skia. Note that, the first call to SkFontMgr::RefDefault() will take
* noticeable time, but later calls will return a reference to the preexisting font manager.
*
* <p>This method should only be called once across all FlutterJNI instances.
*/
public void prefetchDefaultFontManager() {
if (FlutterJNI.prefetchDefaultFontManagerCalled) {
Log.w(TAG, "FlutterJNI.prefetchDefaultFontManager called more than once");
}
FlutterJNI.nativePrefetchDefaultFontManager();
FlutterJNI.prefetchDefaultFontManagerCalled = true;
}
private static boolean prefetchDefaultFontManagerCalled = false;
/**
* Perform one time initialization of the Dart VM and Flutter engine.
*
* <p>This method must be called only once. Calling more than once will cause an exception.
*
* @param context The application context.
* @param args Arguments to the Dart VM/Flutter engine.
* @param bundlePath For JIT runtimes, the path to the Dart kernel file for the application.
* @param appStoragePath The path to the application data directory.
* @param engineCachesPath The path to the application cache directory.
* @param initTimeMillis The time, in milliseconds, taken for initialization.
*/
public void init(
@NonNull Context context,
@NonNull String[] args,
@Nullable String bundlePath,
@NonNull String appStoragePath,
@NonNull String engineCachesPath,
long initTimeMillis) {
if (FlutterJNI.initCalled) {
Log.w(TAG, "FlutterJNI.init called more than once");
}
FlutterJNI.nativeInit(
context, args, bundlePath, appStoragePath, engineCachesPath, initTimeMillis);
FlutterJNI.initCalled = true;
}
private static boolean initCalled = false;
// END methods related to FlutterLoader
@Nullable private static AsyncWaitForVsyncDelegate asyncWaitForVsyncDelegate;
// This should also be updated by FlutterView when it is attached to a Display.
// The initial value of 0.0 indicates unknown refresh rate.
private static float refreshRateFPS = 0.0f;
// This is set from native code via JNI.
@Nullable private static String observatoryUri;
/** @deprecated Use {@link #init(Context, String[], String, String, String, long)} instead. */
@Deprecated
public static native void nativeInit(
@NonNull Context context,
@NonNull String[] args,
@Nullable String bundlePath,
@NonNull String appStoragePath,
@NonNull String engineCachesPath,
long initTimeMillis);
/** @deprecated Use {@link #prefetchDefaultFontManager()} instead. */
@Deprecated
public static native void nativePrefetchDefaultFontManager();
private native boolean nativeGetIsSoftwareRenderingEnabled();
@UiThread
/**
* Checks launch settings for whether software rendering is requested.
*
* <p>The value is the same per program.
*/
public boolean getIsSoftwareRenderingEnabled() {
return nativeGetIsSoftwareRenderingEnabled();
}
@Nullable
/**
* Observatory URI for the VM instance.
*
* <p>Its value is set by the native engine once {@link #init(Context, String[], String, String,
* String, long)} is run.
*/
public static String getObservatoryUri() {
return observatoryUri;
}
public static void setRefreshRateFPS(float refreshRateFPS) {
if (FlutterJNI.setRefreshRateFPSCalled) {
Log.w(TAG, "FlutterJNI.setRefreshRateFPS called more than once");
}
FlutterJNI.refreshRateFPS = refreshRateFPS;
FlutterJNI.setRefreshRateFPSCalled = true;
}
private static boolean setRefreshRateFPSCalled = false;
// TODO(mattcarroll): add javadocs
public static void setAsyncWaitForVsyncDelegate(@Nullable AsyncWaitForVsyncDelegate delegate) {
asyncWaitForVsyncDelegate = delegate;
}
// TODO(mattcarroll): add javadocs
// Called by native.
private static void asyncWaitForVsync(final long cookie) {
if (asyncWaitForVsyncDelegate != null) {
asyncWaitForVsyncDelegate.asyncWaitForVsync(cookie);
} else {
throw new IllegalStateException(
"An AsyncWaitForVsyncDelegate must be registered with FlutterJNI before asyncWaitForVsync() is invoked.");
}
}
// TODO(mattcarroll): add javadocs
public static native void nativeOnVsync(
long frameTimeNanos, long frameTargetTimeNanos, long cookie);
// TODO(mattcarroll): add javadocs
@NonNull
public static native FlutterCallbackInformation nativeLookupCallbackInformation(long handle);
public native boolean nativeFlutterTextUtilsIsEmoji(int codePoint);
public native boolean nativeFlutterTextUtilsIsEmojiModifier(int codePoint);
public native boolean nativeFlutterTextUtilsIsEmojiModifierBase(int codePoint);
public native boolean nativeFlutterTextUtilsIsVariationSelector(int codePoint);
public native boolean nativeFlutterTextUtilsIsRegionalIndicator(int codePoint);
// Below represents the stateful part of the FlutterJNI instances that aren't static per program.
// Conceptually, it represents a native shell instance.
@Nullable private Long nativeShellHolderId;
@Nullable private AccessibilityDelegate accessibilityDelegate;
@Nullable private PlatformMessageHandler platformMessageHandler;
@Nullable private LocalizationPlugin localizationPlugin;
@Nullable private PlatformViewsController platformViewsController;
@Nullable private DeferredComponentManager deferredComponentManager;
@NonNull
private final Set<EngineLifecycleListener> engineLifecycleListeners = new CopyOnWriteArraySet<>();
@NonNull
private final Set<FlutterUiDisplayListener> flutterUiDisplayListeners =
new CopyOnWriteArraySet<>();
@NonNull private final Looper mainLooper; // cached to avoid synchronization on repeat access.
public FlutterJNI() {
// We cache the main looper so that we can ensure calls are made on the main thread
// without consistently paying the synchronization cost of getMainLooper().
mainLooper = Looper.getMainLooper();
}
/**
* Returns true if this instance of {@code FlutterJNI} is connected to Flutter's native engine via
* a Java Native Interface (JNI).
*/
public boolean isAttached() {
return nativeShellHolderId != null;
}
/**
* Attaches this {@code FlutterJNI} instance to Flutter's native engine, which allows for
* communication between Android code and Flutter's platform agnostic engine.
*
* <p>This method must not be invoked if {@code FlutterJNI} is already attached to native.
*/
@UiThread
public void attachToNative(boolean isBackgroundView) {
ensureRunningOnMainThread();
ensureNotAttachedToNative();
nativeShellHolderId = performNativeAttach(this, isBackgroundView);
}
@VisibleForTesting
public long performNativeAttach(@NonNull FlutterJNI flutterJNI, boolean isBackgroundView) {
return nativeAttach(flutterJNI, isBackgroundView);
}
private native long nativeAttach(@NonNull FlutterJNI flutterJNI, boolean isBackgroundView);
/**
* Spawns a new FlutterJNI instance from the current instance.
*
* <p>This creates another native shell from the current shell. This causes the 2 shells to re-use
* some of the shared resources, reducing the total memory consumption versus creating a new
* FlutterJNI by calling its standard constructor.
*
* <p>This can only be called once the current FlutterJNI instance is attached by calling {@link
* #attachToNative(boolean)}.
*
* <p>Static methods that should be only called once such as {@link #init(Context, String[],
* String, String, String, long)} or {@link #setRefreshRateFPS(float)} shouldn't be called again
* on the spawned FlutterJNI instance.
*/
@UiThread
@NonNull
public FlutterJNI spawn(
@Nullable String entrypointFunctionName, @Nullable String pathToEntrypointFunction) {
ensureRunningOnMainThread();
ensureAttachedToNative();
FlutterJNI spawnedJNI =
nativeSpawn(nativeShellHolderId, entrypointFunctionName, pathToEntrypointFunction);
Preconditions.checkState(
spawnedJNI.nativeShellHolderId != null && spawnedJNI.nativeShellHolderId != 0,
"Failed to spawn new JNI connected shell from existing shell.");
return spawnedJNI;
}
private native FlutterJNI nativeSpawn(
long nativeSpawningShellId,
@Nullable String entrypointFunctionName,
@Nullable String pathToEntrypointFunction);
/**
* Detaches this {@code FlutterJNI} instance from Flutter's native engine, which precludes any
* further communication between Android code and Flutter's platform agnostic engine.
*
* <p>This method must not be invoked if {@code FlutterJNI} is not already attached to native.
*
* <p>Invoking this method will result in the release of all native-side resources that were set
* up during {@link #attachToNative(boolean)} or {@link #spawn(String, String)}, or accumulated
* thereafter.
*
* <p>It is permissable to re-attach this instance to native after detaching it from native.
*/
@UiThread
public void detachFromNativeAndReleaseResources() {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeDestroy(nativeShellHolderId);
nativeShellHolderId = null;
}
private native void nativeDestroy(long nativeShellHolderId);
private void ensureNotAttachedToNative() {
if (nativeShellHolderId != null) {
throw new RuntimeException(
"Cannot execute operation because FlutterJNI is attached to native.");
}
}
private void ensureAttachedToNative() {
if (nativeShellHolderId == null) {
throw new RuntimeException(
"Cannot execute operation because FlutterJNI is not attached to native.");
}
}
/**
* Adds a {@link FlutterUiDisplayListener}, which receives a callback when Flutter's engine
* notifies {@code FlutterJNI} that Flutter is painting pixels to the {@link Surface} that was
* provided to Flutter.
*/
@UiThread
public void addIsDisplayingFlutterUiListener(@NonNull FlutterUiDisplayListener listener) {
ensureRunningOnMainThread();
flutterUiDisplayListeners.add(listener);
}
/**
* Removes a {@link FlutterUiDisplayListener} that was added with {@link
* #addIsDisplayingFlutterUiListener(FlutterUiDisplayListener)}.
*/
@UiThread
public void removeIsDisplayingFlutterUiListener(@NonNull FlutterUiDisplayListener listener) {
ensureRunningOnMainThread();
flutterUiDisplayListeners.remove(listener);
}
// Called by native to notify first Flutter frame rendered.
@SuppressWarnings("unused")
@VisibleForTesting
@UiThread
public void onFirstFrame() {
ensureRunningOnMainThread();
for (FlutterUiDisplayListener listener : flutterUiDisplayListeners) {
listener.onFlutterUiDisplayed();
}
}
// TODO(mattcarroll): get native to call this when rendering stops.
@VisibleForTesting
@UiThread
void onRenderingStopped() {
ensureRunningOnMainThread();
for (FlutterUiDisplayListener listener : flutterUiDisplayListeners) {
listener.onFlutterUiNoLongerDisplayed();
}
}
@UiThread
public void onSurfaceCreated(@NonNull Surface surface) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeSurfaceCreated(nativeShellHolderId, surface);
}
private native void nativeSurfaceCreated(long nativeShellHolderId, @NonNull Surface surface);
/**
* In hybrid composition, call this method when the {@link Surface} has changed.
*
* <p>In hybrid composition, the root surfaces changes from {@link
* android.view.SurfaceHolder#getSurface()} to {@link android.media.ImageReader#getSurface()} when
* a platform view is in the current frame.
*/
@UiThread
public void onSurfaceWindowChanged(@NonNull Surface surface) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeSurfaceWindowChanged(nativeShellHolderId, surface);
}
private native void nativeSurfaceWindowChanged(
long nativeShellHolderId, @NonNull Surface surface);
/**
* Call this method when the {@link Surface} changes that was previously registered with {@link
* #onSurfaceCreated(Surface)}.
*
* <p>See {@link android.view.SurfaceHolder.Callback#surfaceChanged(SurfaceHolder, int, int, int)}
* for an example of where this call might originate.
*/
@UiThread
public void onSurfaceChanged(int width, int height) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeSurfaceChanged(nativeShellHolderId, width, height);
}
private native void nativeSurfaceChanged(long nativeShellHolderId, int width, int height);
/**
* Call this method when the {@link Surface} is destroyed that was previously registered with
* {@link #onSurfaceCreated(Surface)}.
*
* <p>See {@link android.view.SurfaceHolder.Callback#surfaceDestroyed(SurfaceHolder)} for an
* example of where this call might originate.
*/
@UiThread
public void onSurfaceDestroyed() {
ensureRunningOnMainThread();
ensureAttachedToNative();
onRenderingStopped();
nativeSurfaceDestroyed(nativeShellHolderId);
}
private native void nativeSurfaceDestroyed(long nativeShellHolderId);
/**
* Call this method to notify Flutter of the current device viewport metrics that are applies to
* the Flutter UI that is being rendered.
*
* <p>This method should be invoked with initial values upon attaching to native. Then, it should
* be invoked any time those metrics change while {@code FlutterJNI} is attached to native.
*/
@UiThread
public void setViewportMetrics(
float devicePixelRatio,
int physicalWidth,
int physicalHeight,
int physicalPaddingTop,
int physicalPaddingRight,
int physicalPaddingBottom,
int physicalPaddingLeft,
int physicalViewInsetTop,
int physicalViewInsetRight,
int physicalViewInsetBottom,
int physicalViewInsetLeft,
int systemGestureInsetTop,
int systemGestureInsetRight,
int systemGestureInsetBottom,
int systemGestureInsetLeft) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeSetViewportMetrics(
nativeShellHolderId,
devicePixelRatio,
physicalWidth,
physicalHeight,
physicalPaddingTop,
physicalPaddingRight,
physicalPaddingBottom,
physicalPaddingLeft,
physicalViewInsetTop,
physicalViewInsetRight,
physicalViewInsetBottom,
physicalViewInsetLeft,
systemGestureInsetTop,
systemGestureInsetRight,
systemGestureInsetBottom,
systemGestureInsetLeft);
}
private native void nativeSetViewportMetrics(
long nativeShellHolderId,
float devicePixelRatio,
int physicalWidth,
int physicalHeight,
int physicalPaddingTop,
int physicalPaddingRight,
int physicalPaddingBottom,
int physicalPaddingLeft,
int physicalViewInsetTop,
int physicalViewInsetRight,
int physicalViewInsetBottom,
int physicalViewInsetLeft,
int systemGestureInsetTop,
int systemGestureInsetRight,
int systemGestureInsetBottom,
int systemGestureInsetLeft);
/** Sends a packet of pointer data to Flutter's engine. */
@UiThread
public void dispatchPointerDataPacket(@NonNull ByteBuffer buffer, int position) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeDispatchPointerDataPacket(nativeShellHolderId, buffer, position);
}
private native void nativeDispatchPointerDataPacket(
long nativeShellHolderId, @NonNull ByteBuffer buffer, int position);
@UiThread
public void setPlatformViewsController(@NonNull PlatformViewsController platformViewsController) {
ensureRunningOnMainThread();
this.platformViewsController = platformViewsController;
}
/**
* Sets the {@link AccessibilityDelegate} for the attached Flutter context.
*
* <p>The {@link AccessibilityDelegate} is responsible for maintaining an Android-side cache of
* Flutter's semantics tree and custom accessibility actions. This cache should be hooked up to
* Android's accessibility system.
*
* <p>See {@link AccessibilityBridge} for an example of an {@link AccessibilityDelegate} and the
* surrounding responsibilities.
*/
@UiThread
public void setAccessibilityDelegate(@Nullable AccessibilityDelegate accessibilityDelegate) {
ensureRunningOnMainThread();
this.accessibilityDelegate = accessibilityDelegate;
}
@SuppressWarnings("unused")
@UiThread
private void updateSemantics(@NonNull ByteBuffer buffer, @NonNull String[] strings) {
ensureRunningOnMainThread();
if (accessibilityDelegate != null) {
accessibilityDelegate.updateSemantics(buffer, strings);
}
// TODO(mattcarroll): log dropped messages when in debug mode
}
@SuppressWarnings("unused")
@UiThread
private void updateCustomAccessibilityActions(
@NonNull ByteBuffer buffer, @NonNull String[] strings) {
ensureRunningOnMainThread();
if (accessibilityDelegate != null) {
accessibilityDelegate.updateCustomAccessibilityActions(buffer, strings);
}
// TODO(mattcarroll): log dropped messages when in debug mode
}
/** Sends a semantics action to Flutter's engine, without any additional arguments. */
public void dispatchSemanticsAction(int id, @NonNull AccessibilityBridge.Action action) {
dispatchSemanticsAction(id, action, null);
}
/** Sends a semantics action to Flutter's engine, with additional arguments. */
public void dispatchSemanticsAction(
int id, @NonNull AccessibilityBridge.Action action, @Nullable Object args) {
ensureAttachedToNative();
ByteBuffer encodedArgs = null;
int position = 0;
if (args != null) {
encodedArgs = StandardMessageCodec.INSTANCE.encodeMessage(args);
position = encodedArgs.position();
}
dispatchSemanticsAction(id, action.value, encodedArgs, position);
}
/**
* Sends a semantics action to Flutter's engine, given arguments that are already encoded for the
* engine.
*
* <p>To send a semantics action that has not already been encoded, see {@link
* #dispatchSemanticsAction(int, AccessibilityBridge.Action)} and {@link
* #dispatchSemanticsAction(int, AccessibilityBridge.Action, Object)}.
*/
@UiThread
public void dispatchSemanticsAction(
int id, int action, @Nullable ByteBuffer args, int argsPosition) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeDispatchSemanticsAction(nativeShellHolderId, id, action, args, argsPosition);
}
private native void nativeDispatchSemanticsAction(
long nativeShellHolderId, int id, int action, @Nullable ByteBuffer args, int argsPosition);
/**
* Instructs Flutter to enable/disable its semantics tree, which is used by Flutter to support
* accessibility and related behaviors.
*/
@UiThread
public void setSemanticsEnabled(boolean enabled) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeSetSemanticsEnabled(nativeShellHolderId, enabled);
}
private native void nativeSetSemanticsEnabled(long nativeShellHolderId, boolean enabled);
// TODO(mattcarroll): figure out what flags are supported and add javadoc about when/why/where to
// use this.
@UiThread
public void setAccessibilityFeatures(int flags) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeSetAccessibilityFeatures(nativeShellHolderId, flags);
}
private native void nativeSetAccessibilityFeatures(long nativeShellHolderId, int flags);
/**
* Gives control of a {@link SurfaceTexture} to Flutter so that Flutter can display that texture
* within Flutter's UI.
*/
@UiThread
public void registerTexture(long textureId, @NonNull SurfaceTextureWrapper textureWrapper) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeRegisterTexture(nativeShellHolderId, textureId, textureWrapper);
}
private native void nativeRegisterTexture(
long nativeShellHolderId, long textureId, @NonNull SurfaceTextureWrapper textureWrapper);
/**
* Call this method to inform Flutter that a texture previously registered with {@link
* #registerTexture(long, SurfaceTexture)} has a new frame available.
*
* <p>Invoking this method instructs Flutter to update its presentation of the given texture so
* that the new frame is displayed.
*/
@UiThread
public void markTextureFrameAvailable(long textureId) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeMarkTextureFrameAvailable(nativeShellHolderId, textureId);
}
private native void nativeMarkTextureFrameAvailable(long nativeShellHolderId, long textureId);
/**
* Unregisters a texture that was registered with {@link #registerTexture(long, SurfaceTexture)}.
*/
@UiThread
public void unregisterTexture(long textureId) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeUnregisterTexture(nativeShellHolderId, textureId);
}
private native void nativeUnregisterTexture(long nativeShellHolderId, long textureId);
/**
* Executes a Dart entrypoint.
*
* <p>This can only be done once per JNI attachment because a Dart isolate can only be entered
* once.
*/
@UiThread
public void runBundleAndSnapshotFromLibrary(
@NonNull String bundlePath,
@Nullable String entrypointFunctionName,
@Nullable String pathToEntrypointFunction,
@NonNull AssetManager assetManager) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeRunBundleAndSnapshotFromLibrary(
nativeShellHolderId,
bundlePath,
entrypointFunctionName,
pathToEntrypointFunction,
assetManager);
}
private native void nativeRunBundleAndSnapshotFromLibrary(
long nativeShellHolderId,
@NonNull String bundlePath,
@Nullable String entrypointFunctionName,
@Nullable String pathToEntrypointFunction,
@NonNull AssetManager manager);
/**
* Sets the handler for all platform messages that come from the attached platform view to Java.
*
* <p>Communication between a specific Flutter context (Dart) and the host platform (Java) is
* accomplished by passing messages. Messages can be sent from Java to Dart with the corresponding
* {@code FlutterJNI} methods:
*
* <ul>
* <li>{@link #dispatchPlatformMessage(String, ByteBuffer, int, int)}
* <li>{@link #dispatchEmptyPlatformMessage(String, int)}
* </ul>
*
* <p>{@code FlutterJNI} is also the recipient of all platform messages sent from its attached
* Flutter context. {@code FlutterJNI} does not know what to do with these messages, so a handler
* is exposed to allow these messages to be processed in whatever manner is desired:
*
* <p>{@code setPlatformMessageHandler(PlatformMessageHandler)}
*
* <p>If a message is received but no {@link PlatformMessageHandler} is registered, that message
* will be dropped (ignored). Therefore, when using {@code FlutterJNI} to integrate a Flutter
* context in an app, a {@link PlatformMessageHandler} must be registered for 2-way Java/Dart
* communication to operate correctly. Moreover, the handler must be implemented such that
* fundamental platform messages are handled as expected. See {@link FlutterNativeView} for an
* example implementation.
*/
@UiThread
public void setPlatformMessageHandler(@Nullable PlatformMessageHandler platformMessageHandler) {
ensureRunningOnMainThread();
this.platformMessageHandler = platformMessageHandler;
}
// Called by native.
// TODO(mattcarroll): determine if message is nonull or nullable
@SuppressWarnings("unused")
@VisibleForTesting
public void handlePlatformMessage(
@NonNull final String channel, byte[] message, final int replyId) {
if (platformMessageHandler != null) {
platformMessageHandler.handleMessageFromDart(channel, message, replyId);
}
// TODO(mattcarroll): log dropped messages when in debug mode
}
// Called by native to respond to a platform message that we sent.
// TODO(mattcarroll): determine if reply is nonull or nullable
@SuppressWarnings("unused")
private void handlePlatformMessageResponse(int replyId, byte[] reply) {
if (platformMessageHandler != null) {
platformMessageHandler.handlePlatformMessageResponse(replyId, reply);
}
// TODO(mattcarroll): log dropped messages when in debug mode
}
/**
* Sends an empty reply (identified by {@code responseId}) from Android to Flutter over the given
* {@code channel}.
*/
@UiThread
public void dispatchEmptyPlatformMessage(@NonNull String channel, int responseId) {
ensureRunningOnMainThread();
if (isAttached()) {
nativeDispatchEmptyPlatformMessage(nativeShellHolderId, channel, responseId);
} else {
Log.w(
TAG,
"Tried to send a platform message to Flutter, but FlutterJNI was detached from native C++. Could not send. Channel: "
+ channel
+ ". Response ID: "
+ responseId);
}
}
// Send an empty platform message to Dart.
private native void nativeDispatchEmptyPlatformMessage(
long nativeShellHolderId, @NonNull String channel, int responseId);
/** Sends a reply {@code message} from Android to Flutter over the given {@code channel}. */
@UiThread
public void dispatchPlatformMessage(
@NonNull String channel, @Nullable ByteBuffer message, int position, int responseId) {
ensureRunningOnMainThread();
if (isAttached()) {
nativeDispatchPlatformMessage(nativeShellHolderId, channel, message, position, responseId);
} else {
Log.w(
TAG,
"Tried to send a platform message to Flutter, but FlutterJNI was detached from native C++. Could not send. Channel: "
+ channel
+ ". Response ID: "
+ responseId);
}
}
// Send a data-carrying platform message to Dart.
private native void nativeDispatchPlatformMessage(
long nativeShellHolderId,
@NonNull String channel,
@Nullable ByteBuffer message,
int position,
int responseId);
// TODO(mattcarroll): differentiate between channel responses and platform responses.
@UiThread
public void invokePlatformMessageEmptyResponseCallback(int responseId) {
ensureRunningOnMainThread();
if (isAttached()) {
nativeInvokePlatformMessageEmptyResponseCallback(nativeShellHolderId, responseId);
} else {
Log.w(
TAG,
"Tried to send a platform message response, but FlutterJNI was detached from native C++. Could not send. Response ID: "
+ responseId);
}
}
// Send an empty response to a platform message received from Dart.
private native void nativeInvokePlatformMessageEmptyResponseCallback(
long nativeShellHolderId, int responseId);
// TODO(mattcarroll): differentiate between channel responses and platform responses.
@UiThread
public void invokePlatformMessageResponseCallback(
int responseId, @Nullable ByteBuffer message, int position) {
ensureRunningOnMainThread();
if (isAttached()) {
nativeInvokePlatformMessageResponseCallback(
nativeShellHolderId, responseId, message, position);
} else {
Log.w(
TAG,
"Tried to send a platform message response, but FlutterJNI was detached from native C++. Could not send. Response ID: "
+ responseId);
}
}
// Send a data-carrying response to a platform message received from Dart.
private native void nativeInvokePlatformMessageResponseCallback(
long nativeShellHolderId, int responseId, @Nullable ByteBuffer message, int position);
/**
* Adds the given {@code engineLifecycleListener} to be notified of Flutter engine lifecycle
* events, e.g., {@link EngineLifecycleListener#onPreEngineRestart()}.
*/
@UiThread
public void addEngineLifecycleListener(@NonNull EngineLifecycleListener engineLifecycleListener) {
ensureRunningOnMainThread();
engineLifecycleListeners.add(engineLifecycleListener);
}
/**
* Removes the given {@code engineLifecycleListener}, which was previously added using {@link
* #addIsDisplayingFlutterUiListener(FlutterUiDisplayListener)}.
*/
@UiThread
public void removeEngineLifecycleListener(
@NonNull EngineLifecycleListener engineLifecycleListener) {
ensureRunningOnMainThread();
engineLifecycleListeners.remove(engineLifecycleListener);
}
// Called by native.
@SuppressWarnings("unused")
private void onPreEngineRestart() {
for (EngineLifecycleListener listener : engineLifecycleListeners) {
listener.onPreEngineRestart();
}
}
@SuppressWarnings("unused")
@UiThread
public void onDisplayOverlaySurface(int id, int x, int y, int width, int height) {
ensureRunningOnMainThread();
if (platformViewsController == null) {
throw new RuntimeException(
"platformViewsController must be set before attempting to position an overlay surface");
}
platformViewsController.onDisplayOverlaySurface(id, x, y, width, height);
}
@SuppressWarnings("unused")
@UiThread
public void onBeginFrame() {
ensureRunningOnMainThread();
if (platformViewsController == null) {
throw new RuntimeException(
"platformViewsController must be set before attempting to begin the frame");
}
platformViewsController.onBeginFrame();
}
@SuppressWarnings("unused")
@UiThread
public void onEndFrame() {
ensureRunningOnMainThread();
if (platformViewsController == null) {
throw new RuntimeException(
"platformViewsController must be set before attempting to end the frame");
}
platformViewsController.onEndFrame();
}
@SuppressWarnings("unused")
@UiThread
public FlutterOverlaySurface createOverlaySurface() {
ensureRunningOnMainThread();
if (platformViewsController == null) {
throw new RuntimeException(
"platformViewsController must be set before attempting to position an overlay surface");
}
return platformViewsController.createOverlaySurface();
}
@SuppressWarnings("unused")
@UiThread
public void destroyOverlaySurfaces() {
ensureRunningOnMainThread();
if (platformViewsController == null) {
throw new RuntimeException(
"platformViewsController must be set before attempting to destroy an overlay surface");
}
platformViewsController.destroyOverlaySurfaces();
}
/** Sets the localization plugin that is used in various localization methods. */
@UiThread
public void setLocalizationPlugin(@Nullable LocalizationPlugin localizationPlugin) {
ensureRunningOnMainThread();
this.localizationPlugin = localizationPlugin;
}
/** Invoked by native to obtain the results of Android's locale resolution algorithm. */
@SuppressWarnings("unused")
@VisibleForTesting
String[] computePlatformResolvedLocale(@NonNull String[] strings) {
if (localizationPlugin == null) {
return new String[0];
}
List<Locale> supportedLocales = new ArrayList<Locale>();
final int localeDataLength = 3;
for (int i = 0; i < strings.length; i += localeDataLength) {
String languageCode = strings[i + 0];
String countryCode = strings[i + 1];
String scriptCode = strings[i + 2];
// Convert to Locales via LocaleBuilder if available (API 24+) to include scriptCode.
if (Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
Locale.Builder localeBuilder = new Locale.Builder();
if (!languageCode.isEmpty()) {
localeBuilder.setLanguage(languageCode);
}
if (!countryCode.isEmpty()) {
localeBuilder.setRegion(countryCode);
}
if (!scriptCode.isEmpty()) {
localeBuilder.setScript(scriptCode);
}
supportedLocales.add(localeBuilder.build());
} else {
// Pre-API 24, we fall back on scriptCode-less locales.
supportedLocales.add(new Locale(languageCode, countryCode));
}
}
Locale result = localizationPlugin.resolveNativeLocale(supportedLocales);
if (result == null) {
return new String[0];
}
String[] output = new String[localeDataLength];
output[0] = result.getLanguage();
output[1] = result.getCountry();
if (Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
output[2] = result.getScript();
} else {
output[2] = "";
}
return output;
}
/** Sets the deferred component manager that is used to download and install split features. */
@UiThread
public void setDeferredComponentManager(
@Nullable DeferredComponentManager deferredComponentManager) {
ensureRunningOnMainThread();
this.deferredComponentManager = deferredComponentManager;
if (deferredComponentManager != null) {
deferredComponentManager.setJNI(this);
}
}
/**
* Called by dart to request that a Dart deferred library corresponding to loadingUnitId be
* downloaded (if necessary) and loaded into the dart vm.
*
* <p>This method delegates the task to DeferredComponentManager, which handles the download and
* loading of the dart library and any assets.
*
* @param loadingUnitId The loadingUnitId is assigned during compile time by gen_snapshot and is
* automatically retrieved when loadLibrary() is called on a dart deferred library.
*/
@SuppressWarnings("unused")
@UiThread
public void requestDartDeferredLibrary(int loadingUnitId) {
if (deferredComponentManager != null) {
deferredComponentManager.installDeferredComponent(loadingUnitId, null);
} else {
// TODO(garyq): Add link to setup/instructions guide wiki.
Log.e(
TAG,
"No DeferredComponentManager found. Android setup must be completed before using split AOT deferred components.");
}
}
/**
* Searches each of the provided paths for a valid Dart shared library .so file and resolves
* symbols to load into the dart VM.
*
* <p>Successful loading of the dart library completes the future returned by loadLibrary() that
* triggered the install/load process.
*
* @param loadingUnitId The loadingUnitId is assigned during compile time by gen_snapshot and is
* automatically retrieved when loadLibrary() is called on a dart deferred library. This is
* used to identify which Dart deferred library the resolved correspond to.
* @param searchPaths An array of paths in which to look for valid dart shared libraries. This
* supports paths within zipped apks as long as the apks are not compressed using the
* `path/to/apk.apk!path/inside/apk/lib.so` format. Paths will be tried first to last and ends
* when a library is sucessfully found. When the found library is invalid, no additional paths
* will be attempted.
*/
@UiThread
public void loadDartDeferredLibrary(int loadingUnitId, @NonNull String[] searchPaths) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeLoadDartDeferredLibrary(nativeShellHolderId, loadingUnitId, searchPaths);
}
private native void nativeLoadDartDeferredLibrary(
long nativeShellHolderId, int loadingUnitId, @NonNull String[] searchPaths);
/**
* Adds the specified AssetManager as an APKAssetResolver in the Flutter Engine's AssetManager.
*
* <p>This may be used to update the engine AssetManager when a new deferred component is
* installed and a new Android AssetManager is created with access to new assets.
*
* @param assetManager An android AssetManager that is able to access the newly downloaded assets.
* @param assetBundlePath The subdirectory that the flutter assets are stored in. The typical
* value is `flutter_assets`.
*/
@UiThread
public void updateJavaAssetManager(
@NonNull AssetManager assetManager, @NonNull String assetBundlePath) {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeUpdateJavaAssetManager(nativeShellHolderId, assetManager, assetBundlePath);
}
private native void nativeUpdateJavaAssetManager(
long nativeShellHolderId,
@NonNull AssetManager assetManager,
@NonNull String assetBundlePath);
/**
* Indicates that a failure was encountered during the Android portion of downloading a dynamic
* feature module and loading a dart deferred library, which is typically done by
* DeferredComponentManager.
*
* <p>This will inform dart that the future returned by loadLibrary() should complete with an
* error.
*
* @param loadingUnitId The loadingUnitId that corresponds to the dart deferred library that
* failed to install.
* @param error The error message to display.
* @param isTransient When isTransient is false, new attempts to install will automatically result
* in same error in Dart before the request is passed to Android.
*/
@SuppressWarnings("unused")
@UiThread
public void deferredComponentInstallFailure(
int loadingUnitId, @NonNull String error, boolean isTransient) {
ensureRunningOnMainThread();
nativeDeferredComponentInstallFailure(loadingUnitId, error, isTransient);
}
private native void nativeDeferredComponentInstallFailure(
int loadingUnitId, @NonNull String error, boolean isTransient);
// @SuppressWarnings("unused")
@UiThread
public void onDisplayPlatformView(
int viewId,
int x,
int y,
int width,
int height,
int viewWidth,
int viewHeight,
FlutterMutatorsStack mutatorsStack) {
ensureRunningOnMainThread();
if (platformViewsController == null) {
throw new RuntimeException(
"platformViewsController must be set before attempting to position a platform view");
}
platformViewsController.onDisplayPlatformView(
viewId, x, y, width, height, viewWidth, viewHeight, mutatorsStack);
}
// TODO(mattcarroll): determine if this is nonull or nullable
@UiThread
public Bitmap getBitmap() {
ensureRunningOnMainThread();
ensureAttachedToNative();
return nativeGetBitmap(nativeShellHolderId);
}
// TODO(mattcarroll): determine if this is nonull or nullable
private native Bitmap nativeGetBitmap(long nativeShellHolderId);
/**
* Notifies the Dart VM of a low memory event, or that the application is in a state such that now
* is an appropriate time to free resources, such as going to the background.
*
* <p>This is distinct from sending a SystemChannel message about low memory, which only notifies
* the running Flutter application.
*/
@UiThread
public void notifyLowMemoryWarning() {
ensureRunningOnMainThread();
ensureAttachedToNative();
nativeNotifyLowMemoryWarning(nativeShellHolderId);
}
private native void nativeNotifyLowMemoryWarning(long nativeShellHolderId);
private void ensureRunningOnMainThread() {
if (Looper.myLooper() != mainLooper) {
throw new RuntimeException(
"Methods marked with @UiThread must be executed on the main thread. Current thread: "
+ Thread.currentThread().getName());
}
}
/**
* Delegate responsible for creating and updating Android-side caches of Flutter's semantics tree
* and custom accessibility actions.
*
* <p>{@link AccessibilityBridge} is an example of an {@code AccessibilityDelegate}.
*/
public interface AccessibilityDelegate {
/**
* Sends new custom accessibility actions from Flutter to Android.
*
* <p>Implementers are expected to maintain an Android-side cache of custom accessibility
* actions. This method provides new actions to add to that cache.
*/
void updateCustomAccessibilityActions(@NonNull ByteBuffer buffer, @NonNull String[] strings);
/**
* Sends new {@code SemanticsNode} information from Flutter to Android.
*
* <p>Implementers are expected to maintain an Android-side cache of Flutter's semantics tree.
* This method provides updates from Flutter for the Android-side semantics tree cache.
*/
void updateSemantics(@NonNull ByteBuffer buffer, @NonNull String[] strings);
}
public interface AsyncWaitForVsyncDelegate {
void asyncWaitForVsync(final long cookie);
}
}
|
package edu.dynamic.dynamiz.parser;
import java.util.ArrayList;
import java.util.List;
import edu.dynamic.dynamiz.controller.Command;
import edu.dynamic.dynamiz.controller.CommandAdd;
import edu.dynamic.dynamiz.controller.CommandDelete;
import edu.dynamic.dynamiz.controller.CommandDo;
import edu.dynamic.dynamiz.controller.CommandHelp;
import edu.dynamic.dynamiz.controller.CommandList;
import edu.dynamic.dynamiz.controller.CommandRedo;
import edu.dynamic.dynamiz.controller.CommandSearch;
import edu.dynamic.dynamiz.controller.CommandType;
import edu.dynamic.dynamiz.controller.CommandUndo;
import edu.dynamic.dynamiz.controller.CommandUpdate;
import edu.dynamic.dynamiz.structure.MyDate;
import edu.dynamic.dynamiz.structure.EventItem;
import edu.dynamic.dynamiz.structure.TaskItem;
import edu.dynamic.dynamiz.structure.ToDoItem;
/**
* This is a class which stores the information of the parsed command line given
* by the user.
*
* Represents a list of arguments parsed against an option argument.
*
* Also represents a list of commands parsed (?! if it is necessary)
*
* This allows querying of a boolean hasOption(String opt), and retrieving value
* of the option getOptionValue(String opt)
*
* @author nhan
*
*/
public class CommandLine {
private Parser parser;
private CommandType commandType;
private Options options;
private String param;
private Command command;
public CommandLine() {
this.commandType = null;
this.options = null;
this.param = null;
}
public CommandLine(CommandType cmdType, Options options, String param) {
this.parser = Parser.getInstance();
this.commandType = cmdType;
this.options = options;
this.param = param;
if (!initialiseCommand()) {
System.out.println("Something is not right");
}
}
private boolean initialiseCommand() {
switch (this.commandType) {
case ADD:
this.command = parseAdd();
break;
case DELETE:
this.command = parseDelete();
break;
case UPDATE:
this.command = parseUpdate();
break;
case LIST:
this.command = parseList();
break;
case SEARCH:
this.command = parseSearch();
break;
case UNDO:
this.command = parseUndo();
break;
case REDO:
this.command = parseRedo();
break;
case HELP:
this.command = parseHelp();
break;
case DO:
this.command = parseDo();
break;
case EXIT:
this.command = parseExit();
break;
default:
}
return true;
}
private String getFirstOptionValue(Options commandOptions, OptionType optionType) {
Option option = commandOptions.getOptions(optionType).get(0);
String optionStr = option.getValues().get(0);
return optionStr;
}
private List<String> getFirstOptionValues(Options commandOptions, OptionType optionType) {
Option option = commandOptions.getOptions(optionType).get(0);
return option.getValues();
}
private Command parseAdd() {
Options commandOptions = extractOptions(this.options);
ToDoItem commandItem = null;
// Handling date
boolean hasStart = commandOptions.hasOption(OptionType.START_TIME);
boolean hasEnd = commandOptions.hasOption(OptionType.END_TIME);
boolean hasBoth = hasStart && hasEnd;
try {
MyDate startDate = null;
MyDate endDate = null;
if (hasStart) {
startDate = parser.parseDate(getFirstOptionValue(commandOptions, OptionType.START_TIME));
}
if (hasEnd) {
endDate = parser.parseDate(getFirstOptionValue(commandOptions, OptionType.END_TIME));
}
if (hasBoth) {
// TODO: Handle ambiguity here
commandItem = new EventItem(this.param, startDate, endDate);
} else if (hasEnd) {
commandItem = new TaskItem(this.param, endDate);
} else {
commandItem = new ToDoItem(this.param);
}
} catch (IllegalArgumentException e) {
// TODO: Implement Exception handling here
}
// Handling Priority (if applicable)
if (commandOptions.hasOption(OptionType.PRIORITY)) {
int priority = Integer.parseInt(getFirstOptionValue(commandOptions, OptionType.PRIORITY));
commandItem.setPriority(priority);
}
return new CommandAdd(commandItem);
}
private Command parseDelete() {
// TODO: Implement ability to delete tasks having the same option
return new CommandDelete(param);
}
private Command parseList() {
Options commandOptions = extractOptions(this.options);
// Parse Start and End Date
List<MyDate> commandStartDateList = new ArrayList<MyDate>();
List<MyDate> commandEndDateList = new ArrayList<MyDate>();
List<Integer> commandPriorityList = new ArrayList<Integer>();
List<OptionType> commandOrderList = new ArrayList<OptionType>();
if (commandOptions.hasOption(OptionType.START_TIME)) {
commandStartDateList = extractDateList(commandOptions, OptionType.START_TIME);
}
if (commandOptions.hasOption(OptionType.END_TIME)) {
commandEndDateList = extractDateList(commandOptions, OptionType.END_TIME);
}
if (commandOptions.hasOption(OptionType.PRIORITY)) {
commandPriorityList = extractPriorityList(commandOptions);
}
if (commandOptions.hasOption(OptionType.ORDER_BY)) {
commandOrderList = extractOptionTypeList(commandOptions);
}
return new CommandList(commandOrderList.toArray(new OptionType[commandOrderList.size()]));
}
private Command parseSearch() {
// TODO: Implement ability to search with keywords and options
Options commandOptions = extractOptions(this.options);
// Parse Start and End Date
MyDate commandStartDate = null;
MyDate commandEndDate = null;
int commandPriority = OptionType.PRIORITY_UNCHANGED;
List<OptionType> commandOrderList= new ArrayList<OptionType>();
if (commandOptions.hasOption(OptionType.START_TIME)) {
commandStartDate = parser.parseDate(getFirstOptionValue(commandOptions, OptionType.START_TIME));
}
if (commandOptions.hasOption(OptionType.END_TIME)) {
commandEndDate = parser.parseDate(getFirstOptionValue(commandOptions, OptionType.END_TIME));
}
if (commandOptions.hasOption(OptionType.PRIORITY)) {
commandPriority = Integer.parseInt(getFirstOptionValue(commandOptions, OptionType.PRIORITY));
}
if (commandOptions.hasOption(OptionType.ORDER_BY)) {
List<String> commandOrderStrList = getFirstOptionValues(commandOptions, OptionType.ORDER_BY);
for (String s: commandOrderStrList) {
commandOrderList.add(OptionType.fromString(s));
}
}
return new CommandSearch(param, commandPriority, commandStartDate, commandEndDate,
commandOrderList.toArray(new OptionType[commandOrderList.size()]));
}
private Command parseUndo() {
return new CommandUndo();
}
private Command parseRedo() {
return new CommandRedo();
}
private Command parseUpdate() {
// check param. If have more than just item ID, update the description
String itemID = Util.getFirstWord(this.param);
String extraDescription = Util.stripFirstWord(this.param);
Options commandOptions = extractOptions(this.options);
// Parse Start and End Date
MyDate commandStartDate = null;
MyDate commandEndDate = null;
if (commandOptions.hasOption(OptionType.START_TIME)) {
commandStartDate = parser.parseDate(getFirstOptionValue(commandOptions, OptionType.START_TIME));
}
if (commandOptions.hasOption(OptionType.END_TIME)) {
commandEndDate = parser.parseDate(getFirstOptionValue(commandOptions, OptionType.END_TIME));
}
// Parse Priority
int commandPriority = OptionType.PRIORITY_UNCHANGED;
if (commandOptions.hasOption(OptionType.PRIORITY)) {
commandPriority = Integer.parseInt(getFirstOptionValue(commandOptions, OptionType.PRIORITY));
}
return new CommandUpdate(itemID, extraDescription, commandPriority,
commandStartDate, commandEndDate);
}
private Command parseHelp() {
// TODO: Implement command Help
return new CommandHelp();
}
private Command parseDo() {
return new CommandDo(param);
}
private Command parseExit() {
return null;
}
/**
* Get the list of applicable options from all the options given
*
* @param options
* The unchecked collection of Option
* @return the checked collection of Option
*/
public Options extractOptions(Options options) {
Options opts = new Options();
for (OptionType optType : CommandType.ADD.getApplicableOptions()) {
if (options.hasOption(optType)) {
List<Option> optList = options.getOptions(optType);
opts.add(optList);
}
}
return opts;
}
public List<MyDate> extractDateList(Options options, OptionType dateType) {
List<String> values = options.getOptions(dateType).get(0).getValues();
List<MyDate> dateList = new ArrayList<MyDate>();
for (String value: values) {
MyDate date = parser.parseDate(value);
if (date != null) {
dateList.add(date);
}
}
return dateList;
}
public List<Integer> extractPriorityList(Options options) {
List<String> values = options.getOptions(OptionType.PRIORITY).get(0).getValues();
List<Integer> priorityList = new ArrayList<Integer>();
for (String value: values) {
Integer priority = Integer.parseInt(value);
priorityList.add(priority);
}
return priorityList;
}
public List<OptionType> extractOptionTypeList(Options options) {
List<String> values = options.getOptions(OptionType.ORDER_BY).get(0).getValues();
List<OptionType> typeList = new ArrayList<OptionType>();
for (String value: values) {
typeList.add(OptionType.fromString(value));
}
return typeList;
}
// TODO: Need tested for refactor
public List<Object> extractValueList(Options options, OptionType type) {
List<String> values = options.getOptions(type).get(0).getValues();
List<Object> valueList = new ArrayList<Object>();
for (String value: values) {
switch (type) {
case START_TIME : // Fall through
case END_TIME :
MyDate date = parser.parseDate(value);
if (date != null) {
valueList.add(date);
}
break;
case PRIORITY :
Integer priority = Integer.parseInt(value);
if (priority != null) {
valueList.add(priority);
}
break;
case ORDER_BY :
OptionType optType = OptionType.fromString(value);
if (optType != null) {
valueList.add(optType);
}
break;
default: throw new IllegalArgumentException();
}
}
return valueList;
}
public CommandType getCommandType() {
return commandType;
}
public void setCommandType(CommandType command) {
this.commandType = command;
}
public Options getOptions() {
return options;
}
public void setOptions(Options options) {
this.options = options;
}
public String getParam() {
return param;
}
public void setParam(String param) {
this.param = param;
}
public Command getCommand() {
return command;
}
public void setCommand(Command command) {
this.command = command;
}
public int getNumberOfOptions() {
return options.getNumOfOptions();
}
@Override
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("Command Type: " + commandType.toString() + "\n");
sb.append("Value: " + param + "\n");
sb.append("Options: \n" + options.toString());
return sb.toString();
}
/**
* This function will call the corresponding Command its execute. For
* example, if the parsed CommandLine has the CommandType of Add. It will
* call CommandAdd's execute.
*/
public void execute() {
if (command != null) {
command.execute();
} else {
throw new IllegalArgumentException("Null command");
}
}
}
|
package gov.nih.nci.integration.catissue.client;
import edu.wustl.catissuecore.domain.CollectionProtocol;
import edu.wustl.catissuecore.domain.DisposalEventParameters;
import edu.wustl.catissuecore.domain.FluidSpecimen;
import edu.wustl.catissuecore.domain.Participant;
import edu.wustl.catissuecore.domain.Specimen;
import edu.wustl.catissuecore.domain.SpecimenCharacteristics;
import edu.wustl.catissuecore.domain.SpecimenCollectionGroup;
import edu.wustl.catissuecore.domain.TissueSpecimen;
import edu.wustl.catissuecore.domain.User;
import gov.nih.nci.integration.catissue.domain.SpecimenDetail;
import gov.nih.nci.integration.catissue.domain.Specimens;
import gov.nih.nci.system.applicationservice.ApplicationException;
import java.io.StringReader;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.StaxDriver;
/**
* This is the client class for Specimen Flow. It provide operation like CreateSpecimen, UpdateSpecimen, RollbackSpecimen
* @author Rohit Gupta
*/
public class CaTissueSpecimenClient {
private static Logger LOG = LoggerFactory.getLogger(CaTissueSpecimenClient.class);
private final CaTissueAPIClientWithRegularAuthentication caTissueAPIClient;
private XStream xStream = new XStream(new StaxDriver());
private static final String ACTIVITY_STATUS_DISABLED = "Disabled";
public CaTissueSpecimenClient(String loginName, String password) throws Exception
{
super();
Thread.currentThread().setContextClassLoader(CaTissueSpecimenClient.class.getClassLoader());
this.caTissueAPIClient = new CaTissueAPIClientWithRegularAuthentication(loginName, password);
xStream.alias("specimens", Specimens.class);
xStream.alias("participant", Participant.class);
xStream.alias("specimenDetail", SpecimenDetail.class);
xStream.alias("specimen", Specimen.class);
xStream.alias("TissueSpecimen", TissueSpecimen.class);
xStream.alias("FluidSpecimen", FluidSpecimen.class);
xStream.alias("collectionProtocol", CollectionProtocol.class);
xStream.addImplicitCollection(Specimens.class, "specimenDetailList");
}
/**
* This method is used to check if Specimen(s) already exist in caTissue
* @throws ApplicationException - Throws exception if Specimen already exist
*/
public String isSpecimensExist(String specimenListXMLStr) throws ApplicationException{
LOG.debug("Inside isSpecimensExist...The Incoming XML for isSpecimensExist() is --> " + specimenListXMLStr);
// System.out.println("Inside isSpecimensExist...The Incoming XML for isSpecimensExist() is --> " + specimenListXMLStr);
// Parse the incoming XML String. The returned object will contain data from the incoming specimens XML
Specimens specimens = parseSpecimenListXML(specimenListXMLStr);
return isSpecimensAlreadyExist(specimens);
}
/**
* This method is used to fetch the Specimen(s) details for given specimen XMLString
* @throws ApplicationException
*/
public String getExistingSpecimens(String specimenListXMLStr) throws ApplicationException{
LOG.debug("Inside getExistingSpecimens...The Incoming XML for getExistingSpecimens() is --> " + specimenListXMLStr);
// System.out.println("Inside getExistingSpecimens...The Incoming XML for getExistingSpecimens() is --> " + specimenListXMLStr);
// Parse the incoming XML String. The returned object will contain data from the incoming consents XML
Specimens incomingSpecimens = parseSpecimenListXML(specimenListXMLStr);
// Fetch the existing Consents
Specimens exitingSpecimens = fetchExistingSpecimens(incomingSpecimens);
return xStream.toXML(exitingSpecimens);
}
/**
* Creates specimens in caTissue
* @param specimenListXMLStr -- The XML string for creating the bio-specimen which may contain multiple specimens.
*/
public String createSpecimens(String specimenListXMLStr) throws ApplicationException{
LOG.debug("Inside CaTissueSpecimenClient...The Incoming XML for createSpecimens() is --> " + specimenListXMLStr);
// System.out.println("Inside CaTissueSpecimenClient...The Incoming XML for createSpecimens() is --> " + specimenListXMLStr);
// Parse the incoming XML String. The returned object will contain data from the incoming specimens XML
Specimens specimens = parseSpecimenListXML(specimenListXMLStr);
// perform the actual logic to create the Specimens.. Also do the rollback, if required
performCreateSpecimens(specimens);
//Returning NULL here as we don't need the returned values at the moment.
// We can return the list of Created Specimen, if required.
return null;
}
/**
* Updates specimens in caTissue
* @param specimenListXMLStr -- The XML string for creating the specimens which may contain multiple specimens.
*/
public String updateSpecimens(String specimenListXMLStr) throws ApplicationException{
LOG.debug("Inside CaTissueSpecimenClient... updateSpecimens()..The Incoming XML is --> " + specimenListXMLStr);
// System.out.println("Inside CaTissueSpecimenClient... updateSpecimens()..The Incoming XML is --> " + specimenListXMLStr);
// This object contain data from the incoming specimens xml
Specimens specimens = parseSpecimenListXML(specimenListXMLStr);
// perform the actual logic to Updating the Specimens..
List<Specimen> existingSpecimenList = performUpdateSpecimens(specimens);
// Copy the exiting Specimen and return in the form of XML
return xStream.toXML(copyFromExistingSpecimen(existingSpecimenList));
}
/**
* This method is used to Rollback the specimen changes for createSpecimen flow
*/
public String rollbackCreatedSpecimens(String specimenListXMLStr) throws ApplicationException{
LOG.debug("Inside CaTissueSpecimenClient... rollbackCreatedSpecimens()..The Incoming XML is --> " + specimenListXMLStr);
// System.out.println("Inside CaTissueSpecimenClient Impl Class... rollbackSpecimens()..The Incoming XML is --> " + specimenListXMLStr);
// This object contain data from the incoming specimens xml
Specimens specimens = parseSpecimenListXML(specimenListXMLStr);
// write a method which will rollback the things...
performRollbackCreatedSpecimens(specimens);
return null;
}
/**
* This method is used to Rollback the specimen changes for updateSpecimen flow
* @throws ApplicationException
*/
public String rollbackUpdatedSpecimens(String specimenListXMLStr) throws ApplicationException{
LOG.debug("Inside CaTissueSpecimenClient... rollbackUpdatedSpecimens()..The Incoming XML is --> " + specimenListXMLStr);
// System.out.println("Inside CaTissueSpecimenClient Impl Class... rollbackUpdatedSpecimens()..The Incoming XML is --> " + specimenListXMLStr);
// This object contain data from the incoming specimens xml
Specimens specimens = parseSpecimenListXML(specimenListXMLStr);
// write a method which will rollback the things...
performRollbackUpdatedSpecimens(specimens);
return null;
}
/**
* This method is used to parse the incoming XML string and populate the 'Specimens' object
* @param specimenListXMLStr
* @return
*/
private Specimens parseSpecimenListXML(String specimenListXMLStr) {
Specimens specimens = (Specimens) xStream.fromXML(new StringReader(specimenListXMLStr));
return specimens;
}
private String isSpecimensAlreadyExist(Specimens specimens) throws ApplicationException{
List<SpecimenDetail> specimenDetailList = specimens.getSpecimenDetailList();
Iterator<SpecimenDetail> specimenDetailItr = specimenDetailList.iterator();
while(specimenDetailItr.hasNext()){
SpecimenDetail specimenDetail = specimenDetailItr.next();
String specimenLabel = specimenDetail.getSpecimen().getLabel();
Specimen existingSpecimen= getExistingSpecimen(specimenLabel);
if(existingSpecimen!=null){
throw new ApplicationException( "Submission failed since a Specimen with the same LABEL already exists.");
}
}
return null;
}
private Specimens fetchExistingSpecimens(Specimens incomingSpecimens ) throws ApplicationException{
List<Specimen> existingSpecimenList = new ArrayList<Specimen>();
List<SpecimenDetail> specimenDetailList = incomingSpecimens.getSpecimenDetailList();
Iterator<SpecimenDetail> specimenDetailItr = specimenDetailList.iterator();
while(specimenDetailItr.hasNext()){
SpecimenDetail specimenDetail = specimenDetailItr.next();
Specimen existingSpecimen= getExistingSpecimen(specimenDetail.getSpecimen().getLabel());
// check if the request data is correct by doing validation checks
if(! isUpdateSpecimenRequestDataValid(specimenDetail, existingSpecimen)){
throw new ApplicationException("UpdateSpecimen Request Failed for Label"+ specimenDetail.getSpecimen().getLabel() +" and exception is COLLECTION_PROTOCOL or COLLECTION_PROTOCOL_EVENT NOT MATCHING with Existing Specimen");
}
existingSpecimenList.add(existingSpecimen);
}
return copyFromExistingSpecimen(existingSpecimenList);
}
/**
* This method has the code/logic to call the createSpecimen.
* @param specimens to be created
*/
private void performCreateSpecimens(Specimens specimens) throws ApplicationException{
List<SpecimenDetail> specimenDetailList = specimens.getSpecimenDetailList();
Iterator<SpecimenDetail> specimenDetailItr = null;
Specimen specimen = null;
for(specimenDetailItr = specimenDetailList.iterator(); specimenDetailItr.hasNext();)
{
SpecimenDetail specimenDetail = null;
specimenDetail = (SpecimenDetail)specimenDetailItr.next();
CollectionProtocol cp = specimenDetail.getCollectionProtocol();
specimen= specimenDetail.getSpecimen();
boolean scgFound = false;
List<SpecimenCollectionGroup> scgList = getSpecimenCollectionGroupList(specimenDetail);
if ((scgList != null) && (scgList.size() > 0)) {
for (SpecimenCollectionGroup scg : scgList) {
CollectionProtocol cpObj = scg.getCollectionProtocolRegistration().getCollectionProtocol();
if (cpObj.getTitle().equals(cp.getTitle())) {
scgFound= true; // Participant has a SpecimenCollectionGroup under the given protocol
specimen.setSpecimenCollectionGroup(scg);
break;
}
}
}
if(scgFound== false){
// throw exception
LOG.error("Specimen Collection Group was found in caTissue for Label " + specimen.getLabel());
throw new ApplicationException( "Specimen_Collection_Group_Not_Found");
}
try{
// method call to createSpecimen
createSpecimen(specimen);
}catch(Exception e){
throw new ApplicationException("CreateSpecimen Failed for Label"+ specimen.getLabel() +" and exception is " +e.getCause());
}
}
}
private Specimen createSpecimen(Specimen specimen) throws ApplicationException{
return caTissueAPIClient.insert(specimen);
}
/**
* This method has the code/logic to Update the Specimens.
* @throws Exception
*/
private List<Specimen> performUpdateSpecimens(Specimens specimens) throws ApplicationException {
List<Specimen> existSpecimenList = new ArrayList<Specimen>();
List<SpecimenDetail> specimenDetailList = specimens.getSpecimenDetailList();
Iterator<SpecimenDetail> specimenDetailItr = null;
SpecimenDetail specimenDetail = null;
try{
for(specimenDetailItr = specimenDetailList.iterator(); specimenDetailItr.hasNext();)
{
specimenDetail = specimenDetailItr.next();
Specimen incomingSpecimen = specimenDetail.getSpecimen();
// Get the corresponding existing Specimen using the Label
Specimen existingSpecimen = getExistingSpecimen(incomingSpecimen.getLabel());
incomingSpecimen.setId(existingSpecimen.getId());
incomingSpecimen.setSpecimenCollectionGroup(existingSpecimen.getSpecimenCollectionGroup());//Specimen Collection Group is required.
incomingSpecimen.setLineage(existingSpecimen.getLineage());//Lineage should not be changed while updating the specimen
incomingSpecimen.getSpecimenCharacteristics().setId(existingSpecimen.getSpecimenCharacteristics().getId()); // The given object has a null identifier: SpecimenCharacteristics
updateSpecimen(incomingSpecimen);
// setting the existing Specimen, which will be required in case of rollback
existSpecimenList.add(existingSpecimen);
}
}catch(Exception e){
throw new ApplicationException("UpdateSpecimen Failed for Label"+ specimenDetail.getSpecimen().getLabel() +" and exception is " +e.getCause());
}
return existSpecimenList;
}
private boolean isUpdateSpecimenRequestDataValid(SpecimenDetail inSpecimenDetail, Specimen existingSpecimen){
boolean hasValidData = true;
String inCPE= inSpecimenDetail.getCollectionProtocolEvent();
String existCPE= existingSpecimen.getSpecimenCollectionGroup().getCollectionProtocolEvent().getCollectionPointLabel();
String inCP = inSpecimenDetail.getCollectionProtocol().getTitle();
String existCP = existingSpecimen.getSpecimenCollectionGroup().getCollectionProtocolEvent().getCollectionProtocol().getTitle();
if( ! inCPE.equalsIgnoreCase(existCPE) || !inCP.equalsIgnoreCase(existCP)){
hasValidData = false;
}
return hasValidData;
}
private void performRollbackUpdatedSpecimens(Specimens specimens){
// System.out.println("Inside Specimen Client Impl... Rollback...");
List<SpecimenDetail> specimenDetailList = specimens.getSpecimenDetailList();
Iterator<SpecimenDetail> specimenDetailItr = specimenDetailList.iterator();
while(specimenDetailItr.hasNext()){
SpecimenDetail specimenDetail = specimenDetailItr.next();
Specimen existingSpecimen = specimenDetail.getSpecimen();
try {
updateSpecimen(existingSpecimen);
} catch (ApplicationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* This method is used to get a specimen on the basis of the Label
* @param label
* @return
* @throws ApplicationException
*/
private Specimen getExistingSpecimen(String label) throws ApplicationException{
Specimen specimen = new Specimen();
specimen.setLabel(label);// set the cdmsSpecimenId
// get the specimen, corresponding to the cdmsSpecimenId
specimen = caTissueAPIClient.searchById(Specimen.class, specimen);
return specimen;
}
private Specimen updateSpecimen(Specimen specimen) throws ApplicationException{
return caTissueAPIClient.update(specimen);
}
private Specimens copyFromExistingSpecimen(List<Specimen> existingSpecimenList){
Specimens existingSpecimens = new Specimens();
Iterator<Specimen> specimenItr = null;
for(specimenItr = existingSpecimenList.iterator(); specimenItr.hasNext();)
{
Specimen existingSpecimen =specimenItr.next();
Specimen specimen = null;
if("Tissue".equalsIgnoreCase(existingSpecimen.getSpecimenClass())){
specimen= new TissueSpecimen();
}else if("Fluid".equalsIgnoreCase(existingSpecimen.getSpecimenClass())){
specimen = new FluidSpecimen();
}
specimen.setId(existingSpecimen.getId());
specimen.setInitialQuantity(existingSpecimen.getInitialQuantity());
specimen.setPathologicalStatus(existingSpecimen.getPathologicalStatus());
specimen.setSpecimenClass(existingSpecimen.getSpecimenClass().trim());
specimen.setSpecimenType(existingSpecimen.getSpecimenType());
specimen.setActivityStatus(existingSpecimen.getActivityStatus());
specimen.setAvailableQuantity(existingSpecimen.getAvailableQuantity());
specimen.setBarcode(existingSpecimen.getBarcode());
specimen.setLabel(existingSpecimen.getLabel());
SpecimenCollectionGroup specimenCollectionGroup = new SpecimenCollectionGroup();
specimenCollectionGroup.setId(existingSpecimen.getSpecimenCollectionGroup().getId());
specimen.setSpecimenCollectionGroup(specimenCollectionGroup);
SpecimenCharacteristics chars = new SpecimenCharacteristics();
chars.setTissueSide(existingSpecimen.getSpecimenCharacteristics().getTissueSide());
chars.setTissueSite(existingSpecimen.getSpecimenCharacteristics().getTissueSite());
specimen.setSpecimenCharacteristics(chars);
specimen.getSpecimenCharacteristics().setId(existingSpecimen.getSpecimenCharacteristics().getId());
specimen.setLineage(existingSpecimen.getLineage());
specimen.setIsAvailable(existingSpecimen.getIsAvailable());
specimen.setCollectionStatus(existingSpecimen.getCollectionStatus());
SpecimenDetail specimenDetail = new SpecimenDetail();
specimenDetail.setSpecimen(specimen);
existingSpecimens.add(specimenDetail);
}
return existingSpecimens;
}
private void performRollbackCreatedSpecimens(Specimens specimens) throws ApplicationException{
List<SpecimenDetail> specimenDetailList = specimens.getSpecimenDetailList();
Iterator<SpecimenDetail> specimenDetailItr = specimenDetailList.iterator();
while(specimenDetailItr.hasNext()){
SpecimenDetail specimenDetail = specimenDetailItr.next();
String specimenLabel = specimenDetail.getSpecimen().getLabel();
Specimen existingSpecimen;
try {
existingSpecimen = getExistingSpecimen(specimenLabel);
} catch (ApplicationException e) {
return;
}
softDeleteSpecimen(existingSpecimen);
}
}
private void softDeleteSpecimen(Specimen existingSpecimen) throws ApplicationException{
// First change the Label of the Specimen to some dummy value.. like "DELETED_Label_+Timestamp"
Specimen updatedSpecimen = updateSpecimenLabel(existingSpecimen);
// Then set the Specimen to Disabled
DisposalEventParameters disposalEventParameters = new DisposalEventParameters();
updatedSpecimen.setActivityStatus(ACTIVITY_STATUS_DISABLED);
disposalEventParameters.setSpecimen(updatedSpecimen);
disposalEventParameters.setActivityStatus(ACTIVITY_STATUS_DISABLED);
disposalEventParameters.setComment("Rollback the Specimen");
disposalEventParameters.setReason("Rollback the Specimen");
disposalEventParameters.setTimestamp(new Date());
disposalEventParameters.setUser(getCaTissueAdminUser());
caTissueAPIClient.insert(disposalEventParameters);
}
private Specimen updateSpecimenLabel(Specimen specimen) throws ApplicationException{
specimen.setLabel("DELETED_Label_"+getCurrentDateTime() );
specimen.setBarcode("DELETED_Barcode_"+getCurrentDateTime());
Specimen updatedSpecimen = updateSpecimen(specimen);
return updatedSpecimen;
}
private String getCurrentDateTime() {
Calendar cal = Calendar.getInstance();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
return sdf.format(cal.getTime());
}
/**
* This method is used to get a User on the basis of the LoginName
* @return
* @throws ApplicationException
*/
private User getCaTissueAdminUser() throws ApplicationException{
User user = new User();
user.setLoginName("admin@admin.com");
user = caTissueAPIClient.searchById(User.class, user);
return user;
}
private List<SpecimenCollectionGroup> getSpecimenCollectionGroupList(SpecimenDetail specimenDetail) throws ApplicationException{
String title = specimenDetail.getCollectionProtocol().getTitle() ;
String label = specimenDetail.getCollectionProtocolEvent();
return caTissueAPIClient.getApplicationService().query(CqlUtility.getSpecimenCollectionGroupListQuery(title, label));
}
}
|
package edu.uw.easysrl.syntax.parser;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import edu.uw.easysrl.dependencies.DependencyGenerator;
import edu.uw.easysrl.dependencies.ResolvedDependency;
import edu.uw.easysrl.dependencies.UnlabelledDependency;
import edu.uw.easysrl.main.InputReader.InputToParser;
import edu.uw.easysrl.main.InputReader.InputWord;
import edu.uw.easysrl.semantics.lexicon.Lexicon;
import edu.uw.easysrl.syntax.grammar.Category;
import edu.uw.easysrl.syntax.grammar.SyntaxTreeNode;
import edu.uw.easysrl.syntax.grammar.SyntaxTreeNode.SyntaxTreeNodeLeaf;
import edu.uw.easysrl.syntax.tagger.POSTagger;
import edu.uw.easysrl.syntax.training.PipelineTrainer.LabelClassifier;
import edu.uw.easysrl.util.Util.Scored;
public abstract class SRLParser {
private final POSTagger tagger;
private SRLParser(final POSTagger tagger) {
this.tagger = tagger;
}
public final List<CCGandSRLparse> parseTokens(final InputToParser tokens) {
return parseTokens2(tagger.tag(tokens));
}
protected abstract List<CCGandSRLparse> parseTokens2(InputToParser tokens);
public static class BackoffSRLParser extends SRLParser {
private final SRLParser[] parsers;
private final AtomicInteger backoffs = new AtomicInteger();
public BackoffSRLParser(final SRLParser... parsers) {
super(parsers[0].tagger);
this.parsers = parsers;
}
@Override
protected List<CCGandSRLparse> parseTokens2(final InputToParser tokens) {
for (final SRLParser parser : parsers) {
final List<CCGandSRLparse> parses = parser.parseTokens(tokens);
if (parses != null) {
return parses;
} else {
backoffs.getAndIncrement();
}
}
return null;
}
@Override
public int getMaxSentenceLength() {
return parsers[parsers.length - 1].getMaxSentenceLength();
}
}
public abstract int getMaxSentenceLength();
public static class SemanticParser extends SRLParser {
private final SRLParser parser;
private final Lexicon lexicon;
public SemanticParser(final SRLParser parser, final Lexicon lexicon) {
super(parser.tagger);
this.parser = parser;
this.lexicon = lexicon;
}
@Override
protected List<CCGandSRLparse> parseTokens2(final InputToParser tokens) {
List<CCGandSRLparse> parse = parser.parseTokens(tokens);
if (parse != null) {
parse = parse.stream().map(x -> x.addSemantics(lexicon)).collect(Collectors.toList());
}
return parse;
}
@Override
public int getMaxSentenceLength() {
return parser.getMaxSentenceLength();
}
}
public static class JointSRLParser extends SRLParser {
private final Parser parser;
public JointSRLParser(final Parser parser, final POSTagger tagger) {
super(tagger);
this.parser = parser;
}
@Override
protected List<CCGandSRLparse> parseTokens2(final InputToParser tokens) {
final List<Scored<SyntaxTreeNode>> parses = parser.doParsing(tokens);
if (parses == null) {
return null;
} else {
return parses
.stream()
.map(x -> new CCGandSRLparse(x.getObject(), x.getObject().getAllLabelledDependencies(), tokens
.getInputWords())).collect(Collectors.toList());
}
}
@Override
public int getMaxSentenceLength() {
return parser.getMaxSentenceLength();
}
}
public static class CCGandSRLparse implements Serializable {
private static final long serialVersionUID = 1L;
private final SyntaxTreeNode ccgParse;
private final Collection<ResolvedDependency> dependencyParse;
private final List<InputWord> words;
private final Table<Integer, Integer, ResolvedDependency> headToArgNumberToDependency = HashBasedTable.create();
private final List<SyntaxTreeNodeLeaf> leaves;
private CCGandSRLparse(final SyntaxTreeNode ccgParse, final Collection<ResolvedDependency> dependencyParse,
final List<InputWord> words) {
super();
this.ccgParse = ccgParse;
this.dependencyParse = dependencyParse;
this.words = words;
for (final ResolvedDependency dep : dependencyParse) {
headToArgNumberToDependency.put(dep.getHead(), dep.getArgNumber(), dep);
}
this.leaves = ccgParse.getLeaves();
}
public SyntaxTreeNode getCcgParse() {
return ccgParse;
}
public Collection<ResolvedDependency> getDependencyParse() {
return dependencyParse;
}
public SyntaxTreeNodeLeaf getLeaf(final int wordIndex) {
return leaves.get(wordIndex);
}
public List<ResolvedDependency> getOrderedDependenciesAtPredicateIndex(final int wordIndex) {
final Category c = getLeaf(wordIndex).getCategory();
final List<ResolvedDependency> result = new ArrayList<>();
for (int i = 1; i <= c.getNumberOfArguments(); i++) {
result.add(headToArgNumberToDependency.get(wordIndex, i));
}
return result;
}
public CCGandSRLparse addSemantics(final Lexicon lexicon) {
return new CCGandSRLparse(ccgParse.addSemantics(lexicon, this), dependencyParse, words);
}
}
public static class PipelineSRLParser extends JointSRLParser {
private final DependencyGenerator dependencyGenerator;
public PipelineSRLParser(final Parser parser, final LabelClassifier classifier, final POSTagger tagger)
throws IOException {
super(parser, tagger);
this.dependencyGenerator = new DependencyGenerator(parser.getUnaryRules());
this.classifier = classifier;
}
private final LabelClassifier classifier;
@Override
public List<CCGandSRLparse> parseTokens2(final InputToParser tokens) {
final List<CCGandSRLparse> parses = super.parseTokens2(tokens);
if (parses == null) {
return null;
}
return parses.stream().map(x -> addDependencies(tokens.getInputWords(), x)).collect(Collectors.toList());
}
private CCGandSRLparse addDependencies(final List<InputWord> tokens, final CCGandSRLparse parse) {
final Collection<UnlabelledDependency> unlabelledDependencies = new ArrayList<>();
// Get the dependencies in this parse.
final SyntaxTreeNode annotatedSyntaxTree = dependencyGenerator.generateDependencies(parse.getCcgParse(),
unlabelledDependencies);
final Collection<ResolvedDependency> result = new ArrayList<>();
for (final UnlabelledDependency dep : unlabelledDependencies) {
// Add labels to the dependencies using the classifier.
result.addAll(dep.setLabel(classifier.classify(dep, tokens)).stream()
.filter(x -> x.getHead() != x.getArgument()).collect(Collectors.toList()));
}
return new CCGandSRLparse(annotatedSyntaxTree, result, tokens);
}
}
public List<CCGandSRLparse> parseTokens(final List<InputWord> words) {
return parseTokens(new InputToParser(words, null, null, false));
}
}
|
package edu.yalestc.yalepublic.news;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import edu.yalestc.yalepublic.R;
import edu.yalestc.yalepublic.news.RssReader;
import edu.yalestc.yalepublic.news.RssFeed;
public class NewsReader extends Activity {
TextView tRSSTitle;
RssFeed feed;
long time, timediff, hourdiff, daydiff;
String downloadurl;
ArrayList<String> rssTitles = new ArrayList<String>();
ArrayList<String> rssLinks = new ArrayList<String>();
ArrayList<String> rssDescription = new ArrayList<String>();
ArrayList<String> rssContent = new ArrayList<String>();
ArrayList<String> rssTimediff = new ArrayList<String>();
// Check for connectivity, return true if connected or connecting.
public boolean isOnline() {
ConnectivityManager cm =
(ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
return cm.getActiveNetworkInfo() != null &&
cm.getActiveNetworkInfo().isConnectedOrConnecting();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.news_items);
tRSSTitle = (TextView) findViewById(R.id.tvRSSTitle);
tRSSTitle.setVisibility(View.GONE); // Hide the top textview
downloadurl = this.getIntent().getStringExtra("rssfeed");
Log.d("NewsReader passed", downloadurl);
// If we're online, downloads the RSS Feed and returns it as `feed`
if (isOnline()) {
NewsDownload start = new NewsDownload();
try {
feed = start.execute(downloadurl).get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
} else {
Log.d("NewsReader", "Please connect to Internet");
}
time = System.currentTimeMillis();
if (feed != null) { // EHOSTUNREACH: No route to host
ArrayList<RssItem> rssItems = feed.getRssItems();
/*private String title;
private String link;
private Date pubDate;
private String description;
private String content;*/
for (RssItem rssItem : rssItems) {
//Log.d("RSS Reader", rssItem.getPubDate().toString());
timediff = time - rssItem.getPubDate().getTime(); // difference in milliseconds
//Log.d("Timediff", String.valueOf(timediff));
daydiff = TimeUnit.MILLISECONDS.toDays(timediff); // difference in days
hourdiff = TimeUnit.MILLISECONDS.toHours(timediff); // difference in hours
if (0L == daydiff) { // 0 days ago
rssTimediff.add(String.valueOf(hourdiff) + " hours ago");
} else if (1L == daydiff) {
rssTimediff.add("Yesterday"); // In the same style as the original app
} else {
rssTimediff.add(String.valueOf(daydiff) + " days ago");
}
rssTitles.add(rssItem.getTitle());
rssLinks.add(rssItem.getLink());
rssDescription.add(rssItem.getDescription());
//rssContent.add(rssItem.getContent());
}
/*String[] video_arrays = {"video1", "video2"};
List<String> videos = new ArrayList<String>(Arrays.asList(video_arrays)); */
// Parameters: Activity (Context), Layout file, Id of TextView, Array that's adapted
/*final ArrayAdapter<String> mNewsAdapter;
mNewsAdapter = new ArrayAdapter<String>(
this, R.layout.news_tab, R.id.tvTitle, rssTitles);*/
//ArrayList<RssItem> rssItems = feed.getRssItems();
//List<String> rssData = rssItems;
// TODO: Convert ArrayList<rssItem> into an array of strings
ListView listView = (ListView) findViewById(R.id.listNews);
listView.setAdapter(new NewsAdapter(this, R.layout.news_tab, rssItems));
//listView.setAdapter(mNewsAdapter);
}
}
public class NewsAdapter extends ArrayAdapter<RssItem> {
private final Context context;
private final ArrayList<RssItem> data;
private final int layoutResourceId;
// fix here
public NewsAdapter(Context context, int layoutResourceId, ArrayList<RssItem> data) {
super(context, layoutResourceId, data);
this.context = context;
this.data = data;
this.layoutResourceId = layoutResourceId;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View row = convertView;
ViewHolder holder = null;
if(row == null) {
LayoutInflater inflater = ((Activity)context).getLayoutInflater();
row = inflater.inflate(layoutResourceId, parent, false);
holder = new ViewHolder();
holder.textView1 = (TextView) row.findViewById(R.id.tvTitle);
holder.textView2 = (TextView) row.findViewById(R.id.tvDate);
holder.textView3 = (TextView) row.findViewById(R.id.tvDescription);
row.setTag(holder);
} else {
holder = (ViewHolder) row.getTag();
}
//RssItem rItem = data.get(position);
holder.textView1.setText(rssTitles.get(position));
holder.textView2.setText(rssTimediff.get(position));
holder.textView3.setText(rssDescription.get(position));
return row;
}
private class ViewHolder {
TextView textView1;
TextView textView2;
TextView textView3;
}
}
}
|
package it.reply.workflowManager.spring.orchestrator.bpm.ejbcommands;
import it.reply.workflowManager.orchestrator.bpm.ejbcommands.AbstractBaseCommand;
import org.apache.logging.log4j.Logger;
import org.kie.api.executor.CommandContext;
import org.kie.api.executor.ExecutionResults;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import org.apache.logging.log4j.LogManager;
/**
* Abstract base class for command implementation.<br/>
* This sets up logger and some variables from the Executor command context. It also manages logging
* at the start and end of command and provides helper methods for error and result handling.
*
* @author l.biava
*
*/
// @ManageEntities
public abstract class BaseCommand extends AbstractBaseCommand {
private static final Logger LOG = LogManager.getLogger(BaseCommand.class);
@Autowired
private ApplicationContext applicationContext;
private BaseCommand self;
@PostConstruct
private void init() throws Exception {
self = applicationContext.getBean(this.getClass());
}
@Override
protected AbstractBaseCommand getFacade() {
return self;
}
@Override
@Transactional(propagation = Propagation.NOT_SUPPORTED)
public ExecutionResults execute(CommandContext ctx) throws Exception {
return super.execute(ctx);
}
}
|
package org.xwiki.observation.remote.test;
import java.io.File;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import org.junit.Before;
import org.xwiki.component.embed.EmbeddableComponentManager;
import org.xwiki.container.ApplicationContext;
import org.xwiki.container.Container;
import org.xwiki.observation.ObservationManager;
import org.xwiki.observation.remote.internal.jgroups.JGroupsNetworkAdapter;
import org.xwiki.test.MockConfigurationSource;
import org.xwiki.test.XWikiComponentInitializer;
/**
* Base class to easily emulate two instances of observation manager communicate with each other by network.
*
* @version $Id$
*/
public abstract class AbstractROMTestCase
{
private XWikiComponentInitializer initializer1 = new XWikiComponentInitializer();
private XWikiComponentInitializer initializer2 = new XWikiComponentInitializer();
private ObservationManager observationManager1;
private ObservationManager observationManager2;
@Before
public void setUp() throws Exception
{
this.initializer1.initializeContainer();
this.initializer1.initializeConfigurationSource();
this.initializer1.initializeExecution();
this.initializer2.initializeContainer();
this.initializer2.initializeConfigurationSource();
this.initializer2.initializeExecution();
ApplicationContext applicationContext = new ApplicationContext()
{
public File getTemporaryDirectory()
{
throw new RuntimeException("Not implemented");
}
public InputStream getResourceAsStream(String resourceName)
{
return this.getClass().getClassLoader().getResourceAsStream(
resourceName.substring(("/WEB-INF/" + JGroupsNetworkAdapter.CONFIGURATION_PATH).length()));
}
public URL getResource(String resourceName) throws MalformedURLException
{
throw new RuntimeException("Not implemented");
}
};
getComponentManager1().lookup(Container.class).setApplicationContext(applicationContext);
getComponentManager2().lookup(Container.class).setApplicationContext(applicationContext);
getConfigurationSource1().setProperty("observation.remote.enabled", Boolean.TRUE);
getConfigurationSource2().setProperty("observation.remote.enabled", Boolean.TRUE);
this.observationManager1 = getComponentManager1().lookup(ObservationManager.class);
this.observationManager2 = getComponentManager2().lookup(ObservationManager.class);
}
public EmbeddableComponentManager getComponentManager1() throws Exception
{
return this.initializer1.getComponentManager();
}
public EmbeddableComponentManager getComponentManager2() throws Exception
{
return this.initializer2.getComponentManager();
}
/**
* @return a modifiable mock configuration source
*/
public MockConfigurationSource getConfigurationSource1()
{
return this.initializer1.getConfigurationSource();
}
/**
* @return a modifiable mock configuration source
*/
public MockConfigurationSource getConfigurationSource2()
{
return this.initializer2.getConfigurationSource();
}
public ObservationManager getObservationManager1()
{
return this.observationManager1;
}
public ObservationManager getObservationManager2()
{
return this.observationManager2;
}
}
|
package com.google.android.libraries.motionphotoreader;
import android.graphics.Bitmap;
import android.media.MediaExtractor;
import android.os.Bundle;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.adobe.internal.xmp.XMPException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Instrumented test for MotionPhotoReader class.
*/
@RunWith(AndroidJUnit4.class)
public class MotionPhotoReaderTest {
private static final String filename = "/sdcard/MVIMG_20200621_200240.jpg";
private static final int NUM_FRAMES = 44;
private static final long SEEK_AMOUNT_US = 10_000L;
// TODO: close motion photo readers
@Before
public void setUp() {
}
@Test(expected = IOException.class)
public void openMotionPhotoReader_invalidFile_throwsIOException() throws IOException, XMPException {
MotionPhotoReader.open("/sdcard/MVIMG_20200621_200241.jpg", null);
}
@Test
public void numberOfFramesPlayed_isCorrect() throws IOException, XMPException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
int frameCount = 0;
while (reader.hasNextFrame()) {
reader.nextFrame();
frameCount++;
}
assertEquals(NUM_FRAMES, frameCount);
}
@Test
public void getCurrentTimestamp_onStart_isCorrect() throws IOException, XMPException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
assertEquals(0, reader.getCurrentTimestamp());
}
@Test
public void getCurrentTimestamp_nextFrame_isCorrect() throws IOException, XMPException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
long currentTimestampUs = -1L;
while (reader.hasNextFrame()) {
long newTimestampUs = reader.getCurrentTimestamp();
reader.nextFrame();
boolean flag = currentTimestampUs < newTimestampUs;
assertTrue("Timestamp did not increase: " + currentTimestampUs + " vs. " + newTimestampUs, flag);
currentTimestampUs = newTimestampUs;
}
}
@Test
public void getCurrentTimestamp_seekTo_isCorrect() throws IOException, XMPException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
long currentTimestampUs = -1L;
while (reader.hasNextFrame()) {
long newTimestampUs = reader.getCurrentTimestamp();
reader.seekTo(reader.getCurrentTimestamp() + SEEK_AMOUNT_US, MediaExtractor.SEEK_TO_NEXT_SYNC);
boolean flag = currentTimestampUs < newTimestampUs;
assertTrue("Timestamp did not increase: " + currentTimestampUs + " vs. " + newTimestampUs, flag);
currentTimestampUs = newTimestampUs;
}
}
@Test
public void hasNextFrame_atBeginningOfVideo_returnsTrue() throws IOException, XMPException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
boolean flag = reader.hasNextFrame();
assertTrue("No next frame", flag);
}
@Test
public void hasNextFrame_atLastFrame_returnsFalse() throws IOException, XMPException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
long timestampUs = reader.getMotionPhotoInfo().getDuration();
reader.seekTo(timestampUs, MediaExtractor.SEEK_TO_NEXT_SYNC);
boolean flag = reader.hasNextFrame();
assertFalse("Did not seek to end of video", flag);
}
@Test
public void availableInputBufferQueue_isNotEmpty()
throws IOException, XMPException, NoSuchFieldException, IllegalAccessException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
Field inputBufferQueueField = reader.getClass().getDeclaredField("availableInputBuffers");
inputBufferQueueField.setAccessible(true);
BlockingQueue<Integer> availableInputBufferQueue = (BlockingQueue<Integer>) inputBufferQueueField.get(reader);
boolean flag = availableInputBufferQueue.size() > 0;
assertTrue("Available input buffer queue is empty", flag);
}
@Test
public void availableOutputBufferQueue_isQueried() throws IOException, XMPException, InterruptedException {
BlockingQueue<Integer> availableInputBuffers = new LinkedBlockingQueue<>();
BlockingQueue<Bundle> availableOutputBuffers = new LinkedBlockingQueue<>();
BlockingQueue<Integer> fakeAvailableInputBuffers = mock(LinkedBlockingQueue.class);
BlockingQueue<Bundle> fakeAvailableOutputBuffers = mock(LinkedBlockingQueue.class);
doAnswer((Answer<Void>) invocation -> {
int index = invocation.getArgument(0);
availableInputBuffers.offer(index);
return null;
}).when(fakeAvailableInputBuffers).offer(anyInt());
doAnswer((Answer<Void>) invocation -> {
Bundle bufferData = invocation.getArgument(0);
availableOutputBuffers.offer(bufferData);
return null;
}).when(fakeAvailableOutputBuffers).offer(any(Bundle.class));
doAnswer((Answer<Integer>) invocation -> {
long timeout = invocation.getArgument(0);
TimeUnit timeUnit = invocation.getArgument(1);
int index = availableInputBuffers.poll(timeout, timeUnit);
return index;
}).when(fakeAvailableInputBuffers).poll(anyLong(), any(TimeUnit.class));
doAnswer((Answer<Bundle>) invocation -> {
long timeout = invocation.getArgument(0);
TimeUnit timeUnit = invocation.getArgument(1);
Bundle bufferData = availableOutputBuffers.poll(timeout, timeUnit);
return bufferData;
}).when(fakeAvailableOutputBuffers).poll(anyLong(), any(TimeUnit.class));
MotionPhotoReader reader = MotionPhotoReader.open(filename, null, fakeAvailableInputBuffers, fakeAvailableOutputBuffers);
while (reader.hasNextFrame()) {
reader.nextFrame();
}
verify(fakeAvailableOutputBuffers, times(NUM_FRAMES)).offer(any(Bundle.class));
verify(fakeAvailableOutputBuffers, times(NUM_FRAMES)).poll(anyLong(), eq(TimeUnit.MILLISECONDS));
}
@Test
public void getMotionPhotoImage_isNotNull() throws IOException, XMPException {
MotionPhotoReader reader = MotionPhotoReader.open(filename, null);
Bitmap bmp = reader.getMotionPhotoImageBitmap();
assertNotNull(bmp);
}
}
|
package net.malisis.demo.fontdemo;
import net.malisis.core.renderer.MalisisRenderer;
import net.malisis.core.renderer.RenderParameters;
import net.malisis.core.renderer.animation.Animation;
import net.malisis.core.renderer.animation.AnimationRenderer;
import net.malisis.core.renderer.animation.transformation.ChainedTransformation;
import net.malisis.core.renderer.animation.transformation.Transformation;
import net.malisis.core.renderer.animation.transformation.Translation;
import net.malisis.core.renderer.element.Face;
import net.malisis.core.renderer.element.Shape;
import net.malisis.core.renderer.font.FontRenderOptions;
import net.malisis.core.renderer.font.MalisisFont;
import net.malisis.core.util.EntityUtils;
import net.malisis.demo.MalisisDemos;
import net.minecraft.client.Minecraft;
import net.minecraft.client.entity.EntityPlayerSP;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.IBlockAccess;
import net.minecraftforge.client.event.RenderWorldLastEvent;
/**
* @author Ordinastie
*
*/
public class FontDemoRenderer extends MalisisRenderer
{
//The font that will be used
MalisisFont font;
//The shape holding the text : "Custom font with cool animations!"
Shape shapeString;
//Parameters
RenderParameters rp;
//The Animation renderer that handles the animations for the shape
AnimationRenderer ar = new AnimationRenderer();
@Override
protected void initialize()
{
rp = new RenderParameters();
rp.colorMultiplier.set(0x9999FF);
loadFont();
loadShapeAndAnimations();
}
@Override
public boolean shouldRender(RenderWorldLastEvent event, IBlockAccess world)
{
//render only if not further away than 64 blocks away from origin of the world
return Minecraft.getMinecraft().thePlayer.getPosition().distanceSq(0, 0, 0) < 4096;
}
private void loadFont()
{
//font is loaded from a regular ResourceLocation
//keep in mind that the process takes time, especially the first time when the font sheet is generated
ResourceLocation rl = new ResourceLocation(MalisisDemos.modid + ":fonts/HoboStd.otf");
font = new MalisisFont(rl);
}
private void loadShapeAndAnimations()
{
String text = "Custom font with cool animations!";
//create the Shape from the text with the font where each letter is a Face
//(the position is hard coded to be placed against a wall in my test world)
shapeString = font.getShape(text, 1f);
shapeString.translate(-20, 4, -19.8F);
shapeString.storeState();
//create the animations : letters will move up and down in succession (increased delay for each letter)
int delay = 0;
float amount = 0.75F;
int ticks = 8;
for (Face f : shapeString.getFaces())
{
Translation t = new Translation(0, 0, 0, 0, amount, 0).forTicks(ticks).movement(Transformation.SINUSOIDAL);
//down
Translation t2 = new Translation(0, 0, 0, 0, -amount, 0).forTicks(ticks).movement(Transformation.SINUSOIDAL);
//chained animation with up and down with greater delay than previous letter
ChainedTransformation chain = new ChainedTransformation(t, t2).delay(delay++ * 2);
//make the animation for the face (letter) and add it to the AnimationRenderer
ar.addAnimation(new Animation(f, chain));
}
}
public void animate()
{
//by setting the starting time for the AnimationRenderer, the animations start again
ar.setStartTime();
}
@Override
public void render()
{
//bind the font texture
bindTexture(font.getResourceLocation());
//reset the shape state
shapeString.resetState();
//animate
ar.animate();
//draw the shape
drawShape(shapeString, rp);
next();
//for direct rendering, you can specify options
FontRenderOptions fro = new FontRenderOptions();
fro.color = 0x339966;
fro.fontScale = 0.25f;
fro.shadow = true;
fro.underline = true;
//hard coded position
float fx = -11;
float fy = 3.5F;
float fz = 0;
//some text with EnumChatFormatting inside
String str = "Just an " + EnumChatFormatting.GOLD + "example with " + EnumChatFormatting.RED + "custom color"
+ EnumChatFormatting.RESET + " and reset after";
//render the text at the position with the options
font.render(this, str, fx, fy, fz, fro);
//change options
fro.color = 0x666666;
fro.underline = false;
//default need to be saved again if the same object is to be reused with different options
fro.saveDefault();
//set the position lower
fy -= 0.4F;
//some "dynamic text"
EntityPlayerSP player = Minecraft.getMinecraft().thePlayer;
BlockPos p = player.getPosition();
str = "Player position : " + EnumChatFormatting.DARK_AQUA + p.getX() + ", " + p.getY() + ", " + p.getZ();
//render the text at the position with the options
font.render(this, str, fx, fy, fz, fro);
fy -= 0.3F;
str = "Facing : " + EnumChatFormatting.DARK_AQUA + EntityUtils.getEntityFacing(player) + " ("
+ EntityUtils.getEntityRotation(player) + ")";
font.render(this, str, fx, fy, fz, fro);
}
}
|
package com.opera.core.systems;
import java.io.File;
import junit.framework.Assert;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import com.opera.core.systems.runner.OperaRunner;
import com.opera.core.systems.settings.OperaDriverSettings;
abstract public class TestBase {
protected static TestOperaDriver driver;
protected static OperaDriverSettings settings;
private static String fixture_dir;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
driver = new TestOperaDriver();
Assert.assertNotNull(driver);
// Setup the fixture directory
String separator = System.getProperty("file.separator");
fixture_dir = System.getProperty("user.dir");
fixture_dir = separator + fixture_dir + separator + separator + "test" +
separator + "fixtures" + separator;
Assert.assertTrue(new File(fixture_dir).isDirectory());
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
driver.shutdown();
}
// Easy access to fixtures
// / Get the URL of the given fixture file
protected String fixture(String file) {
return "file://localhost"+fixture_dir + file;
}
// / Navigate to the given fixture file
protected void getFixture(String file) {
driver.get(fixture(file));
}
}
// Provides access to the Opera Runner, so we can detect crashes
class TestOperaDriver extends OperaDriver {
public TestOperaDriver() {
super();
}
public TestOperaDriver(OperaDriverSettings settings) {
super(settings);
}
public OperaRunner getRunner() {
return operaRunner;
}
public OperaDriverSettings getSettings() {
return settings;
}
}
|
package mandelbrot;
import mandelbrot.ui.Window;
import javax.swing.*;
/**
* Provides the main entry point of the application.
*
* Creates a new {@code Window} within the UI thread and shows it.
*/
public class Main {
public static void main(String[] args) {
// try to use system specific look and feel
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Exception e) {
System.err.println("Unable to set system specific look and feel");
}
// create the window within the UI-thread
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
final Window window = new Window();
window.setVisible(true);
window.setExtendedState(window.getExtendedState() | JFrame.MAXIMIZED_BOTH);
}
});
}
}
|
package org.eclipse.che.api.project.server;
import org.eclipse.che.api.project.shared.dto.ProjectTemplateDescriptor;
import javax.inject.Singleton;
import javax.validation.constraints.NotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* @author Vitaly Parfonov
*/
@Singleton
public class ProjectTemplateRegistry {
private final Map<List<String>, List<ProjectTemplateDescriptor>> templates = new ConcurrentHashMap<>();
public void register(List<String> tags, ProjectTemplateDescriptor template) {
List<ProjectTemplateDescriptor> templateList = templates.get(tags);
if (templateList == null) {
templates.put(tags, templateList = new CopyOnWriteArrayList<>());
}
templateList.add(template);
}
public void register(List<String> tags, List<ProjectTemplateDescriptor> templates) {
List<ProjectTemplateDescriptor> templateList = this.templates.get(tags);
if (templateList == null) {
this.templates.put(tags, new CopyOnWriteArrayList<>(templates));
} else {
templateList.addAll(templates);
}
}
@NotNull
public List<ProjectTemplateDescriptor> getTemplates(List<String> tags) {
if (tags == null || tags.isEmpty()) {
return Arrays.asList();
}
List<ProjectTemplateDescriptor> templateDescriptors = new ArrayList<>();
templates.entrySet()
.stream()
.filter(entry -> entry.getKey().containsAll(tags))
.forEach(entry -> templateDescriptors.addAll(entry.getValue()));
return templateDescriptors;
}
public List<ProjectTemplateDescriptor> getAllTemplates() {
List<ProjectTemplateDescriptor> allTemplates = new ArrayList<>();
for (Map.Entry<List<String>, List<ProjectTemplateDescriptor>> entry : templates.entrySet()) {
allTemplates.addAll(entry.getValue());
}
return allTemplates;
}
}
|
package org.deviceconnect.android.deviceplugin.host.profile;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.deviceconnect.android.deviceplugin.host.BuildConfig;
import org.deviceconnect.android.message.MessageUtils;
import org.deviceconnect.android.profile.FileProfile;
import org.deviceconnect.android.provider.FileManager;
import org.deviceconnect.message.DConnectMessage;
import org.deviceconnect.message.intent.message.IntentDConnectMessage;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Intent;
import android.media.MediaMetadataRetriever;
import android.os.Bundle;
import android.provider.MediaStore;
import android.provider.MediaStore.Audio;
import android.provider.MediaStore.Video;
import android.util.Log;
import android.webkit.MimeTypeMap;
/**
* File Profile.
*
* @author NTT DOCOMO, INC.
*/
public class HostFileProfile extends FileProfile {
/** Debug Tag. */
private static final String TAG = "HOST";
/** FileManager. */
private FileManager mFileManager;
/** SimpleDataFormat. */
private SimpleDateFormat mDataFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss", Locale.getDefault());
/**
* .
*
* @param fileMgr .
*/
public HostFileProfile(final FileManager fileMgr) {
super(fileMgr);
mFileManager = fileMgr;
}
@Override
protected boolean onGetReceive(final Intent request, final Intent response,
final String serviceId, final String path) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else if (path == null) {
MessageUtils.setInvalidRequestParameterError(response);
} else {
File mFile = null;
String filePath = "";
if (path.indexOf("/") == 0) {
mFile = new File(getFileManager().getBasePath() + path);
filePath = getFileManager().getContentUri() + path;
} else {
mFile = new File(getFileManager().getBasePath() + "/" + path);
filePath = getFileManager().getContentUri() + path;
}
if (mFile.isFile()) {
setResult(response, IntentDConnectMessage.RESULT_OK);
response.putExtra(FileProfile.PARAM_MIME_TYPE, getMIMEType(path));
response.putExtra(FileProfile.PARAM_URI, filePath);
} else {
MessageUtils.setInvalidRequestParameterError(response, "not found:" + path);
}
}
return true;
}
/**
* OnList.
*
* @param request
* @param response
* @param serviceId ID
* @param path
* @param mimeType MIME-TYPE
* @param order
* @param offset
* @param limit
* @return ,falsesendBroadcast
*/
@Override
protected boolean onGetList(final Intent request, final Intent response, final String serviceId, final String path,
final String mimeType, final String order, final Integer offset, final Integer limit) {
if (serviceId == null) {
createEmptyServiceId(response);
return true;
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
return true;
} else {
new Thread(new Runnable() {
@Override
public void run() {
File tmpDir = null;
String mPath = null;
Boolean currentTop = false;
if (path == null) {
// nullTop
tmpDir = getFileManager().getBasePath();
currentTop = true;
} else if (path.equals("/")) {
// /Top
tmpDir = getFileManager().getBasePath();
currentTop = true;
} else if (path.endsWith("..")) {
String[] mDirs = path.split("/", 0);
mPath = "/";
int mCount = 0;
if (mDirs[0].equals("")) {
mCount = 1;
}
for (int i = mCount; i < mDirs.length - 2; i++) {
mPath += mDirs[i] + "/";
}
if (mDirs.length == 1 || mPath.equals("/")) {
currentTop = true;
}
tmpDir = new File(getFileManager().getBasePath(), mPath);
} else {
tmpDir = new File(getFileManager().getBasePath() + "/" + path);
currentTop = false;
}
File[] respFileList = tmpDir.listFiles();
if (respFileList == null) {
setResult(response, DConnectMessage.RESULT_ERROR);
MessageUtils.setInvalidRequestParameterError(response, "Dir is not exist:" + tmpDir);
getContext().sendBroadcast(response);
} else if (order != null && !order.endsWith("desc") && !order.endsWith("asc")) {
MessageUtils.setInvalidRequestParameterError(response);
getContext().sendBroadcast(response);
} else {
// Set arraylist from respFileList
ArrayList<FileAttribute> filelist = new ArrayList<FileAttribute>();
filelist = setArryList(respFileList, filelist);
// Sort
filelist = sortFilelist(order, filelist);
List<Bundle> resp = new ArrayList<Bundle>();
Bundle respParam = new Bundle();
if (!currentTop) {
String tmpPath = path;
if (mPath != null) {
tmpPath = mPath;
}
File parentDir = new File(tmpPath + "/..");
String path = parentDir.getPath().replaceAll("" + mFileManager.getBasePath(), "");
String name = parentDir.getName();
Long size = parentDir.length();
String mineType = "folder/dir";
int filetype = 1;
String date = mDataFormat.format(parentDir.lastModified());
FileAttribute fa = new FileAttribute(path, name, mineType, filetype, size, date);
respParam = addResponseParamToArray(fa, respParam);
resp.add((Bundle) respParam.clone());
}
ArrayList<FileAttribute> tmpfilelist = new ArrayList<FileAttribute>();
if (order != null && order.endsWith("desc")) {
int last = filelist.size();
for (int i = last - 1; i >= 0; i
tmpfilelist.add(filelist.get(i));
}
filelist = tmpfilelist;
}
int counter = 0;
int tmpLimit = 0;
int tmpOffset = 0;
if (limit != null) {
if (limit >= 0) {
tmpLimit = limit;
} else {
MessageUtils.setInvalidRequestParameterError(response);
getContext().sendBroadcast(response);
return;
}
} else {
if (request.getStringExtra(PARAM_LIMIT) != null) {
MessageUtils.setInvalidRequestParameterError(response);
getContext().sendBroadcast(response);
return;
}
}
if (offset != null) {
if (offset >= 0) {
tmpOffset = offset;
} else {
MessageUtils.setInvalidRequestParameterError(response);
getContext().sendBroadcast(response);
return;
}
} else {
if (request.getStringExtra(PARAM_OFFSET) != null) {
MessageUtils.setInvalidRequestParameterError(response);
getContext().sendBroadcast(response);
return;
}
}
if (tmpOffset > filelist.size()) {
MessageUtils.setInvalidRequestParameterError(response);
getContext().sendBroadcast(response);
return;
}
int limitCounter = tmpLimit + tmpOffset;
for (FileAttribute fa : filelist) {
if (limit == null || (limit != null && limitCounter > counter)) {
respParam = addResponseParamToArray(fa, respParam);
if (offset == null || (offset != null && counter >= offset)) {
resp.add((Bundle) respParam.clone());
}
}
counter++;
}
setResult(response, IntentDConnectMessage.RESULT_OK);
response.putExtra(PARAM_COUNT, filelist.size());
response.putExtra(PARAM_FILES, resp.toArray(new Bundle[resp.size()]));
getContext().sendBroadcast(response);
}
}
}).start();
}
return false;
}
/**
* Sort File list.
*
* @param order Sort order.
* @param filelist Sort filelist.
* @return Sorted filelist.
*/
protected ArrayList<FileAttribute> sortFilelist(final String order, final ArrayList<FileAttribute> filelist) {
if (order != null) {
if (order.startsWith(PARAM_PATH)) {
Collections.sort(filelist, new Comparator<FileAttribute>() {
public int compare(final FileAttribute fa1, final FileAttribute fa2) {
return fa1.getPath().compareTo(fa2.getPath());
}
});
} else if (order.startsWith(PARAM_FILE_NAME)) {
Collections.sort(filelist, new Comparator<FileAttribute>() {
public int compare(final FileAttribute fa1, final FileAttribute fa2) {
return fa1.getName().compareTo(fa2.getName());
}
});
} else if (order.startsWith(PARAM_MIME_TYPE)) {
Collections.sort(filelist, new Comparator<FileAttribute>() {
public int compare(final FileAttribute fa1, final FileAttribute fa2) {
return fa1.getMimeType().compareTo(fa2.getMimeType());
}
});
} else if (order.startsWith(PARAM_FILE_TYPE)) {
Collections.sort(filelist, new Comparator<FileAttribute>() {
public int compare(final FileAttribute fa1, final FileAttribute fa2) {
return fa1.getFileType() - fa2.getFileType();
}
});
} else if (order.startsWith(PARAM_FILE_SIZE)) {
Collections.sort(filelist, new Comparator<FileAttribute>() {
public int compare(final FileAttribute fa1, final FileAttribute fa2) {
return (int) (fa1.getFileSize() - fa2.getFileSize());
}
});
} else if (order.startsWith(PARAM_UPDATE_DATE)) {
Collections.sort(filelist, new Comparator<FileAttribute>() {
public int compare(final FileAttribute fa1, final FileAttribute fa2) {
return fa1.getUpdateDate().compareTo(fa2.getUpdateDate());
}
});
}
}
return filelist;
}
/**
* Set Arraylist.
*
* @param respFileList File list information.
* @param filelist FileAttribute list.
* @return FileAttribute list.
*/
protected ArrayList<FileAttribute> setArryList(final File[] respFileList, final ArrayList<FileAttribute> filelist) {
for (File file : respFileList) {
String path = file.getPath().replaceAll("" + mFileManager.getBasePath(), "");
if (path == null) {
path = "unknown";
}
String name = file.getName();
if (name == null) {
name = "unknown";
}
Long size = file.length();
String date = mDataFormat.format(file.lastModified());
int filetype = 0;
String mimetype = null;
if (file.isFile()) {
filetype = 0;
mimetype = getMIMEType(file.getPath() + file.getName());
if (mimetype == null) {
mimetype = "unknown";
}
} else {
filetype = 1;
mimetype = "dir/folder";
}
FileAttribute fileAttr = new FileAttribute(path, name, mimetype, filetype, size, date);
filelist.add(fileAttr);
}
return filelist;
}
@Override
protected boolean onPostSend(final Intent request, final Intent response, final String serviceId, final String path,
final String mimeType, final byte[] data) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else if (path == null) {
MessageUtils.setInvalidRequestParameterError(response);
} else if (data == null) {
MessageUtils.setInvalidRequestParameterError(response, "data is null.");
return true;
} else {
String mUri = null;
try {
mUri = getFileManager().saveFile(path, data);
} catch (IOException e) {
mUri = null;
}
if (mUri == null) {
setResult(response, DConnectMessage.RESULT_ERROR);
MessageUtils.setInvalidRequestParameterError(response, "Path is null, you must input path.");
} else {
String mMineType = getMIMEType(getFileManager().getBasePath() + "/" + path);
if (BuildConfig.DEBUG) {
Log.i(TAG, "mMineType:" + mMineType);
}
// MimeType
if (mMineType == null) {
MessageUtils.setInvalidRequestParameterError(response, "Not support format");
setResult(response, DConnectMessage.RESULT_ERROR);
return true;
}
// Contents Provider
if (mMineType.endsWith("audio/mpeg")
|| mMineType.endsWith("audio/x-wav")
|| mMineType.endsWith("audio/mp4")
|| mMineType.endsWith("audio/ogg")
|| mMineType.endsWith("audio/mp3")
|| mMineType.endsWith("audio/x-ms-wma")
) {
MediaMetadataRetriever mMediaMeta = new MediaMetadataRetriever();
mMediaMeta.setDataSource(getFileManager().getBasePath() + "/" + path);
String mTitle = mMediaMeta.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE);
String mComposer = mMediaMeta.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COMPOSER);
String mArtist = mMediaMeta.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ARTIST);
String mDuration = mMediaMeta.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
ContentResolver mContentResolver = this.getContext().getApplicationContext().getContentResolver();
ContentValues mValues = new ContentValues();
if (mTitle == null) {
String[] array = path.split("/");
mTitle = array[array.length - 1];
}
mValues.put(Audio.Media.TITLE, mTitle);
mValues.put(Audio.Media.DISPLAY_NAME, mTitle);
mValues.put(Audio.Media.COMPOSER, mComposer);
mValues.put(Audio.Media.ARTIST, mArtist);
mValues.put(Audio.Media.DURATION, mDuration);
mValues.put(Audio.Media.MIME_TYPE, mMineType);
mValues.put(Audio.Media.DATA, getFileManager().getBasePath() + "/" + path);
mContentResolver.insert(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, mValues);
} else if (mMineType.endsWith("video/mp4") || mMineType.endsWith("video/3gpp")
|| mMineType.endsWith("video/3gpp2") || mMineType.endsWith("video/mpeg")
|| mMineType.endsWith("video/m4v")
) {
MediaMetadataRetriever mMediaMeta = new MediaMetadataRetriever();
mMediaMeta.setDataSource(getFileManager().getBasePath() + "/" + path);
String mTitle = mMediaMeta.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE);
String mArtist = mMediaMeta.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ARTIST);
String mDuration = mMediaMeta.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
ContentResolver mContentResolver = this.getContext().getApplicationContext().getContentResolver();
ContentValues mValues = new ContentValues();
mValues.put(Video.Media.TITLE, mTitle);
mValues.put(Video.Media.DISPLAY_NAME, mTitle);
mValues.put(Video.Media.ARTIST, mArtist);
mValues.put(Video.Media.DURATION, mDuration);
mValues.put(Video.Media.MIME_TYPE, mMineType);
mValues.put(Video.Media.DATA, getFileManager().getBasePath() + "/" + path);
mContentResolver.insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, mValues);
}
setResult(response, DConnectMessage.RESULT_OK);
}
}
return true;
}
@Override
protected boolean onDeleteRemove(final Intent request, final Intent response, final String serviceId,
final String path) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else if (path == null) {
MessageUtils.setInvalidRequestParameterError(response);
} else {
Boolean result = getFileManager().removeFile(path);
if (result) {
setResult(response, DConnectMessage.RESULT_OK);
} else {
setResult(response, DConnectMessage.RESULT_ERROR);
MessageUtils.setInvalidRequestParameterError(response, "not found:" + path);
}
}
return true;
}
@Override
protected boolean onPostMkdir(final Intent request, final Intent response, final String serviceId,
final String path) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else if (path == null) {
MessageUtils.setInvalidRequestParameterError(response);
} else {
File mBaseDir = mFileManager.getBasePath();
File mMakeDir = new File(mBaseDir, path);
if (mMakeDir.isDirectory()) {
setResult(response, DConnectMessage.RESULT_ERROR);
MessageUtils.setInvalidRequestParameterError(response,
"can not make dir, \"" + mMakeDir + "\" already exist.");
} else {
boolean isMakeDir = mMakeDir.mkdir();
if (isMakeDir) {
setResult(response, DConnectMessage.RESULT_OK);
} else {
setResult(response, DConnectMessage.RESULT_ERROR);
MessageUtils.setInvalidRequestParameterError(response, "can not make dir :" + mMakeDir);
}
}
}
return true;
}
@Override
protected boolean onDeleteRmdir(final Intent request, final Intent response, final String serviceId,
final String path, final boolean force) {
if (serviceId == null) {
createEmptyServiceId(response);
} else if (!checkServiceId(serviceId)) {
createNotFoundService(response);
} else if (path == null) {
MessageUtils.setInvalidRequestParameterError(response);
} else {
File mBaseDir = mFileManager.getBasePath();
File mDeleteDir = new File(mBaseDir, path);
if (mDeleteDir.isFile()) {
setResult(response, DConnectMessage.RESULT_ERROR);
MessageUtils.setInvalidRequestParameterError(response, mDeleteDir + "is file");
} else {
boolean isDelete = mDeleteDir.delete();
if (isDelete) {
setResult(response, DConnectMessage.RESULT_OK);
} else {
setResult(response, DConnectMessage.RESULT_ERROR);
MessageUtils.setUnknownError(response, "can not delete dir :" + mDeleteDir);
}
}
}
return true;
}
/**
* .
*
* @param fa FileAttribute.
* @param respParam Bundle.
* @return Bundle
*/
protected Bundle addResponseParamToArray(final FileAttribute fa, final Bundle respParam) {
respParam.putString(PARAM_PATH, fa.getPath());
respParam.putString(PARAM_FILE_NAME, fa.getName());
respParam.putString(PARAM_MIME_TYPE, fa.getMimeType());
respParam.putString(PARAM_FILE_TYPE, String.valueOf(fa.getFileType()));
respParam.putLong(PARAM_FILE_SIZE, fa.getFileSize());
respParam.putString(PARAM_UPDATE_DATE, fa.getUpdateDate());
return respParam;
}
/**
* MIME.
*
* @param path
* @return MIME
*/
public String getMIMEType(final String path) {
String ext = MimeTypeMap.getFileExtensionFromUrl(path);
ext = ext.toLowerCase(Locale.getDefault());
// MIME Type
return MimeTypeMap.getSingleton().getMimeTypeFromExtension(ext);
}
/**
* ID.
*
* @param serviceId ID
* @return <code>serviceId</code>IDtruefalse
*/
private boolean checkServiceId(final String serviceId) {
String regex = HostServiceDiscoveryProfile.SERVICE_ID;
Pattern mPattern = Pattern.compile(regex);
Matcher match = mPattern.matcher(serviceId);
return match.find();
}
/**
* ID.
*
* @param response Intent
*/
private void createEmptyServiceId(final Intent response) {
MessageUtils.setEmptyServiceIdError(response);
}
/**
* .
*
* @param response Intent
*/
private void createNotFoundService(final Intent response) {
MessageUtils.setNotFoundServiceError(response);
}
/**
* File Attribute Class.
*
*/
public class FileAttribute {
/** File Path. */
private String mPath;
/** File Name. */
private String mName;
/** MIME Type. */
private String mMimeType;
/** File Type. */
private int mFileType;
/** File Size. */
private long mSize;
/** Update Date. */
private String mUpdateDate;
/**
* Constructor.
*
* @param path File Path.
* @param name File Name.
* @param mimetype MIME Type.
* @param filetype File Type.
* @param size File Size.
* @param date Update Date.
*/
public FileAttribute(final String path, final String name, final String mimetype, final int filetype,
final long size, final String date) {
this.mPath = path;
this.mName = name;
this.mMimeType = mimetype;
this.mFileType = filetype;
this.mSize = size;
this.mUpdateDate = date;
}
/**
* Get path.
*
* @return path
*/
public String getPath() {
return this.mPath;
}
/**
* Get name.
*
* @return File Name
*/
public String getName() {
return this.mName;
}
/**
* Get MIME Type.
*
* @return MIME Type
*/
public String getMimeType() {
return this.mMimeType;
}
/**
* Get File Type.
*
* @return File Type
*/
public int getFileType() {
return this.mFileType;
}
/**
* Get File Size.
*
* @return File Size
*/
public long getFileSize() {
return this.mSize;
}
/**
* Get Update Date.
*
* @return Update Date
*/
public String getUpdateDate() {
return this.mUpdateDate;
}
}
}
|
package gov.nih.nci.security.cgmm.beans;
import java.io.Serializable;
import org.globus.gsi.GlobusCredential;
/**
* This class represents the CGMM User. It is a transient object used to provide or receieve User details in CGMM.
* The User details can include CSM as well as Grid user details.
*
* @author Vijay Parmar
*
*/
public class CGMMUser implements Serializable{
private static final long serialVersionUID = 1L;
/**
* The loginName for Grid
*/
private String loginIDGrid;
/**
* The loginName for CSM
*/
private String loginIDCSM;
/**
* The password used to login into the CSM.
*
*/
private String passwordCSM;
/**
* The password used to login into the Grid.
*
*/
private String passwordGrid;
/**
* The first name of the user
*/
private String firstName;
/**
* The last name of the user
*/
private String lastName;
/**
* The name of the organization that this user belongs to.
*/
private String organization;
/**
* The name of the department that this user belongs to.
*/
private String phoneNumber;
/**
* Email id for this user.
*/
private String emailId;
private byte migratedFlag;
private GlobusCredential globusCredential;
private String address1;
private String address2;
private String city;
private String country;
private String state;
private String zipcode;
public CGMMUser(){
}
/**
* @exception Throwable Throwable
*/
public void finalize()
throws Throwable {
}
/**
* Email id for this user.
*/
public String getEmailId(){
return emailId;
}
/**
* The first name of the user
*/
public String getFirstName(){
return firstName;
}
/**
* The last name of the user
*/
public String getLastName(){
return lastName;
}
/**
* The name of the organization that this user belongs to.
*/
public String getOrganization(){
return organization;
}
/**
* The password used to login into the application
*/
public String getPasswordCSM(){
return passwordCSM;
}
/**
* The password used to login into the application
*/
public String getPasswordGrid(){
return passwordGrid;
}
/**
* This is the work phone of the user.
*/
public String getPhoneNumber(){
return phoneNumber;
}
/**
* Email id for this user.
* @param newVal
*
*/
public void setEmailId(final String newVal){
emailId = newVal;
}
/**
* The first name of the user
* @param newVal
*
*/
public void setFirstName(final String newVal){
firstName = newVal;
}
/**
* The last name of the user
* @param newVal
*
*/
public void setLastName(final String newVal){
lastName = newVal;
}
/**
* The name of the organization that this user belongs to.
* @param newVal
*
*/
public void setOrganization(final String newVal){
organization = newVal;
}
/**
* The password used to login into the application
* @param newVal
*
*/
public void setPasswordCSM(final String newVal){
passwordCSM = newVal;
}
/**
* The password used to login into the application
* @param newVal
*
*/
public void setPasswordGrid(final String newVal){
passwordGrid = newVal;
}
/**
* This is the work phone of the user.
* @param newVal
*
*/
public void setPhoneNumber(final String newVal){
phoneNumber = newVal;
}
public byte getMigratedFlag() {
return migratedFlag;
}
public void setMigratedFlag(final byte migratedFlag) {
this.migratedFlag = migratedFlag;
}
public String getLoginIDCSM() {
return loginIDCSM;
}
public void setLoginIDCSM(String loginIDCSM) {
this.loginIDCSM = loginIDCSM;
}
public String getLoginIDGrid() {
return loginIDGrid;
}
public void setLoginIDGrid(String loginIDGrid) {
this.loginIDGrid = loginIDGrid;
}
public GlobusCredential getGlobusCredential() {
return globusCredential;
}
public void setGlobusCredential(GlobusCredential globusCredential) {
this.globusCredential = globusCredential;
}
public String getAddress1() {
return address1;
}
public void setAddress1(String address1) {
this.address1 = address1;
}
public String getAddress2() {
return address2;
}
public void setAddress2(String address2) {
this.address2 = address2;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getZipcode() {
return zipcode;
}
public void setZipcode(String zipcode) {
this.zipcode = zipcode;
}
public boolean equals(Object obj){
if(this == obj)
return true;
if((obj == null) || (obj.getClass() != this.getClass()))
return false;
CGMMUser test = (CGMMUser)obj;
return (loginIDGrid == test.loginIDGrid || (loginIDGrid != null && loginIDGrid.equals(test.loginIDGrid))) &&
(loginIDCSM == test.loginIDCSM || (loginIDCSM != null && loginIDCSM.equals(test.loginIDCSM)));
}
public int hashCode(){
int intNumber = 57 * 5;
intNumber = intNumber + ( (null==loginIDCSM?0:loginIDCSM.hashCode()) + (null==loginIDGrid?0:loginIDGrid.hashCode()) );
return intNumber;
}
}
|
package org.terracotta.nomad.entity.common;
/**
* @author Mathieu Carbou
*/
public interface NomadEntityConstants {
String ENTITY_TYPE = "org.terracotta.nomad.entity.client.NomadEntity";
String ENTITY_NAME = "nomad-entity";
}
|
package org.eclipse.birt.report.engine.emitter.pptx.writer;
import java.awt.Color;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.Stack;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.birt.report.engine.emitter.EmitterUtil;
import org.eclipse.birt.report.engine.emitter.ppt.util.PPTUtil.HyperlinkDef;
import org.eclipse.birt.report.engine.emitter.pptx.util.PPTXUtil;
import org.eclipse.birt.report.engine.layout.emitter.Image;
import org.eclipse.birt.report.engine.layout.emitter.util.BackgroundImageLayout;
import org.eclipse.birt.report.engine.layout.emitter.util.Position;
import org.eclipse.birt.report.engine.layout.pdf.font.FontInfo;
import org.eclipse.birt.report.engine.nLayout.area.style.BorderInfo;
import org.eclipse.birt.report.engine.nLayout.area.style.TextStyle;
import org.eclipse.birt.report.engine.ooxml.IPart;
import org.eclipse.birt.report.engine.ooxml.ImageManager;
import org.eclipse.birt.report.engine.ooxml.ImageManager.ImagePart;
import org.eclipse.birt.report.engine.ooxml.constants.ContentTypes;
import org.eclipse.birt.report.engine.ooxml.constants.NameSpaces;
import org.eclipse.birt.report.engine.ooxml.constants.RelationshipTypes;
import org.eclipse.birt.report.engine.ooxml.util.OOXmlUtil;
import org.eclipse.birt.report.engine.ooxml.writer.OOXmlWriter;
import com.lowagie.text.Font;
public class Slide extends Component
{
private static final String TAG_SLIDE = "p:sld";
private static Logger logger = Logger.getLogger( Slide.class.getName( ) );
private int index;
private Presentation presentation;
private int shapeCount = 1;
private boolean isClosed = false;
private ImageManager imageManager;
public Slide( Presentation presentation, int slideIndex )
throws IOException
{
this( presentation, slideIndex, null );
}
public Slide( Presentation presentation, int slideIndex, Color bgColor )
throws IOException
{
this.index = slideIndex;
this.presentation = presentation;
String uri = getSlideUri( index );
String relationShipType = RelationshipTypes.SLIDE;
String type = ContentTypes.SLIDE;
initialize( presentation.getPart( ), uri, type, relationShipType );
new SlideLayout( this, index );
this.imageManager = (ImageManager) part.getPackage( )
.getExtensionData( );
writer.startWriter( );
writer.openTag( TAG_SLIDE );
writer.nameSpace( "a", NameSpaces.DRAWINGML );
writer.nameSpace( "r", NameSpaces.RELATIONSHIPS );
writer.nameSpace( "p", NameSpaces.PRESENTATIONML );
writer.openTag( "p:cSld" );
if ( bgColor != null )
drawSlideBackgroundColor( bgColor );
writer.openTag( "p:spTree" );
writer.openTag( "p:nvGrpSpPr" );
writer.openTag( "p:cNvPr" );
writer.attribute( "id", nextShapeId( ) );
writer.attribute( "name", "" );
writer.closeTag( "p:cNvPr" );
writer.openTag( "p:cNvGrpSpPr" );
writer.closeTag( "p:cNvGrpSpPr" );
writer.openTag( "p:nvPr" );
writer.closeTag( "p:nvPr" );
writer.closeTag( "p:nvGrpSpPr" );
writer.openTag( "p:grpSpPr" );
writer.closeTag( "p:grpSpPr" );
}
private void drawSlideBackgroundColor( Color color )
{
writer.openTag( "p:bg" );
writer.openTag( "p:bgPr" );
setColor( color );
writer.openTag( "a:effectLst" );
writer.closeTag( "a:effectLst" );
writer.closeTag( "p:bgPr" );
writer.closeTag( "p:bg" );
}
public void drawLine( int startX, int startY, int endX, int endY,
int width, Color color, int lineStyle )
{
if ( color == null || width == 0f
|| lineStyle==BorderInfo.BORDER_STYLE_NONE )
{
return;
}
writer.openTag( "p:cxnSp" );
writer.openTag( "p:nvCxnSpPr" );
writer.openTag( "p:cNvPr" );
int shapeId = nextShapeId( );
writer.attribute( "id", shapeId );
writer.attribute( "name", "Line " + shapeId );
writer.closeTag( "p:cNvPr" );
writer.openTag( "p:cNvCxnSpPr" );
writer.closeTag( "p:cNvCxnSpPr" );
writer.openTag( "p:nvPr" );
writer.closeTag( "p:nvPr" );
writer.closeTag( "p:nvCxnSpPr" );
writer.openTag( "p:spPr" );
setPosition( startX, startY, endX - startX, endY - startY );
writer.openTag( "a:prstGeom" );
writer.attribute( "prst", "line" );
writer.closeTag( "a:prstGeom" );
setProperty( color, width, lineStyle );
writer.closeTag( "p:spPr" );
writer.closeTag( "p:cxnSp" );
}
public void drawText( String text, int textX, int textY, int width,
int height, String fontName, float fontSize, int fontStyle,
Color color, boolean isUnderline, boolean isLineThrough,
HyperlinkDef link )
{
writer.openTag( "p:sp" );
writer.openTag( "p:nvSpPr" );
writer.openTag( "p:cNvPr" );
int shapeId = nextShapeId( );
writer.attribute( "id", shapeId );
writer.attribute( "name", "TextBox " + shapeId );
writer.closeTag( "p:cNvPr" );
writer.openTag( "p:cNvSpPr" );
writer.attribute( "txBox", "1" );
writer.closeTag( "p:cNvSpPr" );
writer.openTag( "p:nvPr" );
writer.closeTag( "p:nvPr" );
writer.closeTag( "p:nvSpPr" );
writer.openTag( "p:spPr" );
setPosition( textX, textY, width + 1, height );
writer.openTag( "a:prstGeom" );
writer.attribute( "prst", "rect" );
writer.closeTag( "a:prstGeom" );
writer.closeTag( "p:spPr" );
writer.openTag( "p:txBody" );
writer.openTag( "a:bodyPr" );
writer.attribute( "wrap", "none" );
writer.attribute( "lIns", "0" );
writer.attribute( "tIns", "0" );
writer.attribute( "rIns", "0" );
writer.attribute( "bIns", "0" );
writer.attribute( "rtlCol", "0" );
writer.closeTag( "a:bodyPr" );
writer.openTag( "a:p" );
writer.openTag( "a:r" );
setTextProperty( fontName, fontSize, fontStyle, color, isUnderline,
isLineThrough, link );
writer.openTag( "a:t" );
writeText(text);
writer.closeTag( "a:t" );
writer.closeTag( "a:r" );
writer.closeTag( "a:p" );
writer.closeTag( "p:txBody" );
writer.closeTag( "p:sp" );
}
/**
* Word have extra limitation on text in run: a. it must following xml
* format. b. no ]]> so , we need replace all &, <,> in the text
*
* @param text
*/
private void writeText(String text) {
int length = text.length();
StringBuilder sb = new StringBuilder(length * 2);
for (int i = 0; i < length; i++) {
char ch = text.charAt(i);
switch (ch) {
case '&':
sb.append("&");
break;
case '>':
sb.append(">");
break;
case '<':
sb.append("<");
break;
default:
sb.append(ch);
}
}
writer.cdata(sb.toString());
}
public void drawImage( String uri, String extension, int imageX,
int imageY, int height, int width, String helpText, HyperlinkDef link )
throws IOException
{
byte[] imageData = EmitterUtil.getImageData( uri );
IPart imagePart = imageManager.getImagePart( part, uri, imageData )
.getPart( );
drawImage( imagePart, imageX, imageY, height, width, helpText, true,
link );
}
public void drawImage( String imageId, byte[] imageData, String extension,
int imageX, int imageY, int height, int width, String helpText,
HyperlinkDef link ) throws IOException
{
drawImage( imageId, imageData, extension, imageX, imageY, height,
width, helpText, true, link );
}
private void drawImage( String imageId, byte[] imageData, String extension,
int imageX, int imageY, int height, int width, String helpText,
boolean stretch, HyperlinkDef link ) throws IOException
{
IPart imagePart = imageManager.getImagePart( part, imageId, imageData )
.getPart( );
drawImage( imagePart, imageX, imageY, height, width, helpText,
stretch, link );
}
private Crop checkCrop( int x, int y, int width, int height )
{
if ( clipStack.isEmpty( ) )
{
return null;
}
ClipArea clip = clipStack.peek( );
int left = 0, right = 0, top = 0, bottom = 0;
if ( x < clip.x )
{
left = (int) ( ( clip.x - x ) / (float) width * 100000 );
}
if ( y < clip.y )
{
top = (int) ( ( clip.y - y ) / (float) height * 100000 );
}
if ( x + width > clip.x + clip.width )
{
right = (int) ( ( ( x + width ) - ( clip.x + clip.width ) )
/ (float) width * 100000 );
}
if ( y + height > clip.y + clip.height )
{
bottom = (int) ( ( ( y + height ) - ( clip.y + clip.height ) )
/ (float) height * 100000 );
}
if ( left != 0 || right != 0 || top != 0 || bottom != 0 )
{
return new Crop( left, right, top, bottom );
}
return null;
}
private class Crop
{
int left, right, top, bottom;
Crop( int left, int right, int top, int bottom )
{
this.left = left;
this.right = right;
this.top = top;
this.bottom = bottom;
}
}
private void drawImage( IPart imagePart, int imageX, int imageY,
int height, int width, String helpText, boolean stretch,
HyperlinkDef link )
{
String relationshipId = imagePart.getRelationshipId( );
writer.openTag( "p:pic" );
writer.openTag( "p:nvPicPr" );
writer.openTag( "p:cNvPr" );
int shapeId = nextShapeId( );
writer.attribute( "id", shapeId );
writer.attribute( "name", "Image " + shapeId );
writer.attribute( "descr", helpText );
setHyperlink( link );
writer.closeTag( "p:cNvPr" );
writer.openTag( "p:cNvPicPr" );
writer.openTag( "a:picLocks" );
writer.attribute( "noChangeAspect", "1" );
writer.closeTag( "a:picLocks" );
writer.closeTag( "p:cNvPicPr" );
writer.openTag( "p:nvPr" );
writer.closeTag( "p:nvPr" );
writer.closeTag( "p:nvPicPr" );
writer.openTag( "p:blipFill" );
Crop crop = checkCrop( imageX, imageY, width, height );
if ( crop != null )
{
writer.attribute( "rotWithShape", "1" );
}
writer.openTag( "a:blip" );
writer.attribute( "r:embed", relationshipId );
writer.closeTag( "a:blip" );
if ( crop != null )
{
writer.openTag( "a:srcRect" );
if ( crop.top != 0 )
writer.attribute( "t", crop.top );
if ( crop.left != 0 )
writer.attribute( "l", crop.left );
if ( crop.right != 0 )
writer.attribute( "r", crop.right );
if ( crop.bottom != 0 )
writer.attribute( "b", crop.bottom );
writer.closeTag( "a:srcRect" );
}
if ( stretch )
{
writer.openTag( "a:stretch" );
// writer.openTag("a:fillRect");
// writer.closeTag("a:fillRect");
writer.closeTag( "a:stretch" );
}
writer.closeTag( "p:blipFill" );
writer.openTag( "p:spPr" );
if ( crop == null )
{
setPosition( imageX, imageY, width, height );
}
else
{
ClipArea clip = clipStack.peek( );
int pX = Math.max( clip.x, imageX );
int pY = Math.max( clip.y, imageY );
int pWidth = Math.min( imageX + width, clip.x + clip.width ) - pX;
int pHeight = Math.min( imageY + height, clip.y + clip.height )
- pY;
setPosition( pX, pY, pWidth, pHeight );
}
writer.openTag( "a:prstGeom" );
writer.attribute( "prst", "rect" );
writer.closeTag( "a:prstGeom" );
writer.closeTag( "p:spPr" );
writer.closeTag( "p:pic" );
}
public void drawBackgroundColor( Color color, int x, int y, int width,
int height )
{
if ( color != null )
{
writer.openTag( "p:sp" );
writer.openTag( "p:nvSpPr" );
writer.openTag( "p:cNvPr" );
int shapeId = nextShapeId( );
writer.attribute( "id", shapeId );
writer.attribute( "name", "Rectangle " + shapeId );
writer.closeTag( "p:cNvPr" );
writer.openTag( "p:cNvSpPr" );
writer.closeTag( "p:cNvSpPr" );
writer.openTag( "p:nvPr" );
writer.closeTag( "p:nvPr" );
writer.closeTag( "p:nvSpPr" );
writer.openTag( "p:spPr" );
setPosition( x, y, width, height );
writer.openTag( "a:prstGeom" );
writer.attribute( "prst", "rect" );
writer.closeTag( "a:prstGeom" );
setColor( color );
writer.closeTag( "p:spPr" );
writer.closeTag( "p:sp" );
}
}
public void drawBackgroundImage( int x, int y, int width, int height,
int imageWidth, int imageHeight, int repeat, String imageURI,
byte[] imageData, int offsetX, int offsetY )
{
if ( imageURI == null || imageURI.length( ) == 0 )
{
return;
}
if ( imageData == null || imageData.length == 0 )
{
return;
}
try
{
if ( !imageManager.hasImage( imageURI ) )
{
org.eclipse.birt.report.engine.layout.emitter.Image image = EmitterUtil
.parseImage( imageData, null, null );
imageData = image.getData( );
}
ImagePart imagePartInfo = imageManager.getImagePart( part,
imageURI, imageData );
Image imageInfo = imagePartInfo.getImageInfo( );
float originalImageWidth = imageWidth != 0 ? imageWidth : imageInfo
.getWidth( );
float originalImageHeight = imageHeight != 0
? imageHeight
: imageInfo.getHeight( );
Position areaPosition = new Position( x, y );
Position areaSize = new Position( width, height );
Position imagePosition = new Position( x + offsetX, y + offsetY );
Position imageSize = new Position( originalImageWidth, originalImageHeight );
BackgroundImageLayout layout = new BackgroundImageLayout(
areaPosition, areaSize, imagePosition, imageSize );
Collection positions = layout.getImagePositions( repeat );
Iterator iterator = positions.iterator( );
while ( iterator.hasNext( ) )
{
Position position = (Position) iterator.next( );
fillRectangleWithImage( imagePartInfo, (int) OOXmlUtil
.convertPointerToEmus( position.getX( ) ), (int) OOXmlUtil
.convertPointerToEmus( position.getY( ) ), (int) OOXmlUtil
.convertPointerToEmus( originalImageWidth ), (int) OOXmlUtil
.convertPointerToEmus( originalImageHeight ), 0, 0 );
}
}
catch ( IOException e )
{
logger.log( Level.SEVERE, e.getLocalizedMessage( ), e );
}
}
private float getImageRange( float maxRange, float offset, float imageSize )
{
float result = imageSize;
if ( offset < 0 )
{
result = Math.max( 0, imageSize + offset );
}
else if ( offset + imageSize > maxRange )
{
result = Math.max( 0, maxRange - offset );
}
return result;
}
private boolean isOutOfRange( float maxRange, float offset, float imageSize )
{
return offset <= 0 - imageSize || offset >= maxRange;
}
private void fillRectangleWithImage( ImagePart imageInfo, int x, int y,
int width, int height, int offsetX, int offsetY )
{
writer.openTag( "p:sp" );
writer.openTag( "p:nvSpPr" );
writer.openTag( "p:cNvPr" );
int shapeId = nextShapeId( );
writer.attribute( "id", shapeId );
writer.attribute( "name", "Rectangle " + shapeId );
writer.closeTag( "p:cNvPr" );
writer.openTag( "p:cNvSpPr" );
writer.closeTag( "p:cNvSpPr" );
writer.openTag( "p:nvPr" );
writer.closeTag( "p:nvPr" );
writer.closeTag( "p:nvSpPr" );
writer.openTag( "p:spPr" );
setPosition( x, y, width, height );
writer.openTag( "a:prstGeom" );
writer.attribute( "prst", "rect" );
writer.closeTag( "a:prstGeom" );
writer.openTag( "a:blipFill" );
writer.attribute( "dpi", "0" );
writer.attribute( "rotWithShape", "1" );
writer.openTag( "a:blip" );
writer.attribute( "r:embed", imageInfo.getPart( ).getRelationshipId( ) );
writer.closeTag( "a:blip" );
//To stretch
writer.openTag( "a:stretch" );
writer.openTag( "a:fillRect" );
writer.closeTag( "a:fillRect" );
writer.closeTag( "a:stretch" );
//To tile
// writer.openTag( "a:tile" );
// writer.attribute( "tx", offsetX );
// writer.attribute( "ty", offsetY );
// writer.closeTag( "a:tile" );
writer.closeTag( "a:blipFill" );
writer.openTag( "a:ln" );
writer.openTag( "a:noFill" );
writer.closeTag( "a:noFill" );
writer.closeTag( "a:ln" );
writer.closeTag( "p:spPr" );
writer.closeTag( "p:sp" );
}
private void setTextProperty( String fontName, float fontSize,
int fontStyle, Color color, boolean isUnderline,
boolean isLineThrough, HyperlinkDef link )
{
writer.openTag( "a:rPr" );
writer.attribute( "lang", "en-US" );
writer.attribute( "altLang", "zh-CN" );
writer.attribute( "dirty", "0" );
writer.attribute( "smtClean", "0" );
if ( isLineThrough )
{
writer.attribute( "strike", "sngStrike" );
}
if ( isUnderline )
{
writer.attribute( "u", "sng" );
}
writer.attribute( "sz", (int) ( fontSize * 100 ) );
boolean isItalic = ( fontStyle & Font.ITALIC ) != 0;
boolean isBold = ( fontStyle & Font.BOLD ) != 0;
if ( isItalic )
{
writer.attribute( "i", 1 );
}
if ( isBold )
{
writer.attribute( "b", 1 );
}
setColor( color );
setTextFont( fontName );
setHyperlink( link );
writer.closeTag( "a:rPr" );
}
private void setHyperlink( HyperlinkDef link )
{
if ( link != null )
{
String hyperlink = link.getLink( );
if ( hyperlink != null )
{
if ( hyperlink.startsWith( "\"" ) && hyperlink.endsWith( "\"" ) )
{
hyperlink = hyperlink
.substring( 1, hyperlink.length( ) - 1 );
}
writer.openTag( "a:hlinkClick" );
writer.attribute( "r:id", part.getHyperlinkId( hyperlink ) );
if ( link.getTooltip( ) != null )
{
writer.attribute( "tooltip", link.getTooltip( ) );
}
writer.closeTag( "a:hlinkClick" );
}
}
}
private void setTextFont( String fontName )
{
writer.openTag( "a:latin" );
writer.attribute( "typeface", fontName );
writer.attribute( "pitchFamily", "18" );
writer.attribute( "charset", "0" );
writer.closeTag( "a:latin" );
writer.openTag( "a:ea" );
writer.attribute( "typeface", fontName );
writer.attribute( "pitchFamily", "18" );
writer.attribute( "charset", "0" );
writer.closeTag( "a:ea" );
writer.openTag( "a:cs" );
writer.attribute( "typeface", fontName );
writer.attribute( "pitchFamily", "18" );
writer.attribute( "charset", "0" );
writer.closeTag( "a:cs" );
}
private void setColor( Color color )
{
if ( color != null )
{
writer.openTag( "a:solidFill" );
writer.openTag( "a:srgbClr" );
writer.attribute( "val", EmitterUtil.getColorString( color ) );
writer.closeTag( "a:srgbClr" );
writer.closeTag( "a:solidFill" );
}
}
private void setPosition( int startX, int startY, int width, int height )
{
writer.openTag( "a:xfrm" );
writer.openTag( "a:off" );
writer.attribute( "x", startX );
writer.attribute( "y", startY );
writer.closeTag( "a:off" );
writer.openTag( "a:ext" );
writer.attribute( "cx", width );
writer.attribute( "cy", height );
writer.closeTag( "a:ext" );
writer.closeTag( "a:xfrm" );
}
private void setProperty( Color color, int width, int style )
{
writer.openTag( "a:ln" );
writer.attribute( "w", width );
setColor( color );
// the other line styles, e.g. 'ridge', 'outset', 'groove', 'insert'
// is NOT supported now and all regarded with default style, i.e, solid.
if ( style == BorderInfo.BORDER_STYLE_DOTTED )
{
setStyle( "sysDash" );
}
else if ( style == BorderInfo.BORDER_STYLE_DASHED )
{
setStyle( "dash" );
}
writer.closeTag( "a:ln" );
}
private void setStyle( String lineStyle )
{
writer.openTag( "a:prstDash" );
writer.attribute( "val", lineStyle );
writer.closeTag( "a:prstDash" );
}
public Presentation getPresentation( )
{
return presentation;
}
private String getSlideUri( int slideIndex )
{
return "slides/slide" + slideIndex + ".xml";
}
public int getSlideId( )
{
return 255 + index;
}
public void dispose( )
{
if ( !isClosed )
{
writer.closeTag( "p:spTree" );
writer.closeTag( "p:cSld" );
writer.openTag( "p:clrMapOvr" );
writer.openTag( "a:masterClrMapping" );
writer.closeTag( "a:masterClrMapping" );
writer.closeTag( "p:clrMapOvr" );
writer.closeTag( TAG_SLIDE );
writer.endWriter( );
writer.close( );
writer = null;
isClosed = true;
}
}
private int nextShapeId( )
{
return shapeCount++;
}
public void drawText( String text, int textX, int textY, int width,
int height, TextStyle textStyle, HyperlinkDef link )
{
FontInfo fontInfo = textStyle.getFontInfo( );
String fontName = fontInfo.getFontName( );
float fontSize = fontInfo.getFontSize( );
int fontStyle = fontInfo.getFontStyle( );
Color color = textStyle.getColor( );
drawText( text, textX, textY, width, height, fontName, fontSize,
fontStyle, color, textStyle.isUnderline( ), textStyle
.isLinethrough( ), link );
}
private Stack<ClipArea> clipStack = new Stack<ClipArea>( );
private class ClipArea
{
int x, y, width, height;
ClipArea( int x, int y, int width, int height )
{
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
}
public void startClip( int startX, int startY, int width, int height )
{
startX = PPTXUtil.convertToEnums( startX );
startY = PPTXUtil.convertToEnums( startY );
width = PPTXUtil.convertToEnums( width );
height = PPTXUtil.convertToEnums( height );
if ( clipStack.isEmpty( ) )
{
clipStack.push( new ClipArea( startX, startY, width, height ) );
}
else
{
ClipArea parent = clipStack.peek( );
int newX = Math.max( parent.x, startX );
int newY = Math.max( parent.y, startY );
int newWidth = Math.min( startX + width, parent.x + parent.width )
- newX;
int newHeight = Math
.min( startY + height, parent.y + parent.height ) - newY;
clipStack
.push( new ClipArea( newX, newY, newWidth, newHeight ) );
}
}
public void endClip( )
{
clipStack.pop( );
}
public OOXmlWriter getWriter( )
{
return writer;
}
}
|
package io.spine.gradle.compiler.lookup.enrichments;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import static org.junit.Assert.assertFalse;
/**
* @author Dmytro Dashenkov
*/
public class EnrichmentLookupPluginShould {
private static final Properties prop = new Properties();
@BeforeClass
public static void setUp() {
try {
// Get the properties file generated by the plugin
final File propFile = new File("generated/test/resources/enrichments.properties");
InputStream input = new FileInputStream(propFile);
prop.load(input);
input.close();
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
@Test
public void find_enrichments() {
assertFalse(prop.isEmpty());
}
}
|
package org.eclipse.persistence.tools.schemaframework;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.eis.EISDescriptor;
import org.eclipse.persistence.exceptions.DatabaseException;
import org.eclipse.persistence.exceptions.ValidationException;
import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform;
import org.eclipse.persistence.internal.descriptors.FieldTransformation;
import org.eclipse.persistence.internal.descriptors.MethodBasedFieldTransformation;
import org.eclipse.persistence.internal.descriptors.TransformerBasedFieldTransformation;
import org.eclipse.persistence.internal.helper.ClassConstants;
import org.eclipse.persistence.internal.helper.ConversionManager;
import org.eclipse.persistence.internal.helper.DatabaseField;
import org.eclipse.persistence.internal.helper.DatabaseTable;
import org.eclipse.persistence.internal.helper.Helper;
import org.eclipse.persistence.internal.queries.ContainerPolicy;
import org.eclipse.persistence.internal.queries.MappedKeyMapContainerPolicy;
import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl;
import org.eclipse.persistence.logging.AbstractSessionLog;
import org.eclipse.persistence.logging.SessionLog;
import org.eclipse.persistence.mappings.AggregateCollectionMapping;
import org.eclipse.persistence.mappings.AggregateObjectMapping;
import org.eclipse.persistence.mappings.DatabaseMapping;
import org.eclipse.persistence.mappings.DirectCollectionMapping;
import org.eclipse.persistence.mappings.DirectMapMapping;
import org.eclipse.persistence.mappings.ManyToManyMapping;
import org.eclipse.persistence.mappings.OneToManyMapping;
import org.eclipse.persistence.mappings.OneToOneMapping;
import org.eclipse.persistence.mappings.RelationTableMechanism;
import org.eclipse.persistence.mappings.TransformationMapping;
import org.eclipse.persistence.mappings.structures.ObjectRelationalDataTypeDescriptor;
import org.eclipse.persistence.oxm.XMLDescriptor;
import org.eclipse.persistence.sequencing.DefaultSequence;
import org.eclipse.persistence.sequencing.NativeSequence;
import org.eclipse.persistence.sequencing.Sequence;
import org.eclipse.persistence.sessions.DatabaseLogin;
import org.eclipse.persistence.sessions.Project;
import org.eclipse.persistence.sessions.Session;
import org.eclipse.persistence.sessions.server.ServerSession;
import org.eclipse.persistence.mappings.DirectToFieldMapping;
import org.eclipse.persistence.mappings.converters.Converter;
import org.eclipse.persistence.mappings.converters.SerializedObjectConverter;
import org.eclipse.persistence.mappings.converters.TypeConversionConverter;
/**
* DefaultTableGenerator is a utility class used to generate a default table schema for a EclipseLink project object.
*
* The utility can be used in EclipseLink CMP for OC4J to perform the table auto creation process, which can be triggered
* at deployment time when EclipseLink project descriptor is absent (default mapping) or present.
*
* The utility can also be used to any EclipseLink application to perform the table drop/creation at runtime.
*
* The utility handles all direct/relational mappings, inheritance, multiple tables, interface with/without tables,
* optimistic version/timestamp lockings, nested relationships, BLOB/CLOB generation.
*
* The utility is platform-agnostic.
*
* Usage:
* - CMP
* 1. set "autocreate-tables=true|false, autodelete-tables=true|false" in oc4j application deployment
* descriptor files (config/system-application.xml, config/application.xml, or orion-application.xml in an .ear)
*
* 2. Default Mapping: the same as CMP, plus system properties setting -Declipselink.defaultmapping.autocreate-tables='true|false'
* and -Declipselink.defaultmapping.autodelete-tables='true|false'
*
* - Non-CMP:
* 1. Configuration: through sessions.xml
* 2. Directly runtime call through schema framework:
* SchemaManager mgr = new SchemaManager(session);
* mgr.replaceDefaultTables(); //drop and create
* mgr.createDefaultTables(); //create only
*
* The utility currently only supports relational project.
*
* @author King Wang
* @since Oracle TopLink 10.1.3
*/
public class DefaultTableGenerator {
//the project object used to generate the default data schema.
Project project = null;
//the target database platform
private DatabasePlatform databasePlatform;
//used to track the table definition: keyed by the table name, and valued
//by the table definition object
private Map<String, TableDefinition> tableMap = null;
//used to track the field definition: keyed by the database field object, and
//valued by the field definition.
private Map<DatabaseField, FieldDefinition> fieldMap = null;
//DatabaseField pool (synchronized with above 'fieldMap')
private Map<DatabaseField, DatabaseField> databaseFields;
//When this flag is 'false' EclipseLink will not attempt to create fk constraints
protected boolean generateFKConstraints = true;
/**
* Default construcotr
*/
public DefaultTableGenerator(Project project) {
this.project = project;
if (project.getDatasourceLogin().getDatasourcePlatform() instanceof DatabasePlatform){
databasePlatform = (DatabasePlatform)project.getDatasourceLogin().getDatasourcePlatform();
}
tableMap = new HashMap();
fieldMap = new HashMap();
databaseFields = new HashMap();
}
/**
* This constructor will create a DefaultTableGenerator that can be set to create fk
* constraints
*/
public DefaultTableGenerator(Project project, boolean generateFKConstraints){
this(project);
this.generateFKConstraints = generateFKConstraints;
}
/**
* Generate a default TableCreator object from the EclipseLink project object.
*/
public TableCreator generateDefaultTableCreator() {
TableCreator tblCreator = new TableCreator();
//go through each descriptor and build the table/field definitions out of mappings
Iterator descIter = project.getDescriptors().values().iterator();
while (descIter.hasNext()) {
ClassDescriptor desc = (ClassDescriptor)descIter.next();
if ((desc instanceof XMLDescriptor) || (desc instanceof EISDescriptor) || (desc instanceof ObjectRelationalDataTypeDescriptor)) {
//default table generator does not support ox, eis and object-relational descriptor
AbstractSessionLog.getLog().log(SessionLog.WARNING, "relational_descriptor_support_only", (Object[])null, true);
return tblCreator;
}
// Aggregate descriptors do not contain table/field data and are
// processed through their owning entities. Aggregate descriptors
// can not exist on their own.
if (!desc.isAggregateDescriptor() && !desc.isAggregateCollectionDescriptor()) {
initTableSchema(desc);
}
}
//Post init the schema for relation table and direct collection/map tables, and several special mapping handlings.
descIter = project.getOrderedDescriptors().iterator();
while (descIter.hasNext()) {
ClassDescriptor desc = (ClassDescriptor) descIter.next();
// Aggregate descriptors do not contain table/field data and are
// processed through their owning entities. Aggregate descriptors
// can not exist on their own.
if (!desc.isAggregateDescriptor() && !desc.isAggregateCollectionDescriptor()) {
postInitTableSchema(desc);
}
}
tblCreator.addTableDefinitions(tableMap.values());
return tblCreator;
}
/**
* Generate a default TableCreator object from the EclipseLink project object,
* and perform the table existence check through jdbc table metadata, and filter out
* tables which are already in the database.
*/
public TableCreator generateFilteredDefaultTableCreator(AbstractSession session) throws DatabaseException {
TableCreator tblCreator = generateDefaultTableCreator();
try {
//table exisitence check.
java.sql.Connection conn = null;
if (session.isServerSession()) {
//acquire a connection from the pool
conn = ((ServerSession)session).getDefaultConnectionPool().acquireConnection().getConnection();
} else if (session.isDatabaseSession()) {
conn = ((DatabaseSessionImpl)session).getAccessor().getConnection();
}
if (conn == null) {
//TODO: this is not pretty, connection is not obtained for some reason.
return tblCreator;
}
DatabaseMetaData dbMetaData = conn.getMetaData();
ResultSet resultSet = dbMetaData.getTables(null, dbMetaData.getUserName(), null, new String[] { "TABLE" });
java.util.List tablesInDatabase = new java.util.ArrayList();
while (resultSet.next()) {
//save all tables from the database
tablesInDatabase.add(resultSet.getString("TABLE_NAME"));
}
resultSet.close();
java.util.List existedTables = new java.util.ArrayList();
java.util.List existedTableNames = new java.util.ArrayList();
Iterator tblDefIter = tblCreator.getTableDefinitions().iterator();
while (tblDefIter.hasNext()) {
TableDefinition tblDef = (TableDefinition) tblDefIter.next();
//check if the to-be-created table is already in the database
if (tablesInDatabase.contains(tblDef.getFullName())) {
existedTables.add(tblDef);
existedTableNames.add(tblDef.getFullName());
}
}
if (!existedTableNames.isEmpty()) {
session.getSessionLog().log(SessionLog.FINEST, "skip_create_existing_tables", existedTableNames);
//remove the existed tables, won't create them.
tblCreator.getTableDefinitions().removeAll(existedTables);
}
} catch (SQLException sqlEx) {
throw DatabaseException.errorRetrieveDbMetadataThroughJDBCConnection();
}
return tblCreator;
}
/**
* Build tables/fields information into the table creator object from a EclipseLink descriptor.
* This should handle most of the direct/relational mappings except many-to-many and direct
* collection/map mappings, witch must be down in postInit method.
*/
protected void initTableSchema(ClassDescriptor desc) {
TableDefinition tblDef = null;
DatabaseTable dbTbl = null;
Iterator dbTblIter = desc.getTables().iterator();
//create a table definition for each mapped database table
while (dbTblIter.hasNext()) {
dbTbl = (DatabaseTable) dbTblIter.next();
tblDef = getTableDefFromDBTable(dbTbl);
}
//build each field definition and figure out which table it goes
Iterator fieldIter = desc.getFields().iterator();
DatabaseField dbField = null;
while (fieldIter.hasNext()) {
dbField = (DatabaseField) fieldIter.next();
boolean isPKField = false;
//first check if the filed is a pk field in the default table.
isPKField = desc.getPrimaryKeyFields().contains(dbField);
//then check if the field is a pk field in the secondary table(s), this is only applied to the multiple tables case.
Map secondaryKeyMap = desc.getAdditionalTablePrimaryKeyFields().get(dbField.getTable());
if (secondaryKeyMap != null) {
isPKField = isPKField || secondaryKeyMap.containsValue(dbField);
}
//build or retrieve the field definition.
FieldDefinition fieldDef = getFieldDefFromDBField(dbField, isPKField);
if (isPKField) {
// Check if the generation strategy is IDENTITY
String sequenceName = desc.getSequenceNumberName();
DatabaseLogin login = project.getLogin();
Sequence seq = login.getSequence(sequenceName);
if(seq instanceof DefaultSequence) {
seq = login.getDefaultSequence();
}
//The native sequence whose value should be acquired after insert is identity sequence
boolean isIdentity = seq instanceof NativeSequence && seq.shouldAcquireValueAfterInsert();
fieldDef.setIsIdentity(isIdentity);
}
//find the table the field belongs to, and add it to the table, only if not already added.
tblDef = tableMap.get(dbField.getTableName());
if (!tblDef.getFields().contains(fieldDef)) {
tblDef.addField(fieldDef);
}
}
}
/**
* Build additional table/field definitions for the descriptor, like relation table
* and direct-collection, direct-map table, as well as reset LOB type for serialized
* object mapping and type conversion mapping for LOB usage
*/
private void postInitTableSchema(ClassDescriptor desc) {
for (DatabaseMapping mapping : desc.getMappings()) {
if (desc.isChildDescriptor() && desc.getInheritancePolicy().getParentDescriptor().getMappingForAttributeName(mapping.getAttributeName()) != null) {
// If we are an inheritance subclass, do nothing. That is, don't
// generate mappings that will be generated by our parent,
// otherwise the fields for that mapping will be generated n
// times for the same table.
continue;
} else if (mapping.isManyToManyMapping()) {
buildRelationTableDefinition(((ManyToManyMapping)mapping).getRelationTableMechanism(), ((ManyToManyMapping)mapping).getListOrderField(), mapping.getContainerPolicy());
} else if (mapping.isDirectCollectionMapping()) {
buildDirectCollectionTableDefinition((DirectCollectionMapping) mapping, desc);
} else if (mapping.isDirectToFieldMapping()) {
Converter converter = ((DirectToFieldMapping)mapping).getConverter();
if (converter != null) {
if (converter instanceof TypeConversionConverter) {
resetFieldTypeForLOB((DirectToFieldMapping)mapping);
}
if (converter instanceof SerializedObjectConverter) {
//serialized object mapping field should be BLOB/IMAGE
getFieldDefFromDBField(mapping.getField(), false).setType(Byte[].class);
}
}
} else if (mapping.isAggregateCollectionMapping()) {
//need to figure out the target foreign key field and add it into the aggregate target table
createAggregateTargetTable((AggregateCollectionMapping) mapping);
} else if (mapping.isForeignReferenceMapping()) {
if (mapping.isOneToOneMapping()) {
RelationTableMechanism relationTableMechanism = ((OneToOneMapping)mapping).getRelationTableMechanism();
if(relationTableMechanism == null) {
addForeignKeyFieldToSourceTargetTable((OneToOneMapping) mapping);
} else {
buildRelationTableDefinition(relationTableMechanism, null, null);
}
} else if (mapping.isOneToManyMapping()) {
addForeignKeyFieldToSourceTargetTable((OneToManyMapping) mapping);
TableDefinition targTblDef = getTableDefFromDBTable(((OneToManyMapping)mapping).getReferenceDescriptor().getDefaultTable());
addFieldsForMappedKeyMapContainerPolicy(mapping.getContainerPolicy(), targTblDef);
}
} else if (mapping.isTransformationMapping()) {
resetTransformedFieldType((TransformationMapping) mapping);
} else if (mapping.isAggregateObjectMapping()){
postInitTableSchema(((AggregateObjectMapping)mapping).getReferenceDescriptor());
}
}
processAdditionalTablePkFields(desc);
}
/**
* The ContainerPolicy may contain some additional fields that should be added to the table
*
* @see MappedKeyMapContainerPolicy
* @param cp
* @param tblDef
*/
private void addFieldsForMappedKeyMapContainerPolicy(ContainerPolicy cp, TableDefinition tblDef){
if (cp.isMappedKeyMapPolicy()){
List<DatabaseField> keyFields = cp.getIdentityFieldsForMapKey();
Iterator<DatabaseField> i = keyFields.iterator();
while (i.hasNext()){
DatabaseField foreignKey = i.next();
FieldDefinition fieldDef = getFieldDefFromDBField(foreignKey, false);
if (!tblDef.getFields().contains(fieldDef)) {
tblDef.addField(getFieldDefFromDBField(foreignKey, false));
}
}
Map<DatabaseField, DatabaseField> foreignKeys = ((MappedKeyMapContainerPolicy)cp).getForeignKeyFieldsForMapKey();
if (foreignKeys != null){
addForeignMappingFkConstraint(foreignKeys);
}
}
}
/**
* Build relation table definitions for all many-to-many relationships in a EclipseLink descriptor.
*/
private void buildRelationTableDefinition(RelationTableMechanism relationTableMechanism, DatabaseField listOrderField, ContainerPolicy cp) {
//first create relation table
TableDefinition tblDef = getTableDefFromDBTable(relationTableMechanism.getRelationTable());
//add source foreign key fields into the relation table
Vector srcFkFields = relationTableMechanism.getSourceRelationKeyFields();
Vector srcKeyFields = relationTableMechanism.getSourceKeyFields();
buildRelationTableFields(tblDef, srcFkFields, srcKeyFields);
//add target foreign key fields into the relation table
Vector targFkFields = relationTableMechanism.getTargetRelationKeyFields();
Vector targKeyFields = relationTableMechanism.getTargetKeyFields();
buildRelationTableFields(tblDef, targFkFields, targKeyFields);
if (cp != null){
addFieldsForMappedKeyMapContainerPolicy(cp, tblDef);
}
if(listOrderField != null) {
tblDef.addField(getFieldDefFromDBField(listOrderField, false));
}
}
/**
* Build field definitions and foreign key constraints for all many-to-many relation table.
*/
private void buildRelationTableFields(TableDefinition tblDef, Vector fkFields, Vector targetFields) {
assert fkFields.size() > 0 && fkFields.size() == targetFields.size();
DatabaseField fkField = null;
DatabaseField targetField = null;
Vector<String> fkFieldNames = new Vector();
Vector<String> targetFieldNames = new Vector();
for (int index = 0; index < fkFields.size(); index++) {
fkField = (DatabaseField) fkFields.get(index);
targetField = (DatabaseField) targetFields.get(index);
fkFieldNames.add(fkField.getNameDelimited(databasePlatform));
targetFieldNames.add(targetField.getNameDelimited(databasePlatform));
fkField = resolveDatabaseField(fkField, targetField);
setFieldToRelationTable(fkField, tblDef);
}
// add a foreign key constraint from fk field to target field
DatabaseTable targetTable = targetField.getTable();
TableDefinition targetTblDef = getTableDefFromDBTable(targetTable);
addForeignKeyConstraint(tblDef, targetTblDef, fkFieldNames, targetFieldNames);
}
/**
* Build direct collection table definitions in a EclipseLink descriptor
*/
private void buildDirectCollectionTableDefinition(DirectCollectionMapping mapping, ClassDescriptor desc) {
//first create direct collection table
TableDefinition tblDef = getTableDefFromDBTable(mapping.getReferenceTable());
DatabaseField dbField = null;
//add the table reference key(s)
Vector refPkFields = mapping.getReferenceKeyFields();
for (int index = 0; index < refPkFields.size(); index++) {
dbField = resolveDatabaseField((DatabaseField) refPkFields.get(index), mapping.getSourceKeyFields().get(index));
tblDef.addField(getDirectCollectionReferenceKeyFieldDefFromDBField(dbField));
}
//add the direct collection field to the table.
tblDef.addField(getFieldDefFromDBField(mapping.getDirectField(), false));
//if the mapping is direct-map field, add the direct key field to the table as well.
// TODO: avoid generating DDL for map key mappings for the time being.
// Bug: 270814
if (mapping.isDirectMapMapping() && ! mapping.getContainerPolicy().isMappedKeyMapPolicy() ) {
dbField = ((DirectMapMapping) mapping).getDirectKeyField();
tblDef.addField(getFieldDefFromDBField(dbField, false));
} else {
addFieldsForMappedKeyMapContainerPolicy(mapping.getContainerPolicy(), tblDef);
if(mapping.getListOrderField() != null) {
tblDef.addField(getFieldDefFromDBField(mapping.getListOrderField(), false));
}
}
}
/**
* Reset field type to use BLOB/CLOB with type conversion mapping fix for 4k oracle thin driver bug.
*/
private void resetFieldTypeForLOB(DirectToFieldMapping mapping) {
if (mapping.getFieldClassification().getName().equals("java.sql.Blob")) {
//allow the platform to figure out what database field type gonna be used.
//For example, Oracle9 will generate BLOB type, SQL Server generats IMAGE.
getFieldDefFromDBField(mapping.getField(), false).setType(Byte[].class);
} else if (mapping.getFieldClassification().getName().equals("java.sql.Clob")) {
//allow the platform to figure out what database field type gonna be used.
//For example, Oracle9 will generate CLOB type. SQL Server generats TEXT.
getFieldDefFromDBField(mapping.getField(), false).setType(Character[].class);
}
}
/**
* Reset the transformation mapping field types
*/
private void resetTransformedFieldType(TransformationMapping mapping) {
Iterator transIter = mapping.getFieldTransformations().iterator();
while (transIter.hasNext()) {
FieldTransformation transformation = (FieldTransformation) transIter.next();
if (transformation instanceof MethodBasedFieldTransformation) {
MethodBasedFieldTransformation methodTransformation = (MethodBasedFieldTransformation) transformation;
try {
Class returnType = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodTransformation.getMethodName(), null).getReturnType();
getFieldDefFromDBField(methodTransformation.getField(), false).setType(returnType);
} catch (NoSuchMethodException ex) {
// For some reason, the method type could not be retrieved,
// use the default java.lang.String type
}
} else {
// Must be a TransformerBasedFieldTransformation
TransformerBasedFieldTransformation classTransformation = (TransformerBasedFieldTransformation) transformation;
String methodName = "buildFieldValue";
Class[] params = new Class[] {Object.class, String.class, Session.class};
try {
Class returnType = Helper.getDeclaredMethod(classTransformation.getTransformerClass(), methodName, params).getReturnType();
if (returnType.equals(Object.class)) {
// User needs to be more specific with their class
// transformer return type if they are using DDL. Throw
// an exception.
throw ValidationException.missingFieldTypeForDDLGenerationOfClassTransformation(mapping.getDescriptor(), mapping.getAttributeName(), methodName);
}
getFieldDefFromDBField(classTransformation.getField(), false).setType(returnType);
} catch (NoSuchMethodException ex) {
// For some reason, the method type could not be retrieved.
// Did the interface method change? Throw an exception.
throw ValidationException.missingTransformerMethodForDDLGenerationOfClassTransformation(mapping.getDescriptor(), mapping.getAttributeName(), methodName);
}
}
}
}
/**
* Add the foreign key to the aggregate collection mapping target table.
* Also add listOrderField if specified.
*/
private void createAggregateTargetTable(AggregateCollectionMapping mapping) {
TableDefinition targTblDef = getTableDefFromDBTable(mapping.getReferenceDescriptor().getDefaultTable());
addFieldsForMappedKeyMapContainerPolicy(mapping.getContainerPolicy(), targTblDef);
Iterator aggregateFieldIterator = mapping.getReferenceDescriptor().getFields().iterator();
while (aggregateFieldIterator.hasNext()) {
DatabaseField dbField = (DatabaseField) aggregateFieldIterator.next();
//add the target foreign key field definition to the table definition
targTblDef.addField(getFieldDefFromDBField(dbField, false));
}
//unlike normal one-to-many mapping, aggregate collection mapping does not have 1:1 back reference
//mapping, so the target foreign key fields are not stored in the target descriptor.
Iterator targFKIter = mapping.getTargetForeignKeyFields().iterator();
while (targFKIter.hasNext()) {
DatabaseField dbField = (DatabaseField) targFKIter.next();
//add the target foreign key field definition to the table definition
targTblDef.addField(getFieldDefFromDBField(dbField, false));
}
if(mapping.getListOrderField() != null) {
getTableDefFromDBTable(mapping.getListOrderField().getTable()).addField(getFieldDefFromDBField(mapping.getListOrderField(), false));
}
}
private void addForeignKeyFieldToSourceTargetTable(OneToOneMapping mapping) {
if (!mapping.isForeignKeyRelationship()) {
return;
}
addForeignMappingFkConstraint(mapping.getSourceToTargetKeyFields());
}
private void addForeignKeyFieldToSourceTargetTable(OneToManyMapping mapping) {
addForeignMappingFkConstraint(mapping.getTargetForeignKeysToSourceKeys());
if(mapping.getListOrderField() != null) {
getTableDefFromDBTable(mapping.getListOrderField().getTable()).addField(getFieldDefFromDBField(mapping.getListOrderField(), false));
}
}
private void addForeignMappingFkConstraint(final Map<DatabaseField, DatabaseField> srcFields) {
// srcFields map from the foreign key field to the target key field
if(srcFields.size() == 0) {
return;
}
List<DatabaseField> fkFields = new Vector<DatabaseField>();
List<DatabaseField> targetFields = new Vector<DatabaseField>();
for (DatabaseField fkField : srcFields.keySet()) {
fkFields.add(fkField);
targetFields.add(srcFields.get(fkField));
}
addJoinColumnsFkConstraint(fkFields, targetFields);
}
/**
* Build a table definition object from a database table object
*/
private TableDefinition getTableDefFromDBTable(DatabaseTable dbTbl) {
TableDefinition tblDef = this.tableMap.get(dbTbl.getName());
if (tblDef == null) {
//table not built yet, simply built it
tblDef = new TableDefinition();
tblDef.setName(dbTbl.getNameDelimited(databasePlatform));
tblDef.setQualifier(dbTbl.getTableQualifier());
addUniqueKeyConstraints(tblDef, dbTbl.getUniqueConstraints());
tableMap.put(dbTbl.getName(), tblDef);
}
return tblDef;
}
/**
* Resolve the foreign key database field metadata in relation table or direct collection/map table.
* Those metadata includes type, and maybe dbtype/size/subsize if DatabaseField carries those info.
*/
private DatabaseField resolveDatabaseField(DatabaseField childField, DatabaseField parentField) {
//set through the type from the source table key field to the relation or direct collection table key field.
DatabaseField resolvedDatabaseField = new DatabaseField();
// find original field in the parent table, which contains actual type definitions
// if 'resolvedParentField' is null, there is no corresponding field definition (typo?)
DatabaseField resolvedParentField = databaseFields.get(parentField);
resolvedDatabaseField.setName(childField.getName());
//Table should be set, otherwise other same name field will be used wrongly because equals() is true.
//Fix for GF#1392 the same name column for the entity and many-to-many table cause wrong pk constraint.
resolvedDatabaseField.setTable(childField.getTable());
// type definitions from parent field definition
if(resolvedParentField != null) {
resolvedDatabaseField.setType(resolvedParentField.getType());
resolvedDatabaseField.setScale(resolvedParentField.getScale());
resolvedDatabaseField.setLength(resolvedParentField.getLength());
resolvedDatabaseField.setPrecision(resolvedParentField.getPrecision());
}
// these are defined in childField definition(see @JoinColumn)
resolvedDatabaseField.setUnique(childField.isUnique());
resolvedDatabaseField.setNullable(childField.isNullable());
resolvedDatabaseField.setUpdatable(childField.isUpdatable());
resolvedDatabaseField.setInsertable(childField.isInsertable());
resolvedDatabaseField.setUseDelimiters(childField.shouldUseDelimiters());
String columnDef = childField.getColumnDefinition();
if(columnDef == null || columnDef.trim().equals("")) {
// if childField has no column definition, follow the definition of the parent field
if(resolvedParentField != null) {
resolvedDatabaseField.setColumnDefinition(resolvedParentField.getColumnDefinition());
}
} else {
resolvedDatabaseField.setColumnDefinition(columnDef);
}
return resolvedDatabaseField;
}
/**
* Build a field definition object from a database field.
*/
private FieldDefinition getFieldDefFromDBField(DatabaseField dbField, boolean isPrimaryKey) {
FieldDefinition fieldDef = this.fieldMap.get(dbField);
if (fieldDef == null) {
//not built yet, build one
fieldDef = new FieldDefinition();
fieldDef.setName(dbField.getNameDelimited(databasePlatform));
if (dbField.getColumnDefinition() != null && dbField.getColumnDefinition().length() > 0) {
// This column definition would include the complete definition of the
// column like type, size, "NULL/NOT NULL" clause, unique key clause
fieldDef.setTypeDefinition(dbField.getColumnDefinition());
} else {
Class fieldType = dbField.getType();
// Check if the user field is a String and only then allow the length specified
// in the @Column annotation to be set on the field.
if ((fieldType != null)) {
if (fieldType.equals(ClassConstants.STRING) ||
fieldType.equals(ClassConstants.APCHAR) ||
fieldType.equals(ClassConstants.ACHAR)) {
// The field size is defaulted to "255" or use the user supplied length
fieldDef.setSize(dbField.getLength());
} else {
if (dbField.getPrecision() > 0) {
fieldDef.setSize(dbField.getPrecision());
fieldDef.setSubSize(dbField.getScale());
}
}
}
if ((fieldType == null) || (!fieldType.isPrimitive() &&
(databasePlatform.getFieldTypeDefinition(fieldType) == null))) {
//TODO: log a warning for inaccessible type or not convertable type.
AbstractSessionLog.getLog().log(SessionLog.FINEST, "field_type_set_to_java_lang_string", dbField.getQualifiedName(), fieldType);
//set the default type (lang.String) to all un-resolved java type, like null, Number, util.Date, NChar/NType, Calendar
//sql.Blob/Clob, Object, or unknown type). Please refer to bug 4352820.
fieldDef.setType(ClassConstants.STRING);
} else {
//need to convert the primitive type if applied.
fieldDef.setType(ConversionManager.getObjectClass(fieldType));
}
fieldDef.setShouldAllowNull(dbField.isNullable());
fieldDef.setUnique(dbField.isUnique());
}
fieldDef.setIsPrimaryKey(isPrimaryKey);
fieldMap.put(dbField, fieldDef);
databaseFields.put(dbField, dbField);
}
return fieldDef;
}
/**
* Build a field definition object from a database field.
*/
private FieldDefinition getDirectCollectionReferenceKeyFieldDefFromDBField(DatabaseField dbField) {
FieldDefinition fieldDef = (FieldDefinition)getFieldDefFromDBField(dbField, true).clone();
//direct collection/map table reference kye filed is not unique, need to set it as non-pk.
fieldDef.setIsPrimaryKey(false);
return fieldDef;
}
/**
* Build and add a field definition object to relation table
*/
private void setFieldToRelationTable(DatabaseField dbField, TableDefinition tblDef) {
FieldDefinition fieldDef = getFieldDefFromDBField(dbField, false);
if (!tblDef.getFields().contains(fieldDef)) {
//only add the field once, to avoid add twice if m:m is bi-directional.
tblDef.addField(getFieldDefFromDBField(dbField, false));
fieldDef.setIsPrimaryKey(true); // make this a PK as we will be creating constrains later
}
}
private void processAdditionalTablePkFields(ClassDescriptor desc) {
// only if there are additional tables
if (!desc.hasMultipleTables()) {
return;
}
DatabaseTable dbTbl = null;
Iterator dbTblIter = desc.getTables().iterator();
while (dbTblIter.hasNext()) {
dbTbl = (DatabaseTable) dbTblIter.next();
Map<DatabaseField, DatabaseField> srcFields = desc.getAdditionalTablePrimaryKeyFields().get(dbTbl);
if ((null != srcFields) && srcFields.size() > 0) {
// srcFields is from the secondary field to the primary key field
// Let's make fk constraint from the secondary field to the primary key field
List<DatabaseField> fkFields = new Vector<DatabaseField>();
List<DatabaseField> pkFields = new Vector<DatabaseField>();
for (DatabaseField pkField : srcFields.keySet()) {
pkFields.add(pkField);
fkFields.add(srcFields.get(pkField));
}
addJoinColumnsFkConstraint(fkFields, pkFields);
}
}
}
private void addJoinColumnsFkConstraint(List<DatabaseField> fkFields, List<DatabaseField> targetFields) {
assert fkFields.size() == targetFields.size();
if (fkFields.size() == 0) {
return;
}
DatabaseField fkField = null;
DatabaseField targetField = null;
Vector<String> fkFieldNames = new Vector();
Vector<String> targetFieldNames = new Vector();
DatabaseTable sourceTable = fkFields.get(0).getTable();
TableDefinition sourceTableDef = getTableDefFromDBTable(sourceTable);
for (int i=0; i < fkFields.size(); i++) {
fkField = fkFields.get(i);
targetField = targetFields.get(i);
fkFieldNames.add(fkField.getName());
targetFieldNames.add(targetField.getName());
FieldDefinition fkFieldDef = fieldMap.get(fkField);
FieldDefinition targetFieldDef = fieldMap.get(targetField);
if (targetFieldDef != null) {
// UnidirectionalOneToOneMapping case
if (fkFieldDef == null) {
fkFieldDef = getFieldDefFromDBField(fkField, false);
if (!sourceTableDef.getFields().contains(fkFieldDef)) {
sourceTableDef.addField(fkFieldDef);
}
}
// Also ensure that the type, size and subsize of the foreign key field is
// same as that of the original field.
fkFieldDef.setType(targetFieldDef.getType());
fkFieldDef.setSize(targetFieldDef.getSize());
fkFieldDef.setSubSize(targetFieldDef.getSubSize());
}
}
// add a foreign key constraint
DatabaseTable targetTable = targetField.getTable();
TableDefinition targetTableDef = getTableDefFromDBTable(targetTable);
addForeignKeyConstraint(sourceTableDef, targetTableDef, fkFieldNames, targetFieldNames);
}
/**
* Add a foreign key constraint to the source table.
*/
private void addForeignKeyConstraint(TableDefinition sourceTableDef, TableDefinition targetTableDef,
Vector<String> fkFields, Vector<String> targetFields) {
// Only generate FK constraints if instructed to
if (! this.generateFKConstraints){
return;
}
assert fkFields.size() > 0 && fkFields.size() == targetFields.size();
// target keys could be primary keys or candidate(unique) keys of the target table
Vector<String> fkFieldNames = fkFields;
Vector<String> targetFieldNames = targetFields;
if (fkFields.size() > 1) {
// if composite key, we should consider the order of keys.
// Foreign Key constraint should follow the primary/unique key order of the target table.
// e.g. if the primary key constraint of the target table is (p2, p1),
// foreign key constraint should be "(f2, f1) REFERENCES TARGET (p2, p1)".
// we try to reorder keys using primary keys or unique keys order of the target table,
// but if we might not resolve it due to incorrect field name, then let it as it is.
// This will trigger underlying database exception so users can recognize errors.
boolean resolved = false;
boolean error = false;
Map<String, String> targetToFkField = new HashMap<String, String>();
for (int index = 0; index < fkFields.size(); index++) {
String targetField = targetFields.get(index);
if (targetToFkField.containsKey(targetField)) {
//target key column appears more than once
error = true;
break;
}
targetToFkField.put(targetField, fkFields.get(index));
}
Vector<String> orderedFkFields = new Vector<String>(fkFields.size());
Vector<String> orderedTargetFields = new Vector<String>(targetFields.size());
if (!error) {
// if target fields are primary keys
resolved = true;
for (String pkField : (Vector<String>)targetTableDef.getPrimaryKeyFieldNames()) {
String fkField = targetToFkField.get(pkField);
if (fkField == null) {
//primary key column not found
resolved = false;
break;
}
orderedFkFields.add(fkField);
orderedTargetFields.add(pkField);
}
}
if (!error && !resolved) {
// if target fields are unique keys
for (UniqueKeyConstraint uniqueConstraint : targetTableDef.getUniqueKeys()) {
orderedFkFields.setSize(0);
orderedTargetFields.setSize(0);
resolved = true;
for (String ukField : uniqueConstraint.getSourceFields()) {
String fkField = targetToFkField.get(ukField);
if (fkField == null) {
//unique key column not found
resolved = false;
break;
}
orderedFkFields.add(fkField);
orderedTargetFields.add(ukField);
}
if (resolved) {
break;
}
}
}
if (resolved) {
fkFieldNames = orderedFkFields;
targetFieldNames = orderedTargetFields;
}
}
// For bidirectional relationships both side of mapping will make the same FK constraint twice.
// TableDefinition.addForeignKeyConstraint() will ignore the same FK constraint.
ForeignKeyConstraint fkc = sourceTableDef.buildForeignKeyConstraint(fkFieldNames, targetFieldNames,
targetTableDef, databasePlatform);
sourceTableDef.addForeignKeyConstraint(fkc);
}
private void addUniqueKeyConstraints(TableDefinition sourceTableDef, Map<String, Vector<List<String>>> uniqueConstraintsMap) {
UniqueKeyConstraint uniqueKeyConstraint;
int serialNumber = -1;
for (String name : uniqueConstraintsMap.keySet()) {
Vector<List<String>> uniqueConstraints = uniqueConstraintsMap.get(name);
for (List<String> uniqueConstraint : uniqueConstraints) {
if (uniqueConstraint != null) {
// To keep the serialNumber consecutive, increment it only
// if the name is not specified.
if (name == null || name.equals("")) {
serialNumber++;
}
sourceTableDef.addUniqueKeyConstraint(sourceTableDef.buildUniqueKeyConstraint(name, uniqueConstraint, serialNumber, databasePlatform));
}
}
}
}
}
|
package org.sklsft.generator.bc.metadata.impl;
import org.sklsft.generator.bc.metadata.interfaces.BeanFactory;
import org.sklsft.generator.model.domain.Model;
import org.sklsft.generator.model.domain.business.Bean;
import org.sklsft.generator.model.domain.business.OneToMany;
import org.sklsft.generator.model.domain.business.OneToManyComponent;
import org.sklsft.generator.model.domain.business.OneToOne;
import org.sklsft.generator.model.domain.business.OneToOneComponent;
import org.sklsft.generator.model.domain.business.Property;
import org.sklsft.generator.model.domain.database.Column;
import org.sklsft.generator.model.domain.database.Table;
import org.sklsft.generator.model.metadata.RelationType;
import org.sklsft.generator.model.metadata.TableMetaData;
import org.sklsft.generator.util.naming.JavaClassNaming;
import org.springframework.stereotype.Component;
@Component("javaBeanFactory")
public class JavaBeanFactory implements BeanFactory {
@Override
public Bean scanBean(TableMetaData tableMetaData, Table table) {
Bean bean = new Bean();
bean.table = table;
bean.myPackage = table.myPackage;
bean.isComponent = false;
bean.cardinality = table.cardinality;
bean.detailMode = tableMetaData.getDetailMode();
bean.interfaces = tableMetaData.getInterfaces();
bean.annotations = tableMetaData.getAnnotations();
bean.createEnabled = tableMetaData.getCreateEnabled();
bean.updateEnabled = tableMetaData.getUpdateEnabled();
bean.deleteEnabled = tableMetaData.getDeleteEnabled();
bean.hasComboBox = tableMetaData.getComboxable();
bean.detailRendering = tableMetaData.getDetailRendering();
bean.listRendering = tableMetaData.getListRendering();
bean.className = JavaClassNaming.getClassName(table.originalName);
bean.objectName = JavaClassNaming.getObjectName(table.originalName);
bean.baseDaoClassName = bean.className + "BaseDaoImpl";
bean.daoClassName = bean.className + "DaoImpl";
bean.baseDaoInterfaceName = bean.className + "BaseDao";
;
bean.daoInterfaceName = bean.className + "Dao";
bean.daoObjectName = bean.objectName + "Dao";
bean.baseServiceClassName = bean.className + "BaseServiceImpl";
bean.serviceClassName = bean.className + "ServiceImpl";
bean.baseServiceInterfaceName = bean.className + "BaseService";
bean.serviceInterfaceName = bean.className + "Service";
bean.serviceObjectName = bean.objectName + "Service";
bean.baseStateManagerClassName = bean.className + "BaseStateManager";
bean.stateManagerClassName = bean.className + "StateManager";
bean.stateManagerObjectName = bean.objectName + "StateManager";
bean.baseRightsManagerClassName = bean.className + "BaseRightsManager";
bean.rightsManagerClassName = bean.className + "RightsManager";
bean.rightsManagerObjectName = bean.objectName + "RightsManager";
bean.baseProcessorClassName = bean.className + "BaseProcessor";
bean.processorClassName = bean.className + "Processor";
bean.processorObjectName = bean.objectName + "Processor";
bean.baseListControllerClassName = bean.className + "BaseListController";
bean.listControllerClassName = bean.className + "ListController";
bean.listControllerObjectName = bean.objectName + "ListController";
bean.baseDetailControllerClassName = bean.className + "BaseDetailController";
bean.detailControllerClassName = bean.className + "DetailController";
bean.detailControllerObjectName = bean.objectName + "DetailController";
bean.detailViewClassName = bean.className + "DetailView";
bean.detailViewObjectName = bean.objectName + "DetailView";
bean.listViewClassName = bean.className + "ListView";
bean.listViewObjectName = bean.objectName + "ListView";
return bean;
}
@Override
public Bean fillBean(Table table, Model model) {
Bean bean = model.findBean(table.originalName);
for (Column column : table.columns) {
Property property = new Property();
property.column = column;
if (column.referenceTable != null) {
property.name = JavaClassNaming
.getObjectName(column.originalName.replaceAll("_ID$", "").replaceAll("_id$", ""));
property.capName = JavaClassNaming
.getClassName(column.originalName.replaceAll("_ID$", "").replaceAll("_id$", ""));
property.referenceBean = bean.myPackage.model.findBean(column.referenceTable.originalName);
property.beanDataType = property.referenceBean.className;
} else {
property.name = JavaClassNaming.getObjectName(column.originalName);
property.capName = JavaClassNaming.getClassName(column.originalName);
property.beanDataType = column.dataType.getJavaType();
}
property.getterName = "get" + property.capName;
property.setterName = "set" + property.capName;
property.dataType = column.dataType;
property.nullable = column.nullable;
property.relation = column.relation;
property.embedded = property.relation.isEmbedded();
property.unique = column.unique;
property.format = column.format;
property.visibility = column.visibility;
property.editable = column.editable;
property.rendering = column.rendering;
property.annotations = column.annotations;
bean.properties.add(property);
if (column.relation.equals(RelationType.MANY_TO_ONE)) {
OneToMany oneToMany = new OneToMany();
oneToMany.referenceBean = bean;
oneToMany.referenceProperty = property;
oneToMany.collectionName = bean.objectName + "Collection";
oneToMany.collectionGetterName = "get" + bean.className + "Collection";
oneToMany.collectionSetterName = "set" + bean.className + "Collection";
Bean parentBean = bean.myPackage.model.findBean(column.referenceTable.originalName);
bean.parentBean = parentBean;
parentBean.oneToManyList.add(oneToMany);
oneToMany.parentBean = parentBean;
}
if (column.relation.equals(RelationType.MANY_TO_ONE_COMPONENT)) {
bean.isComponent = true;
OneToManyComponent oneToManyComponent = new OneToManyComponent();
oneToManyComponent.referenceBean = bean;
oneToManyComponent.referenceProperty = property;
oneToManyComponent.collectionName = bean.objectName + "Collection";
oneToManyComponent.collectionGetterName = "get" + bean.className + "Collection";
oneToManyComponent.collectionSetterName = "set" + bean.className + "Collection";
Bean parentBean = bean.myPackage.model.findBean(column.referenceTable.originalName);
bean.parentBean = parentBean;
oneToManyComponent.parentBean = parentBean;
parentBean.oneToManyComponentList.add(oneToManyComponent);
}
if (column.relation.equals(RelationType.ONE_TO_ONE)) {
OneToOne oneToOne = new OneToOne();
oneToOne.referenceBean = bean;
oneToOne.referenceProperty = property;
oneToOne.getterName = "get" + bean.className;
oneToOne.setterName = "set" + bean.className;
Bean targetBean = bean.myPackage.model.findBean(column.referenceTable.originalName);
targetBean.oneToOneList.add(oneToOne);
}
if (column.relation.equals(RelationType.ONE_TO_ONE_COMPONENT)) {
bean.isComponent = true;
bean.isOneToOneComponent = true;
OneToOneComponent oneToOneComponent = new OneToOneComponent();
oneToOneComponent.referenceBean = bean;
oneToOneComponent.referenceProperty = property;
oneToOneComponent.getterName = "get" + bean.className;
oneToOneComponent.setterName = "set" + bean.className;
Bean parentBean = bean.myPackage.model.findBean(column.referenceTable.originalName);
bean.parentBean = parentBean;
oneToOneComponent.parentBean = parentBean;
parentBean.oneToOneComponentList.add(oneToOneComponent);
}
if (column.relation.equals(RelationType.EMBEDDED)) {
Bean targetBean = bean.myPackage.model.findBean(column.referenceTable.originalName);
targetBean.isEmbedded = true;
targetBean.isComponent = true;
}
}
return bean;
}
}
|
package edu.kit.iti.formal.pse.worthwhile.model.ast.util;
import java.util.Iterator;
import java.util.List;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ASTNode;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Addition;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ArrayLiteral;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ArrayType;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Assertion;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Assignment;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Assumption;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Axiom;
import edu.kit.iti.formal.pse.worthwhile.model.ast.BinaryExpression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Block;
import edu.kit.iti.formal.pse.worthwhile.model.ast.BooleanLiteral;
import edu.kit.iti.formal.pse.worthwhile.model.ast.BooleanType;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Conditional;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Conjunction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Disjunction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Equal;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Expression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ForAllQuantifier;
import edu.kit.iti.formal.pse.worthwhile.model.ast.FunctionCall;
import edu.kit.iti.formal.pse.worthwhile.model.ast.FunctionDeclaration;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Greater;
import edu.kit.iti.formal.pse.worthwhile.model.ast.GreaterOrEqual;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Implication;
import edu.kit.iti.formal.pse.worthwhile.model.ast.IntegerLiteral;
import edu.kit.iti.formal.pse.worthwhile.model.ast.IntegerType;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Invariant;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Less;
import edu.kit.iti.formal.pse.worthwhile.model.ast.LessOrEqual;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Loop;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Minus;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Multiplication;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Negation;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Postcondition;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Precondition;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Program;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ReturnStatement;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ReturnValueReference;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Statement;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Subtraction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Unequal;
import edu.kit.iti.formal.pse.worthwhile.model.ast.VariableDeclaration;
import edu.kit.iti.formal.pse.worthwhile.model.ast.VariableReference;
import edu.kit.iti.formal.pse.worthwhile.model.ast.visitor.HierarchialASTNodeVisitor;
/**
* Implements toString methods for {@link ASTNode}s.
*
* @author fabian
*
*/
public final class AstNodeToStringHelper extends HierarchialASTNodeVisitor {
/**
* A single instance of {@link ASTNodeToStringHelper}.
*/
private static AstNodeToStringHelper singleton = new AstNodeToStringHelper();
/**
* The state of the {@link String} result returned by {@link toString}.
*/
private StringBuffer buf;
/**
* Private default constructor.
*/
private AstNodeToStringHelper() {
super();
}
/**
* Returns a {@link String} representing an {@link ASTNode}.
*
* @param node
* the ASTNode that is represented by the returned String
* @return a String representing <code>node</code>
*/
public static String toString(final ASTNode node) {
AstNodeToStringHelper.singleton.buf = new StringBuffer();
node.accept(AstNodeToStringHelper.singleton);
return AstNodeToStringHelper.singleton.buf.toString();
}
/**
* Appends <code>`(' binaryExpression.left operatorString binaryExpression.right `)'</code> to the buffer.
*
* @param binaryExpression
* the {@link BinaryExpression} that is to be represented as {@link String}
* @param operatorString
* the <code>String</code> representation for the <code>binaryExpression</code>'s operator
*/
private void appendBinaryExpression(final BinaryExpression binaryExpression, final String operatorString) {
this.buf.append("(");
binaryExpression.getLeft().accept(this);
this.buf.append(" " + operatorString + " ");
binaryExpression.getRight().accept(this);
this.buf.append(")");
}
@Override
public void visit(final ASTNode aSTNode) {
this.buf.append(aSTNode.toString());
}
@Override
public void visit(final Addition addition) {
this.appendBinaryExpression(addition, "+");
}
@Override
public void visit(final Subtraction subtraction) {
this.appendBinaryExpression(subtraction, "-");
}
@Override
public void visit(final ArrayLiteral arrayLiteral) {
this.buf.append("{ ");
final Iterator<Expression> i = arrayLiteral.getValues().iterator();
if (i.hasNext()) {
i.next().accept(this);
while (i.hasNext()) {
this.buf.append(", ");
i.next().accept(this);
}
}
this.buf.append(" }");
}
@Override
public void visit(final ArrayType arrayType) {
arrayType.getBaseType().accept(this);
this.buf.append("[]");
}
@Override
public void visit(final Assertion assertion) {
this.buf.append("_assert ");
assertion.getExpression().accept(this);
}
@Override
public void visit(final Assignment assignment) {
assignment.getVariable().accept(this);
this.buf.append(" := ");
assignment.getValue().accept(this);
}
@Override
public void visit(final Assumption assumption) {
this.buf.append("_assume ");
assumption.getExpression().accept(this);
}
@Override
public void visit(final Block block) {
this.buf.append("{\n");
for (Statement stmt : block.getStatements()) {
stmt.accept(this);
this.buf.append("\n");
}
this.buf.append("}\n");
}
@Override
public void visit(final BooleanLiteral booleanLiteral) {
this.buf.append(booleanLiteral.getValue());
}
@Override
public void visit(final BooleanType booleanType) {
this.buf.append("Boolean");
}
@Override
public void visit(final Conjunction conjunction) {
this.appendBinaryExpression(conjunction, "&&");
}
@Override
public void visit(final Equal equal) {
this.appendBinaryExpression(equal, "=");
}
@Override
public void visit(final FunctionCall functionCall) {
this.buf.append(functionCall.getFunction().getName());
this.buf.append("(");
final List<Expression> actuals = functionCall.getActuals();
if (!actuals.isEmpty()) {
final Iterator<Expression> i = actuals.iterator();
i.next().accept(this);
while (i.hasNext()) {
this.buf.append(", ");
i.next().accept(this);
}
}
this.buf.append(")");
}
@Override
public void visit(final IntegerLiteral integerLiteral) {
this.buf.append(integerLiteral.getValue());
}
@Override
public void visit(final IntegerType integerType) {
this.buf.append("Integer");
}
@Override
public void visit(final Less less) {
this.appendBinaryExpression(less, "<");
}
@Override
public void visit(final LessOrEqual lessOrEqual) {
this.appendBinaryExpression(lessOrEqual, "<=");
}
@Override
public void visit(final Loop loop) {
this.buf.append("while ");
loop.getCondition().accept(this);
for (final Invariant i : loop.getInvariants()) {
this.buf.append("\n_invariant ");
i.getExpression().accept(this);
}
this.buf.append(" ");
loop.getBody().accept(this);
}
@Override
public void visit(final Conditional conditional) {
this.buf.append("if ");
conditional.getCondition().accept(this);
conditional.getTrueBlock().accept(this);
if (conditional.getFalseBlock() != null) {
this.buf.append("else ");
conditional.getFalseBlock().accept(this);
}
}
@Override
public void visit(final Minus minus) {
this.buf.append("-");
minus.getOperand().accept(this);
}
@Override
public void visit(final Program program) {
for (final Axiom a : program.getAxioms()) {
a.accept(this);
this.buf.append("\n");
}
for (final FunctionDeclaration f : program.getFunctionDeclarations()) {
f.accept(this);
}
program.getMainBlock().accept(this);
}
@Override
public void visit(final VariableDeclaration variableDeclaration) {
variableDeclaration.getType().accept(this);
this.buf.append(" ");
this.buf.append(variableDeclaration.getName());
Expression initialValue = variableDeclaration.getInitialValue();
if (initialValue != null) {
this.buf.append(" := ");
initialValue.accept(this);
}
}
@Override
public void visit(final VariableReference variableReference) {
this.buf.append(variableReference.getVariable().getName());
if (variableReference.getIndex() != null) {
this.buf.append("[");
variableReference.getIndex().accept(this);
this.buf.append("]");
}
}
@Override
public void visit(final Implication implication) {
this.appendBinaryExpression(implication, "=>");
}
@Override
public void visit(final Negation negation) {
this.buf.append("!");
negation.getOperand().accept(this);
}
@Override
public void visit(final Multiplication multiplication) {
this.appendBinaryExpression(multiplication, "*");
}
@Override
public void visit(final Axiom axiom) {
this.buf.append("_axiom ");
axiom.getExpression().accept(this);
}
@Override
public void visit(final ForAllQuantifier forAllQuantifier) {
this.buf.append("forall ");
forAllQuantifier.getParameter().accept(this);
Expression condition = forAllQuantifier.getCondition();
if (condition != null) {
this.buf.append(", ");
condition.accept(this);
}
this.buf.append(" : ");
forAllQuantifier.getExpression().accept(this);
}
@Override
public void visit(final FunctionDeclaration functionDeclaration) {
this.buf.append("function ");
this.buf.append(functionDeclaration.getName());
this.buf.append("(");
List<VariableDeclaration> params = functionDeclaration.getParameters();
if (!params.isEmpty()) {
Iterator<VariableDeclaration> i = functionDeclaration.getParameters().iterator();
// params is not empty, i has next
i.next().accept(this);
while (i.hasNext()) {
this.buf.append(", ");
i.next().accept(this);
}
}
this.buf.append(") ");
for (final Precondition p : functionDeclaration.getPreconditions()) {
this.buf.append("\n");
p.accept(this);
}
for (final Postcondition p : functionDeclaration.getPostconditions()) {
this.buf.append("\n");
p.accept(this);
}
functionDeclaration.getBody().accept(this);
}
@Override
public void visit(final Postcondition postcondition) {
this.buf.append("_ensures ");
postcondition.getExpression().accept(this);
}
@Override
public void visit(final Precondition precondition) {
this.buf.append("_requires ");
precondition.getExpression().accept(this);
}
@Override
public void visit(final ReturnStatement returnStatement) {
this.buf.append("return ");
returnStatement.getReturnValue().accept(this);
}
@Override
public void visit(final ReturnValueReference node) {
this.buf.append("_return");
if (node.getIndex() != null) {
this.buf.append("[");
node.getIndex().accept(this);
this.buf.append("]");
}
}
@Override
public void visit(final Unequal unequal) {
this.appendBinaryExpression(unequal, "!=");
}
@Override
public void visit(final GreaterOrEqual greaterOrEqual) {
this.appendBinaryExpression(greaterOrEqual, ">=");
}
@Override
public void visit(final Greater greater) {
this.appendBinaryExpression(greater, ">");
}
@Override
public void visit(final Disjunction disjunction) {
this.appendBinaryExpression(disjunction, "||");
}
}
|
package edu.kit.iti.formal.pse.worthwhile.prover;
import java.util.LinkedHashSet;
import java.util.Set;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ArrayFunction;
import edu.kit.iti.formal.pse.worthwhile.model.ast.ArrayFunctionAccess;
import edu.kit.iti.formal.pse.worthwhile.model.ast.BinaryExpression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Expression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.Literal;
import edu.kit.iti.formal.pse.worthwhile.model.ast.QuantifiedExpression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.UnaryExpression;
import edu.kit.iti.formal.pse.worthwhile.model.ast.VariableDeclaration;
import edu.kit.iti.formal.pse.worthwhile.model.ast.VariableReference;
import edu.kit.iti.formal.pse.worthwhile.model.ast.visitor.HierarchialASTNodeVisitor;
/**
* A visitor to find variables that are unbound and have to be declared.
*
* @author Leon Handreke
*
*/
public class UnboundVariableFinderVisitor extends HierarchialASTNodeVisitor {
/**
* The set of variables that is bound in the current visitor state.
*/
private Set<VariableDeclaration> boundVariables = new LinkedHashSet<VariableDeclaration>();
/**
* The set of unbound variables that have to be declared.
*/
private Set<VariableDeclaration> unboundVariables = new LinkedHashSet<VariableDeclaration>();
/**
* @return the set of unbound variables in the visited {@link Expression}s
*/
public final Set<VariableDeclaration> getUnboundVariables() {
return this.unboundVariables;
}
@Override
public final void visit(final ArrayFunction arrayFunction) {
final Expression index = arrayFunction.getIndex();
if (index != null) {
index.accept(this);
}
arrayFunction.getValue().accept(this);
final ArrayFunction chainedFunction = arrayFunction.getChainedFunction();
if (chainedFunction != null) {
chainedFunction.accept(this);
}
}
@Override
public final void visit(final ArrayFunctionAccess arrayFunctionAccess) {
arrayFunctionAccess.getFunction().accept(this);
arrayFunctionAccess.getIndex().accept(this);
}
@Override
public final void visit(final BinaryExpression binaryExpression) {
binaryExpression.getLeft().accept(this);
binaryExpression.getRight().accept(this);
}
@Override
public final void visit(final UnaryExpression unaryExpression) {
unaryExpression.getOperand().accept(this);
}
@Override
public final void visit(final Literal literal) {
// literals can't really have any unbound variables...
}
@Override
public final void visit(final QuantifiedExpression quantifiedExpression) {
// a quantifiedExpression binds a variable
this.boundVariables.add(quantifiedExpression.getParameter());
if (quantifiedExpression.getCondition() != null) {
quantifiedExpression.getCondition().accept(this);
}
quantifiedExpression.getExpression().accept(this);
this.boundVariables.remove(quantifiedExpression.getParameter());
}
@Override
public final void visit(final VariableReference variableReference) {
variableReference.getVariable().accept(this);
}
@Override
public final void visit(final VariableDeclaration variableDeclaration) {
if (!this.boundVariables.contains(variableDeclaration)) {
this.unboundVariables.add(variableDeclaration);
}
}
}
|
package org.opennms.netmgt.provision;
import java.net.InetAddress;
import java.util.Date;
import java.util.Set;
import org.apache.log4j.Category;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.EventConstants;
import org.opennms.netmgt.dao.NodeDao;
import org.opennms.netmgt.model.OnmsIpInterface;
import org.opennms.netmgt.model.OnmsNode;
import org.opennms.netmgt.model.events.EventBuilder;
import org.opennms.netmgt.model.events.EventForwarder;
import org.springframework.transaction.annotation.Transactional;
/**
* A Dynamic DNS provisioning adapter for integration with OpenNMS Provisoning daemon API.
*
* @author <a href="mailto:david@opennms.org">David Hustace</a>
*
*/
public class DnsProvisioningAdapter implements ProvisioningAdapter {
/*
* A read-only DAO will be set by the Provisioning Daemon.
*/
private NodeDao m_nodeDao;
private EventForwarder m_eventForwarder;
private static final String MESSAGE_PREFIX = "Dynamic DNS provisioning failed: ";
/* (non-Javadoc)
* @see org.opennms.netmgt.provision.ProvisioningAdapter#addNode(org.opennms.netmgt.model.OnmsNode)
*/
@Transactional
public void addNode(int nodeId) throws ProvisioningAdapterException {
OnmsNode node = null;
try {
node = m_nodeDao.get(nodeId);
DnsRecord record = new DnsRecord(node);
DynamicDnsAdapter.add(record);
} catch (Exception e) {
log().error("addNode: Error handling node added event.", e);
sendAndThrow(nodeId, e);
}
}
/* (non-Javadoc)
* @see org.opennms.netmgt.provision.ProvisioningAdapter#updateNode(org.opennms.netmgt.model.OnmsNode)
*/
@Transactional
public void updateNode(int nodeId) throws ProvisioningAdapterException {
try {
OnmsNode node = m_nodeDao.get(nodeId);
DnsRecord record = new DnsRecord(node);
DynamicDnsAdapter.update(record);
} catch (Exception e) {
log().error("updateNode: Error handling node added event.", e);
sendAndThrow(nodeId, e);
}
}
/* (non-Javadoc)
* @see org.opennms.netmgt.provision.ProvisioningAdapter#deleteNode(org.opennms.netmgt.model.OnmsNode)
*/
@Transactional
public void deleteNode(int nodeId) throws ProvisioningAdapterException {
try {
OnmsNode node = m_nodeDao.get(nodeId);
DnsRecord record = new DnsRecord(node);
DynamicDnsAdapter.delete(record);
} catch (Exception e) {
log().error("deleteNode: Error handling node deleted event.", e);
sendAndThrow(nodeId, e);
}
}
private void sendAndThrow(int nodeId, Exception e) {
m_eventForwarder.sendNow(buildEvent(EventConstants.PROVISIONING_ADAPTER_FAILED, nodeId).addParam("reason", MESSAGE_PREFIX+e.getLocalizedMessage()).getEvent());
throw new ProvisioningAdapterException(MESSAGE_PREFIX, e);
}
private EventBuilder buildEvent(String uei, int nodeId) {
EventBuilder builder = new EventBuilder(uei, "Provisioner", new Date());
builder.setNodeid(nodeId);
return builder;
}
public NodeDao getNodeDao() {
return m_nodeDao;
}
public void setNodeDao(NodeDao dao) {
m_nodeDao = dao;
}
public void setEventForwarder(EventForwarder eventForwarder) {
m_eventForwarder = eventForwarder;
}
public EventForwarder getEventForwarder() {
return m_eventForwarder;
}
class DnsRecord {
private InetAddress m_ip;
private String m_hostname;
DnsRecord(OnmsNode node) {
OnmsIpInterface primaryInterface = node.getPrimaryInterface();
if (primaryInterface == null) {
Set<OnmsIpInterface> ipInterfaces = node.getIpInterfaces();
for (OnmsIpInterface onmsIpInterface : ipInterfaces) {
m_ip = onmsIpInterface.getInetAddress();
}
} else {
m_ip = primaryInterface.getInetAddress();
}
m_hostname = node.getLabel();
}
public InetAddress getIp() {
return m_ip;
}
public String getHostname() {
return m_hostname;
}
}
static class DynamicDnsAdapter {
static boolean add(DnsRecord record) {
log().error("DNS Adapter not Implemented.");
throw new UnsupportedOperationException("method not yet implemented.");
}
static boolean update(DnsRecord record) {
log().error("DNS Adapter not Implemented.");
throw new UnsupportedOperationException("method not yet implemented.");
}
static boolean delete(DnsRecord record) {
log().error("DNS Adapter not Implemented.");
throw new UnsupportedOperationException("method not yet implemented.");
}
static public DnsRecord getRecord(DnsRecord record) {
log().error("DNS Adapter not Implemented.");
throw new UnsupportedOperationException("method not yet implemented.");
}
}
private static Category log() {
return ThreadCategory.getInstance(DnsProvisioningAdapter.class);
}
}
|
package org.opennms.netmgt.provision;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.log4j.Category;
import org.opennms.core.utils.ThreadCategory;
import org.opennms.netmgt.EventConstants;
import org.opennms.netmgt.config.MapsAdapterConfig;
import org.opennms.netmgt.config.map.adapter.Celement;
import org.opennms.netmgt.config.map.adapter.Cmap;
import org.opennms.netmgt.config.map.adapter.Csubmap;
import org.opennms.netmgt.dao.NodeDao;
import org.opennms.netmgt.dao.OnmsMapDao;
import org.opennms.netmgt.dao.OnmsMapElementDao;
import org.opennms.netmgt.model.OnmsIpInterface;
import org.opennms.netmgt.model.OnmsMap;
import org.opennms.netmgt.model.OnmsMapElement;
import org.opennms.netmgt.model.OnmsNode;
import org.opennms.netmgt.model.events.EventBuilder;
import org.opennms.netmgt.model.events.EventForwarder;
import org.opennms.netmgt.xml.event.Event;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import org.springframework.util.Assert;
/**
* A Dynamic Map provisioning adapter for integration with OpenNMS Provisoning daemon API.
*
* @author <a href="mailto:antonio@opennms.it">Antonio Russo</a>
*
*/
public class MapProvisioningAdapter extends SimpleQueuedProvisioningAdapter implements InitializingBean {
private class XY {
int x;
int y;
protected XY(){
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
}
private NodeDao m_onmsNodeDao;
private OnmsMapDao m_onmsMapDao;
private OnmsMapElementDao m_onmsMapElementDao;
private EventForwarder m_eventForwarder;
private MapsAdapterConfig m_mapsAdapterConfig;
private TransactionTemplate m_template;
private int m_nodeId;
private static final String MESSAGE_PREFIX = "Dynamic Map provisioning failed: ";
private static final String ADAPTER_NAME="MAP Provisioning Adapter";
private volatile static ConcurrentMap<Integer, List<OnmsMapElement>> m_onmsNodeMapElementListMap;
private volatile static ConcurrentMap<String,Integer> m_mapNameMapSizeListMap;
private List<OnmsMapElement> m_onmsNodeMapElementToDelete;
public OnmsMapDao getOnmsMapDao() {
return m_onmsMapDao;
}
public void setOnmsMapDao(OnmsMapDao onmsMapDao) {
m_onmsMapDao = onmsMapDao;
}
public OnmsMapElementDao getOnmsMapElementDao() {
return m_onmsMapElementDao;
}
public void setOnmsMapElementDao(OnmsMapElementDao onmsMapElementDao) {
m_onmsMapElementDao = onmsMapElementDao;
}
public MapsAdapterConfig getMapsAdapterConfig() {
return m_mapsAdapterConfig;
}
public void setMapsAdapterConfig(MapsAdapterConfig mapsAdapterConfig) {
m_mapsAdapterConfig = mapsAdapterConfig;
}
public void setEventForwarder(EventForwarder eventForwarder) {
m_eventForwarder = eventForwarder;
}
public EventForwarder getEventForwarder() {
return m_eventForwarder;
}
public NodeDao getOnmsNodeDao() {
return m_onmsNodeDao;
}
public void setOnmsNodeDao(NodeDao onmsNodeDao) {
m_onmsNodeDao = onmsNodeDao;
}
public TransactionTemplate getTemplate() {
return m_template;
}
public void setTemplate(TransactionTemplate template) {
m_template = template;
}
private static Category log() {
return ThreadCategory.getInstance(MapProvisioningAdapter.class);
}
public String getName() {
return ADAPTER_NAME;
}
@Override
public boolean isNodeReady(AdapterOperation op) {
return true;
}
@Override
public void processPendingOperationForNode(AdapterOperation op)
throws ProvisioningAdapterException {
log().debug("processPendingOperationForNode: processing operation: " + op.getType().name() + " for node with Id: #" + op.getNodeId());
m_mapsAdapterConfig.rebuildPackageIpListMap();
if (op.getType() == AdapterOperationType.ADD) {
m_onmsNodeMapElementListMap.put(op.getNodeId(), new ArrayList<OnmsMapElement>());
doAddOrUpdate(op.getNodeId());
} else if (op.getType() == AdapterOperationType.UPDATE) {
doAddOrUpdate(op.getNodeId());
} else if (op.getType() == AdapterOperationType.DELETE) {
doDelete(op.getNodeId());
}
}
public void afterPropertiesSet() throws Exception {
Assert.notNull(m_onmsNodeDao, "Map Provisioning Adapter requires nodeDao property to be set.");
Assert.notNull(m_onmsMapDao, "Map Provisioning Adapter requires OnmsMapDao property to be set.");
Assert.notNull(m_onmsMapElementDao, "Map Provisioning Adapter requires OnmsMapElementDao property to be set.");
Assert.notNull(m_mapsAdapterConfig, "Map Provisioning Adapter requires MapasAdapterConfig property to be set.");
Assert.notNull(m_eventForwarder, "Map Provisioning Adapter requires EventForwarder property to be set.");
m_template.execute(new TransactionCallback() {
public Object doInTransaction(TransactionStatus arg0) {
List<Cmap> cmaps = m_mapsAdapterConfig.getAllMaps();
m_mapNameMapSizeListMap = new ConcurrentHashMap<String, Integer>(cmaps.size());
List<OnmsNode> nodes = m_onmsNodeDao.findAllProvisionedNodes();
m_onmsNodeMapElementListMap = new ConcurrentHashMap<Integer, List<OnmsMapElement>>(nodes.size());
syncMaps(cmaps, nodes);
return null;
}
});
}
private void syncMaps(List<Cmap> cmaps,List<OnmsNode> nodes) {
Date now = new Date();
log().debug("syncMaps: sync automated maps in database with configuration");
for (OnmsMap onmsMap : m_onmsMapDao.findAutoMaps()) {
log().debug("syncMaps: deleting old automated map: " + onmsMap.getName());
m_onmsMapDao.delete(onmsMap);
m_onmsMapDao.flush();
}
for (Cmap cmap: cmaps) {
OnmsMap onmsMap = getSuitableMap(cmap.getMapName());
log().debug("syncMaps: adding new automated map: " + onmsMap.getName());
onmsMap.setOwner(cmap.getMapOwner());
onmsMap.setUserLastModifies(cmap.getMapOwner());
onmsMap.setMapGroup(cmap.getMapGroup());
onmsMap.setAccessMode(cmap.getMapAccess());
onmsMap.setBackground(cmap.getMapBG());
onmsMap.setHeight(cmap.getMapHeight());
onmsMap.setWidth(cmap.getMapWidth());
onmsMap.setLastModifiedTime(now);
m_onmsMapDao.saveOrUpdate(onmsMap);
m_mapNameMapSizeListMap.put(cmap.getMapName(),0);
}
m_onmsMapDao.flush();
m_onmsMapDao.clear();
for(OnmsNode node: nodes) {
log().debug("syncMaps: try to add to automated maps node element: '" + node.getLabel() +"'");
m_onmsNodeMapElementListMap.put(node.getId(), new ArrayList<OnmsMapElement>());
doAddOrUpdate(node.getId());
}
Map<String,List<Csubmap>> mapnameSubmapMap = m_mapsAdapterConfig.getsubMaps();
for (String mapName : mapnameSubmapMap.keySet()) {
log().debug("syncMaps: adding automated submap: " + mapName);
OnmsMap onmsMap = getSuitableMap(mapName);
for (Csubmap csubmap : mapnameSubmapMap.get(mapName)) {
OnmsMap onmsSubMap = getSuitableMap(csubmap.getName());
if (onmsSubMap.isNew()) {
log().error("syncMap: add SubMaps: the submap does not exist: " + csubmap.getName());
continue;
}
if (onmsSubMap.getMapElements().size() > 0 || csubmap.getAddwithoutelements()) {
addSubMap(onmsMap,csubmap,onmsSubMap);
onmsMap.setLastModifiedTime(new Date());
m_onmsMapDao.update(onmsMap);
m_onmsMapDao.flush();
}
}
m_onmsMapDao.clear();
}
}
private void addSubMap(OnmsMap onmsMap, Csubmap csubmap, OnmsMap onmsSubMap) {
log().debug("addSubMap: adding automated submap: " + onmsSubMap.getName() + " to map: " + onmsMap.getName());
OnmsMapElement mapElement = null;
if (!onmsMap.getMapElements().isEmpty()) {
log().debug("addSubMap: looping on elements of not empty map: " + onmsMap.getName());
for (OnmsMapElement elem: onmsMap.getMapElements()) {
log().debug("addSubMap: checking element with id: " + elem.getElementId() + " and type" + elem.getType());
if (elem.getType().equals(OnmsMapElement.MAP_TYPE) && elem.getElementId() == onmsSubMap.getId()) {
log().debug("addSubMap: still exists in map updating");
mapElement = elem;
mapElement.setLabel(csubmap.getLabel());
mapElement.setIconName(csubmap.getIcon());
mapElement.setX(csubmap.getX());
mapElement.setY(csubmap.getY());
break;
}
}
}
if (mapElement == null) {
mapElement =
new OnmsMapElement(onmsMap,onmsSubMap.getId(),OnmsMapElement.MAP_TYPE,csubmap.getLabel(),csubmap.getIcon(),csubmap.getX(),csubmap.getY());
m_onmsMapElementDao.saveOrUpdate(mapElement);
} else {
m_onmsMapElementDao.update(mapElement);
}
log().debug("added map element with id: " + mapElement.getId());
log().debug(" with label: " + mapElement.getLabel());
log().debug(" with icon: " + mapElement.getIconName());
log().debug(" with X: " + mapElement.getX());
log().debug(" with Y: " + mapElement.getY());
m_onmsMapElementDao.flush();
m_onmsMapElementDao.clear();
}
private void doDelete(Integer nodeid) {
m_nodeId = nodeid;
log().debug("doDelete: deleting mapElements from the automated maps for the node with nodeid:" + m_nodeId);
try {
m_template.execute(new TransactionCallback() {
public Object doInTransaction(TransactionStatus arg0) {
try {
for (OnmsMapElement elem: m_onmsNodeMapElementListMap.remove(Integer.valueOf(m_nodeId))) {
log().debug("doDelete: deleting element with label: '" + elem.getLabel() + "' from automated map: '" + elem.getMap().getName()+ "'");
Integer mapId = elem.getMap().getId();
m_onmsMapElementDao.delete(elem);
m_onmsMapElementDao.flush();
OnmsMap onmsMap = m_onmsMapDao.findMapById(mapId);
onmsMap.setLastModifiedTime(new Date());
m_onmsMapDao.update(onmsMap);
m_onmsMapDao.flush();
if (onmsMap.getMapElements().size() == 0)
removeEmptySubmap(onmsMap);
}
m_onmsMapDao.clear();
m_onmsMapElementDao.clear();
} catch (Exception e) {
log().error(e.getMessage());
}
return null;
}
});
} catch (Exception e) {
sendAndThrow(m_nodeId, e);
}
}
protected void removeEmptySubmap(OnmsMap submap) {
log().debug("removeEmptySubmap: verify delete map element which correspond to empty submap: " + submap.getName());
Map<String,List<Csubmap>> submaps = m_mapsAdapterConfig.getsubMaps();
for (String mapName : submaps.keySet()) {
for (Csubmap csubmap : submaps.get(mapName)) {
if (csubmap.getName().equals(submap.getName())) {
if (csubmap.getAddwithoutelements()) continue;
log().debug("removeEmptySubmap: delete from container map: '" + mapName +"' empty submap '" + submap.getName() );
OnmsMap onmsMap = getSuitableMap(mapName);
Integer mapid = onmsMap.getId();
OnmsMapElement mapElement = m_onmsMapElementDao.findMapElement(submap.getId(), OnmsMapElement.MAP_TYPE, onmsMap);
if (mapElement != null) {
m_onmsMapElementDao.delete(mapElement);
m_onmsMapElementDao.flush();
onmsMap = m_onmsMapDao.findMapById(mapid);
onmsMap.setLastModifiedTime(new Date());
m_onmsMapDao.update(onmsMap);
m_onmsMapDao.flush();
}
}
}
}
}
private void doAddOrUpdate(Integer nodeid) throws ProvisioningAdapterException {
m_nodeId = nodeid;
try {
m_template.execute(new TransactionCallback() {
public Object doInTransaction(TransactionStatus arg0) {
try {
addOrUpdate();
} catch (Exception e) {
log().error(e.getMessage());
}
return null;
}
});
} catch (Exception e) {
sendAndThrow(m_nodeId, e);
}
}
private void addOrUpdate() throws Exception {
log().debug("addOrUpdate: adding or updating the automated maps for the node with nodeid:" + m_nodeId);
OnmsNode node = m_onmsNodeDao.get(m_nodeId);
if (node == null) throw new Exception("Error Adding element. Node does not exist: nodeid: " + m_nodeId);
m_onmsNodeMapElementToDelete = m_onmsNodeMapElementListMap.get(m_nodeId);
log().debug("addOrUpdate: found #" + m_onmsNodeMapElementToDelete.size() + " mapElements in automated maps for the nodeid: " +m_nodeId);
// This is the array with the new elements
List<OnmsMapElement> elems = new ArrayList<OnmsMapElement>();
m_onmsNodeMapElementListMap.replace(m_nodeId, elems);
Map<String, Celement> celements = m_mapsAdapterConfig.getElementByAddress((getSuitableIp(node)));
if (celements.isEmpty()) {
log().info("addOrUpdate: Element is not managed in the adapter configuration file: no package match nodeid: "+m_nodeId);
} else {
log().debug("addOrUpdate: found #" + celements.size() + " container automated maps for the nodeid: " +m_nodeId);
for (String mapName: celements.keySet()) {
log().debug("addOrUpdate: found mapName: " + mapName + " container automated map for the nodeid: " +m_nodeId);
Celement celement = celements.get(mapName);
OnmsMap onmsMap = getSuitableMap(mapName);
if (onmsMap.isNew()) {
throw new Exception("Error adding element. Automated map does not exist in database: " + mapName);
} else {
log().debug("addOrUpdate: container automated map: " + mapName + " has mapId: " + onmsMap.getId() );
if (onmsMap.getMapElements().size() == 0) {
log().debug("addOrUpdate: automated map: " + mapName + " has no elements");
addAsSubMap(mapName);
}
OnmsMapElement mapElement = m_onmsMapElementDao.findMapElement(m_nodeId, OnmsMapElement.NODE_TYPE,onmsMap);
if (mapElement == null) {
int elementsize = m_mapNameMapSizeListMap.get(mapName);
log().debug("addOrUpdate: mapElement is new: found last mapElement at position #" + elementsize + " on map: " + mapName);
XY xy=getXY(onmsMap, elementsize);
mapElement = new OnmsMapElement(onmsMap,m_nodeId,OnmsMapElement.NODE_TYPE,node.getLabel(),celement.getIcon(),xy.getX(),xy.getY());
m_mapNameMapSizeListMap.replace(mapName, ++elementsize);
log().debug("doAddOrUpdate: adding node: " + node.getLabel() + " to map: " + mapName);
} else {
mapElement.setIconName(celement.getIcon());
mapElement.setLabel(node.getLabel());
log().debug("doAddOrUpdate: updating node: " + node.getLabel() + " to map: " + mapName);
List<OnmsMapElement> tempElems = new ArrayList<OnmsMapElement>();
for (OnmsMapElement oldElem : m_onmsNodeMapElementToDelete) {
log().debug("doAddOrUpdate: removing the old mapElement from the deleting list parsing element with mapId: " + oldElem.getMap().getId());
if ( oldElem.getMap().getId() != onmsMap.getId()) {
tempElems.add(oldElem);
log().debug("doAddOrUpdate: leaving the old mapElement in deleting list: ");
}
}
m_onmsNodeMapElementToDelete = tempElems;
}
m_onmsMapElementDao.saveOrUpdate(mapElement);
m_onmsMapElementDao.flush();
onmsMap.setLastModifiedTime(new Date());
m_onmsMapDao.update(onmsMap);
m_onmsMapDao.flush();
elems.add(mapElement);
}
}
m_onmsMapElementDao.clear();
m_onmsMapDao.clear();
m_onmsNodeMapElementListMap.replace(m_nodeId, elems);
}
log().debug("doAddOrUpdate: deleting moved element from automated maps: size #" + m_onmsNodeMapElementToDelete.size());
try {
m_template.execute(new TransactionCallback() {
public Object doInTransaction(TransactionStatus arg0) {
try {
for (OnmsMapElement elem : m_onmsNodeMapElementToDelete) {
log().debug("doAddOrUpdate: deleting element with label: '" + elem.getLabel() + "' from automated map: '" + elem.getMap().getName()+ "'");
Integer mapId = elem.getMap().getId();
m_onmsMapElementDao.delete(elem);
m_onmsMapElementDao.flush();
OnmsMap onmsMap = m_onmsMapDao.findMapById(mapId);
onmsMap.setLastModifiedTime(new Date());
m_onmsMapDao.update(onmsMap);
m_onmsMapDao.flush();
if (onmsMap.getMapElements().size() == 0)
removeEmptySubmap(onmsMap);
}
m_onmsMapElementDao.clear();
m_onmsMapDao.clear();
} catch (Exception e) {
log().error(e.getMessage());
}
return null;
}
});
} catch (Exception e) {
sendAndThrow(m_nodeId, e);
}
m_onmsNodeMapElementToDelete = null;
}
private void addAsSubMap(String submapName) {
Map<String,Csubmap> csubmaps = m_mapsAdapterConfig.getContainerMaps(submapName);
for(String mapName:csubmaps.keySet()) {
OnmsMap onmsMap = getSuitableMap(mapName);
Csubmap csubmap = csubmaps.get(mapName);
OnmsMap onmsSubMap = getSuitableMap(csubmap.getName());
if (onmsSubMap.isNew()) {
log().error("add SubMaps: the submap doen not exists: " + csubmap.getName());
continue;
}
if (!csubmap.getAddwithoutelements()) {
addSubMap(onmsMap, csubmap, onmsSubMap);
onmsMap.setLastModifiedTime(new Date());
m_onmsMapDao.update(onmsMap);
m_onmsMapDao.flush();
}
}
m_onmsMapDao.clear();
}
private void sendAndThrow(int nodeId, Exception e) {
Event event = buildEvent(EventConstants.PROVISIONING_ADAPTER_FAILED, nodeId).addParam("reason", MESSAGE_PREFIX+e.getLocalizedMessage()).getEvent();
m_eventForwarder.sendNow(event);
log().error(e);
throw new ProvisioningAdapterException(MESSAGE_PREFIX, e);
}
private EventBuilder buildEvent(String uei, int nodeId) {
EventBuilder builder = new EventBuilder(uei, "Provisioner", new Date());
builder.setNodeid(nodeId);
return builder;
}
private String getSuitableIp(OnmsNode node){
OnmsIpInterface primaryInterface = node.getPrimaryInterface();
if (primaryInterface == null) {
Set<OnmsIpInterface> ipInterfaces = node.getIpInterfaces();
for (OnmsIpInterface onmsIpInterface : ipInterfaces) {
return onmsIpInterface.getIpAddress();
}
}
return primaryInterface.getIpAddress();
}
private OnmsMap getSuitableMap(String mapName){
OnmsMap onmsMap = null;
Collection<OnmsMap> maps = m_onmsMapDao.findMapsByNameAndType(mapName, OnmsMap.AUTOMATICALLY_GENERATED_MAP);
if (maps.size()>0) {
onmsMap = maps.iterator().next();
onmsMap.setNew(false);
log().debug("getSuitableMap: found map with mapid #" + onmsMap.getMapId() + " for map name:" + mapName );
} else {
log().debug("getSuitableMap: no map found for name:" + mapName + ". Creating a new map.");
onmsMap = new OnmsMap();
onmsMap.setName(mapName);
onmsMap.setType(OnmsMap.AUTOMATICALLY_GENERATED_MAP);
}
return onmsMap;
}
private XY getXY(OnmsMap map, int mapElementSize) {
int deltaX = m_mapsAdapterConfig.getMapElementDimension();
int deltaY = deltaX/2;
int maxNumberofelementsonX=map.getWidth()/(2*deltaX);
log().debug("getXY: max number of elements on a row: " +maxNumberofelementsonX);
int numberofexistingelement = mapElementSize;
log().debug("getXY: number of existing elements on map: " + mapElementSize);
int positiononX = 1;
int positiononY = 1;
boolean addoffset = true;
while (maxNumberofelementsonX <= numberofexistingelement){
numberofexistingelement = numberofexistingelement - maxNumberofelementsonX;
log().debug("getXY: entering the loop: element found on the row: " + numberofexistingelement);
positiononY++;
if (addoffset) {
maxNumberofelementsonX
} else {
maxNumberofelementsonX++;
}
addoffset = !addoffset;
}
positiononX = positiononX + numberofexistingelement;
XY xy = new XY();
if (addoffset) {
xy.setX(2*deltaX*positiononX-deltaX);
} else {
xy.setX(2*deltaX*positiononX);
}
xy.setY(deltaY*positiononY);
return xy;
}
}
|
package org.languagetool.tagging.disambiguation.rules;
import org.languagetool.AnalyzedSentence;
import org.languagetool.AnalyzedToken;
import org.languagetool.AnalyzedTokenReadings;
import org.languagetool.chunking.ChunkTag;
import org.languagetool.rules.patterns.*;
import org.languagetool.tools.StringTools;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @since 2.3
*/
class DisambiguationPatternRuleReplacer extends AbstractPatternRulePerformer {
List<Boolean> elementsMatched;
public DisambiguationPatternRuleReplacer(DisambiguationPatternRule rule) {
super(rule, rule.getLanguage().getDisambiguationUnifier());
elementsMatched = new ArrayList<>(rule.getPatternElements().size());
}
public final AnalyzedSentence replace(final AnalyzedSentence sentence)
throws IOException {
List<ElementMatcher> elementMatchers = createElementMatchers();
final AnalyzedTokenReadings[] tokens = sentence.getTokensWithoutWhitespace();
AnalyzedTokenReadings[] whTokens = sentence.getTokens();
final int[] tokenPositions = new int[tokens.length + 1];
final int patternSize = elementMatchers.size();
final int limit = Math.max(0, tokens.length - patternSize + 1);
ElementMatcher elem = null;
boolean changed = false;
elementsMatched.clear();
for (ElementMatcher elementMatcher : elementMatchers) { //the list has exactly the same number
// of elements as the list of ElementMatchers
elementsMatched.add(false);
}
int i = 0;
int minOccurCorrection = getMinOccurrenceCorrection();
while (i < limit + minOccurCorrection && !(rule.isSentStart() && i > 0)) {
boolean allElementsMatch = false;
unifiedTokens = null;
int matchingTokens = 0;
int skipShiftTotal = 0;
int firstMatchToken = -1;
int lastMatchToken = -1;
int firstMarkerMatchToken = -1;
int lastMarkerMatchToken = -1;
int prevSkipNext = 0;
if (rule.isTestUnification()) {
unifier.reset();
}
int minOccurSkip = 0;
for (int k = 0; k < patternSize; k++) {
final ElementMatcher prevElement = elem;
elem = elementMatchers.get(k);
elem.resolveReference(firstMatchToken, tokens, rule.getLanguage());
final int nextPos = i + k + skipShiftTotal - minOccurSkip;
prevMatched = false;
if (prevSkipNext + nextPos >= tokens.length || prevSkipNext < 0) { // SENT_END?
prevSkipNext = tokens.length - (nextPos + 1);
}
final int maxTok = Math.min(nextPos + prevSkipNext, tokens.length - (patternSize - k) + minOccurCorrection);
for (int m = nextPos; m <= maxTok; m++) {
allElementsMatch = testAllReadings(tokens, elem, prevElement, m, firstMatchToken, prevSkipNext);
if (elem.getElement().getMinOccurrence() == 0) {
final ElementMatcher nextElement = elementMatchers.get(k + 1);
final boolean nextElementMatch = testAllReadings(tokens, nextElement, elem, m,
firstMatchToken, prevSkipNext);
if (nextElementMatch) {
// this element doesn't match, but it's optional so accept this and continue
allElementsMatch = true;
minOccurSkip++;
elementsMatched.set(k, false);
break;
}
}
if (allElementsMatch) {
elementsMatched.set(k, true);
int skipForMax = skipMaxTokens(tokens, elem, firstMatchToken, prevSkipNext,
prevElement, m, patternSize - k -1);
lastMatchToken = m + skipForMax;
final int skipShift = lastMatchToken - nextPos;
tokenPositions[matchingTokens] = skipShift + 1;
prevSkipNext = elem.getElement().getSkipNext();
matchingTokens++;
skipShiftTotal += skipShift;
if (firstMatchToken == -1) {
firstMatchToken = lastMatchToken - skipForMax;
}
if (firstMarkerMatchToken == -1 && elem.getElement().isInsideMarker()) {
firstMarkerMatchToken = lastMatchToken - skipForMax;
}
if (elem.getElement().isInsideMarker()) {
lastMarkerMatchToken = lastMatchToken;
}
break;
}
}
if (!allElementsMatch) {
break;
}
}
if (allElementsMatch && matchingTokens == patternSize || matchingTokens == patternSize - minOccurSkip && firstMatchToken != -1) {
int lastMatchTokenTmp = lastMarkerMatchToken != -1 ? lastMarkerMatchToken : lastMatchToken;
whTokens = executeAction(sentence, whTokens, unifiedTokens, firstMatchToken, lastMatchTokenTmp, matchingTokens, tokenPositions);
changed = true;
}
i++;
}
if (changed) {
return new AnalyzedSentence(whTokens, sentence.getWhPositions());
}
return sentence;
}
@Override
protected int skipMaxTokens(AnalyzedTokenReadings[] tokens, ElementMatcher elem, int firstMatchToken, int prevSkipNext, ElementMatcher prevElement, int m, int remainingElems) throws IOException {
int maxSkip = 0;
int maxOccurrences = elem.getElement().getMaxOccurrence() == -1 ? Integer.MAX_VALUE : elem.getElement().getMaxOccurrence();
for (int j = 1; j < maxOccurrences && m+j < tokens.length - remainingElems; j++) {
boolean nextAllElementsMatch = testAllReadings(tokens, elem, prevElement, m+j, firstMatchToken, prevSkipNext);
if (nextAllElementsMatch) {
maxSkip++;
} else {
break;
}
}
return maxSkip;
}
private AnalyzedTokenReadings[] executeAction(final AnalyzedSentence sentence,
final AnalyzedTokenReadings[] whiteTokens,
final AnalyzedTokenReadings[] unifiedTokens,
final int firstMatchToken, int lastMatchToken,
final int matchingTokens, final int[] tokenPositions) {
final AnalyzedTokenReadings[] whTokens = whiteTokens.clone();
final DisambiguationPatternRule rule = (DisambiguationPatternRule) this.rule;
int correctedStPos = 0;
int startPositionCorrection = rule.getStartPositionCorrection();
int endPositionCorrection = rule.getEndPositionCorrection();
int matchingTokensWithCorrection = matchingTokens;
List<Integer> tokenPositionList = new ArrayList<>();
for (int i : tokenPositions) {
tokenPositionList.add(i);
}
if (startPositionCorrection > 0) {
correctedStPos--; //token positions are shifted by 1
for (int j = 0; j < elementsMatched.size(); j++) {
if (!elementsMatched.get(j)) {
tokenPositionList.add(j, 0); // add zero-length token corresponding to the non-matching pattern element so that position count is fine
}
}
for (int l = 0; l <= startPositionCorrection; l++) {
correctedStPos += tokenPositionList.get(l);
}
int w = startPositionCorrection; // adjust to make sure the token count is fine as it's checked later
for (int j = 0; j <= w; j++) {
if (j < elementsMatched.size() && !elementsMatched.get(j)) {
startPositionCorrection
}
}
}
if (endPositionCorrection < 0) { // adjust the end position correction if one of the elements has not been matched
for (int d = startPositionCorrection; d < elementsMatched.size(); d++) {
if (!elementsMatched.get(d)) {
endPositionCorrection++;
}
}
}
int maxPosCorrection = Math.max((lastMatchToken + 1 - (firstMatchToken + correctedStPos)) - matchingTokens, 0);
matchingTokensWithCorrection += maxPosCorrection;
final int fromPos = sentence.getOriginalPosition(firstMatchToken + correctedStPos);
final boolean spaceBefore = whTokens[fromPos].isWhitespaceBefore();
final DisambiguationPatternRule.DisambiguatorAction disAction = rule.getAction();
final AnalyzedToken[] newTokenReadings = rule.getNewTokenReadings();
final Match matchElement = rule.getMatchElement();
final String disambiguatedPOS = rule.getDisambiguatedPOS();
switch (disAction) {
case UNIFY:
if (unifiedTokens != null) {
//TODO: unifiedTokens.length is larger > matchingTokensWithCorrection in cases where there are no markers...
if (unifiedTokens.length == matchingTokensWithCorrection - startPositionCorrection + endPositionCorrection) {
if (whTokens[sentence.getOriginalPosition(firstMatchToken
+ correctedStPos + unifiedTokens.length - 1)].isSentenceEnd()) {
unifiedTokens[unifiedTokens.length - 1].setSentEnd();
}
for (int i = 0; i < unifiedTokens.length; i++) {
final int position = sentence.getOriginalPosition(firstMatchToken + correctedStPos + i);
unifiedTokens[i].setStartPos(whTokens[position].getStartPos());
final String prevValue = whTokens[position].toString();
final String prevAnot = whTokens[position].getHistoricalAnnotations();
List<ChunkTag> chTags = whTokens[position].getChunkTags();
whTokens[position] = unifiedTokens[i];
whTokens[position].setChunkTags(chTags);
annotateChange(whTokens[position], prevValue, prevAnot);
}
}
}
break;
case REMOVE:
if (newTokenReadings != null && newTokenReadings.length > 0) {
if (newTokenReadings.length == matchingTokensWithCorrection
- startPositionCorrection + endPositionCorrection) {
for (int i = 0; i < newTokenReadings.length; i++) {
final int position = sentence.getOriginalPosition(firstMatchToken + correctedStPos + i);
final String prevValue = whTokens[position].toString();
final String prevAnot = whTokens[position].getHistoricalAnnotations();
whTokens[position].removeReading(newTokenReadings[i]);
annotateChange(whTokens[position], prevValue, prevAnot);
}
}
} else if (!StringTools.isEmpty(disambiguatedPOS)) { // negative filtering
Pattern p = Pattern.compile(disambiguatedPOS);
AnalyzedTokenReadings tmp = new AnalyzedTokenReadings(whTokens[fromPos].getReadings(),
whTokens[fromPos].getStartPos());
for (AnalyzedToken analyzedToken : tmp) {
if (analyzedToken.getPOSTag() != null) {
final Matcher mPos = p.matcher(analyzedToken.getPOSTag());
if (mPos.matches()) {
final int position = sentence.getOriginalPosition(firstMatchToken + correctedStPos);
final String prevValue = whTokens[position].toString();
final String prevAnot = whTokens[position].getHistoricalAnnotations();
whTokens[position].removeReading(analyzedToken);
annotateChange(whTokens[position], prevValue, prevAnot);
}
}
}
}
break;
case ADD:
if (newTokenReadings != null) {
if (newTokenReadings.length == matchingTokensWithCorrection
- startPositionCorrection + endPositionCorrection) {
for (int i = 0; i < newTokenReadings.length; i++) {
final String token;
final int position = sentence.getOriginalPosition(firstMatchToken + correctedStPos + i);
if ("".equals(newTokenReadings[i].getToken())) { // empty token
token = whTokens[position].getToken();
} else {
token = newTokenReadings[i].getToken();
}
final String lemma;
if (newTokenReadings[i].getLemma() == null) { // empty lemma
lemma = token;
} else {
lemma = newTokenReadings[i].getLemma();
}
final AnalyzedToken newTok = new AnalyzedToken(token,
newTokenReadings[i].getPOSTag(), lemma);
final String prevValue = whTokens[position].toString();
final String prevAnot = whTokens[position].getHistoricalAnnotations();
whTokens[position].addReading(newTok);
annotateChange(whTokens[position], prevValue, prevAnot);
}
}
}
break;
case FILTERALL:
for (int i = 0; i < matchingTokensWithCorrection - startPositionCorrection + endPositionCorrection; i++) {
final int position = sentence.getOriginalPosition(firstMatchToken + correctedStPos + i);
Element myEl;
if (elementsMatched.get(i + startPositionCorrection)) {
myEl = rule.getPatternElements().get(i + startPositionCorrection);
} else {
int k = 1;
while (i + startPositionCorrection + k < rule.getPatternElements().size() + endPositionCorrection &&
!elementsMatched.get(i + startPositionCorrection + k)) {
k++;
}
//FIXME: this is left to see whether this fails anywhere
assert(i + k + startPositionCorrection < rule.getPatternElements().size());
myEl = rule.getPatternElements().get(i + k + startPositionCorrection);
}
final Match tmpMatchToken = new Match(myEl.getPOStag(), null,
true,
myEl.getPOStag(),
null, Match.CaseConversion.NONE, false, false,
Match.IncludeRange.NONE);
MatchState matchState = tmpMatchToken.createState(rule.getLanguage().getSynthesizer(), whTokens[position]);
final String prevValue = whTokens[position].toString();
final String prevAnot = whTokens[position].getHistoricalAnnotations();
whTokens[position] = matchState.filterReadings();
annotateChange(whTokens[position], prevValue, prevAnot);
}
break;
case IMMUNIZE:
for (int i = 0; i < matchingTokensWithCorrection - startPositionCorrection + endPositionCorrection; i++) {
whTokens[sentence.getOriginalPosition(firstMatchToken + correctedStPos + i)].immunize();
}
break;
case IGNORE_SPELLING:
for (int i = 0; i < matchingTokensWithCorrection - startPositionCorrection + endPositionCorrection; i++) {
whTokens[sentence.getOriginalPosition(firstMatchToken + correctedStPos + i)].ignoreSpelling();
}
break;
case FILTER:
if (matchElement == null) { // same as REPLACE if using <match>
final Match tmpMatchToken = new Match(disambiguatedPOS, null,
true, disambiguatedPOS, null,
Match.CaseConversion.NONE, false, false,
Match.IncludeRange.NONE);
boolean newPOSmatches = false;
// only apply filter rule when it matches previous tags:
for (int i = 0; i < whTokens[fromPos].getReadingsLength(); i++) {
if (!whTokens[fromPos].getAnalyzedToken(i).hasNoTag() &&
whTokens[fromPos].getAnalyzedToken(i).getPOSTag().matches(disambiguatedPOS)) {
newPOSmatches = true;
break;
}
}
if (newPOSmatches) {
final MatchState matchState = tmpMatchToken.createState(rule.getLanguage().getSynthesizer(), whTokens[fromPos]);
final String prevValue = whTokens[fromPos].toString();
final String prevAnot = whTokens[fromPos].getHistoricalAnnotations();
whTokens[fromPos] = matchState.filterReadings();
annotateChange(whTokens[fromPos], prevValue, prevAnot);
}
break;
}
//fallthrough
case REPLACE:
default:
if (newTokenReadings != null && newTokenReadings.length > 0) {
if (newTokenReadings.length == matchingTokensWithCorrection - startPositionCorrection + endPositionCorrection) {
for (int i = 0; i < newTokenReadings.length; i++) {
final String token;
final int position = sentence.getOriginalPosition(firstMatchToken + correctedStPos + i);
if ("".equals(newTokenReadings[i].getToken())) { // empty token
token = whTokens[position].getToken();
} else {
token = newTokenReadings[i].getToken();
}
final String lemma;
if (newTokenReadings[i].getLemma() == null) { // empty lemma
lemma = token;
} else {
lemma = newTokenReadings[i].getLemma();
}
final AnalyzedToken analyzedToken = new AnalyzedToken(token, newTokenReadings[i].getPOSTag(), lemma);
final AnalyzedTokenReadings toReplace = new AnalyzedTokenReadings(
analyzedToken,
whTokens[fromPos].getStartPos());
whTokens[position] = replaceTokens(
whTokens[position], toReplace);
}
}
} else if (matchElement == null) {
String lemma = "";
for (AnalyzedToken analyzedToken : whTokens[fromPos]) {
if (analyzedToken.getPOSTag() != null
&& analyzedToken.getPOSTag().equals(disambiguatedPOS) && analyzedToken.getLemma() != null) {
lemma = analyzedToken.getLemma();
}
}
if (StringTools.isEmpty(lemma)) {
lemma = whTokens[fromPos].getAnalyzedToken(0).getLemma();
}
final AnalyzedToken analyzedToken = new AnalyzedToken(whTokens[fromPos].getToken(), disambiguatedPOS, lemma);
final AnalyzedTokenReadings toReplace = new AnalyzedTokenReadings(
analyzedToken, whTokens[fromPos].getStartPos());
whTokens[fromPos] = replaceTokens(whTokens[fromPos], toReplace);
} else {
// using the match element
final MatchState matchElementState = matchElement.createState(rule.getLanguage().getSynthesizer(), whTokens[fromPos]);
final String prevValue = whTokens[fromPos].toString();
final String prevAnot = whTokens[fromPos].getHistoricalAnnotations();
whTokens[fromPos] = matchElementState.filterReadings();
whTokens[fromPos].setWhitespaceBefore(spaceBefore);
annotateChange(whTokens[fromPos], prevValue, prevAnot);
}
}
return whTokens;
}
private void annotateChange(AnalyzedTokenReadings atr,
final String prevValue, String prevAnot) {
atr.setHistoricalAnnotations(prevAnot + "\n" + rule.getId() + ":"
+ rule.getSubId() + " " + prevValue + " -> " + atr.toString());
}
private AnalyzedTokenReadings replaceTokens(AnalyzedTokenReadings oldAtr,
final AnalyzedTokenReadings newAtr) {
final String prevValue = oldAtr.toString();
final String prevAnot = oldAtr.getHistoricalAnnotations();
final boolean isSentEnd = oldAtr.isSentenceEnd();
final boolean isParaEnd = oldAtr.isParagraphEnd();
final boolean spaceBefore = oldAtr.isWhitespaceBefore();
final int startPosition = oldAtr.getStartPos();
final List<ChunkTag> chunkTags = oldAtr.getChunkTags();
if (isSentEnd) {
newAtr.setSentEnd();
}
if (isParaEnd) {
newAtr.setParagraphEnd();
}
newAtr.setWhitespaceBefore(spaceBefore);
newAtr.setStartPos(startPosition);
newAtr.setChunkTags(chunkTags);
if (oldAtr.isImmunized()) {
newAtr.immunize();
}
annotateChange(newAtr, prevValue, prevAnot);
return newAtr;
}
}
|
package io.subutai.core.network.impl;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import io.subutai.common.command.RequestBuilder;
import io.subutai.common.network.LogLevel;
import io.subutai.common.network.ProxyLoadBalanceStrategy;
import io.subutai.common.protocol.LoadBalancing;
import io.subutai.common.protocol.Protocol;
/**
* Networking commands
*/
public class Commands
{
private static final String TUNNEL_BINDING = "subutai tunnel";
private static final String VXLAN_BINDING = "subutai vxlan";
private static final String P2P_BINDING = "p2p";
private static final String PROXY_BINDING = "subutai proxy";
private static final String INFO_BINDING = "subutai info";
private static final String MAP_BINDING = "subutai map";
private static final String LOG_BINDING = "subutai log";
private final SimpleDateFormat p2pDateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss" );
private static final String NETWORK_IFACE_REMOVAL = "ip link delete";
RequestBuilder getGetReservedPortsCommand()
{
return new RequestBuilder( INFO_BINDING ).withCmdArgs( "ports" );
}
RequestBuilder getGetP2pVersionCommand()
{
return new RequestBuilder( P2P_BINDING ).withCmdArgs( "-v" );
}
RequestBuilder getP2PConnectionsCommand()
{
return new RequestBuilder( P2P_BINDING ).withCmdArgs( "show" );
}
RequestBuilder getJoinP2PSwarmCommand( String interfaceName, String localIp, String p2pHash, String secretKey,
long secretKeyTtlSec, String portRange )
{
return new RequestBuilder( P2P_BINDING )
.withCmdArgs( "start", "-dev", interfaceName, "-hash", p2pHash, "-key", secretKey, "-ttl",
String.valueOf( secretKeyTtlSec ), "-ip", localIp, "-ports", portRange ).withTimeout( 90 );
}
RequestBuilder getJoinP2PSwarmDHCPCommand( String interfaceName, String p2pHash, String secretKey,
long secretKeyTtlSec, String portRange )
{
return new RequestBuilder( P2P_BINDING )
.withCmdArgs( "start", "-dev", interfaceName, "-hash", p2pHash, "-key", secretKey, "-ttl",
String.valueOf( secretKeyTtlSec ), "-ports", portRange ).withTimeout( 90 );
}
RequestBuilder getRemoveP2PSwarmCommand( String p2pHash )
{
return new RequestBuilder( P2P_BINDING ).withCmdArgs( "stop", "-hash", p2pHash ).withTimeout( 90 );
}
RequestBuilder getResetP2PSecretKey( String p2pHash, String newSecretKey, long ttlSeconds )
{
return new RequestBuilder( P2P_BINDING )
.withCmdArgs( "set", "-key", newSecretKey, "-ttl", String.valueOf( ttlSeconds ), "-hash", p2pHash );
}
RequestBuilder getGetUsedP2pIfaceNamesCommand()
{
return new RequestBuilder( P2P_BINDING ).withCmdArgs( "show", "--interfaces", "--all" );
}
RequestBuilder getRemoveP2PIfaceCommand( String interfaceName )
{
return new RequestBuilder( NETWORK_IFACE_REMOVAL ).withCmdArgs( interfaceName ).withTimeout( 90 );
}
RequestBuilder getGetP2pLogsCommand( Date from, Date till, LogLevel logLevel )
{
List<String> args =
Lists.newArrayList( "p2p", "-s", p2pDateFormat.format( from ), "-e", p2pDateFormat.format( till ) );
if ( logLevel != LogLevel.ALL )
{
args.add( "-l" );
args.add( logLevel.getCliParam() );
}
return new RequestBuilder( LOG_BINDING ).withCmdArgs( args.toArray( new String[args.size()] ) );
}
RequestBuilder getCreateTunnelCommand( String tunnelName, String tunnelIp, int vlan, long vni )
{
return new RequestBuilder( VXLAN_BINDING )
.withCmdArgs( "-create", tunnelName, "-remoteip", tunnelIp, "-vlan", String.valueOf( vlan ), "-vni",
String.valueOf( vni ) );
}
RequestBuilder getDeleteTunnelCommand( final String tunnelName )
{
return new RequestBuilder( VXLAN_BINDING ).withCmdArgs( "-delete", tunnelName );
}
RequestBuilder getGetTunnelsCommand()
{
return new RequestBuilder( VXLAN_BINDING ).withCmdArgs( "-list" );
}
RequestBuilder getGetVlanDomainCommand( int vLanId )
{
return new RequestBuilder( PROXY_BINDING ).withCmdArgs( "check", String.valueOf( vLanId ), "-d" );
}
RequestBuilder getRemoveVlanDomainCommand( final String vLanId )
{
return new RequestBuilder( PROXY_BINDING ).withCmdArgs( "del", vLanId, "-d" );
}
RequestBuilder getSetVlanDomainCommand( final String vLanId, final String domain,
final ProxyLoadBalanceStrategy proxyLoadBalanceStrategy,
final String sslCertPath )
{
List<String> args = Lists.newArrayList( "add", vLanId, "-d", domain );
if ( proxyLoadBalanceStrategy != ProxyLoadBalanceStrategy.NONE )
{
args.add( "-p" );
args.add( proxyLoadBalanceStrategy.getValue() );
}
if ( !Strings.isNullOrEmpty( sslCertPath ) )
{
args.add( "-f" );
args.add( sslCertPath );
}
return new RequestBuilder( PROXY_BINDING ).withCmdArgs( args.toArray( new String[args.size()] ) );
}
RequestBuilder getCheckIpInVlanDomainCommand( final String hostIp, final int vLanId )
{
return new RequestBuilder( PROXY_BINDING ).withCmdArgs( "check", String.valueOf( vLanId ), "-h", hostIp );
}
RequestBuilder getAddIpToVlanDomainCommand( final String hostIp, final String vLanId )
{
return new RequestBuilder( PROXY_BINDING ).withCmdArgs( "add", vLanId, "-h", hostIp );
}
RequestBuilder getRemoveIpFromVlanDomainCommand( final String hostIp, final int vLanId )
{
return new RequestBuilder( PROXY_BINDING ).withCmdArgs( "del", String.valueOf( vLanId ), "-h", hostIp );
}
RequestBuilder getSetupContainerSshTunnelCommand( final String containerIp, final int sshIdleTimeout )
{
return new RequestBuilder( TUNNEL_BINDING ).withCmdArgs( "add", containerIp, String.valueOf( sshIdleTimeout ) );
}
RequestBuilder getMapContainerPortToRandomPortCommand( final Protocol protocol, final String containerIp,
final int containerPort )
{
return new RequestBuilder( MAP_BINDING ).withCmdArgs( protocol.name().toLowerCase(), "-i",
String.format( "%s:%s", containerIp, containerPort ) );
}
RequestBuilder getMapContainerPortToSpecificPortCommand( final Protocol protocol, final String containerIp,
final int containerPort, final int rhPort )
{
return new RequestBuilder( MAP_BINDING )
.withCmdArgs( protocol.name().toLowerCase(), "-i", String.format( "%s:%s", containerIp, containerPort ),
"-e", String.valueOf( rhPort ) );
}
RequestBuilder getRemoveContainerPortMappingCommand( final Protocol protocol, final String containerIp,
final int containerPort, final int rhPort )
{
return new RequestBuilder( MAP_BINDING )
.withCmdArgs( protocol.name().toLowerCase(), "-i", String.format( "%s:%s", containerIp, containerPort ),
"-e", String.valueOf( rhPort ), "-r" );
}
RequestBuilder getMapContainerPortToDomainCommand( final Protocol protocol, final String containerIp,
final int containerPort, final int rhPort, final String domain,
final String sslCertPath, final LoadBalancing loadBalancing,
final boolean sslBackend )
{
List<String> args = Lists.newArrayList( protocol.name().toLowerCase(), "-i",
String.format( "%s:%s", containerIp, containerPort ), "-e", String.valueOf( rhPort ), "-d", domain );
if ( !Strings.isNullOrEmpty( sslCertPath ) )
{
args.add( "-c" );
args.add( sslCertPath );
}
if ( sslBackend )
{
args.add( "--sslbackend" );
}
if ( loadBalancing != null )
{
args.add( "-p" );
args.add( loadBalancing.name().toLowerCase() );
}
return new RequestBuilder( MAP_BINDING ).withCmdArgs( args.toArray( new String[args.size()] ) );
}
RequestBuilder getRemoveContainerPortDomainMappingCommand( final Protocol protocol, final String containerIp,
final int containerPort, final int rhPort,
final String domain )
{
return new RequestBuilder( MAP_BINDING )
.withCmdArgs( protocol.name().toLowerCase(), "-i", String.format( "%s:%s", containerIp, containerPort ),
"-e", String.valueOf( rhPort ), "-d", domain, "-r" );
}
RequestBuilder getListOfReservedPortMappingCommand()
{
return new RequestBuilder( MAP_BINDING ).withCmdArgs( "-l" );
}
RequestBuilder getListPortMappingsCommand( final Protocol protocol )
{
List<String> args = Lists.newArrayList( "-l" );
if ( protocol != null )
{
args.add( protocol.name().toLowerCase() );
}
return new RequestBuilder( MAP_BINDING ).withCmdArgs( args.toArray( new String[args.size()] ) );
}
RequestBuilder getGetIPAddressCommand()
{
return new RequestBuilder( INFO_BINDING ).withCmdArgs( "ipaddr" );
}
}
|
package org.csstudio.display.builder.model;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.csstudio.display.builder.model.StructuredWidgetProperty.Descriptor;
import org.csstudio.display.builder.model.persist.ModelReader;
import org.csstudio.display.builder.model.persist.ModelWriter;
import org.csstudio.display.builder.model.properties.CommonWidgetProperties;
import org.csstudio.display.builder.model.properties.WidgetColor;
import org.junit.Test;
/** JUnit test of structured widget property
* @author Kay Kasemir
*/
@SuppressWarnings("nls")
public class StructuredWidgetPropertyUnitTest
{
/** Demo structured property */
private final static StructuredWidgetProperty.Descriptor propTrace =
new Descriptor(WidgetPropertyCategory.BEHAVIOR, "trace", "Trace");
/** Demo widget that has a structured property */
private static class PlotWidget extends Widget
{
public static final WidgetDescriptor WIDGET_DESCRIPTOR
= new WidgetDescriptor("plot", WidgetCategory.GRAPHIC, "Plot", "no-icon.png", "Demo widget")
{
@Override
public Widget createWidget()
{
return new PlotWidget();
}
};
private StructuredWidgetProperty trace;
public PlotWidget()
{
super("plot");
}
@Override
protected void defineProperties(List<WidgetProperty<?>> properties)
{
super.defineProperties(properties);
properties.add( trace = propTrace.createProperty(this, Arrays.asList(
CommonWidgetProperties.propPVName.createProperty(this, ""),
CommonWidgetProperties.propForegroundColor.createProperty(this, new WidgetColor(0, 0, 255))
)));
}
public StructuredWidgetProperty propTrace()
{
return trace;
}
};
@Test
public void testStructuredWidgetProperty() throws Exception
{
final PlotWidget widget = new PlotWidget();
System.out.println(widget + " trace:");
for (WidgetProperty<?> trace_element : widget.getPropertyValue(propTrace))
System.out.println(trace_element);
// Structure elements are always in XML, even with default value
widget.propTrace().getValue().get(0).setValueFromObject("position");
String xml = ModelWriter.getXML(Arrays.asList(widget));
System.out.println(xml);
assertThat(xml, containsString("<trace>"));
assertThat(xml, containsString("position"));
assertThat(xml, containsString("color"));
final WidgetProperty<WidgetColor> color = widget.propTrace().getElement(1);
color.setValue(new WidgetColor(255, 255, 0));
xml = ModelWriter.getXML(Arrays.asList(widget));
System.out.println(xml);
assertThat(xml, containsString("color"));
// Read back from XML
WidgetFactory.getInstance().addWidgetType(PlotWidget.WIDGET_DESCRIPTOR);
final DisplayModel model = ModelReader.parseXML(xml);
System.out.println(model);
assertThat(model.getChildren().size(), equalTo(1));
assertThat(model.getChildren().get(0).getType(), equalTo("plot"));
final PlotWidget copy = (PlotWidget)model.getChildren().get(0);
System.out.println(copy);
System.out.println(copy.getProperties());
final WidgetProperty<String> pv_name = copy.propTrace().getElement(0);
System.out.println(pv_name);
assertThat(pv_name.getValue(), equalTo("position"));
}
@Test
public void testUnmodifiable() throws Exception
{
final PlotWidget widget = new PlotWidget();
try
{
widget.propTrace().setValue(Collections.emptyList());
fail("Structure allowed modification");
}
catch (IllegalAccessError ex)
{
assertThat(ex.getMessage(), containsString("cannot"));
}
try
{
widget.propTrace().setValueFromObject(Collections.emptyList());
fail("Structure allowed modification");
}
catch (Exception ex)
{
assertThat(ex.getMessage(), containsString("must provide 2 elements"));
}
}
@Test
public void testElementAccess() throws Exception
{
final PlotWidget widget = new PlotWidget();
final WidgetProperty<String> name1 = widget.propTrace().getElement(0);
final WidgetProperty<String> name2 = widget.propTrace().getElement("pv_name");
assertThat(name1, sameInstance(name2));
WidgetProperty<WidgetColor> color_prop = widget.propTrace().getElement(1);
WidgetColor color = color_prop.getValue();
System.out.println(color);
color_prop = widget.propTrace().getElement(0);
try
{
color = color_prop.getValue();
System.out.println(color);
fail("Allowed access to String property as color");
}
catch (ClassCastException ex)
{
assertThat(ex.getMessage(), containsString("String cannot"));
}
}
@Test
public void testStructureAccess() throws Exception
{
final PlotWidget widget = new PlotWidget();
System.out.println(widget.getProperties());
widget.setPropertyValue("trace.pv_name", "fred");
final WidgetProperty<?> item = widget.getProperty("trace.pv_name");
System.out.println("trace.pv_name: " + item);
assertThat(item.getValue(), equalTo("fred"));
}
}
|
package org.eclipse.dawnsci.analysis.dataset.roi.handler;
import org.eclipse.dawnsci.analysis.dataset.roi.RectangularROI;
/**
* Wrapper class for a RectangularROI that adds handles
*/
public class RectangularROIHandler extends ROIHandler<RectangularROI> {
/**
* Number of handle areas
*/
private final static int NHANDLE = 9;
/**
* Handler for RectangularROI
* @param roi
*/
public RectangularROIHandler(RectangularROI roi) {
super();
for (int h = 0; h < NHANDLE; h++) {
add(-1);
}
this.roi = roi;
}
@Override
public int getCentreHandle() {
return 4;
}
@Override
public double[] getHandlePoint(int handle, int size) {
double[] pt = null;
switch (handle) {
case 0:
pt = roi.getPoint();
break;
case 1:
pt = roi.getPoint(0.5, 0);
pt[0] -= size/2;
break;
case 2:
pt = roi.getPoint(1.0, 0);
pt[0] -= size;
break;
case 3:
pt = roi.getPoint(0.0, 0.5);
pt[1] -= size/2;
break;
case 4:
pt = roi.getPoint(0.5, 0.5);
pt[0] -= size/2;
pt[1] -= size/2;
break;
case 5:
pt = roi.getPoint(1.0, 0.5);
pt[0] -= size;
pt[1] -= size/2;
break;
case 6:
pt = roi.getPoint(0.0, 1.0);
pt[1] -= size;
break;
case 7:
pt = roi.getPoint(0.5, 1.0);
pt[0] -= size/2;
pt[1] -= size;
break;
case 8:
pt = roi.getPoint(1.0, 1.0);
pt[0] -= size;
pt[1] -= size;
break;
}
return pt;
}
@Override
public double[] getAnchorPoint(int handle, int size) {
double[] pt = null;
switch (handle) {
case 0:
pt = roi.getPoint();
break;
case 1:
pt = roi.getPoint(0.5, 0);
break;
case 2:
pt = roi.getPoint(1.0, 0);
break;
case 3:
pt = roi.getPoint(0.0, 0.5);
break;
case 4:
pt = roi.getPoint(0.5, 0.5);
break;
case 5:
pt = roi.getPoint(1.0, 0.5);
break;
case 6:
pt = roi.getPoint(0.0, 1.0);
break;
case 7:
pt = roi.getPoint(0.5, 1.0);
break;
case 8:
pt = roi.getPoint(1.0, 1.0);
break;
}
return pt;
}
/**
* @param spt starting point
* @param pt
* @return resized ROI
*/
public RectangularROI resize(double[] spt, double[] pt) {
RectangularROI rroi = null;
double[] ept;
if (handle == 4)
return rroi;
rroi = roi.copy();
ept = rroi.getEndPoint();
switch (handle) {
case -1: // new definition
rroi.setPoint(spt);
rroi.setEndPoint(pt);
break;
case 0:
pt[0] -= spt[0];
pt[1] -= spt[1];
rroi.setPointKeepEndPoint(pt, true, true);
break;
case 1:
pt[0] -= spt[0];
pt[1] -= spt[1];
rroi.setPointKeepEndPoint(pt, false, true);
break;
case 2:
rroi.adjustKeepDiagonalPoint(spt, ept, pt, true);
break;
case 3:
pt[0] -= spt[0];
pt[1] -= spt[1];
rroi.setPointKeepEndPoint(pt, true, false);
break;
case 5:
pt[0] += ept[0] - spt[0];
pt[1] += ept[1] - spt[1];
rroi.setEndPoint(pt, true, false);
break;
case 6:
rroi.adjustKeepDiagonalPoint(spt, ept, pt, false);
break;
case 7:
pt[0] += ept[0] - spt[0];
pt[1] += ept[1] - spt[1];
rroi.setEndPoint(pt, false, true);
break;
case 8:
pt[0] += ept[0] - spt[0];
pt[1] += ept[1] - spt[1];
rroi.setEndPoint(pt, true, true);
break;
default:
break;
}
return rroi;
}
/**
* @param pt
* @return reoriented ROI
*/
public RectangularROI reorient(double[] pt) {
RectangularROI rroi = null;
rroi = roi.copy();
double nang, oang;
switch (handle) {
case 0: // keep end point
oang = roi.getAngleRelativeToPoint(1.0, 1.0, roi.getPoint());
nang = roi.getAngleRelativeToPoint(1.0, 1.0, pt);
rroi.addAngle(nang-oang);
rroi.setEndPointKeepLengths(roi.getEndPoint());
break;
case 1:
oang = roi.getAngleRelativeToPoint(0.5, 1.0, roi.getPoint(0.5, 0.0));
nang = roi.getAngleRelativeToPoint(0.5, 1.0, pt);
rroi.translate(0.5, 1.0);
rroi.addAngle(nang-oang);
rroi.translate(-0.5, -1.0);
break;
case 2:
oang = roi.getAngleRelativeToPoint(0.0, 1.0, roi.getPoint(1.0, 0.0));
nang = roi.getAngleRelativeToPoint(0.0, 1.0, pt);
rroi.translate(0.0, 1.0);
rroi.addAngle(nang-oang);
rroi.translate(0.0, -1.0);
break;
case 3:
oang = roi.getAngleRelativeToPoint(1.0, 0.5, roi.getPoint(0.0, 0.5));
nang = roi.getAngleRelativeToPoint(1.0, 0.5, pt);
rroi.translate(1.0, 0.5);
rroi.addAngle(nang-oang);
rroi.translate(-1.0, -0.5);
break;
case 5:
oang = roi.getAngleRelativeToPoint(0.0, 0.5, roi.getPoint(1.0, 0.5));
nang = roi.getAngleRelativeToPoint(0.0, 0.5, pt);
rroi.translate(0.0, 0.5);
rroi.addAngle(nang-oang);
rroi.translate(0.0, -0.5);
break;
case 6:
oang = roi.getAngleRelativeToPoint(1.0, 0.0, roi.getPoint(0.0, 1.0));
nang = roi.getAngleRelativeToPoint(1.0, 0.0, pt);
rroi.translate(1.0, 0.0);
rroi.addAngle(nang-oang);
rroi.translate(-1.0, 0.0);
break;
case 7:
oang = roi.getAngleRelativeToPoint(0.5, 0.0, roi.getPoint(0.5, 1.0));
nang = roi.getAngleRelativeToPoint(0.5, 0.0, pt);
rroi.translate(0.5, 0.0);
rroi.addAngle(nang-oang);
rroi.translate(-0.5, 0.0);
break;
case 8: // keep start point
oang = roi.getAngleRelativeToPoint(0, 0, roi.getPoint(1.0, 1.0));
nang = roi.getAngleRelativeToPoint(0, 0, pt);
rroi.addAngle(nang-oang);
break;
default:
rroi = null;
}
return rroi;
}
@Override
public RectangularROI interpretMouseDragging(double[] spt, double[] ept) {
RectangularROI croi = null; // return null if not a valid event
switch (status) {
case RMOVE:
croi = roi.copy();
ept[0] -= spt[0];
ept[1] -= spt[1];
croi.addPoint(ept);
break;
case NONE:
croi = roi.copy();
croi.setEndPoint(ept);
break;
case REORIENT:
croi = reorient(ept);
break;
case RESIZE:
croi = resize(spt, ept);
break;
case ROTATE:
croi = roi.copy();
double ang = croi.getAngleRelativeToMidPoint(ept);
double[] mpt = croi.getMidPoint();
croi.setAngle(ang);
croi.setMidPoint(mpt);
break;
case CMOVE:
break;
case CRMOVE:
break;
}
return croi;
}
}
|
package com.intellij.internal.statistic.collectors.fus.actions.persistence;
import com.intellij.internal.statistic.beans.ConvertUsagesUtil;
import com.intellij.internal.statistic.eventLog.FeatureUsageDataBuilder;
import com.intellij.internal.statistic.eventLog.FeatureUsageGroup;
import com.intellij.internal.statistic.persistence.UsageStatisticsPersistenceComponent;
import com.intellij.internal.statistic.service.fus.collectors.FUSCounterUsageLogger;
import com.intellij.internal.statistic.utils.PluginInfo;
import com.intellij.internal.statistic.utils.PluginInfoDetectorKt;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.impl.ActionMenu;
import com.intellij.openapi.components.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.xmlb.annotations.MapAnnotation;
import com.intellij.util.xmlb.annotations.Tag;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.MouseEvent;
import java.util.List;
import java.util.*;
import java.util.stream.Collectors;
import static com.intellij.internal.statistic.service.fus.collectors.FUSUsageContext.OS_CONTEXT;
/**
* @author Konstantin Bulenkov
*/
@State(
name = "MainMenuCollector",
storages = {
@Storage(value = UsageStatisticsPersistenceComponent.USAGE_STATISTICS_XML, roamingType = RoamingType.DISABLED, deprecated = true),
@Storage(value = "statistics.main_menu.xml", roamingType = RoamingType.DISABLED, deprecated = true)
}
)
public class MainMenuCollector implements PersistentStateComponent<MainMenuCollector.State> {
private static final FeatureUsageGroup GROUP = new FeatureUsageGroup("main.menu", 1);
private static final String GENERATED_ON_RUNTIME_ITEM = "generated.on.runtime";
private State myState = new State();
@Nullable
@Override
public State getState() {
return myState;
}
@Override
public void loadState(@NotNull State state) {
}
public void record(@NotNull AnAction action) {
try {
final PluginInfo info = PluginInfoDetectorKt.getPluginInfo(action.getClass());
if (!info.isDevelopedByJetBrains()) {
return;
}
AWTEvent e = EventQueue.getCurrentEvent();
String path = null;
if (e instanceof ItemEvent) {
path = getPathFromMenuItem(e, action);
}
else if (e instanceof MouseEvent) {
path = getPathFromMenuSelectionManager(action);
}
if (!StringUtil.isEmpty(path)) {
final FeatureUsageDataBuilder data = new FeatureUsageDataBuilder().addFeatureContext(OS_CONTEXT).addPluginInfo(info);
FUSCounterUsageLogger.logEvent(GROUP, ConvertUsagesUtil.escapeDescriptorName(path), data);
}
}
catch (Exception ignore) {
}
}
protected String getPathFromMenuSelectionManager(@NotNull AnAction action) {
List<String> groups = Arrays.stream(MenuSelectionManager.defaultManager().getSelectedPath())
.filter(o -> o instanceof ActionMenu)
.map(o -> ((ActionMenu)o).getText())
.collect(Collectors.toList());
if (groups.size() > 0) {
String text = getActionText(action);
groups.add(text);
return convertMenuItemsToKey(groups);
}
return null;
}
private static final HashMap<String, String> ourBlackList = new HashMap<>();
static {
ourBlackList.put("com.intellij.ide.ReopenProjectAction", "Reopen Project");
ourBlackList.put("com.intellij.openapi.wm.impl.ProjectWindowAction", "Switch Project");
ourBlackList.put("com.intellij.tools.ToolAction", "External Tool");
ourBlackList.put("com.intellij.ide.actionMacro.ActionMacroManager$InvokeMacroAction", "Invoke Macro");
}
private static String getActionText(@NotNull AnAction action) {
String text = ourBlackList.get(action.getClass().getName());
if (text != null) {
return text;
}
final String actionId = ActionManager.getInstance().getId(action);
if (StringUtil.isEmpty(actionId)) {
return GENERATED_ON_RUNTIME_ITEM;
}
return action.getTemplatePresentation().getText(); //avoid user data in Action Presentation
}
@NotNull
private static String convertMenuItemsToKey(List<String> menuItems) {
return StringUtil.join(menuItems, " -> ");
}
@NotNull
protected String getPathFromMenuItem(AWTEvent e, AnAction action) {
Object src = e.getSource();
ArrayList<String> items = new ArrayList<>();
while (src instanceof MenuItem) {
items.add(0, ((MenuItem)src).getLabel());
src = ((MenuItem)src).getParent();
}
if (items.size() > 1) {
items.set(items.size() - 1, getActionText(action));
}
return convertMenuItemsToKey(items);
}
public static MainMenuCollector getInstance() {
return ServiceManager.getService(MainMenuCollector.class);
}
public final static class State {
@Tag("counts")
@MapAnnotation(surroundWithTag = false, keyAttributeName = "path", valueAttributeName = "count")
public Map<String, Integer> myValues = new HashMap<>();
}
}
|
package org.ow2.proactive.resourcemanager.gui.table;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.ow2.proactive.resourcemanager.gui.data.RMStore;
import org.ow2.proactive.resourcemanager.gui.data.model.Node;
import org.ow2.proactive.resourcemanager.gui.data.model.Selectable;
import org.ow2.proactive.resourcemanager.gui.handlers.LockNodesHandler;
import org.ow2.proactive.resourcemanager.gui.handlers.UnlockNodesHandler;
import org.ow2.proactive.resourcemanager.gui.handlers.RemoveNodesHandler;
import org.ow2.proactive.resourcemanager.gui.views.NodeInfoView;
import org.ow2.proactive.resourcemanager.gui.views.ResourceExplorerView;
import org.ow2.proactive.resourcemanager.gui.views.ResourcesCompactView;
public class TableSelectionListener implements ISelectionChangedListener {
public void selectionChanged(SelectionChangedEvent event) {
final Object[] arr = ((IStructuredSelection) event.getSelectionProvider().getSelection()).toArray();
if (arr.length != 0) {
final List<Selectable> list = new ArrayList<Selectable>(arr.length);
for (final Object o : arr) {
list.add((Selectable) o);
}
// normally RM is connected if I can select something...
if (RMStore.isConnected()) {
//check for null in order to fix SCHEDULING-1383
//FIXME: Ugly design. getInstance() method should never return null
if (RemoveNodesHandler.getInstance() != null)
RemoveNodesHandler.getInstance().setSelectedNodes(list);
if (LockNodesHandler.getInstance() != null)
LockNodesHandler.getInstance().setSelectedNodes(list);
if (UnlockNodesHandler.getInstance() != null)
UnlockNodesHandler.getInstance().setSelectedNodes(list);
}
if (list.size() > 0) {
Node n = (Node) list.get(0);
NodeInfoView.setNode(n);
if (ResourceExplorerView.getTreeViewer() != null) {
ResourceExplorerView.getTreeViewer().select(n);
}
}
if (ResourcesCompactView.getCompactViewer() != null) {
ResourcesCompactView.getCompactViewer().getSelectionManager().select(list);
}
}
}
}
|
package org.motechproject.ScheduleTrackingDemo.listeners;
import org.motechproject.ScheduleTrackingDemo.OpenMrsClient;
import org.motechproject.ScheduleTrackingDemo.OpenMrsConceptConverter;
import org.motechproject.ScheduleTrackingDemo.PatientScheduler;
import org.motechproject.ScheduleTrackingDemo.beans.PatientEncounterBean;
import org.motechproject.ScheduleTrackingDemo.beans.PatientEnrollmentBean;
import org.motechproject.ScheduleTrackingDemo.beans.PatientRegistrationBean;
import org.motechproject.mobileforms.api.callbacks.FormPublisher;
import org.motechproject.model.MotechEvent;
import org.motechproject.mrs.model.MRSFacility;
import org.motechproject.mrs.model.MRSPatient;
import org.motechproject.mrs.model.MRSPerson;
import org.motechproject.server.event.annotations.MotechListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
public class MobileFormListener {
private static final String DEMO_SCHEDULE_NAME = "Demo Concept Schedule";
Logger logger = LoggerFactory.getLogger(MobileFormListener.class);
@Autowired
OpenMrsClient openmrsClient;
@Autowired
PatientScheduler patientScheduler;
@MotechListener(subjects = { FormPublisher.FORM_VALIDATION_SUCCESSFUL + ".DemoGroup.DemoPatientRegistration" })
public void handlePatientRegistrationForm(MotechEvent event) {
PatientRegistrationBean bean = (PatientRegistrationBean)event.getParameters().get(FormPublisher.FORM_BEAN);
MRSPerson person = new MRSPerson().firstName(bean.getFirstName())
.lastName(bean.getLastName())
.dateOfBirth(bean.getDateOfBirth())
.birthDateEstimated(false)
.gender(bean.getGender());
MRSFacility facility = new MRSFacility("1");
MRSPatient patient = new MRSPatient(bean.getMotechId(), person, facility);
openmrsClient.savePatient(patient);
patientScheduler.saveMotechPatient(bean.getMotechId(), stripDashFromPhoneNumber(bean.getPhoneNumber()));
if (bean.isEnrollPatient()) {
patientScheduler.enrollIntoSchedule(bean.getMotechId(), DEMO_SCHEDULE_NAME);
}
}
private String stripDashFromPhoneNumber(String phoneNum) {
return phoneNum.replaceAll("-", "");
}
@MotechListener(subjects = { FormPublisher.FORM_VALIDATION_SUCCESSFUL + ".DemoGroup.DemoPatientEnrollment" })
public void handlePatientEnrollment(MotechEvent event) {
PatientEnrollmentBean bean = (PatientEnrollmentBean)event.getParameters().get(FormPublisher.FORM_BEAN);
patientScheduler.saveMotechPatient(bean.getMotechId(), stripDashFromPhoneNumber(bean.getPhoneNumber()));
patientScheduler.enrollIntoSchedule(bean.getMotechId(), DEMO_SCHEDULE_NAME);
}
@MotechListener(subjects = { FormPublisher.FORM_VALIDATION_SUCCESSFUL + ".DemoGroup.DemoPatientEncounter" })
public void handlePatientEncounter(MotechEvent event) {
PatientEncounterBean bean = (PatientEncounterBean)event.getParameters().get(FormPublisher.FORM_BEAN);
String conceptName = OpenMrsConceptConverter.convertToNameFromIndex(bean.getObservedConcept());
openmrsClient.addEncounterForPatient(bean.getMotechId(), conceptName, bean.getObservedDate());
}
}
|
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// persons to whom the Software is furnished to do so, subject to the
// notice shall be included in all copies or substantial portions of the
// Software.
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
package phasereditor.inspect.core.tests;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.junit.Assert;
import org.junit.Test;
import phasereditor.inspect.core.InspectCore;
import phasereditor.inspect.core.jsdoc.PhaserJSDoc;
/**
* @author arian
*
*/
public class Canvas_Phaser_Help_Test {
@SuppressWarnings("static-method")
@Test
public void test() throws IOException {
Path wsPath = Paths.get(".").toAbsolutePath().getParent().getParent();
Path sourceProjectPath = wsPath.resolve(InspectCore.RESOURCES_PHASER_CODE_PLUGIN);
Path metadataProjectPath = wsPath.resolve(InspectCore.RESOURCES_METADATA_PLUGIN);
PhaserJSDoc jsDoc = new PhaserJSDoc(sourceProjectPath.resolve("phaser-master/src"),
metadataProjectPath.resolve("phaser-custom/jsdoc/docs.json"));
// @formatter:off
String[] memberNames = {
"Phaser.Sprite.frameName",
"Phaser.Sprite.x",
"Phaser.Sprite.y",
"Phaser.Sprite.angle",
"Phaser.Sprite.scale",
"Phaser.Sprite.pivot",
"Phaser.Sprite.anchor",
"Phaser.Sprite.anchor",
"Phaser.Sprite.tint",
"Phaser.Sprite.animations",
"Phaser.GameObjectFactory.physicsGroup",
"Phaser.Sprite.frame",
"Phaser.TileSprite.tilePosition",
"Phaser.TileSprite.tileScale",
"Phaser.TileSprite.width",
"Phaser.TileSprite.height",
"Phaser.Group.physicsBodyType",
"Phaser.Group.physicsSortDirection",
"Phaser.Animation.loop",
"Phaser.Animation.killOnComplete",
"Phaser.ScaleManager.scaleMode",
"Phaser.ScaleManager.pageAlignHorizontally",
"Phaser.ScaleManager.pageAlignVertically",
"Phaser.Physics.startSystem",
"Phaser.Stage.backgroundColor"
};
// @formatter:on
for (String name : memberNames) {
String doc = jsDoc.getMemberHelp(name);
Assert.assertTrue(name, !doc.equals("<No help available>"));
}
// @formatter:off
String[][] memeber_arg_tuples = {
{ "Phaser.Button", "callback" },
{ "Phaser.Button", "callbackContext" },
{ "Phaser.Button", "overFrame" },
{ "Phaser.Button", "outFrame" },
{ "Phaser.Button", "downFrame" },
{ "Phaser.Button", "upFrame" },
{ "Phaser.AnimationManager.add", "name" },
{ "Phaser.AnimationManager.add", "frameRate" },
{"Phaser.Sprite", "game"},
{ "Phaser.Sprite", "x"},
{ "Phaser.Sprite", "y"},
{ "Phaser.Sprite", "key"},
{ "Phaser.Sprite", "frame"},
{ "Phaser.Group", "game"},
{ "Phaser.Group", "parent"},
{ "Phaser.Group", "name"},
{ "Phaser.Group", "addToStage"},
{ "Phaser.Group", "enableBody"},
{ "Phaser.Group", "physicsBodyType"},
};
//@formatter:on
for (String[] tuple : memeber_arg_tuples) {
String doc = jsDoc.getMethodArgHelp(tuple[0], tuple[1]);
Assert.assertTrue(tuple[0] + "#" + tuple[1], !doc.equals("<No help available>"));
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.