answer
stringlengths
17
10.2M
package com.facebook.litho; import static com.facebook.litho.ComponentLifecycle.StateUpdate; import static com.facebook.litho.FrameworkLogEvents.EVENT_LAYOUT_CALCULATE; import static com.facebook.litho.FrameworkLogEvents.EVENT_PRE_ALLOCATE_MOUNT_CONTENT; import static com.facebook.litho.FrameworkLogEvents.PARAM_IS_BACKGROUND_LAYOUT; import static com.facebook.litho.FrameworkLogEvents.PARAM_LOG_TAG; import static com.facebook.litho.FrameworkLogEvents.PARAM_TREE_DIFF_ENABLED; import static com.facebook.litho.ThreadUtils.assertHoldsLock; import static com.facebook.litho.ThreadUtils.assertMainThread; import static com.facebook.litho.ThreadUtils.isMainThread; import android.content.Context; import android.graphics.Rect; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; import android.os.Message; import android.os.Process; import android.support.annotation.IntDef; import android.support.annotation.Keep; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.VisibleForTesting; import android.util.Log; import android.view.View; import android.view.ViewParent; import com.facebook.infer.annotation.ReturnsOwnership; import com.facebook.infer.annotation.ThreadConfined; import com.facebook.infer.annotation.ThreadSafe; import com.facebook.litho.annotations.MountSpec; import com.facebook.litho.config.ComponentsConfiguration; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.CheckReturnValue; import javax.annotation.concurrent.GuardedBy; /** * Represents a tree of components and controls their life cycle. ComponentTree takes in a single * root component and recursively invokes its OnCreateLayout to create a tree of components. * ComponentTree is responsible for refreshing the mounted state of a component with new props. * * The usual use case for {@link ComponentTree} is: * <code> * ComponentTree component = ComponentTree.create(context, MyComponent.create()); * myHostView.setRoot(component); * <code/> */ @ThreadSafe public class ComponentTree { public static final int INVALID_ID = -1; private static final String TAG = ComponentTree.class.getSimpleName(); private static final int SIZE_UNINITIALIZED = -1; // MainThread Looper messages: private static final int MESSAGE_WHAT_BACKGROUND_LAYOUT_STATE_UPDATED = 1; private static final String DEFAULT_LAYOUT_THREAD_NAME = "ComponentLayoutThread"; private static final String DEFAULT_PMC_THREAD_NAME = "PreallocateMountContentThread"; private static final int DEFAULT_LAYOUT_THREAD_PRIORITY = Process.THREAD_PRIORITY_BACKGROUND; private static final int SCHEDULE_NONE = 0; private static final int SCHEDULE_LAYOUT_ASYNC = 1; private static final int SCHEDULE_LAYOUT_SYNC = 2; private final MeasureListener mMeasureListener; private boolean mReleased; private String mReleasedComponent; @IntDef({SCHEDULE_NONE, SCHEDULE_LAYOUT_ASYNC, SCHEDULE_LAYOUT_SYNC}) @Retention(RetentionPolicy.SOURCE) private @interface PendingLayoutCalculation {} public interface MeasureListener { void onSetRootAndSizeSpec(int width, int height); } private static final AtomicInteger sIdGenerator = new AtomicInteger(0); private static final Handler sMainThreadHandler = new ComponentMainThreadHandler(); // Do not access sDefaultLayoutThreadLooper directly, use getDefaultLayoutThreadLooper(). @GuardedBy("ComponentTree.class") private static volatile Looper sDefaultLayoutThreadLooper; @GuardedBy("ComponentTree.class") private static volatile Looper sDefaultPreallocateMountContentThreadLooper; private static final ThreadLocal<WeakReference<Handler>> sSyncStateUpdatesHandler = new ThreadLocal<>(); // Helpers to track view visibility when we are incrementally // mounting and partially invalidating private static final int[] sCurrentLocation = new int[2]; private static final int[] sParentLocation = new int[2]; private static final Rect sParentBounds = new Rect(); @Nullable private final IncrementalMountHelper mIncrementalMountHelper; private final Runnable mCalculateLayoutRunnable = new Runnable() { @Override public void run() { calculateLayout(null); } }; private final boolean mCanPreallocateOnDefaultHandler; private final boolean mShouldPreallocatePerMountSpec; private final Runnable mPreAllocateMountContentRunnable = new Runnable() { @Override public void run() { preAllocateMountContent(mShouldPreallocatePerMountSpec); } }; private final Runnable mUpdateStateSyncRunnable = new Runnable() { @Override public void run() { updateStateInternal(false); } }; private final ComponentContext mContext; private final boolean mCanPrefetchDisplayLists; private final boolean mCanCacheDrawingDisplayLists; private final boolean mShouldClipChildren; @Nullable private LayoutHandler mPreAllocateMountContentHandler; // These variables are only accessed from the main thread. @ThreadConfined(ThreadConfined.UI) private boolean mIsMounting; @ThreadConfined(ThreadConfined.UI) private final boolean mIncrementalMountEnabled; @ThreadConfined(ThreadConfined.UI) private final boolean mIsLayoutDiffingEnabled; @ThreadConfined(ThreadConfined.UI) private boolean mIsAttached; @ThreadConfined(ThreadConfined.UI) private final boolean mIsAsyncUpdateStateEnabled; @ThreadConfined(ThreadConfined.UI) private LithoView mLithoView; @ThreadConfined(ThreadConfined.UI) private LayoutHandler mLayoutThreadHandler; @GuardedBy("this") private boolean mHasViewMeasureSpec; private boolean mHasMounted = false; // TODO(6606683): Enable recycling of mComponent. // We will need to ensure there are no background threads referencing mComponent. We'll need // to keep a reference count or something. :-/ @GuardedBy("this") private @Nullable Component mRoot; @GuardedBy("this") private int mWidthSpec = SIZE_UNINITIALIZED; @GuardedBy("this") private int mHeightSpec = SIZE_UNINITIALIZED; // This is written to only by the main thread with the lock held, read from the main thread with // no lock held, or read from any other thread with the lock held. @Nullable private LayoutState mMainThreadLayoutState; // The semantics here are tricky. Whenever you transfer mBackgroundLayoutState to a local that // will be accessed outside of the lock, you must set mBackgroundLayoutState to null to ensure // that the current thread alone has access to the LayoutState, which is single-threaded. @GuardedBy("this") @Nullable private LayoutState mBackgroundLayoutState; @GuardedBy("this") private StateHandler mStateHandler; @ThreadConfined(ThreadConfined.UI) private RenderState mPreviousRenderState; @ThreadConfined(ThreadConfined.UI) private boolean mPreviousRenderStateSetFromBuilder = false; private final Object mLayoutLock; protected final int mId; @GuardedBy("this") private boolean mIsMeasuring; @PendingLayoutCalculation @GuardedBy("this") private int mScheduleLayoutAfterMeasure; @GuardedBy("mEventHandlers") public final Map<String, EventHandlersWrapper> mEventHandlers = new LinkedHashMap<>(); @GuardedBy("mEventTriggersContainer") private final EventTriggersContainer mEventTriggersContainer = new EventTriggersContainer(); public static Builder create(ComponentContext context, Component.Builder<?> root) { return create(context, root.build()); } public static Builder create(ComponentContext context, Component root) { return ComponentsPools.acquireComponentTreeBuilder(context, root); } protected ComponentTree(Builder builder) { mContext = ComponentContext.withComponentTree(builder.context, this); mRoot = builder.root; mIncrementalMountEnabled = builder.incrementalMountEnabled; mIsLayoutDiffingEnabled = builder.isLayoutDiffingEnabled; mLayoutThreadHandler = builder.layoutThreadHandler; mShouldPreallocatePerMountSpec = builder.shouldPreallocatePerMountSpec; mPreAllocateMountContentHandler = builder.preAllocateMountContentHandler; mCanPreallocateOnDefaultHandler = builder.canPreallocateOnDefaultHandler; mLayoutLock = builder.layoutLock; mIsAsyncUpdateStateEnabled = builder.asyncStateUpdates; mCanPrefetchDisplayLists = builder.canPrefetchDisplayLists; mCanCacheDrawingDisplayLists = builder.canCacheDrawingDisplayLists; mShouldClipChildren = builder.shouldClipChildren; mHasMounted = builder.hasMounted; mMeasureListener = builder.mMeasureListener; if (mLayoutThreadHandler == null) { mLayoutThreadHandler = new DefaultLayoutHandler(getDefaultLayoutThreadLooper()); } if (mPreAllocateMountContentHandler == null && mCanPreallocateOnDefaultHandler) { mPreAllocateMountContentHandler = new DefaultPreallocateMountContentHandler( getDefaultPreallocateMountContentThreadLooper()); } final StateHandler builderStateHandler = builder.stateHandler; mStateHandler = builderStateHandler == null ? StateHandler.acquireNewInstance(null) : builderStateHandler; if (builder.previousRenderState != null) { mPreviousRenderState = builder.previousRenderState; mPreviousRenderStateSetFromBuilder = true; } if (builder.overrideComponentTreeId != -1) { mId = builder.overrideComponentTreeId; } else { mId = generateComponentTreeId(); } mIncrementalMountHelper = ComponentsConfiguration.USE_INCREMENTAL_MOUNT_HELPER ? new IncrementalMountHelper(this) : null; } @ThreadConfined(ThreadConfined.UI) LayoutState getMainThreadLayoutState() { return mMainThreadLayoutState; } @VisibleForTesting @GuardedBy("this") protected LayoutState getBackgroundLayoutState() { return mBackgroundLayoutState; } /** * Picks the best LayoutState and sets it in mMainThreadLayoutState. The return value is a * LayoutState that must be released (after the lock is released). This awkward contract is * necessary to ensure thread-safety. */ @CheckReturnValue @ReturnsOwnership @ThreadConfined(ThreadConfined.UI) @GuardedBy("this") private LayoutState setBestMainThreadLayoutAndReturnOldLayout() { assertHoldsLock(this); // If everything matches perfectly then we prefer mMainThreadLayoutState // because that means we don't need to remount. boolean isMainThreadLayoutBest; if (isCompatibleComponentAndSpec(mMainThreadLayoutState)) { isMainThreadLayoutBest = true; } else if (isCompatibleSpec(mBackgroundLayoutState, mWidthSpec, mHeightSpec) || !isCompatibleSpec(mMainThreadLayoutState, mWidthSpec, mHeightSpec)) { // If mMainThreadLayoutState isn't a perfect match, we'll prefer // mBackgroundLayoutState since it will have the more recent create. isMainThreadLayoutBest = false; } else { // If the main thread layout is still compatible size-wise, and the // background one is not, then we'll do nothing. We want to keep the same // main thread layout so that we don't force main thread re-layout. isMainThreadLayoutBest = true; } if (isMainThreadLayoutBest) { // We don't want to hold onto mBackgroundLayoutState since it's unlikely // to ever be used again. We return mBackgroundLayoutState to indicate it // should be released after exiting the lock. final LayoutState toRelease = mBackgroundLayoutState; mBackgroundLayoutState = null; return toRelease; } else { // Since we are changing layout states we'll need to remount. if (mLithoView != null) { mLithoView.setMountStateDirty(); } final LayoutState toRelease = mMainThreadLayoutState; mMainThreadLayoutState = mBackgroundLayoutState; mBackgroundLayoutState = null; return toRelease; } } private void backgroundLayoutStateUpdated() { assertMainThread(); // If we aren't attached, then we have nothing to do. We'll handle // everything in onAttach. if (!mIsAttached) { return; } LayoutState toRelease; boolean layoutStateUpdated; int componentRootId; synchronized (this) { if (mRoot == null) { // We have been released. Abort. return; } LayoutState oldMainThreadLayoutState = mMainThreadLayoutState; toRelease = setBestMainThreadLayoutAndReturnOldLayout(); layoutStateUpdated = (mMainThreadLayoutState != oldMainThreadLayoutState); componentRootId = mRoot.getId(); } if (toRelease != null) { toRelease.releaseRef(); toRelease = null; } if (!layoutStateUpdated) { return; } // We defer until measure if we don't yet have a width/height int viewWidth = mLithoView.getMeasuredWidth(); int viewHeight = mLithoView.getMeasuredHeight(); if (viewWidth == 0 && viewHeight == 0) { // The host view has not been measured yet. return; } final boolean needsAndroidLayout = !isCompatibleComponentAndSize( mMainThreadLayoutState, componentRootId, viewWidth, viewHeight); if (needsAndroidLayout) { mLithoView.requestLayout(); } else { mountComponentIfDirty(); } } void attach() { assertMainThread(); if (mLithoView == null) { throw new IllegalStateException("Trying to attach a ComponentTree without a set View"); } if (mIncrementalMountHelper != null) { mIncrementalMountHelper.onAttach(mLithoView); } LayoutState toRelease; int componentRootId; synchronized (this) { // We need to track that we are attached regardless... mIsAttached = true; // ... and then we do state transfer toRelease = setBestMainThreadLayoutAndReturnOldLayout(); if (mRoot == null) { throw new IllegalStateException( "Trying to attach a ComponentTree with a null root. Is released: " + mReleased + ", Released Component name is: " + mReleasedComponent); } componentRootId = mRoot.getId(); } if (toRelease != null) { toRelease.releaseRef(); toRelease = null; } // We defer until measure if we don't yet have a width/height int viewWidth = mLithoView.getMeasuredWidth(); int viewHeight = mLithoView.getMeasuredHeight(); if (viewWidth == 0 && viewHeight == 0) { // The host view has not been measured yet. return; } final boolean needsAndroidLayout = !isCompatibleComponentAndSize( mMainThreadLayoutState, componentRootId, viewWidth, viewHeight); if (needsAndroidLayout || mLithoView.isMountStateDirty()) { mLithoView.requestLayout(); } else { mLithoView.rebind(); } } private static boolean hasSameRootContext(Context context1, Context context2) { return ContextUtils.getRootContext(context1) == ContextUtils.getRootContext(context2); } @ThreadConfined(ThreadConfined.UI) boolean isMounting() { return mIsMounting; } private boolean mountComponentIfDirty() { if (mLithoView.isMountStateDirty()) { if (mIncrementalMountEnabled) { incrementalMountComponent(); } else { mountComponent(null, true); } return true; } return false; } void incrementalMountComponent() { assertMainThread(); if (!mIncrementalMountEnabled) { throw new IllegalStateException("Calling incrementalMountComponent() but incremental mount" + " is not enabled"); } if (mLithoView == null || mLithoView.doesOwnIncrementalMount()) { return; } // Per ComponentTree visible area. Because LithoViews can be nested and mounted // not in "depth order", this variable cannot be static. final Rect currentVisibleArea = ComponentsPools.acquireRect(); if (getVisibleRect(currentVisibleArea)) { mountComponent(currentVisibleArea, true); } // if false: no-op, doesn't have visible area, is not ready or not attached ComponentsPools.release(currentVisibleArea); } private boolean getVisibleRect(Rect visibleBounds) { assertMainThread(); if (ComponentsConfiguration.incrementalMountUsesLocalVisibleBounds) { return mLithoView.getLocalVisibleRect(visibleBounds); } getLocationAndBoundsOnScreen(mLithoView, sCurrentLocation, visibleBounds); final ViewParent viewParent = mLithoView.getParent(); if (viewParent instanceof View) { View parent = (View) viewParent; getLocationAndBoundsOnScreen(parent, sParentLocation, sParentBounds); if (!visibleBounds.setIntersect(visibleBounds, sParentBounds)) { return false; } } visibleBounds.offset(-sCurrentLocation[0], -sCurrentLocation[1]); return true; } private static void getLocationAndBoundsOnScreen(View view, int[] location, Rect bounds) { assertMainThread(); view.getLocationOnScreen(location); bounds.set( location[0], location[1], location[0] + view.getWidth(), location[1] + view.getHeight()); } /** @see LayoutState#hasLithoViewWidthAnimation() */ @ThreadConfined(ThreadConfined.UI) boolean hasLithoViewWidthAnimation() { assertMainThread(); return mMainThreadLayoutState != null && mMainThreadLayoutState.hasLithoViewWidthAnimation(); } /** @see LayoutState#hasLithoViewHeightAnimation() */ @ThreadConfined(ThreadConfined.UI) boolean hasLithoViewHeightAnimation() { assertMainThread(); return mMainThreadLayoutState != null && mMainThreadLayoutState.hasLithoViewHeightAnimation(); } void mountComponent(Rect currentVisibleArea, boolean processVisibilityOutputs) { assertMainThread(); if (mMainThreadLayoutState == null) { Log.w(TAG, "Main Thread Layout state is not found"); return; } final boolean isDirtyMount = mLithoView.isMountStateDirty(); mIsMounting = true; if (isDirtyMount) { applyPreviousRenderData(mMainThreadLayoutState); } if (!mHasMounted) { mLithoView.getMountState().setIsFirstMountOfComponentTree(); mHasMounted = true; } // currentVisibleArea null or empty => mount all mLithoView.mount(mMainThreadLayoutState, currentVisibleArea, processVisibilityOutputs); if (isDirtyMount) { recordRenderData(mMainThreadLayoutState); } mIsMounting = false; if (isDirtyMount) { mLithoView.onDirtyMountComplete(); } } private void applyPreviousRenderData(LayoutState layoutState) { final List<Component> components = layoutState.getComponentsNeedingPreviousRenderData(); if (components == null || components.isEmpty()) { return; } if (mPreviousRenderState == null) { return; } mPreviousRenderState.applyPreviousRenderData(components); } private void recordRenderData(LayoutState layoutState) { final List<Component> components = layoutState.getComponentsNeedingPreviousRenderData(); if (components == null || components.isEmpty()) { return; } if (mPreviousRenderState == null) { mPreviousRenderState = ComponentsPools.acquireRenderState(); } mPreviousRenderState.recordRenderData(components); } void detach() { assertMainThread(); if (mIncrementalMountHelper != null) { mIncrementalMountHelper.onDetach(mLithoView); } synchronized (this) { mIsAttached = false; mHasViewMeasureSpec = false; } } /** * Set a new LithoView to this ComponentTree checking that they have the same context and * clear the ComponentTree reference from the previous LithoView if any. * Be sure this ComponentTree is detach first. */ void setLithoView(@NonNull LithoView view) { assertMainThread(); // It's possible that the view associated with this ComponentTree was recycled but was // never detached. In all cases we have to make sure that the old references between // lithoView and componentTree are reset. if (mIsAttached) { if (mLithoView != null) { mLithoView.setComponentTree(null); } else { detach(); } } else if (mLithoView != null) { // Remove the ComponentTree reference from a previous view if any. mLithoView.clearComponentTree(); } if (!hasSameRootContext(view.getContext(), mContext)) { // This would indicate bad things happening, like leaking a context. throw new IllegalArgumentException( "Base view context differs, view context is: " + view.getContext() + ", ComponentTree context is: " + mContext); } mLithoView = view; } void clearLithoView() { assertMainThread(); // Crash if the ComponentTree is mounted to a view. if (mIsAttached) { throw new IllegalStateException( "Clearing the LithoView while the ComponentTree is attached"); } mLithoView = null; } void measure(int widthSpec, int heightSpec, int[] measureOutput, boolean forceLayout) { assertMainThread(); Component component = null; LayoutState toRelease; synchronized (this) { mIsMeasuring = true; // This widthSpec/heightSpec is fixed until the view gets detached. mWidthSpec = widthSpec; mHeightSpec = heightSpec; mHasViewMeasureSpec = true; toRelease = setBestMainThreadLayoutAndReturnOldLayout(); if (forceLayout || !isCompatibleComponentAndSpec(mMainThreadLayoutState)) { // Neither layout was compatible and we have to perform a layout. // Since outputs get set on the same object during the lifecycle calls, // we need to copy it in order to use it concurrently. component = mRoot.makeShallowCopy(); } } if (toRelease != null) { toRelease.releaseRef(); toRelease = null; } if (component != null) { // TODO: We should re-use the existing CSSNodeDEPRECATED tree instead of re-creating it. if (mMainThreadLayoutState != null) { // It's beneficial to delete the old layout state before we start creating a new one since // we'll be able to re-use some of the layout nodes. LayoutState localLayoutState; synchronized (this) { localLayoutState = mMainThreadLayoutState; mMainThreadLayoutState = null; } localLayoutState.releaseRef(); } // We have no layout that matches the given spec, so we need to compute it on the main thread. LayoutState localLayoutState = calculateLayoutState( mLayoutLock, mContext, component, widthSpec, heightSpec, mIsLayoutDiffingEnabled, null); final StateHandler layoutStateStateHandler = localLayoutState.consumeStateHandler(); final List<Component> components = new ArrayList<>(localLayoutState.getComponents()); synchronized (this) { if (layoutStateStateHandler != null) { mStateHandler.commit(layoutStateStateHandler); } localLayoutState.clearComponents(); mMainThreadLayoutState = localLayoutState; localLayoutState = null; } clearUnusedTriggerHandlers(); for (Component layoutComponent : components) { bindEventHandler(layoutComponent); bindTriggerHandler(layoutComponent); } clearUnusedEventHandlers(); // We need to force remount on layout mLithoView.setMountStateDirty(); } measureOutput[0] = mMainThreadLayoutState.getWidth(); measureOutput[1] = mMainThreadLayoutState.getHeight(); int layoutScheduleType = SCHEDULE_NONE; Component root = null; synchronized (this) { mIsMeasuring = false; if (mScheduleLayoutAfterMeasure != SCHEDULE_NONE) { layoutScheduleType = mScheduleLayoutAfterMeasure; mScheduleLayoutAfterMeasure = SCHEDULE_NONE; root = mRoot.makeShallowCopy(); } } if (layoutScheduleType != SCHEDULE_NONE) { setRootAndSizeSpecInternal( root, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, layoutScheduleType == SCHEDULE_LAYOUT_ASYNC, null /*output */); } } /** * Returns {@code true} if the layout call mounted the component. */ boolean layout() { assertMainThread(); return mountComponentIfDirty(); } /** * Returns whether incremental mount is enabled or not in this component. */ public boolean isIncrementalMountEnabled() { return mIncrementalMountEnabled; } synchronized Component getRoot() { return mRoot; } /** * Update the root component. This can happen in both attached and detached states. In each case * we will run a layout and then proxy a message to the main thread to cause a * relayout/invalidate. */ public void setRoot(Component rootComponent) { if (rootComponent == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( rootComponent, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, false /* isAsync */, null /* output */); } /** * Pre-allocate the mount content of all MountSpec in this tree. Must be called after layout is * created. */ @ThreadSafe(enableChecks = false) private void preAllocateMountContent(boolean shouldPreallocatePerMountSpec) { final LayoutState toPrePopulate; synchronized (this) { if (mMainThreadLayoutState != null) { toPrePopulate = mMainThreadLayoutState.acquireRef(); } else if (mBackgroundLayoutState != null) { toPrePopulate = mBackgroundLayoutState.acquireRef(); } else { return; } } final ComponentsLogger logger = mContext.getLogger(); LogEvent event = null; if (logger != null) { event = logger.newPerformanceEvent(EVENT_PRE_ALLOCATE_MOUNT_CONTENT); event.addParam(PARAM_LOG_TAG, mContext.getLogTag()); } toPrePopulate.preAllocateMountContent(shouldPreallocatePerMountSpec); if (logger != null) { logger.log(event); } toPrePopulate.releaseRef(); } public void setRootAsync(Component rootComponent) { if (rootComponent == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( rootComponent, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, true /* isAsync */, null /* output */); } synchronized void updateStateLazy(String componentKey, StateUpdate stateUpdate) { if (mRoot == null) { return; } mStateHandler.queueStateUpdate(componentKey, stateUpdate); } void updateStateSync(String componentKey, StateUpdate stateUpdate) { synchronized (this) { if (mRoot == null) { return; } mStateHandler.queueStateUpdate(componentKey, stateUpdate); } Looper looper = Looper.myLooper(); if (looper == null) { Log.w( TAG, "You cannot update state synchronously from a thread without a looper, " + "using the default background layout thread instead"); mLayoutThreadHandler.removeCallbacks(mUpdateStateSyncRunnable); mLayoutThreadHandler.post(mUpdateStateSyncRunnable); return; } Handler handler; synchronized (this) { final WeakReference<Handler> handlerWr = sSyncStateUpdatesHandler.get(); if (handlerWr != null && handlerWr.get() != null) { handler = handlerWr.get(); handler.removeCallbacks(mUpdateStateSyncRunnable); } else { handler = new Handler(looper); sSyncStateUpdatesHandler.set(new WeakReference<>(handler)); } } handler.post(mUpdateStateSyncRunnable); } void updateStateAsync(String componentKey, StateUpdate stateUpdate) { if (!mIsAsyncUpdateStateEnabled) { throw new RuntimeException("Triggering async state updates on this component tree is " + "disabled, use sync state updates."); } synchronized (this) { if (mRoot == null) { return; } mStateHandler.queueStateUpdate(componentKey, stateUpdate); } updateStateInternal(true); } void updateStateInternal(boolean isAsync) { final Component root; synchronized (this) { if (mRoot == null) { return; } if (mIsMeasuring) { // If the layout calculation was already scheduled to happen synchronously let's just go // with a sync layout calculation. if (mScheduleLayoutAfterMeasure == SCHEDULE_LAYOUT_SYNC) { return; } mScheduleLayoutAfterMeasure = isAsync ? SCHEDULE_LAYOUT_ASYNC : SCHEDULE_LAYOUT_SYNC; return; } root = mRoot.makeShallowCopy(); } setRootAndSizeSpecInternal( root, SIZE_UNINITIALIZED, SIZE_UNINITIALIZED, isAsync, null /*output */); } private void bindEventHandler(Component component) { final String key = component.getGlobalKey(); if (key == null) { return; } synchronized (mEventHandlers) { final EventHandlersWrapper eventHandlers = mEventHandlers.get(key); if (eventHandlers == null) { return; } // Mark that the list of event handlers for this component is still needed. eventHandlers.boundInCurrentLayout = true; eventHandlers.bindToDispatcherComponent(component); } } private void clearUnusedEventHandlers() { synchronized (mEventHandlers) { final Iterator iterator = mEventHandlers.keySet().iterator(); while (iterator.hasNext()) { if (!mEventHandlers.get(iterator.next()).boundInCurrentLayout) { iterator.remove(); } } } } void recordEventHandler(Component component, EventHandler eventHandler) { final String key = component.getGlobalKey(); if (key == null) { return; } synchronized (mEventHandlers) { EventHandlersWrapper eventHandlers = mEventHandlers.get(key); if (eventHandlers == null) { eventHandlers = new EventHandlersWrapper(); mEventHandlers.put(key, eventHandlers); } eventHandlers.addEventHandler(eventHandler); } } private void bindTriggerHandler(Component component) { synchronized (mEventTriggersContainer) { component.recordEventTrigger(mEventTriggersContainer); } } private void clearUnusedTriggerHandlers() { synchronized (mEventTriggersContainer) { mEventTriggersContainer.clear(); } } @Nullable EventTrigger getEventTrigger(String triggerKey) { synchronized (mEventTriggersContainer) { return mEventTriggersContainer.getEventTrigger(triggerKey); } } /** * Update the width/height spec. This is useful if you are currently detached and are responding * to a configuration change. If you are currently attached then the HostView is the source of * truth for width/height, so this call will be ignored. */ public void setSizeSpec(int widthSpec, int heightSpec) { setSizeSpec(widthSpec, heightSpec, null); } /** * Same as {@link #setSizeSpec(int, int)} but fetches the resulting width/height * in the given {@link Size}. */ public void setSizeSpec(int widthSpec, int heightSpec, Size output) { setRootAndSizeSpecInternal( null, widthSpec, heightSpec, false /* isAsync */, output /* output */); } public void setSizeSpecAsync(int widthSpec, int heightSpec) { setRootAndSizeSpecInternal( null, widthSpec, heightSpec, true /* isAsync */, null /* output */); } /** * Compute asynchronously a new layout with the given component root and sizes */ public void setRootAndSizeSpecAsync(Component root, int widthSpec, int heightSpec) { if (root == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( root, widthSpec, heightSpec, true /* isAsync */, null /* output */); } /** * Compute a new layout with the given component root and sizes */ public void setRootAndSizeSpec(Component root, int widthSpec, int heightSpec) { if (root == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( root, widthSpec, heightSpec, false /* isAsync */, null /* output */); } public void setRootAndSizeSpec(Component root, int widthSpec, int heightSpec, Size output) { if (root == null) { throw new IllegalArgumentException("Root component can't be null"); } setRootAndSizeSpecInternal( root, widthSpec, heightSpec, false /* isAsync */, output); } /** * @return the {@link LithoView} associated with this ComponentTree if any. */ @Keep @Nullable public LithoView getLithoView() { assertMainThread(); return mLithoView; } /** * Provides a new instance from the StateHandler pool that is initialized with the information * from the StateHandler currently held by the ComponentTree. Once the state updates have been * applied and we are back in the main thread the state handler gets released to the pool. * @return a copy of the state handler instance held by ComponentTree. */ public synchronized StateHandler getStateHandler() { return StateHandler.acquireNewInstance(mStateHandler); } /** * Takes ownership of the {@link RenderState} object from this ComponentTree - this allows the * RenderState to be persisted somewhere and then set back on another ComponentTree using the * {@link Builder}. See {@link RenderState} for more information on the purpose of this object. */ @ThreadConfined(ThreadConfined.UI) public RenderState consumePreviousRenderState() { final RenderState previousRenderState = mPreviousRenderState; mPreviousRenderState = null; mPreviousRenderStateSetFromBuilder = false; return previousRenderState; } /** * @deprecated * @see {@link #showTooltip(LithoTooltip, String, int, int)} } */ @Deprecated void showTooltip( DeprecatedLithoTooltip tooltip, String anchorGlobalKey, TooltipPosition tooltipPosition, int xOffset, int yOffset) { assertMainThread(); final Map<String, Rect> componentKeysToBounds; synchronized (this) { componentKeysToBounds = mMainThreadLayoutState.getComponentKeyToBounds(); } if (!componentKeysToBounds.containsKey(anchorGlobalKey)) { throw new IllegalArgumentException( "Cannot find a component with key " + anchorGlobalKey + " to use as anchor."); } final Rect anchorBounds = componentKeysToBounds.get(anchorGlobalKey); LithoTooltipController.showOnAnchor( tooltip, anchorBounds, mLithoView, tooltipPosition, xOffset, yOffset); } void showTooltip(LithoTooltip lithoTooltip, String anchorGlobalKey, int xOffset, int yOffset) { assertMainThread(); final Map<String, Rect> componentKeysToBounds; synchronized (this) { componentKeysToBounds = mMainThreadLayoutState.getComponentKeyToBounds(); } if (!componentKeysToBounds.containsKey(anchorGlobalKey)) { throw new IllegalArgumentException( "Cannot find a component with key " + anchorGlobalKey + " to use as anchor."); } final Rect anchorBounds = componentKeysToBounds.get(anchorGlobalKey); lithoTooltip.showLithoTooltip(mLithoView, anchorBounds, xOffset, yOffset); } private void setRootAndSizeSpecInternal( Component root, int widthSpec, int heightSpec, boolean isAsync, Size output) { synchronized (this) { final Map<String, List<StateUpdate>> pendingStateUpdates = mStateHandler.getPendingStateUpdates(); if (pendingStateUpdates != null && pendingStateUpdates.size() > 0 && root != null) { root = root.makeShallowCopyWithNewId(); } final boolean rootInitialized = root != null; final boolean widthSpecInitialized = widthSpec != SIZE_UNINITIALIZED; final boolean heightSpecInitialized = heightSpec != SIZE_UNINITIALIZED; if (mHasViewMeasureSpec && !rootInitialized) { // It doesn't make sense to specify the width/height while the HostView is attached and it // has been measured. We do not throw an Exception only because there can be race conditions // that can cause this to happen. In such race conditions, ignoring the setSizeSpec call is // the right thing to do. return; } final boolean widthSpecDidntChange = !widthSpecInitialized || widthSpec == mWidthSpec; final boolean heightSpecDidntChange = !heightSpecInitialized || heightSpec == mHeightSpec; final boolean sizeSpecDidntChange = widthSpecDidntChange && heightSpecDidntChange; final LayoutState mostRecentLayoutState = mBackgroundLayoutState != null ? mBackgroundLayoutState : mMainThreadLayoutState; final boolean allSpecsWereInitialized = widthSpecInitialized && heightSpecInitialized && mWidthSpec != SIZE_UNINITIALIZED && mHeightSpec != SIZE_UNINITIALIZED; final boolean sizeSpecsAreCompatible = sizeSpecDidntChange || (allSpecsWereInitialized && mostRecentLayoutState != null && LayoutState.hasCompatibleSizeSpec( mWidthSpec, mHeightSpec, widthSpec, heightSpec, mostRecentLayoutState.getWidth(), mostRecentLayoutState.getHeight())); final boolean rootDidntChange = !rootInitialized || root.getId() == mRoot.getId(); if (rootDidntChange && sizeSpecsAreCompatible) { // The spec and the root haven't changed. Either we have a layout already, or we're // currently computing one on another thread. if (output == null) { return; } // Set the output if we have a LayoutState, otherwise we need to compute one synchronously // below to get the correct output. if (mostRecentLayoutState != null) { output.height = mostRecentLayoutState.getHeight(); output.width = mostRecentLayoutState.getWidth(); return; } } if (widthSpecInitialized) { mWidthSpec = widthSpec; } if (heightSpecInitialized) { mHeightSpec = heightSpec; } if (rootInitialized) { mRoot = root; } } if (isAsync && output != null) { throw new IllegalArgumentException("The layout can't be calculated asynchronously if" + " we need the Size back"); } else if (isAsync) { mLayoutThreadHandler.removeCallbacks(mCalculateLayoutRunnable); mLayoutThreadHandler.post(mCalculateLayoutRunnable); } else { calculateLayout(output); } } /** * Calculates the layout. * @param output a destination where the size information should be saved */ private void calculateLayout(Size output) { int widthSpec; int heightSpec; Component root; LayoutState previousLayoutState = null; // Cancel any scheduled layout requests we might have in the background queue // since we are starting a new layout computation. mLayoutThreadHandler.removeCallbacks(mCalculateLayoutRunnable); synchronized (this) { // Can't compute a layout if specs or root are missing if (!hasSizeSpec() || mRoot == null) { return; } // Check if we already have a compatible layout. if (hasCompatibleComponentAndSpec()) { if (output != null) { final LayoutState mostRecentLayoutState = mBackgroundLayoutState != null ? mBackgroundLayoutState : mMainThreadLayoutState; output.width = mostRecentLayoutState.getWidth(); output.height = mostRecentLayoutState.getHeight(); } return; } widthSpec = mWidthSpec; heightSpec = mHeightSpec; root = mRoot.makeShallowCopy(); if (mMainThreadLayoutState != null) { previousLayoutState = mMainThreadLayoutState.acquireRef(); } } final ComponentsLogger logger = mContext.getLogger(); LogEvent layoutEvent = null; if (logger != null) { layoutEvent = logger.newPerformanceEvent(EVENT_LAYOUT_CALCULATE); layoutEvent.addParam(PARAM_LOG_TAG, mContext.getLogTag()); layoutEvent.addParam(PARAM_TREE_DIFF_ENABLED, String.valueOf(mIsLayoutDiffingEnabled)); layoutEvent.addParam(PARAM_IS_BACKGROUND_LAYOUT, String.valueOf(!ThreadUtils.isMainThread())); } LayoutState localLayoutState = calculateLayoutState( mLayoutLock, mContext, root, widthSpec, heightSpec, mIsLayoutDiffingEnabled, previousLayoutState != null ? previousLayoutState.getDiffTree() : null); if (output != null) { output.width = localLayoutState.getWidth(); output.height = localLayoutState.getHeight(); } if (previousLayoutState != null) { previousLayoutState.releaseRef(); previousLayoutState = null; } List<Component> components = null; boolean layoutStateUpdated = false; synchronized (this) { // Make sure some other thread hasn't computed a compatible layout in the meantime. if (!hasCompatibleComponentAndSpec() && isCompatibleSpec(localLayoutState, mWidthSpec, mHeightSpec)) { if (localLayoutState != null) { final StateHandler layoutStateStateHandler = localLayoutState.consumeStateHandler(); if (layoutStateStateHandler != null) { if (mStateHandler != null) { // we could have been released mStateHandler.commit(layoutStateStateHandler); } } if (mMeasureListener != null) { mMeasureListener.onSetRootAndSizeSpec( localLayoutState.getWidth(), localLayoutState.getHeight()); } components = new ArrayList<>(localLayoutState.getComponents()); localLayoutState.clearComponents(); } // Set the new layout state, and remember the old layout state so we // can release it. LayoutState tmp = mBackgroundLayoutState; mBackgroundLayoutState = localLayoutState; localLayoutState = tmp; layoutStateUpdated = true; } } if (components != null) { clearUnusedTriggerHandlers(); for (Component component : components) { bindEventHandler(component); bindTriggerHandler(component); } clearUnusedEventHandlers(); } if (localLayoutState != null) { localLayoutState.releaseRef(); localLayoutState = null; } if (layoutStateUpdated) { postBackgroundLayoutStateUpdated(); } if (mPreAllocateMountContentHandler != null) { mPreAllocateMountContentHandler.removeCallbacks(mPreAllocateMountContentRunnable); mPreAllocateMountContentHandler.post(mPreAllocateMountContentRunnable); } if (logger != null) { logger.log(layoutEvent); } } /** * Transfer mBackgroundLayoutState to mMainThreadLayoutState. This will proxy * to the main thread if necessary. If the component/size-spec changes in the * meantime, then the transfer will be aborted. */ private void postBackgroundLayoutStateUpdated() { if (isMainThread()) { // We need to possibly update mMainThreadLayoutState. This call will // cause the host view to be invalidated and re-laid out, if necessary. backgroundLayoutStateUpdated(); } else { // If we aren't on the main thread, we send a message to the main thread // to invoke backgroundLayoutStateUpdated. sMainThreadHandler.obtainMessage(MESSAGE_WHAT_BACKGROUND_LAYOUT_STATE_UPDATED, this) .sendToTarget(); } } /** * The contract is that in order to release a ComponentTree, you must do so from the main * thread, or guarantee that it will never be accessed from the main thread again. Usually * HostView will handle releasing, but if you never attach to a host view, then you should call * release yourself. */ public void release() { if (mIsMounting) { throw new IllegalStateException("Releasing a ComponentTree that is currently being mounted"); } LayoutState mainThreadLayoutState; LayoutState backgroundLayoutState; synchronized (this) { sMainThreadHandler.removeMessages(MESSAGE_WHAT_BACKGROUND_LAYOUT_STATE_UPDATED, this); mLayoutThreadHandler.removeCallbacks(mCalculateLayoutRunnable); mLayoutThreadHandler.removeCallbacks(mUpdateStateSyncRunnable); if (mPreAllocateMountContentHandler != null) { mPreAllocateMountContentHandler.removeCallbacks(mPreAllocateMountContentRunnable); } mReleased = true; mReleasedComponent = mRoot.getSimpleName(); if (mLithoView != null) { mLithoView.setComponentTree(null); } mRoot = null; mainThreadLayoutState = mMainThreadLayoutState; mMainThreadLayoutState = null; backgroundLayoutState = mBackgroundLayoutState; mBackgroundLayoutState = null; // TODO t15532529 mStateHandler = null; if (mPreviousRenderState != null && !mPreviousRenderStateSetFromBuilder) { ComponentsPools.release(mPreviousRenderState); } mPreviousRenderState = null; mPreviousRenderStateSetFromBuilder = false; } if (mainThreadLayoutState != null) { mainThreadLayoutState.releaseRef(); mainThreadLayoutState = null; } if (backgroundLayoutState != null) { backgroundLayoutState.releaseRef(); backgroundLayoutState = null; } synchronized (mEventTriggersContainer) { clearUnusedTriggerHandlers(); } } @GuardedBy("this") private boolean isCompatibleComponentAndSpec(LayoutState layoutState) { assertHoldsLock(this); return mRoot != null && isCompatibleComponentAndSpec( layoutState, mRoot.getId(), mWidthSpec, mHeightSpec); } // Either the MainThreadLayout or the BackgroundThreadLayout is compatible with the current state. @GuardedBy("this") private boolean hasCompatibleComponentAndSpec() { assertHoldsLock(this); return isCompatibleComponentAndSpec(mMainThreadLayoutState) || isCompatibleComponentAndSpec(mBackgroundLayoutState); } @GuardedBy("this") private boolean hasSizeSpec() { assertHoldsLock(this); return mWidthSpec != SIZE_UNINITIALIZED && mHeightSpec != SIZE_UNINITIALIZED; } private static synchronized Looper getDefaultLayoutThreadLooper() { if (sDefaultLayoutThreadLooper == null) { final HandlerThread defaultThread = new HandlerThread(DEFAULT_LAYOUT_THREAD_NAME, DEFAULT_LAYOUT_THREAD_PRIORITY); defaultThread.start(); sDefaultLayoutThreadLooper = defaultThread.getLooper(); } return sDefaultLayoutThreadLooper; } private static synchronized Looper getDefaultPreallocateMountContentThreadLooper() { if (sDefaultPreallocateMountContentThreadLooper == null) { HandlerThread defaultThread = new HandlerThread(DEFAULT_PMC_THREAD_NAME); defaultThread.start(); sDefaultPreallocateMountContentThreadLooper = defaultThread.getLooper(); } return sDefaultPreallocateMountContentThreadLooper; } private static boolean isCompatibleSpec( LayoutState layoutState, int widthSpec, int heightSpec) { return layoutState != null && layoutState.isCompatibleSpec(widthSpec, heightSpec) && layoutState.isCompatibleAccessibility(); } private static boolean isCompatibleComponentAndSpec( LayoutState layoutState, int componentId, int widthSpec, int heightSpec) { return layoutState != null && layoutState.isCompatibleComponentAndSpec(componentId, widthSpec, heightSpec) && layoutState.isCompatibleAccessibility(); } private static boolean isCompatibleComponentAndSize( LayoutState layoutState, int componentId, int width, int height) { return layoutState != null && layoutState.isComponentId(componentId) && layoutState.isCompatibleSize(width, height) && layoutState.isCompatibleAccessibility(); } public synchronized boolean isReleased() { return mReleased; } synchronized String getReleasedComponent() { return mReleasedComponent; } public ComponentContext getContext() { return mContext; } private static class ComponentMainThreadHandler extends Handler { private ComponentMainThreadHandler() { super(Looper.getMainLooper()); } @Override public void handleMessage(Message msg) { switch (msg.what) { case MESSAGE_WHAT_BACKGROUND_LAYOUT_STATE_UPDATED: ComponentTree that = (ComponentTree) msg.obj; that.backgroundLayoutStateUpdated(); break; default: throw new IllegalArgumentException(); } } } protected LayoutState calculateLayoutState( @Nullable Object lock, ComponentContext context, Component root, int widthSpec, int heightSpec, boolean diffingEnabled, @Nullable DiffNode diffNode) { final ComponentContext contextWithStateHandler; synchronized (this) { contextWithStateHandler = new ComponentContext( context, StateHandler.acquireNewInstance(mStateHandler), new KeyHandler(mContext.getLogger())); } if (lock != null) { synchronized (lock) { return LayoutState.calculate( contextWithStateHandler, root, mId, widthSpec, heightSpec, diffingEnabled, diffNode, mCanPrefetchDisplayLists, mCanCacheDrawingDisplayLists, mShouldClipChildren); } } else { return LayoutState.calculate( contextWithStateHandler, root, mId, widthSpec, heightSpec, diffingEnabled, diffNode, mCanPrefetchDisplayLists, mCanCacheDrawingDisplayLists, mShouldClipChildren); } } /** * A default {@link LayoutHandler} that will use a {@link Handler} with a {@link Thread}'s * {@link Looper}. */ private static class DefaultLayoutHandler extends Handler implements LayoutHandler { private DefaultLayoutHandler(Looper threadLooper) { super(threadLooper); } } private static class DefaultPreallocateMountContentHandler extends Handler implements LayoutHandler { private DefaultPreallocateMountContentHandler(Looper threadLooper) { super(threadLooper); } } public static int generateComponentTreeId() { return sIdGenerator.getAndIncrement(); } /** * A builder class that can be used to create a {@link ComponentTree}. */ public static class Builder { // required private ComponentContext context; private Component root; // optional private boolean incrementalMountEnabled = true; private boolean isLayoutDiffingEnabled = true; private LayoutHandler layoutThreadHandler; private LayoutHandler preAllocateMountContentHandler; private Object layoutLock; private StateHandler stateHandler; private RenderState previousRenderState; private boolean asyncStateUpdates = true; private int overrideComponentTreeId = -1; private boolean canPrefetchDisplayLists = false; private boolean canCacheDrawingDisplayLists = false; private boolean shouldClipChildren = true; private boolean hasMounted = false; private MeasureListener mMeasureListener; private boolean shouldPreallocatePerMountSpec; private boolean canPreallocateOnDefaultHandler; protected Builder() { } protected Builder(ComponentContext context, Component root) { init(context, root); } protected void init(ComponentContext context, Component root) { this.context = context; this.root = root; } protected void release() { context = null; root = null; incrementalMountEnabled = true; isLayoutDiffingEnabled = true; layoutThreadHandler = null; layoutLock = null; stateHandler = null; previousRenderState = null; asyncStateUpdates = true; overrideComponentTreeId = -1; canPrefetchDisplayLists = false; canCacheDrawingDisplayLists = false; shouldClipChildren = true; hasMounted = false; preAllocateMountContentHandler = null; } /** * Whether or not to enable the incremental mount optimization. True by default. * In order to use incremental mount you should disable mount diffing. * * @Deprecated We will remove this option soon, please consider turning it on (which is on by * default) */ public Builder incrementalMount(boolean isEnabled) { incrementalMountEnabled = isEnabled; return this; } /** * Whether or not to enable layout tree diffing. This will reduce the cost of * updates at the expense of using extra memory. True by default. * * @Deprecated We will remove this option soon, please consider turning it on (which is on by * default) */ public Builder layoutDiffing(boolean enabled) { isLayoutDiffingEnabled = enabled; return this; } /** * Specify the looper to use for running layouts on. Note that in rare cases * layout must run on the UI thread. For example, if you rotate the screen, * we must measure on the UI thread. If you don't specify a Looper here, the * Components default Looper will be used. */ public Builder layoutThreadLooper(Looper looper) { if (looper != null) { layoutThreadHandler = new DefaultLayoutHandler(looper); } return this; } /** Specify the handler for to preAllocateMountContent */ public Builder preAllocateMountContentHandler(LayoutHandler handler) { preAllocateMountContentHandler = handler; return this; } /** * If true, this ComponentTree will only preallocate mount specs that are enabled for * preallocation with {@link MountSpec#canPreallocate()}. If false, it preallocates all mount * content. */ public Builder shouldPreallocateMountContentPerMountSpec(boolean preallocatePerMountSpec) { shouldPreallocatePerMountSpec = preallocatePerMountSpec; return this; } /** * If true, mount content preallocation will use a default layout handler to preallocate mount * content on a background thread if no other layout handler is provided through {@link * ComponentTree.Builder#preAllocateMountContentHandler(LayoutHandler)}. */ public Builder preallocateOnDefaultHandler(boolean preallocateOnDefaultHandler) { canPreallocateOnDefaultHandler = preallocateOnDefaultHandler; return this; } /** * Specify the looper to use for running layouts on. Note that in rare cases layout must run on * the UI thread. For example, if you rotate the screen, we must measure on the UI thread. If * you don't specify a Looper here, the Components default Looper will be used. */ public Builder layoutThreadHandler(LayoutHandler handler) { layoutThreadHandler = handler; return this; } /** * Specify a lock to be acquired during layout. This is an advanced feature * that can lead to deadlock if you don't know what you are doing. */ public Builder layoutLock(Object layoutLock) { this.layoutLock = layoutLock; return this; } /** * Specify an initial state handler object that the ComponentTree can use to set the current * values for states. */ public Builder stateHandler(StateHandler stateHandler) { this.stateHandler = stateHandler; return this; } /** * Specify an existing previous render state that the ComponentTree can use to set the current * values for providing previous versions of @Prop/@State variables. */ public Builder previousRenderState(RenderState previousRenderState) { this.previousRenderState = previousRenderState; return this; } /** * Specify whether the ComponentTree allows async state updates. This is enabled by default. */ public Builder asyncStateUpdates(boolean enabled) { this.asyncStateUpdates = enabled; return this; } /** * Gives the ability to override the auto-generated ComponentTree id: this is generally not * useful in the majority of circumstances, so don't use it unless you really know what you're * doing. */ public Builder overrideComponentTreeId(int overrideComponentTreeId) { this.overrideComponentTreeId = overrideComponentTreeId; return this; } /** * Specify whether the ComponentTree allows to prefetch display lists of its components * on idle time of UI thread. * * NOTE: To make display lists prefetching work, besides setting this flag * {@link com.facebook.litho.utils.DisplayListUtils#prefetchDisplayLists(View)} * should be called on scrollable surfaces like {@link android.support.v7.widget.RecyclerView} * during scrolling. */ public Builder canPrefetchDisplayLists(boolean canPrefetch) { this.canPrefetchDisplayLists = canPrefetch; return this; } /** * Specify whether the ComponentTree allows to cache display lists of the components after it * was first drawng. * * NOTE: To make display lists caching work, {@link #canPrefetchDisplayLists(boolean)} should * be set to true. */ public Builder canCacheDrawingDisplayLists(boolean canCacheDrawingDisplayLists) { this.canCacheDrawingDisplayLists = canCacheDrawingDisplayLists; return this; } public Builder shouldClipChildren(boolean shouldClipChildren) { this.shouldClipChildren = shouldClipChildren; return this; } /** * Sets whether the 'hasMounted' flag should be set on this ComponentTree (for use with appear * animations). */ public Builder hasMounted(boolean hasMounted) { this.hasMounted = hasMounted; return this; } public Builder measureListener(MeasureListener measureListener) { this.mMeasureListener = measureListener; return this; } /** Builds a {@link ComponentTree} using the parameters specified in this builder. */ public ComponentTree build() { ComponentTree componentTree = new ComponentTree(this); ComponentsPools.release(this); return componentTree; } } }
package org.fusesource.lmdbjni; import java.nio.ByteBuffer; import java.nio.ByteOrder; import static org.fusesource.lmdbjni.JNI.mdb_strerror; public class BufferCursor implements AutoCloseable { private final Cursor cursor; private final ByteBuffer keyByteBuffer; private ByteBuffer valueByteBuffer; private final boolean isReadOnly; private DirectBuffer key; private DirectBuffer value; private boolean keyDatbaseMemoryLocation = false; private boolean valDatbaseMemoryLocation = false; private int keyWriteIndex = 0; private int valWriteIndex = 0; private boolean validPosition = false; BufferCursor(Cursor cursor, DirectBuffer key, DirectBuffer value) { this.cursor = cursor; this.isReadOnly = cursor.isReadOnly(); if (key.byteBuffer() == null) { throw new IllegalArgumentException("No ByteBuffer available for key."); } if (!key.byteBuffer().isDirect()) { throw new IllegalArgumentException("ByteBuffer for key must be direct."); } this.keyByteBuffer = key.byteBuffer(); this.key = key; if (value.byteBuffer() == null) { throw new IllegalArgumentException("No ByteBuffer available for value."); } if (!value.byteBuffer().isDirect()) { throw new IllegalArgumentException("ByteBuffer for value must be direct."); } this.value = value; this.valueByteBuffer = value.byteBuffer(); } BufferCursor(Cursor cursor, int maxValueSize) { this(cursor, new DirectBuffer(), new DirectBuffer(ByteBuffer.allocateDirect(maxValueSize))); } /** * @return the write position of the key */ public int keyWriteIndex() { return keyWriteIndex; } /** * @return the write position of the value */ public int valWriteIndex() { return valWriteIndex; } /** * Position at the exact provided key. * * @return true if a key was found. */ public boolean seekKey() { if (keyWriteIndex != 0) { this.key.wrap(this.key.addressOffset(), keyWriteIndex); } int rc = cursor.seekPosition(this.key, value, SeekOp.KEY); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at first key greater than or equal to provided key. * * @return true if a key was found. */ public boolean seekRange() { if (keyWriteIndex != 0) { this.key.wrap(this.key.addressOffset(), keyWriteIndex); } int rc = cursor.seekPosition(this.key, value, SeekOp.RANGE); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at first key greater than or equal to specified key. * * @param key key to seek for. * @return true if a key was found. */ public boolean seekRange(byte[] key) { keyWriteBytes(key); return seekRange(); } /** * Position at first key greater than or equal to specified key. * * @param key key to seek for. * @return true if a key was found. * @deprecated use {@link BufferCursor#seekRange(byte[])} } */ @Deprecated public boolean seek(byte[] key) { return seekRange(key); } /** * Position at first key/data item. * * @return true if found */ public boolean first() { int rc = cursor.position(key, value, GetOp.FIRST); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at first data item of current key. Only for * {@link org.fusesource.lmdbjni.Constants#DUPSORT}. * * @return true if found */ public boolean firstDup() { int rc = cursor.position(key, value, GetOp.FIRST_DUP); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at last key/data item. * * @return true if found */ public boolean last() { int rc = cursor.position(key, value, GetOp.LAST); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at first data item of current key. Only for * {@link org.fusesource.lmdbjni.Constants#DUPSORT}. * * @return true if found */ public boolean lastDup() { int rc = cursor.position(key, value, GetOp.LAST_DUP); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at next data item. * * @return true if found */ public boolean next() { int rc = cursor.position(key, value, GetOp.NEXT); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at next data item of current key. Only for * {@link org.fusesource.lmdbjni.Constants#DUPSORT}. * * @return true if found */ public boolean nextDup() { int rc = cursor.position(key, value, GetOp.NEXT_DUP); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at previous data item. * * @return true if found */ public boolean prev() { int rc = cursor.position(key, value, GetOp.PREV); setDatabaseMemoryLocation(rc); return rc == 0; } /** * Position at previous data item of current key. Only for * {@link org.fusesource.lmdbjni.Constants#DUPSORT}. * * @return true if found */ public boolean prevDup() { int rc = cursor.position(key, value, GetOp.PREV_DUP); setDatabaseMemoryLocation(rc); return rc == 0; } /** * <p> * Delete key/data pair at current cursor position. * </p> */ public void delete() { cursor.delete(); } /** * Close the cursor and the transaction. */ @Override public void close() { cursor.close(); } /** * Stores key/data pairs in the database replacing any * previously existing key. */ public boolean put() { DirectBuffer k = (keyWriteIndex != 0) ? new DirectBuffer(key.addressOffset(), keyWriteIndex) : key; DirectBuffer v = (valWriteIndex != 0) ? new DirectBuffer(value.addressOffset(), valWriteIndex) : value; keyWriteIndex = 0; valWriteIndex = 0; int rc = cursor.put(k, v, Constants.NOOVERWRITE); if (rc == 0) { return true; } else if (rc == LMDBException.KEYEXIST) { return false; } else { String msg = Util.string(mdb_strerror(rc)); throw new LMDBException(msg, rc); } } /** * Stores key/data pairs in the database replacing any * previously existing key. Also used for adding duplicates. */ public boolean overwrite() { DirectBuffer k = (keyWriteIndex != 0) ? new DirectBuffer(key.addressOffset(), keyWriteIndex) : key; DirectBuffer v = (valWriteIndex != 0) ? new DirectBuffer(value.addressOffset(), valWriteIndex) : value; keyWriteIndex = 0; valWriteIndex = 0; int rc = cursor.put(k, v, 0); if (rc == 0) { return true; } else if (rc == LMDBException.KEYEXIST) { return false; } else { String msg = Util.string(mdb_strerror(rc)); throw new LMDBException(msg, rc); } } /** * Append the given key/data pair to the end of the database. * No key comparisons are performed. This option allows * fast bulk loading when keys are already known to be in the * correct order. Loading unsorted keys with this flag will cause * data corruption. */ public void append() { DirectBuffer k = (keyWriteIndex != 0) ? new DirectBuffer(key.addressOffset(), keyWriteIndex) : key; DirectBuffer v = (valWriteIndex != 0) ? new DirectBuffer(value.addressOffset(), valWriteIndex) : value; keyWriteIndex = 0; valWriteIndex = 0; int rc = cursor.put(k, v, Constants.APPEND); if (rc != 0) { String msg = Util.string(mdb_strerror(rc)); throw new LMDBException(msg, rc); } } /** * Write data to key at current cursor position and * move write index forward. * * @param data boolean * @return this */ public BufferCursor keyWriteBoolean(boolean data) { setSafeKeyMemoryLocation(); this.key.putByte(keyWriteIndex, data ? (byte)1 : (byte)0); keyWriteIndex += 1; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data byte * @return this */ public BufferCursor keyWriteByte(int data) { setSafeKeyMemoryLocation(); this.key.putByte(keyWriteIndex, (byte) data); keyWriteIndex += 1; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data int * @return this */ public BufferCursor keyWriteInt(int data) { setSafeKeyMemoryLocation(); this.key.putInt(keyWriteIndex, data, ByteOrder.BIG_ENDIAN); keyWriteIndex += 4; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data long * @return this */ public BufferCursor keyWriteLong(long data) { setSafeKeyMemoryLocation(); this.key.putLong(keyWriteIndex, data, ByteOrder.BIG_ENDIAN); keyWriteIndex += 8; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data float * @return this */ public BufferCursor keyWriteFloat(float data) { setSafeValMemoryLocation(); this.key.putFloat(keyWriteIndex, data, ByteOrder.BIG_ENDIAN); keyWriteIndex += 4; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data double * @return this */ public BufferCursor keyWriteDouble(double data) { setSafeValMemoryLocation(); this.key.putDouble(keyWriteIndex, data, ByteOrder.BIG_ENDIAN); keyWriteIndex += 8; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data string * @return this */ public BufferCursor keyWriteUtf8(ByteString data) { setSafeKeyMemoryLocation(); this.key.putString(keyWriteIndex, data); keyWriteIndex += data.size() + 1; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data string * @return this */ public BufferCursor keyWriteUtf8(String data) { setSafeKeyMemoryLocation(); ByteString bytes = new ByteString(data); this.key.putString(keyWriteIndex, bytes); keyWriteIndex += bytes.size() + 1; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data byte array * @return this */ public BufferCursor keyWriteBytes(byte[] data) { setSafeKeyMemoryLocation(); this.key.putBytes(keyWriteIndex, data); keyWriteIndex += data.length; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param data byte array * @param offset the start offset in the data * @param length the number of bytes to write * @return this */ public BufferCursor keyWriteBytes(byte[] data, int offset, int length) { setSafeKeyMemoryLocation(); this.key.putBytes(keyWriteIndex, data, offset, length); keyWriteIndex += length; return this; } /** * Write data to key at current cursor position and * move write index forward. * * @param buffer buffer * @param capacity capacity * @return this */ public BufferCursor keyWrite(DirectBuffer buffer, int capacity) { setSafeKeyMemoryLocation(); this.key.putBytes(keyWriteIndex, buffer, 0, capacity); keyWriteIndex += capacity; return this; } /** * @see org.fusesource.lmdbjni.BufferCursor#keyWrite(DirectBuffer, int) */ public BufferCursor keyWrite(DirectBuffer buffer) { keyWrite(buffer, buffer.capacity()); return this; } /** * Get key length at current cursor position. * * @return length of key or <code>0</code> if cursor is in an unpositioned state. */ public int keyLength() { if (validPosition) { return this.key.capacity(); } else { return 0; } } /** * Get data from key at current cursor position. * * @param pos byte position * @return boolean */ public boolean keyBoolean(int pos) { checkForValidPosition(); return this.key.getByte(pos) == (byte)1; } /** * Get data from key at current cursor position. * * @param pos byte position * @return byte */ public byte keyByte(int pos) { checkForValidPosition(); return this.key.getByte(pos); } /** * Get data from key at current cursor position. * * @param pos byte position * @return int */ public int keyInt(int pos) { checkForValidPosition(); return this.key.getInt(pos, ByteOrder.BIG_ENDIAN); } /** * Get data from key at current cursor position. * * @param pos byte position * @return long */ public long keyLong(int pos) { checkForValidPosition(); return this.key.getLong(pos, ByteOrder.BIG_ENDIAN); } /** * Get data from key at current cursor position. * * @param pos byte position * @return float */ public float keyFloat(int pos) { checkForValidPosition(); return this.key.getFloat(pos, ByteOrder.BIG_ENDIAN); } /** * Get data from key at current cursor position. * * @param pos byte position * @return double */ public double keyDouble(int pos) { checkForValidPosition(); return this.key.getDouble(pos, ByteOrder.BIG_ENDIAN); } /** * Get string data from key (ending with NULL byte) * at current cursor position. * * @param pos byte position * @return String */ public ByteString keyUtf8(int pos) { checkForValidPosition(); return this.key.getString(pos); } /** * Get data from key at current cursor position. * * @param pos byte position * @return byte array */ public byte[] keyBytes(int pos, int length) { checkForValidPosition(); byte[] k = new byte[length]; key.getBytes(pos, k); return k; } /** * @return copy of key data */ public byte[] keyBytes() { checkForValidPosition(); byte[] k = new byte[key.capacity()]; key.getBytes(0, k); return k; } /** * Here be dragons, use with caution! * * @return underlying buffer */ public DirectBuffer keyBuffer() { return key; } /** * @return the key direct buffer at current position. */ public DirectBuffer keyDirectBuffer() { checkForValidPosition(); return key; } /** * Write data to value at current cursor position and * move write index forward. * * @param data boolean * @return this */ public BufferCursor valWriteBoolean(boolean data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(1); this.value.putByte(valWriteIndex, data ? (byte)1 : (byte)0); valWriteIndex += 1; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data byte * @return this */ public BufferCursor valWriteByte(int data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(1); this.value.putByte(valWriteIndex, (byte) data); valWriteIndex += 1; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data int * @return this */ public BufferCursor valWriteInt(int data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(4); this.value.putInt(valWriteIndex, data, ByteOrder.BIG_ENDIAN); valWriteIndex += 4; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data long * @return this */ public BufferCursor valWriteLong(long data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(8); this.value.putLong(valWriteIndex, data, ByteOrder.BIG_ENDIAN); valWriteIndex += 8; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data float * @return this */ public BufferCursor valWriteFloat(float data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(4); this.value.putFloat(valWriteIndex, data, ByteOrder.BIG_ENDIAN); valWriteIndex += 4; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data float * @return this */ public BufferCursor valWriteDouble(double data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(8); this.value.putDouble(valWriteIndex, data, ByteOrder.BIG_ENDIAN); valWriteIndex += 8; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data string * @return this */ public BufferCursor valWriteUtf8(String data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ByteString bytes = new ByteString(data); ensureValueWritableBytes(bytes.size() + 1); this.value.putString(valWriteIndex, bytes); valWriteIndex += bytes.size() + 1; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data string * @return this */ public BufferCursor valWriteUtf8(ByteString data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(data.size() + 1); this.value.putString(valWriteIndex, data); valWriteIndex += data.size() + 1; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data byte array * @return this */ public BufferCursor valWriteBytes(byte[] data) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(data.length); this.value.putBytes(valWriteIndex, data); valWriteIndex += data.length; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param data byte array * @param offset the start offset in the data * @param length the number of bytes to write * @return this */ public BufferCursor valWriteBytes(byte[] data, int offset, int length) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(length); this.value.putBytes(valWriteIndex, data, offset, length); valWriteIndex += length; return this; } /** * Write data to value at current cursor position and * move write index forward. * * @param buffer buffer * @param capacity capacity * @return this */ public BufferCursor valWrite(DirectBuffer buffer, int capacity) { if (isReadOnly) { throw new LMDBException("Read only transaction", LMDBException.EACCES); } setSafeValMemoryLocation(); ensureValueWritableBytes(capacity); this.value.putBytes(valWriteIndex, buffer, 0, capacity); valWriteIndex += capacity; return this; } /** * @see org.fusesource.lmdbjni.BufferCursor#valWrite(DirectBuffer, int) */ public BufferCursor valWrite(DirectBuffer buffer) { valWrite(buffer, buffer.capacity()); return this; } /** * Get value length at current cursor position. * * @return length of value or <code>0</code> if cursor is in an unpositioned state. */ public int valLength() { if (validPosition) { return this.value.capacity(); } else { return 0; } } /** * Get data from value at current cursor position. * * @param pos byte position * @return boolean */ public boolean valBoolean(int pos) { checkForValidPosition(); return this.value.getByte(pos) == (byte)1; } /** * Get data from value at current cursor position. * * @param pos byte position * @return byte */ public byte valByte(int pos) { checkForValidPosition(); return this.value.getByte(pos); } private void checkForValidPosition() { if (!validPosition) { throw new IndexOutOfBoundsException("Cursor is in an unpositioned state"); } } /** * Get data from value at current cursor position. * * @param pos byte position * @return int */ public int valInt(int pos) { checkForValidPosition(); return this.value.getInt(pos, ByteOrder.BIG_ENDIAN); } /** * Get data from value at current cursor position. * * @param pos byte position * @return long */ public long valLong(int pos) { checkForValidPosition(); return this.value.getLong(pos, ByteOrder.BIG_ENDIAN); } /** * Get data from key at current cursor position. * * @param pos byte position * @return byte array */ public byte[] valBytes(int pos, int length) { checkForValidPosition(); byte[] v = new byte[length]; value.getBytes(pos, v); return v; } /** * @return copy of value data */ public byte[] valBytes() { checkForValidPosition(); byte[] v = new byte[value.capacity()]; value.getBytes(0, v); return v; } /** * Here be dragons, use with caution! * * @return underlying buffer */ public DirectBuffer valBuffer() { return value; } /** * Get data from value at current cursor position. * * @param pos byte position * @return float */ public float valFloat(int pos) { checkForValidPosition(); return this.value.getFloat(pos, ByteOrder.BIG_ENDIAN); } /** * Get data from value at current cursor position. * * @param pos byte position * @return double */ public double valDouble(int pos) { checkForValidPosition(); return this.value.getDouble(pos, ByteOrder.BIG_ENDIAN); } /** * Get string data from key (ending with NULL byte) * at current cursor position. * * @param pos byte position * @return String */ public ByteString valUtf8(int pos) { checkForValidPosition(); return this.value.getString(pos); } /** * @return the direct buffer at the current position. */ public DirectBuffer valDirectBuffer() { checkForValidPosition(); return this.value; } /** * Prepare cursor for write. * <p/> * Only needed by users that manage DirectBuffer on their own. */ public void setWriteMode() { setSafeKeyMemoryLocation(); setSafeValMemoryLocation(); } private void setSafeKeyMemoryLocation() { if (keyDatbaseMemoryLocation) { this.key.wrap(keyByteBuffer); keyDatbaseMemoryLocation = false; } } private void setSafeValMemoryLocation() { if (valDatbaseMemoryLocation) { this.value.wrap(valueByteBuffer); valDatbaseMemoryLocation = false; } } private void setDatabaseMemoryLocation(int rc) { validPosition = rc == 0; this.valDatbaseMemoryLocation = true; this.keyDatbaseMemoryLocation = true; keyWriteIndex = 0; valWriteIndex = 0; } private void ensureValueWritableBytes(int minWritableBytes) { if (minWritableBytes <= (valueByteBuffer.capacity() - valWriteIndex)) { return; } int newCapacity; if (valueByteBuffer.capacity() == 0) { newCapacity = 1; } else { newCapacity = valueByteBuffer.capacity(); } int minNewCapacity = valWriteIndex + minWritableBytes; while (newCapacity < minNewCapacity) { newCapacity <<= 1; // exceeded maximum size of 2gb, then newCapacity == 0 if (newCapacity == 0) { throw new IllegalStateException("Maximum size of 2gb exceeded"); } } ByteBuffer newBuffer = ByteBuffer.allocateDirect(newCapacity).order(valueByteBuffer.order()); valueByteBuffer.position(0); newBuffer.put(valueByteBuffer); valueByteBuffer = newBuffer; this.value.wrap(valueByteBuffer); } }
package org.jgroups.protocols.pbcast; import org.jgroups.*; import org.jgroups.annotations.*; import org.jgroups.conf.PropertyConverters; import org.jgroups.stack.*; import org.jgroups.util.*; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Negative AcKnowledgement layer (NAKs). Messages are assigned a monotonically * increasing sequence number (seqno). Receivers deliver messages ordered * according to seqno and request retransmission of missing messages.<br/> * Retransmit requests are usually sent to the original sender of a message, but * this can be changed by xmit_from_random_member (send to random member) or * use_mcast_xmit_req (send to everyone). Responses can also be sent to everyone * instead of the requester by setting use_mcast_xmit to true. * * @author Bela Ban * @version $Id: NAKACK.java,v 1.216 2009/04/28 07:53:46 belaban Exp $ */ @MBean(description="Reliable transmission multipoint FIFO protocol") @DeprecatedProperty(names={"max_xmit_size", "eager_lock_release"}) public class NAKACK extends Protocol implements Retransmitter.RetransmitCommand, NakReceiverWindow.Listener { private static final long INITIAL_SEQNO=0; private static final String name="NAKACK"; /** * the weight with which we take the previous smoothed average into account, * WEIGHT should be >0 and <= 1 */ private static final double WEIGHT=0.9; private static final double INITIAL_SMOOTHED_AVG=30.0; private static final int NUM_REBROADCAST_MSGS=3; @Property(name="retransmit_timeout", converter=PropertyConverters.LongArray.class, description="Timeout before requesting retransmissions. Default is 600, 1200, 2400, 4800") private long[] retransmit_timeouts= { 600, 1200, 2400, 4800 }; // time(s) to wait before requesting retransmission @Property(description="If true, retransmissions stats will be captured. Default is false") boolean enable_xmit_time_stats=false; @ManagedAttribute(description="Garbage collection lag", writable=true) @Property(description="Garbage collection lag. Default is 20 msec") private int gc_lag=20; // number of msgs garbage collection lags behind /** * Retransmit messages using multicast rather than unicast. This has the * advantage that, if many receivers lost a message, the sender only * retransmits once. */ @Property(description="Retransmit messages using multicast rather than unicast. Default is true") @ManagedAttribute(description="Retransmit messages using multicast rather than unicast", writable=true) private boolean use_mcast_xmit=true; /** * Use a multicast to request retransmission of missing messages. This may * be costly as every member in the cluster will send a response */ @Property(description="Use a multicast to request retransmission of missing messages. Default is false") private boolean use_mcast_xmit_req=false; /** * Ask a random member for retransmission of a missing message. If set to * true, discard_delivered_msgs will be set to false */ @Property(description="Ask a random member for retransmission of a missing message. Default is false") @ManagedAttribute(description="Ask a random member for retransmission of a missing message", writable=true) private boolean xmit_from_random_member=false; /** * The first value (in milliseconds) to use in the exponential backoff * retransmission mechanism. Only enabled if the value is > 0 */ @Property(description="The first value (in milliseconds) to use in the exponential backoff. Enabled if greater than 0. Default is 0") private long exponential_backoff=0; /** * If enabled, we use statistics gathered from actual retransmission times * to compute the new retransmission times */ @Property(description="Use statistics gathered from actual retransmission times to compute new retransmission times. Default is false") private boolean use_stats_for_retransmission=false; /** * Messages that have been received in order are sent up the stack (= * delivered to the application). Delivered messages are removed from * NakReceiverWindow.xmit_table and moved to * NakReceiverWindow.delivered_msgs, where they are later garbage collected * (by STABLE). Since we do retransmits only from sent messages, never * received or delivered messages, we can turn the moving to delivered_msgs * off, so we don't keep the message around, and don't need to wait for * garbage collection to remove them. */ @Property(description="Should messages delivered to application be discarded. Default is false") @ManagedAttribute(description="Discard delivered messages", writable=true) private boolean discard_delivered_msgs=false; @Property(description="See http://jira.jboss.com/jira/browse/JGRP-656. Default is true") private boolean eager_lock_release=false; /** * If value is > 0, the retransmit buffer is bounded: only the * max_xmit_buf_size latest messages are kept, older ones are discarded when * the buffer size is exceeded. A value <= 0 means unbounded buffers */ @Property(description="If value is > 0, the retransmit buffer is bounded. If value <= 0 unbounded buffers are used. Default is 0") @ManagedAttribute(description="If value is > 0, the retransmit buffer is bounded. If value <= 0 unbounded buffers are used", writable=true) private int max_xmit_buf_size=0; @Property(description="Size of retransmission history. Default is 50 entries") private int xmit_history_max_size=50; @Property(description="Timeout to rebroadcast messages. Default is 2000 msec") private long max_rebroadcast_timeout=2000; /** * When not finding a message on an XMIT request, include the last N * stability messages in the error message */ @Property(description="Should stability history be printed if we fail in retransmission. Default is false") protected boolean print_stability_history_on_failed_xmit=false; @Property(description="Size of send and receive history. Default is 20 entries") private int stats_list_size=20; @ManagedAttribute(description="Number of retransmit requests received") private long xmit_reqs_received; @ManagedAttribute(description="Number of retransmit requests sent") private long xmit_reqs_sent; @ManagedAttribute(description="Number of retransmit responses received") private long xmit_rsps_received; @ManagedAttribute(description="Number of retransmit responses sent") private long xmit_rsps_sent; @ManagedAttribute(description="Number of missing messages received") private long missing_msgs_received; /** * Maintains retransmission related data across a time. Only used if enable_xmit_time_stats is set to true. * At program termination, accumulated data is dumped to a file named by the address of the member. * Careful, don't enable this in production as the data in this hashmap are * never reaped ! Really only meant for diagnostics ! */ private ConcurrentMap<Long,XmitTimeStat> xmit_time_stats=null; private long xmit_time_stats_start; /** * BoundedList<MissingMessage>. Keeps track of the last stats_list_size * XMIT requests */ private BoundedList<MissingMessage> receive_history; /** * BoundedList<XmitRequest>. Keeps track of the last stats_list_size * missing messages received */ private BoundedList<XmitRequest> send_history; /** Captures stats on XMIT_REQS, XMIT_RSPS per sender */ private ConcurrentMap<Address,StatsEntry> sent=new ConcurrentHashMap<Address,StatsEntry>(); /** Captures stats on XMIT_REQS, XMIT_RSPS per receiver */ private ConcurrentMap<Address,StatsEntry> received=new ConcurrentHashMap<Address,StatsEntry>(); /** * Per-sender map of seqnos and timestamps, to keep track of avg times for retransmission of messages */ private final ConcurrentMap<Address,ConcurrentMap<Long,Long>> xmit_stats=new ConcurrentHashMap<Address,ConcurrentMap<Long,Long>>(); /** * Maintains a list of the last N retransmission times (duration it took to * retransmit a message) for all members */ private final ConcurrentMap<Address,BoundedList<Long>> xmit_times_history=new ConcurrentHashMap<Address,BoundedList<Long>>(); /** * Maintains a smoothed average of the retransmission times per sender, * these are the actual values that are used for new retransmission requests */ private final Map<Address,Double> smoothed_avg_xmit_times=new HashMap<Address,Double>(); private boolean is_server=false; private Address local_addr=null; private final List<Address> members=new CopyOnWriteArrayList<Address>(); private View view; @GuardedBy("seqno_lock") private long seqno=0; // current message sequence number (starts with 1) private final Lock seqno_lock=new ReentrantLock(); /** Map to store sent and received messages (keyed by sender) */ private final ConcurrentMap<Address,NakReceiverWindow> xmit_table=new ConcurrentHashMap<Address,NakReceiverWindow>(11); private volatile boolean leaving=false; private volatile boolean started=false; private TimeScheduler timer=null; /** * Keeps track of OOB messages sent by myself, needed by * {@link #handleMessage(org.jgroups.Message, NakAckHeader)} */ private final Set<Long> oob_loopback_msgs=Collections.synchronizedSet(new HashSet<Long>()); private final Lock rebroadcast_lock=new ReentrantLock(); private final Condition rebroadcast_done=rebroadcast_lock.newCondition(); // set during processing of a rebroadcast event private volatile boolean rebroadcasting=false; private final Lock rebroadcast_digest_lock=new ReentrantLock(); @GuardedBy("rebroadcast_digest_lock") private Digest rebroadcast_digest=null; /** BoundedList<Digest>, keeps the last 10 stability messages */ protected final BoundedList<Digest> stability_msgs=new BoundedList<Digest>(10); /** Keeps a bounded list of the last N merges */ protected final BoundedList<String> merge_history=new BoundedList<String>(10); /** If true, logs messages discarded because received from other members */ @ManagedAttribute(description="If true, logs messages discarded because received from other members", writable=true) private boolean log_discard_msgs=true; /** <em>Regular</em> messages which have been added, but not removed */ private final AtomicInteger undelivered_msgs=new AtomicInteger(0); public NAKACK() { } public String getName() { return name; } @ManagedAttribute public int getUndeliveredMessages() { return undelivered_msgs.get(); } public long getXmitRequestsReceived() {return xmit_reqs_received;} public long getXmitRequestsSent() {return xmit_reqs_sent;} public long getXmitResponsesReceived() {return xmit_rsps_received;} public long getXmitResponsesSent() {return xmit_rsps_sent;} public long getMissingMessagesReceived() {return missing_msgs_received;} @ManagedAttribute public int getPendingRetransmissionRequests() { int num=0; for(NakReceiverWindow win: xmit_table.values()) { num+=win.getPendingXmits(); } return num; } @ManagedAttribute public int getXmitTableSize() { int num=0; for(NakReceiverWindow win: xmit_table.values()) { num+=win.size(); } return num; } public int getReceivedTableSize() { return getPendingRetransmissionRequests(); } public void resetStats() { xmit_reqs_received=xmit_reqs_sent=xmit_rsps_received=xmit_rsps_sent=missing_msgs_received=0; sent.clear(); received.clear(); if(receive_history !=null) receive_history.clear(); if(send_history != null) send_history.clear(); stability_msgs.clear(); merge_history.clear(); } public void init() throws Exception { if(enable_xmit_time_stats) { if(log.isWarnEnabled()) log.warn("enable_xmit_time_stats is experimental, and may be removed in any release"); xmit_time_stats=new ConcurrentHashMap<Long,XmitTimeStat>(); xmit_time_stats_start=System.currentTimeMillis(); } if(xmit_from_random_member) { if(discard_delivered_msgs) { discard_delivered_msgs=false; log.warn("xmit_from_random_member set to true: changed discard_delivered_msgs to false"); } } if(stats) { send_history=new BoundedList<XmitRequest>(stats_list_size); receive_history=new BoundedList<MissingMessage>(stats_list_size); } } public int getGcLag() { return gc_lag; } public void setGcLag(int gc_lag) { this.gc_lag=gc_lag; } public boolean isUseMcastXmit() { return use_mcast_xmit; } public void setUseMcastXmit(boolean use_mcast_xmit) { this.use_mcast_xmit=use_mcast_xmit; } public boolean isXmitFromRandomMember() { return xmit_from_random_member; } public void setXmitFromRandomMember(boolean xmit_from_random_member) { this.xmit_from_random_member=xmit_from_random_member; } public boolean isDiscardDeliveredMsgs() { return discard_delivered_msgs; } public void setDiscardDeliveredMsgs(boolean discard_delivered_msgs) { boolean old=this.discard_delivered_msgs; this.discard_delivered_msgs=discard_delivered_msgs; if(old != this.discard_delivered_msgs) { for(NakReceiverWindow win: xmit_table.values()) { win.setDiscardDeliveredMessages(this.discard_delivered_msgs); } } } public int getMaxXmitBufSize() { return max_xmit_buf_size; } public void setMaxXmitBufSize(int max_xmit_buf_size) { this.max_xmit_buf_size=max_xmit_buf_size; } /** * * @return * @deprecated removed in 2.6 */ public long getMaxXmitSize() { return -1; } /** * * @param max_xmit_size * @deprecated removed in 2.6 */ public void setMaxXmitSize(long max_xmit_size) { } public void setLogDiscardMessages(boolean flag) { log_discard_msgs=flag; } public void setLogDiscardMsgs(boolean flag) { setLogDiscardMessages(flag); } public boolean getLogDiscardMessages() { return log_discard_msgs; } public Map<String,Object> dumpStats() { Map<String,Object> retval=super.dumpStats(); retval.put("msgs", printMessages()); return retval; } public String printStats() { Map.Entry entry; Object key, val; StringBuilder sb=new StringBuilder(); sb.append("sent:\n"); for(Iterator it=sent.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); key=entry.getKey(); if(key == null || key == Global.NULL) key="<mcast dest>"; val=entry.getValue(); sb.append(key).append(": ").append(val).append("\n"); } sb.append("\nreceived:\n"); for(Iterator it=received.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); key=entry.getKey(); if(key == null || key == Global.NULL) key="<mcast dest>"; val=entry.getValue(); sb.append(key).append(": ").append(val).append("\n"); } sb.append("\nXMIT_REQS sent:\n"); for(XmitRequest tmp: send_history) { sb.append(tmp).append("\n"); } sb.append("\nMissing messages received\n"); for(MissingMessage missing: receive_history) { sb.append(missing).append("\n"); } sb.append("\nStability messages received\n"); sb.append(printStabilityMessages()).append("\n"); return sb.toString(); } @ManagedOperation(description="TODO") public String printStabilityMessages() { StringBuilder sb=new StringBuilder(); sb.append(Util.printListWithDelimiter(stability_msgs, "\n")); return sb.toString(); } public String printStabilityHistory() { StringBuilder sb=new StringBuilder(); int i=1; for(Digest digest: stability_msgs) { sb.append(i++).append(": ").append(digest).append("\n"); } return sb.toString(); } @ManagedOperation(description="Keeps information about the last N merges") public String printMergeHistory() { StringBuilder sb=new StringBuilder(); for(String tmp: merge_history) sb.append(tmp).append("\n"); return sb.toString(); } @ManagedOperation(description="TODO") public String printLossRates() { StringBuilder sb=new StringBuilder(); NakReceiverWindow win; for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) { win=entry.getValue(); sb.append(entry.getKey()).append(": ").append(win.printLossRate()).append("\n"); } return sb.toString(); } @ManagedAttribute public double getAverageLossRate() { double retval=0.0; int count=0; if(xmit_table.isEmpty()) return 0.0; for(NakReceiverWindow win: xmit_table.values()) { retval+=win.getLossRate(); count++; } return retval / (double)count; } @ManagedAttribute public double getAverageSmoothedLossRate() { double retval=0.0; int count=0; if(xmit_table.isEmpty()) return 0.0; for(NakReceiverWindow win: xmit_table.values()) { retval+=win.getSmoothedLossRate(); count++; } return retval / (double)count; } public Vector<Integer> providedUpServices() { Vector<Integer> retval=new Vector<Integer>(5); retval.addElement(new Integer(Event.GET_DIGEST)); retval.addElement(new Integer(Event.SET_DIGEST)); retval.addElement(new Integer(Event.MERGE_DIGEST)); return retval; } public void start() throws Exception { timer=getTransport().getTimer(); if(timer == null) throw new Exception("timer is null"); started=true; leaving=false; if(xmit_time_stats != null) { Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { String filename="xmit-stats-" + local_addr + ".log"; try { dumpXmitStats(filename); } catch(IOException e) { e.printStackTrace(); } } }); } } public void stop() { started=false; reset(); // clears sent_msgs and destroys all NakReceiverWindows oob_loopback_msgs.clear(); } /** * <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>down_prot.down()</code> in this * method as the event is passed down by default by the superclass after this method returns !</b> */ public Object down(Event evt) { switch(evt.getType()) { case Event.MSG: Message msg=(Message)evt.getArg(); Address dest=msg.getDest(); if(dest != null && !dest.isMulticastAddress()) { break; // unicast address: not null and not mcast, pass down unchanged } send(evt, msg); return null; // don't pass down the stack case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg stable((Digest)evt.getArg()); return null; // do not pass down further (Bela Aug 7 2001) case Event.GET_DIGEST: return getDigest(); case Event.SET_DIGEST: setDigest((Digest)evt.getArg()); return null; case Event.MERGE_DIGEST: mergeDigest((Digest)evt.getArg()); return null; case Event.TMP_VIEW: View tmp_view=(View)evt.getArg(); Vector<Address> mbrs=tmp_view.getMembers(); members.clear(); members.addAll(mbrs); // adjustReceivers(false); break; case Event.VIEW_CHANGE: tmp_view=(View)evt.getArg(); mbrs=tmp_view.getMembers(); members.clear(); members.addAll(mbrs); view=tmp_view; adjustReceivers(members); is_server=true; // check vids from now on Set<Address> tmp=new LinkedHashSet<Address>(members); tmp.add(null); // for null destination (= mcast) sent.keySet().retainAll(tmp); received.keySet().retainAll(tmp); xmit_stats.keySet().retainAll(tmp); // in_progress.keySet().retainAll(mbrs); // remove elements which are not in the membership break; case Event.BECOME_SERVER: is_server=true; break; case Event.SET_LOCAL_ADDRESS: local_addr=(Address)evt.getArg(); break; case Event.DISCONNECT: leaving=true; reset(); break; case Event.REBROADCAST: rebroadcasting=true; rebroadcast_digest=(Digest)evt.getArg(); try { rebroadcastMessages(); } finally { rebroadcasting=false; rebroadcast_digest_lock.lock(); try { rebroadcast_digest=null; } finally { rebroadcast_digest_lock.unlock(); } } return null; } return down_prot.down(evt); } /** * <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>PassUp</code> in this * method as the event is passed up by default by the superclass after this method returns !</b> */ public Object up(Event evt) { switch(evt.getType()) { case Event.MSG: Message msg=(Message)evt.getArg(); NakAckHeader hdr=(NakAckHeader)msg.getHeader(name); if(hdr == null) break; // pass up (e.g. unicast msg) // discard messages while not yet server (i.e., until JOIN has returned) if(!is_server) { if(log.isTraceEnabled()) log.trace("message was discarded (not yet server)"); return null; } // Changed by bela Jan 29 2003: we must not remove the header, otherwise // further xmit requests will fail ! //hdr=(NakAckHeader)msg.removeHeader(getName()); switch(hdr.type) { case NakAckHeader.MSG: handleMessage(msg, hdr); return null; // transmitter passes message up for us ! case NakAckHeader.XMIT_REQ: if(hdr.range == null) { if(log.isErrorEnabled()) { log.error("XMIT_REQ: range of xmit msg is null; discarding request from " + msg.getSrc()); } return null; } handleXmitReq(msg.getSrc(), hdr.range.low, hdr.range.high, hdr.sender); return null; case NakAckHeader.XMIT_RSP: if(log.isTraceEnabled()) log.trace("received missing message " + msg.getSrc() + ":" + hdr.seqno); handleXmitRsp(msg); return null; default: if(log.isErrorEnabled()) { log.error("NakAck header type " + hdr.type + " not known !"); } return null; } case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg stable((Digest)evt.getArg()); return null; // do not pass up further (Bela Aug 7 2001) case Event.SUSPECT: // release the promise if rebroadcasting is in progress... otherwise we wait forever. there will be a new // flush round anyway if(rebroadcasting) { cancelRebroadcasting(); } break; } return up_prot.up(evt); } private void send(Event evt, Message msg) { if(msg == null) throw new NullPointerException("msg is null; event is " + evt); if(!started) { if(log.isTraceEnabled()) log.trace("[" + local_addr + "] discarded message as start() has not been called, message: " + msg); return; } long msg_id; NakReceiverWindow win=xmit_table.get(local_addr); msg.setSrc(local_addr); // this needs to be done so we can check whether the message sender is the local_addr seqno_lock.lock(); try { try { // incrementing seqno and adding the msg to sent_msgs needs to be atomic msg_id=seqno +1; msg.putHeader(name, new NakAckHeader(NakAckHeader.MSG, msg_id)); if(win.add(msg_id, msg) && !msg.isFlagSet(Message.OOB)) undelivered_msgs.incrementAndGet(); seqno=msg_id; } catch(Throwable t) { throw new RuntimeException("failure adding msg " + msg + " to the retransmit table for " + local_addr, t); } } finally { seqno_lock.unlock(); } try { if(msg.isFlagSet(Message.OOB)) oob_loopback_msgs.add(msg_id); if(log.isTraceEnabled()) log.trace("sending " + local_addr + "#" + msg_id); down_prot.down(evt); // if this fails, since msg is in sent_msgs, it can be retransmitted } catch(Throwable t) { // eat the exception, don't pass it up the stack if(log.isWarnEnabled()) { log.warn("failure passing message down", t); } } } /** * Finds the corresponding NakReceiverWindow and adds the message to it (according to seqno). Then removes as many * messages as possible from the NRW and passes them up the stack. Discards messages from non-members. */ private void handleMessage(Message msg, NakAckHeader hdr) { Address sender=msg.getSrc(); if(sender == null) { if(log.isErrorEnabled()) log.error("sender of message is null"); return; } if(log.isTraceEnabled()) log.trace(new StringBuilder().append('[').append(local_addr).append(": received ").append(sender).append('#').append(hdr.seqno)); NakReceiverWindow win=xmit_table.get(sender); if(win == null) { // discard message if there is no entry for sender if(leaving) return; if(log.isWarnEnabled() && log_discard_msgs) log.warn(local_addr + "] discarded message from non-member " + sender + ", my view is " + view); return; } boolean loopback=local_addr.equals(sender); boolean added_to_window=false; boolean added=loopback || (added_to_window=win.add(hdr.seqno, msg)); if(added_to_window) undelivered_msgs.incrementAndGet(); // message is passed up if OOB. Later, when remove() is called, we discard it. This affects ordering ! if(added && msg.isFlagSet(Message.OOB)) { if(!loopback || oob_loopback_msgs.remove(hdr.seqno)) { up_prot.up(new Event(Event.MSG, msg)); win.removeOOBMessage(); if(!(win.hasMessagesToRemove() && undelivered_msgs.get() > 0)) return; } } // Efficient way of checking whether another thread is already processing messages from 'sender'. // If that's the case, we return immediately and let the exiting thread process our message // can be returned to the thread pool final AtomicBoolean processing=win.getProcessing(); if(!processing.compareAndSet(false, true)) { return; } // where lots of threads can come up to this point concurrently, but only 1 is allowed to pass at a time // We *can* deliver messages from *different* senders concurrently, e.g. reception of P1, Q1, P2, Q2 can result in // delivery of P1, Q1, Q2, P2: FIFO (implemented by NAKACK) says messages need to be delivered in the // order in which they were sent by the sender int num_regular_msgs_removed=0; // 2nd line of defense: in case of an exception, remove() might not be called, therefore processing would never // be set back to false. If we get an exception and released_processing is not true, then we set // processing to false in the finally clause boolean released_processing=false; try { while(true) { // we're removing a msg and set processing to false (if null) *atomically* (wrt to add()) Message msg_to_deliver=win.remove(processing); if(msg_to_deliver == null) { released_processing=true; return; // processing will be set to false now } if(msg_to_deliver.isFlagSet(Message.OOB)) { continue; } num_regular_msgs_removed++; // System.out.println("removed regular #" + ((NakAckHeader)msg_to_deliver.getHeader(name)).seqno); // Changed by bela Jan 29 2003: not needed (see above) //msg_to_deliver.removeHeader(getName()); up_prot.up(new Event(Event.MSG, msg_to_deliver)); } } finally { // We keep track of regular messages that we added, but couldn't remove (because of ordering). // When we have such messages pending, then even OOB threads will remove and process them undelivered_msgs.addAndGet(-num_regular_msgs_removed); // processing is always set in win.remove(processing) above and never here ! This code is just a // 2nd line of defense should there be an exception before win.remove(processing) sets processing if(!released_processing) processing.set(false); } } /** * Retransmits messsages first_seqno to last_seqno from original_sender from xmit_table to xmit_requester, * called when XMIT_REQ is received. * @param xmit_requester The sender of the XMIT_REQ, we have to send the requested copy of the message to this address * @param first_seqno The first sequence number to be retransmitted (<= last_seqno) * @param last_seqno The last sequence number to be retransmitted (>= first_seqno) * @param original_sender The member who originally sent the messsage. Guaranteed to be non-null */ private void handleXmitReq(Address xmit_requester, long first_seqno, long last_seqno, Address original_sender) { Message msg; if(log.isTraceEnabled()) { StringBuilder sb=new StringBuilder(); sb.append(local_addr).append(": received xmit request from ").append(xmit_requester).append(" for "); sb.append(original_sender).append(" [").append(first_seqno).append(" - ").append(last_seqno).append("]"); log.trace(sb.toString()); } if(first_seqno > last_seqno) { if(log.isErrorEnabled()) log.error("first_seqno (" + first_seqno + ") > last_seqno (" + last_seqno + "): not able to retransmit"); return; } if(stats) { xmit_reqs_received+=last_seqno - first_seqno +1; updateStats(received, xmit_requester, 1, 0, 0); } if(xmit_time_stats != null) { long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000; XmitTimeStat stat=xmit_time_stats.get(key); if(stat == null) { stat=new XmitTimeStat(); XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat); if(stat2 != null) stat=stat2; } stat.xmit_reqs_received.addAndGet((int)(last_seqno - first_seqno +1)); stat.xmit_rsps_sent.addAndGet((int)(last_seqno - first_seqno +1)); } NakReceiverWindow win=xmit_table.get(original_sender); if(win == null) { if(log.isErrorEnabled()) { StringBuilder sb=new StringBuilder(); sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr); sb.append(") ").append(original_sender).append(" not found in retransmission table:\n").append(printMessages()); if(print_stability_history_on_failed_xmit) { sb.append(" (stability history:\n").append(printStabilityHistory()); } log.error(sb); } return; } for(long i=first_seqno; i <= last_seqno; i++) { msg=win.get(i); if(msg == null) { if(log.isWarnEnabled() && !local_addr.equals(xmit_requester)) { StringBuilder sb=new StringBuilder(); sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr); sb.append(") message ").append(original_sender).append("::").append(i); sb.append(" not found in retransmission table of ").append(original_sender).append(":\n").append(win); if(print_stability_history_on_failed_xmit) { sb.append(" (stability history:\n").append(printStabilityHistory()); } log.warn(sb); } continue; } sendXmitRsp(xmit_requester, msg, i); } } private void cancelRebroadcasting() { rebroadcast_lock.lock(); try { rebroadcasting=false; rebroadcast_done.signalAll(); } finally { rebroadcast_lock.unlock(); } } private static void updateStats(ConcurrentMap<Address,StatsEntry> map, Address key, int req, int rsp, int missing) { StatsEntry entry=map.get(key); if(entry == null) { entry=new StatsEntry(); StatsEntry tmp=map.putIfAbsent(key, entry); if(tmp != null) entry=tmp; } entry.xmit_reqs+=req; entry.xmit_rsps+=rsp; entry.missing_msgs_rcvd+=missing; } /** * Sends a message msg to the requester. We have to wrap the original message into a retransmit message, as we need * to preserve the original message's properties, such as src, headers etc. * @param dest * @param msg * @param seqno */ private void sendXmitRsp(Address dest, Message msg, long seqno) { Buffer buf; if(msg == null) { if(log.isErrorEnabled()) log.error("message is null, cannot send retransmission"); return; } if(stats) { xmit_rsps_sent++; updateStats(sent, dest, 0, 1, 0); } if(use_mcast_xmit) dest=null; if(msg.getSrc() == null) msg.setSrc(local_addr); try { buf=Util.messageToByteBuffer(msg); Message xmit_msg=new Message(dest, null, buf.getBuf(), buf.getOffset(), buf.getLength()); // changed Bela Jan 4 2007: we should not use OOB for retransmitted messages, otherwise we tax the // OOB thread pool too much // msg.setFlag(Message.OOB); xmit_msg.putHeader(name, new NakAckHeader(NakAckHeader.XMIT_RSP, seqno)); down_prot.down(new Event(Event.MSG, xmit_msg)); } catch(IOException ex) { log.error("failed marshalling xmit list", ex); } } private void handleXmitRsp(Message msg) { if(msg == null) { if(log.isWarnEnabled()) log.warn("message is null"); return; } try { Message wrapped_msg=Util.byteBufferToMessage(msg.getRawBuffer(), msg.getOffset(), msg.getLength()); if(xmit_time_stats != null) { long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000; XmitTimeStat stat=xmit_time_stats.get(key); if(stat == null) { stat=new XmitTimeStat(); XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat); if(stat2 != null) stat=stat2; } stat.xmit_rsps_received.incrementAndGet(); } if(stats) { xmit_rsps_received++; updateStats(received, msg.getSrc(), 0, 1, 0); } up(new Event(Event.MSG, wrapped_msg)); if(rebroadcasting) { Digest tmp=getDigest(); boolean cancel_rebroadcasting; rebroadcast_digest_lock.lock(); try { cancel_rebroadcasting=tmp.isGreaterThanOrEqual(rebroadcast_digest); } finally { rebroadcast_digest_lock.unlock(); } if(cancel_rebroadcasting) { cancelRebroadcasting(); } } } catch(Exception ex) { if(log.isErrorEnabled()) { log.error("failed reading retransmitted message", ex); } } } /** * Takes the argument highest_seqnos and compares it to the current digest. If the current digest has fewer messages, * then send retransmit messages for the missing messages. Return when all missing messages have been received. If * we're waiting for a missing message from P, and P crashes while waiting, we need to exclude P from the wait set. */ private void rebroadcastMessages() { Digest my_digest; Map<Address,Digest.Entry> their_digest; Address sender; Digest.Entry their_entry, my_entry; long their_high, my_high; long sleep=max_rebroadcast_timeout / NUM_REBROADCAST_MSGS; long wait_time=max_rebroadcast_timeout, start=System.currentTimeMillis(); while(wait_time > 0) { rebroadcast_digest_lock.lock(); try { if(rebroadcast_digest == null) break; their_digest=rebroadcast_digest.getSenders(); } finally { rebroadcast_digest_lock.unlock(); } my_digest=getDigest(); boolean xmitted=false; for(Map.Entry<Address,Digest.Entry> entry: their_digest.entrySet()) { sender=entry.getKey(); their_entry=entry.getValue(); my_entry=my_digest.get(sender); if(my_entry == null) continue; their_high=their_entry.getHighest(); my_high=my_entry.getHighest(); if(their_high > my_high) { if(log.isTraceEnabled()) log.trace("sending XMIT request to " + sender + " for messages " + my_high + " - " + their_high); retransmit(my_high, their_high, sender, true); // use multicast to send retransmit request xmitted=true; } } if(!xmitted) return; // we're done; no retransmissions are needed anymore. our digest is >= rebroadcast_digest rebroadcast_lock.lock(); try { try { my_digest=getDigest(); rebroadcast_digest_lock.lock(); try { if(!rebroadcasting || my_digest.isGreaterThanOrEqual(rebroadcast_digest)) return; } finally { rebroadcast_digest_lock.unlock(); } rebroadcast_done.await(sleep, TimeUnit.MILLISECONDS); wait_time-=(System.currentTimeMillis() - start); } catch(InterruptedException e) { } } finally { rebroadcast_lock.unlock(); } } } /** * Remove old members from NakReceiverWindows and add new members (starting seqno=0). Essentially removes all * entries from xmit_table that are not in <code>members</code>. This method is not called concurrently * multiple times */ private void adjustReceivers(List<Address> new_members) { NakReceiverWindow win; // 1. Remove all senders in xmit_table that are not members anymore for(Iterator<Address> it=xmit_table.keySet().iterator(); it.hasNext();) { Address sender=it.next(); if(!new_members.contains(sender)) { if(local_addr != null && local_addr.equals(sender)) { if(log.isErrorEnabled()) log.error("will not remove myself (" + sender + ") from xmit_table, received incorrect new membership of " + new_members); continue; } win=xmit_table.get(sender); win.reset(); if(log.isDebugEnabled()) { log.debug("removing " + sender + " from xmit_table (not member anymore)"); } it.remove(); } } // 2. Add newly joined members to xmit_table (starting seqno=0) for(Address sender: new_members) { if(!xmit_table.containsKey(sender)) { win=createNakReceiverWindow(sender, INITIAL_SEQNO, 0); xmit_table.put(sender, win); } } } /** * Returns a message digest: for each member P the lowest, highest delivered and highest received seqno is added */ public Digest getDigest() { final Map<Address,Digest.Entry> map=new HashMap<Address,Digest.Entry>(); for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) { Address sender=entry.getKey(); // guaranteed to be non-null (CCHM) NakReceiverWindow win=entry.getValue(); // guaranteed to be non-null (CCHM) long low=win.getLowestSeen(), highest_delivered=win.getHighestDelivered(), highest_received=win.getHighestReceived(); map.put(sender, new Digest.Entry(low, highest_delivered, highest_received)); } return new Digest(map); } /** * Creates a NakReceiverWindow for each sender in the digest according to the sender's seqno. If NRW already exists, * reset it. */ private void setDigest(Digest digest) { if(digest == null) { if(log.isErrorEnabled()) { log.error("digest or digest.senders is null"); } return; } if(local_addr != null && digest.contains(local_addr)) { clear(); } else { // remove all but local_addr (if not null) for(Iterator<Address> it=xmit_table.keySet().iterator(); it.hasNext();) { Address key=it.next(); if(local_addr != null && local_addr.equals(key)) { ; } else { it.remove(); } } } Address sender; Digest.Entry val; long initial_seqno; NakReceiverWindow win; for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) { sender=entry.getKey(); val=entry.getValue(); if(sender == null || val == null) { if(log.isWarnEnabled()) { log.warn("sender or value is null"); } continue; } initial_seqno=val.getHighestDeliveredSeqno(); win=createNakReceiverWindow(sender, initial_seqno, val.getLow()); xmit_table.put(sender, win); } if(!xmit_table.containsKey(local_addr)) { if(log.isWarnEnabled()) { log.warn("digest does not contain local address (local_addr=" + local_addr + ", digest=" + digest); } } } private void mergeDigest(Digest digest) { if(digest == null) { if(log.isErrorEnabled()) { log.error("digest or digest.senders is null"); } return; } StringBuilder sb=new StringBuilder(); sb.append("existing digest: " + getDigest()).append("\nnew digest: " + digest); for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) { Address sender=entry.getKey(); Digest.Entry val=entry.getValue(); if(sender == null || val == null) { if(log.isWarnEnabled()) { log.warn("sender or value is null"); } continue; } long highest_delivered_seqno=val.getHighestDeliveredSeqno(); long low_seqno=val.getLow(); // except for myself NakReceiverWindow win=xmit_table.get(sender); if(win != null) { if(local_addr != null && local_addr.equals(sender)) { continue; } else { win.reset(); // stops retransmission xmit_table.remove(sender); } } win=createNakReceiverWindow(sender, highest_delivered_seqno, low_seqno); xmit_table.put(sender, win); } sb.append("\n").append("resulting digest: " + getDigest()); merge_history.add(sb.toString()); if(log.isDebugEnabled()) log.debug(sb); if(!xmit_table.containsKey(local_addr)) { if(log.isWarnEnabled()) { log.warn("digest does not contain local address (local_addr=" + local_addr + ", digest=" + digest); } } } private NakReceiverWindow createNakReceiverWindow(Address sender, long initial_seqno, long lowest_seqno) { NakReceiverWindow win=new NakReceiverWindow(local_addr, sender, this, initial_seqno, lowest_seqno, timer); if(use_stats_for_retransmission) { win.setRetransmitTimeouts(new ActualInterval(sender)); } else if(exponential_backoff > 0) { win.setRetransmitTimeouts(new ExponentialInterval(exponential_backoff)); } else { win.setRetransmitTimeouts(new StaticInterval(retransmit_timeouts)); } win.setDiscardDeliveredMessages(discard_delivered_msgs); win.setMaxXmitBufSize(this.max_xmit_buf_size); if(stats) win.setListener(this); return win; } private void dumpXmitStats(String filename) throws IOException { Writer out=new FileWriter(filename); try { TreeMap<Long,XmitTimeStat> map=new TreeMap<Long,XmitTimeStat>(xmit_time_stats); StringBuilder sb; XmitTimeStat stat; out.write("time (secs) gaps-detected xmit-reqs-sent xmit-reqs-received xmit-rsps-sent xmit-rsps-received missing-msgs-received\n\n"); for(Map.Entry<Long,XmitTimeStat> entry: map.entrySet()) { sb=new StringBuilder(); stat=entry.getValue(); sb.append(entry.getKey()).append(" "); sb.append(stat.gaps_detected).append(" "); sb.append(stat.xmit_reqs_sent).append(" "); sb.append(stat.xmit_reqs_received).append(" "); sb.append(stat.xmit_rsps_sent).append(" "); sb.append(stat.xmit_rsps_received).append(" "); sb.append(stat.missing_msgs_received).append("\n"); out.write(sb.toString()); } } finally { out.close(); } } /** * Garbage collect messages that have been seen by all members. Update sent_msgs: for the sender P in the digest * which is equal to the local address, garbage collect all messages <= seqno at digest[P]. Update xmit_table: * for each sender P in the digest and its highest seqno seen SEQ, garbage collect all delivered_msgs in the * NakReceiverWindow corresponding to P which are <= seqno at digest[P]. */ private void stable(Digest digest) { NakReceiverWindow recv_win; long my_highest_rcvd; // highest seqno received in my digest for a sender P long stability_highest_rcvd; // highest seqno received in the stability vector for a sender P if(members == null || local_addr == null || digest == null) { if(log.isWarnEnabled()) log.warn("members, local_addr or digest are null !"); return; } if(log.isTraceEnabled()) { log.trace("received stable digest " + digest); } stability_msgs.add(digest); Address sender; Digest.Entry val; long high_seqno_delivered, high_seqno_received; for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) { sender=entry.getKey(); if(sender == null) continue; val=entry.getValue(); high_seqno_delivered=val.getHighestDeliveredSeqno(); high_seqno_received=val.getHighestReceivedSeqno(); // check whether the last seqno received for a sender P in the stability vector is > last seqno // received for P in my digest. if yes, request retransmission (see "Last Message Dropped" topic // in DESIGN) recv_win=xmit_table.get(sender); if(recv_win != null) { my_highest_rcvd=recv_win.getHighestReceived(); stability_highest_rcvd=high_seqno_received; if(stability_highest_rcvd >= 0 && stability_highest_rcvd > my_highest_rcvd) { if(log.isTraceEnabled()) { log.trace("my_highest_rcvd (" + my_highest_rcvd + ") < stability_highest_rcvd (" + stability_highest_rcvd + "): requesting retransmission of " + sender + '#' + stability_highest_rcvd); } retransmit(stability_highest_rcvd, stability_highest_rcvd, sender); } } high_seqno_delivered-=gc_lag; if(high_seqno_delivered < 0) { continue; } if(log.isTraceEnabled()) log.trace("deleting msgs <= " + high_seqno_delivered + " from " + sender); // delete *delivered* msgs that are stable if(recv_win != null) { recv_win.stable(high_seqno_delivered); // delete all messages with seqnos <= seqno } } } /** * Implementation of Retransmitter.RetransmitCommand. Called by retransmission thread when gap is detected. */ public void retransmit(long first_seqno, long last_seqno, Address sender) { retransmit(first_seqno, last_seqno, sender, false); } protected void retransmit(long first_seqno, long last_seqno, final Address sender, boolean multicast_xmit_request) { NakAckHeader hdr; Message retransmit_msg; Address dest=sender; // to whom do we send the XMIT request ? if(multicast_xmit_request || this.use_mcast_xmit_req) { dest=null; } else { if(xmit_from_random_member && !local_addr.equals(sender)) { Address random_member=(Address)Util.pickRandomElement(members); if(random_member != null && !local_addr.equals(random_member)) { dest=random_member; if(log.isTraceEnabled()) log.trace("picked random member " + dest + " to send XMIT request to"); } } } hdr=new NakAckHeader(NakAckHeader.XMIT_REQ, first_seqno, last_seqno, sender); retransmit_msg=new Message(dest, null, null); retransmit_msg.setFlag(Message.OOB); if(log.isTraceEnabled()) log.trace(local_addr + ": sending XMIT_REQ ([" + first_seqno + ", " + last_seqno + "]) to " + dest); retransmit_msg.putHeader(name, hdr); ConcurrentMap<Long,Long> tmp=xmit_stats.get(sender); if(tmp == null) { tmp=new ConcurrentHashMap<Long,Long>(); ConcurrentMap<Long,Long> tmp2=xmit_stats.putIfAbsent(sender, tmp); if(tmp2 != null) tmp=tmp2; } for(long seq=first_seqno; seq < last_seqno; seq++) { tmp.putIfAbsent(seq, System.currentTimeMillis()); } if(xmit_time_stats != null) { long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000; XmitTimeStat stat=xmit_time_stats.get(key); if(stat == null) { stat=new XmitTimeStat(); XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat); if(stat2 != null) stat=stat2; } stat.xmit_reqs_sent.addAndGet((int)(last_seqno - first_seqno +1)); } down_prot.down(new Event(Event.MSG, retransmit_msg)); if(stats) { xmit_reqs_sent+=last_seqno - first_seqno +1; updateStats(sent, sender, 1, 0, 0); XmitRequest req=new XmitRequest(sender, first_seqno, last_seqno, sender); send_history.add(req); } } public void missingMessageReceived(long seqno, final Address original_sender) { ConcurrentMap<Long,Long> tmp=xmit_stats.get(original_sender); if(tmp != null) { Long timestamp=tmp.remove(seqno); if(timestamp != null) { long diff=System.currentTimeMillis() - timestamp; BoundedList<Long> list=xmit_times_history.get(original_sender); if(list == null) { list=new BoundedList<Long>(xmit_history_max_size); BoundedList<Long> list2=xmit_times_history.putIfAbsent(original_sender, list); if(list2 != null) list=list2; } list.add(diff); // compute the smoothed average for retransmission times for original_sender // needs to be synchronized because we rely on the previous value for computation of the next value synchronized(smoothed_avg_xmit_times) { Double smoothed_avg=smoothed_avg_xmit_times.get(original_sender); if(smoothed_avg == null) smoothed_avg=INITIAL_SMOOTHED_AVG; // the smoothed avg takes 90% of the previous value, 100% of the new value and averages them // then, we add 10% to be on the safe side (an xmit value should rather err on the higher than lower side) smoothed_avg=((smoothed_avg * WEIGHT) + diff) / 2; smoothed_avg=smoothed_avg * (2 - WEIGHT); smoothed_avg_xmit_times.put(original_sender, smoothed_avg); } } } if(xmit_time_stats != null) { long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000; XmitTimeStat stat=xmit_time_stats.get(key); if(stat == null) { stat=new XmitTimeStat(); XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat); if(stat2 != null) stat=stat2; } stat.missing_msgs_received.incrementAndGet(); } if(stats) { missing_msgs_received++; updateStats(received, original_sender, 0, 0, 1); MissingMessage missing=new MissingMessage(original_sender, seqno); receive_history.add(missing); } } /** Called when a message gap is detected */ public void messageGapDetected(long from, long to, Address src) { if(xmit_time_stats != null) { long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000; XmitTimeStat stat=xmit_time_stats.get(key); if(stat == null) { stat=new XmitTimeStat(); XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat); if(stat2 != null) stat=stat2; } stat.gaps_detected.addAndGet((int)(to - from +1)); } } private void clear() { // changed April 21 2004 (bela): SourceForge bug# 938584. We cannot delete our own messages sent between // a join() and a getState(). Otherwise retransmission requests from members who missed those msgs might // fail. Not to worry though: those msgs will be cleared by STABLE (message garbage collection) // sent_msgs.clear(); for(NakReceiverWindow win: xmit_table.values()) { win.reset(); } xmit_table.clear(); undelivered_msgs.set(0); } private void reset() { seqno_lock.lock(); try { seqno=0; } finally { seqno_lock.unlock(); } for(NakReceiverWindow win: xmit_table.values()) { win.destroy(); } xmit_table.clear(); undelivered_msgs.set(0); } @ManagedOperation(description="TODO") public String printMessages() { StringBuilder ret=new StringBuilder(); Map.Entry<Address,NakReceiverWindow> entry; Address addr; Object w; for(Iterator<Map.Entry<Address,NakReceiverWindow>> it=xmit_table.entrySet().iterator(); it.hasNext();) { entry=it.next(); addr=entry.getKey(); w=entry.getValue(); ret.append(addr).append(": ").append(w.toString()).append('\n'); } return ret.toString(); } @ManagedOperation(description="TODO") public String printRetransmissionAvgs() { StringBuilder sb=new StringBuilder(); for(Map.Entry<Address,BoundedList<Long>> entry: xmit_times_history.entrySet()) { Address sender=entry.getKey(); BoundedList<Long> list=entry.getValue(); long tmp=0; int i=0; for(Long val: list) { tmp+=val; i++; } double avg=i > 0? tmp / i: -1; sb.append(sender).append(": ").append(avg).append("\n"); } return sb.toString(); } @ManagedOperation(description="TODO") public String printSmoothedRetransmissionAvgs() { StringBuilder sb=new StringBuilder(); for(Map.Entry<Address,Double> entry: smoothed_avg_xmit_times.entrySet()) { sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n"); } return sb.toString(); } @ManagedOperation(description="TODO") public String printRetransmissionTimes() { StringBuilder sb=new StringBuilder(); for(Map.Entry<Address,BoundedList<Long>> entry: xmit_times_history.entrySet()) { Address sender=entry.getKey(); BoundedList<Long> list=entry.getValue(); sb.append(sender).append(": ").append(list).append("\n"); } return sb.toString(); } @ManagedAttribute public double getTotalAverageRetransmissionTime() { long total=0; int i=0; for(BoundedList<Long> list: xmit_times_history.values()) { for(Long val: list) { total+=val; i++; } } return i > 0? total / i: -1; } @ManagedAttribute public double getTotalAverageSmoothedRetransmissionTime() { double total=0.0; int cnt=0; synchronized(smoothed_avg_xmit_times) { for(Double val: smoothed_avg_xmit_times.values()) { if(val != null) { total+=val; cnt++; } } } return cnt > 0? total / cnt : -1; } /** Returns the smoothed average retransmission time for a given sender */ public double getSmoothedAverageRetransmissionTime(Address sender) { synchronized(smoothed_avg_xmit_times) { Double retval=smoothed_avg_xmit_times.get(sender); if(retval == null) { retval=INITIAL_SMOOTHED_AVG; smoothed_avg_xmit_times.put(sender, retval); } return retval; } } // public static final class LossRate { // private final Set<Long> received=new HashSet<Long>(); // private final Set<Long> missing=new HashSet<Long>(); // private double smoothed_loss_rate=0.0; // public synchronized void addReceived(long seqno) { // received.add(seqno); // missing.remove(seqno); // setSmoothedLossRate(); // public synchronized void addReceived(Long ... seqnos) { // for(int i=0; i < seqnos.length; i++) { // Long seqno=seqnos[i]; // received.add(seqno); // missing.remove(seqno); // setSmoothedLossRate(); // public synchronized void addMissing(long from, long to) { // for(long i=from; i <= to; i++) { // if(!received.contains(i)) // missing.add(i); // setSmoothedLossRate(); // public synchronized double computeLossRate() { // int num_missing=missing.size(); // if(num_missing == 0) // return 0.0; // int num_received=received.size(); // int total=num_missing + num_received; // return num_missing / (double)total; // public synchronized double getSmoothedLossRate() { // return smoothed_loss_rate; // public synchronized String toString() { // StringBuilder sb=new StringBuilder(); // int num_missing=missing.size(); // int num_received=received.size(); // int total=num_missing + num_received; // sb.append("total=").append(total).append(" (received=").append(received.size()).append(", missing=") // .append(missing.size()).append(", loss rate=").append(computeLossRate()) // .append(", smoothed loss rate=").append(smoothed_loss_rate).append(")"); // return sb.toString(); // /** Set the new smoothed_loss_rate value to 70% of the new value and 30% of the old value */ // private void setSmoothedLossRate() { // double new_loss_rate=computeLossRate(); // if(smoothed_loss_rate == 0) { // smoothed_loss_rate=new_loss_rate; // else { // smoothed_loss_rate=smoothed_loss_rate * .3 + new_loss_rate * .7; private static class XmitTimeStat { final AtomicInteger gaps_detected=new AtomicInteger(0); final AtomicInteger xmit_reqs_sent=new AtomicInteger(0); final AtomicInteger xmit_reqs_received=new AtomicInteger(0); final AtomicInteger xmit_rsps_sent=new AtomicInteger(0); final AtomicInteger xmit_rsps_received=new AtomicInteger(0); final AtomicInteger missing_msgs_received=new AtomicInteger(0); } private class ActualInterval implements Interval { private final Address sender; public ActualInterval(Address sender) { this.sender=sender; } public long next() { return (long)getSmoothedAverageRetransmissionTime(sender); } public Interval copy() { return this; } } static class StatsEntry { long xmit_reqs, xmit_rsps, missing_msgs_rcvd; public String toString() { StringBuilder sb=new StringBuilder(); sb.append(xmit_reqs).append(" xmit_reqs").append(", ").append(xmit_rsps).append(" xmit_rsps"); sb.append(", ").append(missing_msgs_rcvd).append(" missing msgs"); return sb.toString(); } } static class XmitRequest { final Address original_sender; // original sender of message final long low, high, timestamp=System.currentTimeMillis(); final Address xmit_dest; // destination to which XMIT_REQ is sent, usually the original sender XmitRequest(Address original_sender, long low, long high, Address xmit_dest) { this.original_sender=original_sender; this.xmit_dest=xmit_dest; this.low=low; this.high=high; } public String toString() { StringBuilder sb=new StringBuilder(); sb.append(new Date(timestamp)).append(": ").append(original_sender).append(" #[").append(low); sb.append("-").append(high).append("]"); sb.append(" (XMIT_REQ sent to ").append(xmit_dest).append(")"); return sb.toString(); } } static class MissingMessage { final Address original_sender; final long seq, timestamp=System.currentTimeMillis(); MissingMessage(Address original_sender, long seqno) { this.original_sender=original_sender; this.seq=seqno; } public String toString() { StringBuilder sb=new StringBuilder(); sb.append(new Date(timestamp)).append(": ").append(original_sender).append(" #").append(seq); return sb.toString(); } } }
package org.jkiss.dbeaver.ui.views.navigator.database; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.jface.viewers.CheckboxTreeViewer; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.TreeEditor; import org.eclipse.swt.events.*; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.*; import org.eclipse.ui.IWorkbenchCommandConstants; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.core.DBeaverCore; import org.jkiss.dbeaver.core.DBeaverUI; import org.jkiss.dbeaver.model.navigator.*; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.DBRRunnableWithResult; import org.jkiss.dbeaver.runtime.AbstractUIJob; import org.jkiss.dbeaver.ui.ActionUtils; import org.jkiss.dbeaver.ui.NavigatorUtils; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.actions.navigator.NavigatorHandlerObjectRename; import org.jkiss.dbeaver.ui.preferences.PrefConstants; import org.jkiss.utils.CommonUtils; import java.lang.reflect.InvocationTargetException; import java.util.List; public class DatabaseNavigatorTree extends Composite implements IDBNListener { static final Log log = LogFactory.getLog(DatabaseNavigatorTree.class); private TreeViewer viewer; private DBNModel model; private TreeEditor treeEditor; private boolean checkEnabled; public DatabaseNavigatorTree(Composite parent, DBNNode rootNode, int style) { this(parent, rootNode, style, false); } public DatabaseNavigatorTree(Composite parent, DBNNode rootNode, int style, boolean showRoot) { super(parent, SWT.NONE); this.setLayout(new FillLayout()); this.model = DBNModel.getInstance(); this.model.addListener(this); addDisposeListener(new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { if (model != null) { model.removeListener(DatabaseNavigatorTree.this); model = null; } } }); checkEnabled = (style & SWT.CHECK) != 0; // Create tree final ISelection defaultSelection = new StructuredSelection(rootNode); // TODO: there are problems with this tree when we have a lot of items. // TODO: I may set SWT.SINGLE style and it'll solve the problem at least when traversing tree // TODO: But we need multiple selection (to copy, export, etc) // TODO: need to do something with it int treeStyle = SWT.H_SCROLL | SWT.V_SCROLL | style; if (checkEnabled) { this.viewer = new CheckboxTreeViewer(this, treeStyle); } else { this.viewer = new TreeViewer(this, treeStyle) { @Override public ISelection getSelection() { ISelection selection = super.getSelection(); return selection.isEmpty() ? defaultSelection : selection; } }; } this.viewer.getTree().setCursor(getDisplay().getSystemCursor(SWT.CURSOR_ARROW)); this.viewer.setUseHashlookup(true); if (rootNode.getParentNode() == null) { //this.viewer.setAutoExpandLevel(2); } this.viewer.setLabelProvider(new DatabaseNavigatorLabelProvider(this.viewer)); this.viewer.setContentProvider(new DatabaseNavigatorContentProvider(this.viewer, showRoot)); this.viewer.setInput(new DatabaseNavigatorContent(rootNode)); initEditor(); } private void initEditor() { Tree treeControl = this.viewer.getTree(); treeEditor = new TreeEditor(treeControl); treeEditor.horizontalAlignment = SWT.LEFT; treeEditor.verticalAlignment = SWT.TOP; treeEditor.grabHorizontal = false; treeEditor.minimumWidth = 50; //treeControl.addSelectionListener(new TreeSelectionAdapter()); if (!checkEnabled) { // Add rename listener only for non CHECK trees treeControl.addMouseListener(new TreeSelectionAdapter()); } } public TreeViewer getViewer() { return viewer; } @Override public void nodeChanged(final DBNEvent event) { switch (event.getAction()) { case ADD: case REMOVE: final DBNNode parentNode = event.getNode().getParentNode(); if (parentNode != null) { UIUtils.runInUI(null, new Runnable() { @Override public void run() { if (!viewer.getControl().isDisposed()) { if (!parentNode.isDisposed()) { viewer.refresh(getViewerObject(parentNode)); } } } }); } break; case UPDATE: UIUtils.runInUI(null, new Runnable() { @Override public void run() { if (!viewer.getControl().isDisposed() && !viewer.isBusy()) { if (event.getNode() != null) { switch (event.getNodeChange()) { case LOAD: viewer.refresh(getViewerObject(event.getNode())); expandNodeOnLoad(event.getNode()); break; case UNLOAD: viewer.collapseToLevel(event.getNode(), -1); viewer.refresh(getViewerObject(event.getNode())); break; case REFRESH: viewer.update(getViewerObject(event.getNode()), null); break; case LOCK: case UNLOCK: viewer.refresh(getViewerObject(event.getNode())); break; } } else { log.warn("Null node object"); } } } }); break; default: break; } } private void expandNodeOnLoad(final DBNNode node) { if (node instanceof DBNDataSource && DBeaverCore.getGlobalPreferenceStore().getBoolean(PrefConstants.NAVIGATOR_EXPAND_ON_CONNECT)) { try { DBRRunnableWithResult<DBNNode> runnable = new DBRRunnableWithResult<DBNNode>() { @Override public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException { try { result = finaActiveNode(monitor, node); } catch (DBException e) { throw new InvocationTargetException(e); } } }; DBeaverUI.runInProgressService(runnable); if (runnable.getResult() != null) { showNode(runnable.getResult()); viewer.expandToLevel(runnable.getResult(), 1); } } catch (InvocationTargetException e) { log.error("Can't expand node", e.getTargetException()); } catch (InterruptedException e) { // skip it } } } private DBNNode finaActiveNode(DBRProgressMonitor monitor, DBNNode node) throws DBException { List<? extends DBNNode> children = node.getChildren(monitor); if (!CommonUtils.isEmpty(children)) { if (children.get(0) instanceof DBNContainer) { // Use only first folder to search return finaActiveNode(monitor, children.get(0)); } for (DBNNode child : children) { if (NavigatorUtils.isDefaultElement(child)) { return child; } } } return node; } Object getViewerObject(DBNNode node) { if (((DatabaseNavigatorContent) viewer.getInput()).getRootNode() == node) { return viewer.getInput(); } else { return node; } } public void showNode(DBNNode node) { viewer.reveal(node); viewer.setSelection(new StructuredSelection(node)); } public void reloadTree(final DBNNode rootNode) { DatabaseNavigatorTree.this.viewer.setInput(new DatabaseNavigatorContent(rootNode)); } private class TreeSelectionAdapter implements MouseListener { private volatile TreeItem curSelection; private volatile RenameJob renameJob = new RenameJob(); private volatile boolean doubleClick = false; @Override public synchronized void mouseDoubleClick(MouseEvent e) { curSelection = null; renameJob.canceled = true; } @Override public void mouseDown(MouseEvent e) { } @Override public void mouseUp(MouseEvent e) { if ((e.stateMask & SWT.BUTTON1) == 0) { curSelection = null; return; } changeSelection(e); } public void changeSelection(MouseEvent e) { disposeOldEditor(); final TreeItem newSelection = viewer.getTree().getItem(new Point(e.x, e.y)); if (newSelection == null) { //curSelection = null; return; } if (!(newSelection.getData() instanceof DBNNode) || !(ActionUtils.isCommandEnabled(IWorkbenchCommandConstants.FILE_RENAME, DBeaverUI.getActiveWorkbenchWindow().getActivePage().getActivePart()))) { curSelection = null; return; } if (curSelection != null && curSelection == newSelection) { renameJob.schedule(1000); } curSelection = newSelection; } private class RenameJob extends AbstractUIJob { private volatile boolean canceled = false; public RenameJob() { super("Rename "); } @Override protected IStatus runInUIThread(DBRProgressMonitor monitor) { try { if (!viewer.getTree().isDisposed() && viewer.getTree().isFocusControl() && curSelection != null && !canceled) { getDisplay().asyncExec(new Runnable() { @Override public void run() { if (curSelection != null) { renameItem(curSelection); } } }); } } finally { canceled = false; } return Status.OK_STATUS; } } } private void renameItem(final TreeItem item) { // Clean up any previous editor control disposeOldEditor(); if (item.isDisposed()) { return; } final DBNNode node = (DBNNode) item.getData(); Text text = new Text(viewer.getTree(), SWT.BORDER); text.setText(node.getNodeName()); text.selectAll(); text.setFocus(); text.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { disposeOldEditor(); } }); text.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (e.keyCode == SWT.CR) { Text text = (Text) treeEditor.getEditor(); final String newName = text.getText(); disposeOldEditor(); viewer.getTree().setFocus(); if (!CommonUtils.isEmpty(newName) && !newName.equals(node.getNodeName())) { NavigatorHandlerObjectRename.renameNode(DBeaverUI.getActiveWorkbenchWindow(), node, newName); } } else if (e.keyCode == SWT.ESC) { disposeOldEditor(); viewer.getTree().setFocus(); } } }); final Rectangle itemBounds = item.getBounds(0); final Rectangle treeBounds = viewer.getTree().getBounds(); treeEditor.minimumWidth = Math.max(itemBounds.width, 50); treeEditor.minimumWidth = Math.min(treeEditor.minimumWidth, treeBounds.width - (itemBounds.x - treeBounds.x) - item.getImageBounds(0).width - 4); treeEditor.setEditor(text, item, 0); } private void disposeOldEditor() { Control oldEditor = treeEditor.getEditor(); if (oldEditor != null) oldEditor.dispose(); } }
// $Id: STABLE.java,v 1.11 2004/04/28 20:19:17 belaban Exp $ package org.jgroups.protocols.pbcast; import org.jgroups.*; import org.jgroups.stack.Protocol; import org.jgroups.util.Promise; import org.jgroups.util.TimeScheduler; import org.jgroups.util.Util; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Properties; import java.util.Vector; /** * Computes the broadcast messages that are stable, i.e. have been received by all members. Sends * STABLE events up the stack when this is the case. This allows NAKACK to garbage collect messages that * have been seen by all members.<p> * Works as follows: periodically we mcast our highest seqnos (seen for each member) to the group. * A stability vector, which maintains the highest seqno for each member and initially contains no data, * is updated when such a message is received. The entry for a member P is computed set to * min(entry[P], digest[P]). When messages from all members have been received, a stability * message is mcast, which causes all members to send a STABLE event up the stack (triggering garbage collection * in the NAKACK layer).<p> * The stable task now terminates after max_num_gossips if no messages or view changes have been sent or received * in the meantime. It will resume when messages are received. This effectively suspends sending superfluous * STABLE messages in the face of no activity.<br/> * New: when <code>max_bytes</code> is exceeded (unless disabled by setting it to 0), * a STABLE task will be started (unless it is already running). * @author Bela Ban */ public class STABLE extends Protocol { Address local_addr=null; Vector mbrs=new Vector(); Digest digest=new Digest(); // keeps track of the highest seqnos from all members Promise digest_promise=new Promise(); // for fetching digest (from NAKACK layer) Vector heard_from=new Vector(); // keeps track of who we already heard from (STABLE_GOSSIP msgs) long digest_timeout=60000; // time to wait until digest is received (from NAKACK) /** Sends a STABLE gossip every 20 seconds on average. 0 disables gossipping of STABLE messages */ long desired_avg_gossip=20000; /** delay before we send STABILITY msg (give others a change to send first). This should be set to a very * small number (> 0 !) if <code>max_bytes</code> is used */ long stability_delay=6000; StabilitySendTask stability_task=null; Object stability_mutex=new Object(); // to synchronize on stability_task StableTask stable_task=null; // bcasts periodic STABLE message (added to timer below) Object stable_task_mutex=new Object(); // to sync on stable_task TimeScheduler timer=null; // to send periodic STABLE msgs (and STABILITY messages) int max_gossip_runs=3; // max. number of times the StableTask runs before terminating int num_gossip_runs=3; // this number is decremented (max_gossip_runs doesn't change) static final String name="STABLE"; /** Total amount of bytes from incoming messages (default = 0 = disabled). When exceeded, a STABLE * message will be broadcast and <code>num_bytes_received</code> reset to 0 . If this is > 0, then ideally * <code>stability_delay</code> should be set to a low number as well */ long max_bytes=0; /** The total number of bytes received from unicast and multicast messages */ long num_bytes_received=0; /** When true, don't take part in garbage collection protocol: neither send STABLE messages nor * handle STABILITY messages */ boolean suspended=false; /** Max time we should hold off on message garbage collection. This is a second line of defense in case * we get a SUSPEND_STABLE, but forget to send a corresponding RESUME_STABLE (which should never happen !) * The consequence of a missing RESUME_STABLE would be that the group doesn't garbage collect stable * messages anymore, eventually, with a lot of traffic, every member would accumulate messages and run * out of memory ! */ long max_suspend_time=600000; SuspendTask suspend_task=null; public String getName() { return name; } public Vector requiredDownServices() { Vector retval=new Vector(); retval.addElement(new Integer(Event.GET_DIGEST_STABLE)); // NAKACK layer return retval; } public boolean setProperties(Properties props) { String str; super.setProperties(props); str=props.getProperty("digest_timeout"); if(str != null) { digest_timeout=Long.parseLong(str); props.remove("digest_timeout"); } str=props.getProperty("desired_avg_gossip"); if(str != null) { desired_avg_gossip=new Long(str).longValue(); props.remove("desired_avg_gossip"); } str=props.getProperty("stability_delay"); if(str != null) { stability_delay=new Long(str).longValue(); props.remove("stability_delay"); } str=props.getProperty("max_gossip_runs"); if(str != null) { max_gossip_runs=new Integer(str).intValue(); num_gossip_runs=max_gossip_runs; props.remove("max_gossip_runs"); } str=props.getProperty("max_bytes"); if(str != null) { max_bytes=new Long(str).longValue(); props.remove("max_bytes"); } str=props.getProperty("max_suspend_time"); if(str != null) { max_suspend_time=new Long(str).longValue(); props.remove("max_suspend_time"); } if(props.size() > 0) { System.err.println("STABLE.setProperties(): these properties are not recognized:"); props.list(System.out); return false; } return true; } void suspend() { if(!suspended) { suspended=true; if(log.isDebugEnabled()) log.debug("suspending message garbage collection"); } if(suspend_task == null || suspend_task.cancelled()) { suspend_task=new SuspendTask(); timer.add(suspend_task); } } void resume() { if(suspended) { suspended=false; if(log.isDebugEnabled()) log.debug("resuming message garbage collection"); } if(suspend_task != null) { suspend_task.stop(); suspend_task=null; } } public void start() throws Exception { if(stack != null && stack.timer != null) timer=stack.timer; else throw new Exception("STABLE.up(): timer cannot be retrieved from protocol stack"); } public void stop() { stopStableTask(); } public void up(Event evt) { Message msg; StableHeader hdr; Header obj; int type=evt.getType(); switch(evt.getType()) { case Event.MSG: msg=(Message)evt.getArg(); if(max_bytes > 0) { // message counting is enabled long size=Math.max(msg.getLength(), 24); num_bytes_received+=size; if(log.isTraceEnabled()) log.trace("received message of " + size + " bytes, total bytes received=" + num_bytes_received); if(num_bytes_received >= max_bytes) { if(log.isDebugEnabled()) log.debug("max_bytes has been exceeded (max_bytes=" + max_bytes + ", number of bytes received=" + num_bytes_received + "): sending STABLE message"); new Thread() { public void run() { initialize(); sendStableMessage(); } }.start(); num_bytes_received=0; } } obj=msg.getHeader(getName()); if(obj == null || !(obj instanceof StableHeader)) break; hdr=(StableHeader)msg.removeHeader(getName()); switch(hdr.type) { case StableHeader.STABLE_GOSSIP: handleStableGossip(msg.getSrc(), hdr.digest); break; case StableHeader.STABILITY: handleStabilityMessage(hdr.digest); break; default: if(log.isErrorEnabled()) log.error("StableHeader type " + hdr.type + " not known"); } return; // don't pass STABLE or STABILITY messages up the stack case Event.SET_LOCAL_ADDRESS: local_addr=(Address)evt.getArg(); break; } passUp(evt); if(desired_avg_gossip > 0) { if(type == Event.VIEW_CHANGE || type == Event.MSG) startStableTask(); // only start if not yet running } } /** * We need to receive this event out-of-band, otherwise we would block. The use case is * <ol> * <li>To send a STABLE_GOSSIP message we need the digest (from NAKACK below) * <li>We send a GET_DIGEST_STABLE event down <em>from the up() method</em> * <li>NAKACK sends the GET_DIGEST_STABLE_OK backup. <em>However, we may have other messages in the * up queue ahead of this event !</em> Therefore the event cannot be processed until all messages ahead of * the event have been processed. These can't be processed, however, because the up() call waits for * GET_DIGEST_STABLE_OK ! The up() call would always run into the timeout.<be/> * Having out-of-band reception of just this one event eliminates the problem. * </ol> * @param evt */ protected void receiveUpEvent(Event evt) { if(evt.getType() == Event.GET_DIGEST_STABLE_OK) { digest_promise.setResult(evt.getArg()); return; } super.receiveUpEvent(evt); } public void down(Event evt) { int type=evt.getType(); switch(evt.getType()) { case Event.VIEW_CHANGE: View v=(View)evt.getArg(); Vector tmp=v.getMembers(); mbrs.removeAllElements(); mbrs.addAll(tmp); heard_from.retainAll(tmp); // removes all elements from heard_from that are not in new view stopStableTask(); break; case Event.SUSPEND_STABLE: stopStableTask(); suspend(); break; case Event.RESUME_STABLE: resume(); break; } if(desired_avg_gossip > 0) { if(type == Event.VIEW_CHANGE || type == Event.MSG) startStableTask(); // only start if not yet running } passDown(evt); } void initialize() { synchronized(digest) { digest.reset(mbrs.size()); for(int i=0; i < mbrs.size(); i++) digest.add((Address)mbrs.elementAt(i), -1, -1); heard_from.removeAllElements(); heard_from.addAll(mbrs); } } void startStableTask() { num_gossip_runs=max_gossip_runs; synchronized(stable_task_mutex) { if(stable_task != null && !stable_task.cancelled()) { return; // already running } stable_task=new StableTask(); timer.add(stable_task, true); // fixed-rate scheduling } if(log.isDebugEnabled()) log.debug("stable task started; num_gossip_runs=" + num_gossip_runs + ", max_gossip_runs=" + max_gossip_runs); } void stopStableTask() { synchronized(stable_task_mutex) { if(stable_task != null) { stable_task.stop(); stable_task=null; } } } /** Digest d contains (a) the highest seqnos <em>deliverable</em> for each sender and (b) the highest seqnos <em>seen</em> for each member. (Difference: with 1,2,4,5, the highest seqno seen is 5, whereas the highest seqno deliverable is 2). The minimum of all highest seqnos deliverable will be taken to send a stability message, which results in garbage collection of messages lower than the ones in the stability vector. The maximum of all seqnos will be taken to trigger possible retransmission of last missing seqno (see DESIGN for details). */ void handleStableGossip(Address sender, Digest d) { Address mbr; long highest_seqno, my_highest_seqno; long highest_seen_seqno, my_highest_seen_seqno; if(d == null || sender == null) { if(log.isErrorEnabled()) log.error("digest or sender is null"); return; } if(suspended) { if(log.isDebugEnabled()) { log.debug("STABLE message will not be handled as suspened=" + suspended); } return; } if(log.isDebugEnabled()) log.debug("received digest " + printStabilityDigest(d) + " from " + sender); if(!heard_from.contains(sender)) { // already received gossip from sender; discard it if(log.isDebugEnabled()) log.debug("already received gossip from " + sender); return; } // we won't handle the gossip d, if d's members don't match the membership in my own digest, // this is part of the fix for the NAKACK problem (bugs #943480 and #938584) if(!this.digest.sameSenders(d)) { if(log.isDebugEnabled()) { log.debug("received digest from " + sender + " (digest=" + d + ") which does not match my own digest ("+ this.digest + "): ignoring digest and re-initializing own digest"); } initialize(); return; } for(int i=0; i < d.size(); i++) { mbr=d.senderAt(i); highest_seqno=d.highSeqnoAt(i); highest_seen_seqno=d.highSeqnoSeenAt(i); if(digest.getIndex(mbr) == -1) { if(log.isDebugEnabled()) log.debug("sender " + mbr + " not found in stability vector"); continue; } // compute the minimum of the highest seqnos deliverable (for garbage collection) my_highest_seqno=digest.highSeqnoAt(mbr); if(my_highest_seqno < 0) { if(highest_seqno >= 0) digest.setHighSeqnoAt(mbr, highest_seqno); } else { digest.setHighSeqnoAt(mbr, Math.min(my_highest_seqno, highest_seqno)); } // compute the maximum of the highest seqnos seen (for retransmission of last missing message) my_highest_seen_seqno=digest.highSeqnoSeenAt(mbr); if(my_highest_seen_seqno < 0) { if(highest_seen_seqno >= 0) digest.setHighSeqnoSeenAt(mbr, highest_seen_seqno); } else { digest.setHighSeqnoSeenAt(mbr, Math.max(my_highest_seen_seqno, highest_seen_seqno)); } } heard_from.removeElement(sender); if(heard_from.size() == 0) { if(log.isDebugEnabled()) log.debug("sending stability msg " + printStabilityDigest(digest)); sendStabilityMessage(digest.copy()); initialize(); } } /** * Bcasts a STABLE message to all group members. Message contains highest seqnos of all members * seen by this member. Highest seqnos are retrieved from the NAKACK layer above. */ synchronized void sendStableMessage() { Digest d=null; Message msg=new Message(); // mcast message StableHeader hdr; d=getDigest(); if(d != null && d.size() > 0) { if(log.isDebugEnabled()) log.debug("mcasting digest " + d + " (num_gossip_runs=" + num_gossip_runs + ", max_gossip_runs=" + max_gossip_runs + ")"); hdr=new StableHeader(StableHeader.STABLE_GOSSIP, d); msg.putHeader(getName(), hdr); passDown(new Event(Event.MSG, msg)); } } Digest getDigest() { Digest ret=null; passDown(new Event(Event.GET_DIGEST_STABLE)); ret=(Digest)digest_promise.getResult(digest_timeout); if(ret == null) { if(log.isErrorEnabled()) log.error("digest could not be fetched from below " + "(timeout was " + digest_timeout + " msecs)"); } return ret; } /** Schedules a stability message to be mcast after a random number of milliseconds (range 1-5 secs). The reason for waiting a random amount of time is that, in the worst case, all members receive a STABLE_GOSSIP message from the last outstanding member at the same time and would therefore mcast the STABILITY message at the same time too. To avoid this, each member waits random N msecs. If, before N elapses, some other member sent the STABILITY message, we just cancel our own message. If, during waiting for N msecs to send STABILITY message S1, another STABILITY message S2 is to be sent, we just discard S2. @param tmp A copy of te stability digest, so we don't need to copy it again */ void sendStabilityMessage(Digest tmp) { long delay; if(timer == null) { if(log.isErrorEnabled()) log.error("timer is null, cannot schedule stability message to be sent"); timer=stack != null ? stack.timer : null; return; } // give other members a chance to mcast STABILITY message. if we receive STABILITY by the end of // our random sleep, we will not send the STABILITY msg. this prevents that all mbrs mcast a // STABILITY msg at the same time delay=Util.random(stability_delay); if(log.isDebugEnabled()) log.debug("stability_task=" + stability_task + ", delay is " + delay); synchronized(stability_mutex) { if(stability_task != null && !stability_task.cancelled()) // schedule only if not yet running return; stability_task=new StabilitySendTask(this, tmp, delay); timer.add(stability_task, true); // run it 1x after delay msecs. use fixed-rate scheduling } } void handleStabilityMessage(Digest d) { if(d == null) { if(log.isErrorEnabled()) log.error("stability vector is null"); return; } if(suspended) { if(log.isDebugEnabled()) { log.debug("STABILITY message will not be handled as suspened=" + suspended); } return; } if(log.isDebugEnabled()) log.debug("stability vector is " + d.printHighSeqnos()); synchronized(stability_mutex) { if(stability_task != null) { if(log.isDebugEnabled()) log.debug("cancelling stability task (running=" + !stability_task.cancelled() + ")"); stability_task.stop(); stability_task=null; } } // we won't handle the gossip d, if d's members don't match the membership in my own digest, // this is part of the fix for the NAKACK problem (bugs #943480 and #938584) if(!this.digest.sameSenders(d)) { if(log.isDebugEnabled()) { log.debug("received digest (digest=" + d + ") which does not match my own digest ("+ this.digest + "): ignoring digest and re-initializing own digest"); } initialize(); return; } // pass STABLE event down the stack, so NAKACK can garbage collect old messages passDown(new Event(Event.STABLE, d)); } String printStabilityDigest(Digest d) { StringBuffer sb=new StringBuffer(); boolean first=true; if(d != null) { for(int i=0; i < d.size(); i++) { if(!first) sb.append(", "); else first=false; sb.append(d.senderAt(i) + "#" + d.highSeqnoAt(i) + " (" + d.highSeqnoSeenAt(i) + ")"); } } return sb.toString(); } public static class StableHeader extends Header { static final int STABLE_GOSSIP=1; static final int STABILITY=2; int type=0; // Digest digest=new Digest(); // used for both STABLE_GOSSIP and STABILITY message Digest digest=null; // changed by Bela April 4 2004 public StableHeader() { } // used for externalizable StableHeader(int type, Digest digest) { this.type=type; this.digest=digest; } static String type2String(int t) { switch(t) { case STABLE_GOSSIP: return "STABLE_GOSSIP"; case STABILITY: return "STABILITY"; default: return "<unknown>"; } } public String toString() { StringBuffer sb=new StringBuffer(); sb.append("["); sb.append(type2String(type)); sb.append("]: digest is "); sb.append(digest); return sb.toString(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(type); digest.writeExternal(out); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { type=in.readInt(); digest=new Digest(); digest.readExternal(in); } } /** Mcast periodic STABLE message. Interval between sends varies. Terminates after num_gossip_runs is 0. However, UP or DOWN messages will reset num_gossip_runs to max_gossip_runs. This has the effect that the stable_send task terminates only after a period of time within which no messages were either sent or received */ private class StableTask implements TimeScheduler.Task { boolean stopped=false; public void reset() { stopped=false; } public void stop() { stopped=true; } public boolean cancelled() { return stopped; } public long nextInterval() { long interval=computeSleepTime(); if(interval <= 0) return 10000; else return interval; } public void run() { if(suspended) { log.debug("stable task will not run as suspended=" + suspended); return; } initialize(); sendStableMessage(); num_gossip_runs if(num_gossip_runs <= 0) { if(log.isDebugEnabled()) log.debug("stable task terminating (num_gossip_runs=" + num_gossip_runs + ", max_gossip_runs=" + max_gossip_runs + ")"); stop(); } } long computeSleepTime() { return getRandom((mbrs.size() * desired_avg_gossip * 2)); } long getRandom(long range) { return (long)((Math.random() * range) % range); } } /** * Multicasts a STABILITY message. */ private class StabilitySendTask implements TimeScheduler.Task { Digest d=null; Protocol stable_prot=null; boolean stopped=false; long delay=2000; public StabilitySendTask(Protocol stable_prot, Digest d, long delay) { this.stable_prot=stable_prot; this.d=d; this.delay=delay; } public void stop() { stopped=true; } public boolean cancelled() { return stopped; } /** wait a random number of msecs (to give other a chance to send the STABILITY msg first) */ public long nextInterval() { return delay; } public void run() { Message msg; StableHeader hdr; if(suspended) { if(log.isDebugEnabled()) { log.debug("STABILITY message will not be sent as suspened=" + suspended); } stopped=true; return; } if(d != null && !stopped) { msg=new Message(); hdr=new StableHeader(StableHeader.STABILITY, d); msg.putHeader(STABLE.name, hdr); stable_prot.passDown(new Event(Event.MSG, msg)); d=null; } stopped=true; // run only once } } private class SuspendTask implements TimeScheduler.Task { boolean running=true; SuspendTask() { } void stop() { running=false; } public boolean cancelled() { return running == false; } public long nextInterval() { return max_suspend_time; } public void run() { if(suspended) { suspended=false; log.warn("Reset suspended flag to true, this should never happen: " + "check why RESUME_STABLE has not been received"); } stop(); } } }
package org.jkiss.dbeaver.ui.views.navigator.database; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.jface.viewers.*; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.TreeEditor; import org.eclipse.swt.events.*; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.*; import org.eclipse.ui.IWorkbenchCommandConstants; import org.jkiss.code.NotNull; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.DBeaverPreferences; import org.jkiss.dbeaver.core.DBeaverCore; import org.jkiss.dbeaver.core.DBeaverUI; import org.jkiss.dbeaver.model.navigator.*; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.DBRRunnableWithResult; import org.jkiss.dbeaver.runtime.AbstractUIJob; import org.jkiss.dbeaver.ui.ActionUtils; import org.jkiss.dbeaver.ui.NavigatorUtils; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.actions.navigator.NavigatorHandlerObjectRename; import org.jkiss.utils.CommonUtils; import java.lang.reflect.InvocationTargetException; import java.util.List; public class DatabaseNavigatorTree extends Composite implements IDBNListener { static final Log log = LogFactory.getLog(DatabaseNavigatorTree.class); private TreeViewer viewer; private DBNModel model; private TreeEditor treeEditor; private boolean checkEnabled; public DatabaseNavigatorTree(Composite parent, DBNNode rootNode, int style) { this(parent, rootNode, style, false); } public DatabaseNavigatorTree(Composite parent, DBNNode rootNode, int style, boolean showRoot) { super(parent, SWT.NONE); this.setLayout(new FillLayout()); this.model = DBNModel.getInstance(); this.model.addListener(this); addDisposeListener(new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { if (model != null) { model.removeListener(DatabaseNavigatorTree.this); model = null; } } }); checkEnabled = (style & SWT.CHECK) != 0; // Create tree final ISelection defaultSelection = new StructuredSelection(rootNode); // TODO: there are problems with this tree when we have a lot of items. // TODO: I may set SWT.SINGLE style and it'll solve the problem at least when traversing tree // TODO: But we need multiple selection (to copy, export, etc) // TODO: need to do something with it int treeStyle = SWT.H_SCROLL | SWT.V_SCROLL | style; if (checkEnabled) { this.viewer = new CheckboxTreeViewer(this, treeStyle); ((CheckboxTreeViewer)this.viewer).setCheckStateProvider(new ICheckStateProvider() { @Override public boolean isChecked(Object element) { return false; } @Override public boolean isGrayed(Object element) { return element instanceof DBNContainer; } }); } else { this.viewer = new TreeViewer(this, treeStyle) { @Override public ISelection getSelection() { ISelection selection = super.getSelection(); return selection.isEmpty() ? defaultSelection : selection; } }; } this.viewer.getTree().setCursor(getDisplay().getSystemCursor(SWT.CURSOR_ARROW)); this.viewer.setUseHashlookup(true); if (rootNode.getParentNode() == null) { //this.viewer.setAutoExpandLevel(2); } this.viewer.setLabelProvider(new DatabaseNavigatorLabelProvider(this.viewer)); this.viewer.setContentProvider(new DatabaseNavigatorContentProvider(this.viewer, showRoot)); this.viewer.setInput(new DatabaseNavigatorContent(rootNode)); initEditor(); } public DBNNode getModel() { DatabaseNavigatorContent content = (DatabaseNavigatorContent) this.viewer.getInput(); return content.getRootNode(); } private void initEditor() { Tree treeControl = this.viewer.getTree(); treeEditor = new TreeEditor(treeControl); treeEditor.horizontalAlignment = SWT.LEFT; treeEditor.verticalAlignment = SWT.TOP; treeEditor.grabHorizontal = false; treeEditor.minimumWidth = 50; //treeControl.addSelectionListener(new TreeSelectionAdapter()); if (!checkEnabled) { // Add rename listener only for non CHECK trees treeControl.addMouseListener(new TreeSelectionAdapter()); } } @NotNull public TreeViewer getViewer() { return viewer; } @Override public void nodeChanged(final DBNEvent event) { switch (event.getAction()) { case ADD: case REMOVE: final DBNNode parentNode = event.getNode().getParentNode(); if (parentNode != null) { UIUtils.runInUI(null, new Runnable() { @Override public void run() { if (!viewer.getControl().isDisposed()) { if (!parentNode.isDisposed()) { viewer.refresh(getViewerObject(parentNode)); } } } }); } break; case UPDATE: UIUtils.runInUI(null, new Runnable() { @Override public void run() { if (!viewer.getControl().isDisposed() && !viewer.isBusy()) { if (event.getNode() != null) { switch (event.getNodeChange()) { case LOAD: viewer.refresh(getViewerObject(event.getNode())); expandNodeOnLoad(event.getNode()); break; case UNLOAD: viewer.collapseToLevel(event.getNode(), -1); viewer.refresh(getViewerObject(event.getNode())); break; case REFRESH: viewer.update(getViewerObject(event.getNode()), null); break; case LOCK: case UNLOCK: case STRUCT_REFRESH: viewer.refresh(getViewerObject(event.getNode())); break; } } else { log.warn("Null node object"); } } } }); break; default: break; } } private void expandNodeOnLoad(final DBNNode node) { if (node instanceof DBNDataSource && DBeaverCore.getGlobalPreferenceStore().getBoolean(DBeaverPreferences.NAVIGATOR_EXPAND_ON_CONNECT)) { try { DBRRunnableWithResult<DBNNode> runnable = new DBRRunnableWithResult<DBNNode>() { @Override public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException { try { result = finaActiveNode(monitor, node); } catch (DBException e) { throw new InvocationTargetException(e); } } }; DBeaverUI.runInProgressService(runnable); if (runnable.getResult() != null) { showNode(runnable.getResult()); viewer.expandToLevel(runnable.getResult(), 1); } } catch (InvocationTargetException e) { log.error("Can't expand node", e.getTargetException()); } catch (InterruptedException e) { // skip it } } } private DBNNode finaActiveNode(DBRProgressMonitor monitor, DBNNode node) throws DBException { List<? extends DBNNode> children = node.getChildren(monitor); if (!CommonUtils.isEmpty(children)) { if (children.get(0) instanceof DBNContainer) { // Use only first folder to search return finaActiveNode(monitor, children.get(0)); } for (DBNNode child : children) { if (NavigatorUtils.isDefaultElement(child)) { return child; } } } return node; } Object getViewerObject(DBNNode node) { if (((DatabaseNavigatorContent) viewer.getInput()).getRootNode() == node) { return viewer.getInput(); } else { return node; } } public void showNode(DBNNode node) { viewer.reveal(node); viewer.setSelection(new StructuredSelection(node)); } public void reloadTree(final DBNNode rootNode) { DatabaseNavigatorTree.this.viewer.setInput(new DatabaseNavigatorContent(rootNode)); } private class TreeSelectionAdapter implements MouseListener { private volatile TreeItem curSelection; private volatile RenameJob renameJob = new RenameJob(); private volatile boolean doubleClick = false; @Override public synchronized void mouseDoubleClick(MouseEvent e) { curSelection = null; renameJob.canceled = true; } @Override public void mouseDown(MouseEvent e) { } @Override public void mouseUp(MouseEvent e) { if ((e.stateMask & SWT.BUTTON1) == 0) { curSelection = null; return; } changeSelection(e); } public void changeSelection(MouseEvent e) { disposeOldEditor(); final TreeItem newSelection = viewer.getTree().getItem(new Point(e.x, e.y)); if (newSelection == null) { //curSelection = null; return; } if (!(newSelection.getData() instanceof DBNNode) || !(ActionUtils.isCommandEnabled(IWorkbenchCommandConstants.FILE_RENAME, DBeaverUI.getActiveWorkbenchWindow().getActivePage().getActivePart()))) { curSelection = null; return; } if (curSelection != null && curSelection == newSelection) { renameJob.schedule(1000); } curSelection = newSelection; } private class RenameJob extends AbstractUIJob { private volatile boolean canceled = false; public RenameJob() { super("Rename "); } @Override protected IStatus runInUIThread(DBRProgressMonitor monitor) { try { if (!viewer.getTree().isDisposed() && viewer.getTree().isFocusControl() && curSelection != null && !canceled) { getDisplay().asyncExec(new Runnable() { @Override public void run() { if (curSelection != null) { renameItem(curSelection); } } }); } } finally { canceled = false; } return Status.OK_STATUS; } } } private void renameItem(final TreeItem item) { // Clean up any previous editor control disposeOldEditor(); if (item.isDisposed()) { return; } final DBNNode node = (DBNNode) item.getData(); Text text = new Text(viewer.getTree(), SWT.BORDER); text.setText(node.getNodeName()); text.selectAll(); text.setFocus(); text.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { disposeOldEditor(); } }); text.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (e.keyCode == SWT.CR) { Text text = (Text) treeEditor.getEditor(); final String newName = text.getText(); disposeOldEditor(); viewer.getTree().setFocus(); if (!CommonUtils.isEmpty(newName) && !newName.equals(node.getNodeName())) { NavigatorHandlerObjectRename.renameNode(DBeaverUI.getActiveWorkbenchWindow(), node, newName); } } else if (e.keyCode == SWT.ESC) { disposeOldEditor(); viewer.getTree().setFocus(); } } }); final Rectangle itemBounds = item.getBounds(0); final Rectangle treeBounds = viewer.getTree().getBounds(); treeEditor.minimumWidth = Math.max(itemBounds.width, 50); treeEditor.minimumWidth = Math.min(treeEditor.minimumWidth, treeBounds.width - (itemBounds.x - treeBounds.x) - item.getImageBounds(0).width - 4); treeEditor.setEditor(text, item, 0); } private void disposeOldEditor() { Control oldEditor = treeEditor.getEditor(); if (oldEditor != null) oldEditor.dispose(); } }
package org.jgroups.protocols.pbcast; import org.jgroups.*; import org.jgroups.annotations.GuardedBy; import org.jgroups.stack.Protocol; import org.jgroups.util.*; import java.io.*; import java.util.*; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Computes the broadcast messages that are stable; i.e., have been received by all members. Sends * STABLE events up the stack when this is the case. This allows NAKACK to garbage collect messages that * have been seen by all members.<p> * Works as follows: periodically we mcast our highest seqnos (seen for each member) to the group. * A stability vector, which maintains the highest seqno for each member and initially contains no data, * is updated when such a message is received. The entry for a member P is computed set to * min(entry[P], digest[P]). When messages from all members have been received, a stability * message is mcast, which causes all members to send a STABLE event up the stack (triggering garbage collection * in the NAKACK layer).<p> * New: when <code>max_bytes</code> is exceeded (unless disabled by setting it to 0), * a STABLE task will be started (unless it is already running). * @author Bela Ban * @version $Id: STABLE.java,v 1.81 2007/05/31 07:50:27 belaban Exp $ */ public class STABLE extends Protocol { private Address local_addr=null; private final Vector<Address> mbrs=new Vector<Address>(); private final MutableDigest digest=new MutableDigest(10); // keeps track of the highest seqnos from all members private final MutableDigest latest_local_digest=new MutableDigest(10); // keeps track of the latest digests received from NAKACK /** Keeps track of who we already heard from (STABLE_GOSSIP msgs). This is initialized with all members, and we * remove the sender when a STABLE message is received. When the list is empty, we send a STABILITY message */ private final List<Address> votes=new ArrayList<Address>(); /** Sends a STABLE gossip every 20 seconds on average. 0 disables gossipping of STABLE messages */ private long desired_avg_gossip=20000; /** delay before we send STABILITY msg (give others a change to send first). This should be set to a very * small number (> 0 !) if <code>max_bytes</code> is used */ private long stability_delay=6000; @GuardedBy("stability_lock") private Future stability_task_future=null; private final Lock stability_lock=new ReentrantLock(); // to synchronize on stability_task @GuardedBy("stable_task_lock") private Future stable_task_future=null; // bcasts periodic STABLE message (added to timer below) private final Lock stable_task_lock=new ReentrantLock(); // to sync on stable_task private TimeScheduler timer=null; // to send periodic STABLE msgs (and STABILITY messages) private static final String name="STABLE"; /** Total amount of bytes from incoming messages (default = 0 = disabled). When exceeded, a STABLE * message will be broadcast and <code>num_bytes_received</code> reset to 0 . If this is > 0, then ideally * <code>stability_delay</code> should be set to a low number as well */ private long max_bytes=0; /** The total number of bytes received from unicast and multicast messages */ @GuardedBy("received") private long num_bytes_received=0; private final Lock received=new ReentrantLock(); /** When true, don't take part in garbage collection protocol: neither send STABLE messages nor * handle STABILITY messages */ private boolean suspended=false; private boolean initialized=false; private Future resume_task_future=null; private final Object resume_task_mutex=new Object(); private int num_stable_msgs_sent=0; private int num_stable_msgs_received=0; private int num_stability_msgs_sent=0; private int num_stability_msgs_received=0; private static final long MAX_SUSPEND_TIME=200000; public String getName() { return name; } public long getDesiredAverageGossip() { return desired_avg_gossip; } public void setDesiredAverageGossip(long gossip_interval) { desired_avg_gossip=gossip_interval; } public long getMaxBytes() { return max_bytes; } public void setMaxBytes(long max_bytes) { this.max_bytes=max_bytes; } public long getBytes() {return num_bytes_received;} public int getStableSent() {return num_stable_msgs_sent;} public int getStableReceived() {return num_stable_msgs_received;} public int getStabilitySent() {return num_stability_msgs_sent;} public int getStabilityReceived() {return num_stability_msgs_received;} public void resetStats() { super.resetStats(); num_stability_msgs_received=num_stability_msgs_sent=num_stable_msgs_sent=num_stable_msgs_received=0; } public Vector<Integer> requiredDownServices() { Vector<Integer> retval=new Vector<Integer>(); retval.addElement(Event.GET_DIGEST_STABLE); // NAKACK layer return retval; } public boolean setProperties(Properties props) { String str; super.setProperties(props); str=props.getProperty("digest_timeout"); if(str != null) { props.remove("digest_timeout"); log.error("digest_timeout has been deprecated; it will be ignored"); } str=props.getProperty("desired_avg_gossip"); if(str != null) { desired_avg_gossip=Long.parseLong(str); props.remove("desired_avg_gossip"); } str=props.getProperty("stability_delay"); if(str != null) { stability_delay=Long.parseLong(str); props.remove("stability_delay"); } str=props.getProperty("max_gossip_runs"); if(str != null) { props.remove("max_gossip_runs"); log.error("max_gossip_runs has been deprecated and will be ignored"); } str=props.getProperty("max_bytes"); if(str != null) { max_bytes=Long.parseLong(str); props.remove("max_bytes"); } str=props.getProperty("max_suspend_time"); if(str != null) { log.error("max_suspend_time is not supported any longer; please remove it (ignoring it)"); props.remove("max_suspend_time"); } Util.checkBufferSize("STABLE.max_bytes", max_bytes); if(!props.isEmpty()) { log.error("these properties are not recognized: " + props); return false; } return true; } private void suspend(long timeout) { if(!suspended) { suspended=true; if(log.isDebugEnabled()) log.debug("suspending message garbage collection"); } startResumeTask(timeout); // will not start task if already running } private void resume() { resetDigest(mbrs); // start from scratch suspended=false; if(log.isDebugEnabled()) log.debug("resuming message garbage collection"); stopResumeTask(); } public void start() throws Exception { if(stack != null && stack.timer != null) timer=stack.timer; else throw new Exception("timer cannot be retrieved from protocol stack"); if(desired_avg_gossip > 0) startStableTask(); } public void stop() { stopStableTask(); clearDigest(); } public Object up(Event evt) { Message msg; StableHeader hdr; int type=evt.getType(); switch(type) { case Event.MSG: msg=(Message)evt.getArg(); hdr=(StableHeader)msg.getHeader(name); if(hdr == null) { handleRegularMessage(msg); return up_prot.up(evt); } switch(hdr.type) { case StableHeader.STABLE_GOSSIP: handleStableMessage(msg.getSrc(), hdr.stableDigest); break; case StableHeader.STABILITY: handleStabilityMessage(hdr.stableDigest, msg.getSrc()); break; default: if(log.isErrorEnabled()) log.error("StableHeader type " + hdr.type + " not known"); } return null; // don't pass STABLE or STABILITY messages up the stack case Event.VIEW_CHANGE: Object retval=up_prot.up(evt); View view=(View)evt.getArg(); handleViewChange(view); return retval; case Event.SET_LOCAL_ADDRESS: local_addr=(Address)evt.getArg(); break; } return up_prot.up(evt); } private void handleRegularMessage(Message msg) { // only if message counting is enabled, and only for multicast messages if(max_bytes <= 0) return; Address dest=msg.getDest(); if(dest == null || dest.isMulticastAddress()) { received.lock(); boolean locked=true; try { num_bytes_received+=(long)msg.getLength(); if(num_bytes_received >= max_bytes) { if(log.isTraceEnabled()) { log.trace(new StringBuilder("max_bytes has been reached (").append(max_bytes). append(", bytes received=").append(num_bytes_received).append("): triggers stable msg")); } num_bytes_received=0; received.unlock(); locked=false; // asks the NAKACK protocol for the current digest, Digest my_digest=(Digest)down_prot.down(Event.GET_DIGEST_STABLE_EVT); synchronized(latest_local_digest) { latest_local_digest.replace(my_digest); } if(log.isTraceEnabled()) log.trace("setting latest_local_digest from NAKACK: " + my_digest.printHighestDeliveredSeqnos()); sendStableMessage(my_digest); } } finally { if(locked) received.unlock(); } } } public Object down(Event evt) { switch(evt.getType()) { case Event.VIEW_CHANGE: Object retval=down_prot.down(evt); View v=(View)evt.getArg(); handleViewChange(v); return retval; case Event.SUSPEND_STABLE: long timeout=0; Object t=evt.getArg(); if(t != null && t instanceof Long) timeout=(Long)t; suspend(timeout); break; case Event.RESUME_STABLE: resume(); break; } return down_prot.down(evt); } public void runMessageGarbageCollection() { Digest copy; synchronized(digest) { copy=digest.copy(); } sendStableMessage(copy); } private void handleViewChange(View v) { Vector<Address> tmp=v.getMembers(); mbrs.clear(); mbrs.addAll(tmp); adjustSenders(digest, tmp); adjustSenders(latest_local_digest, tmp); // asks the NAKACK protocol for the current digest Digest my_digest=(Digest)down_prot.down(Event.GET_DIGEST_STABLE_EVT); if(my_digest != null) { synchronized(latest_local_digest) { latest_local_digest.replace(my_digest); } } resetDigest(tmp); if(!initialized) initialized=true; } /** Digest and members are guaranteed to be non-null */ private static void adjustSenders(MutableDigest digest, Vector<Address> members) { synchronized(digest) { // 1. remove all members from digest who are not in the view Iterator<Address> it=digest.getSenders().keySet().iterator(); Address mbr; while(it.hasNext()) { mbr=it.next(); if(!members.contains(mbr)) it.remove(); } // 2. add members to digest which are in the new view but not in the digest for(Address member : members) { mbr=member; if(!digest.contains(mbr)) digest.add(mbr, -1, -1); } } } private void clearDigest() { synchronized(digest) { digest.clear(); } } /** Update my own digest from a digest received by somebody else. Returns whether the update was successful. * Needs to be called with a lock on digest */ private boolean updateLocalDigest(Digest d, Address sender) { if(d == null || d.size() == 0) return false; if(!initialized) { if(log.isTraceEnabled()) log.trace("STABLE message will not be handled as I'm not yet initialized"); return false; } if(!digest.sameSenders(d)) { // to avoid sending incorrect stability/stable msgs, we simply reset our heard_from list, see DESIGN resetDigest(mbrs); return false; } StringBuilder sb=null; if(log.isTraceEnabled()) { sb=new StringBuilder("[").append(local_addr).append("] handling digest from ").append(sender).append(" ("). append(votes.size()).append(" pending):\nmine: ").append(digest.printHighestDeliveredSeqnos()) .append("\nother: ").append(d.printHighestDeliveredSeqnos()); } Address mbr; long highest_seqno, my_highest_seqno, new_highest_seqno, my_low, low, new_low; long highest_seen_seqno, my_highest_seen_seqno, new_highest_seen_seqno; Digest.Entry val; for(Map.Entry<Address, Digest.Entry> entry: d.getSenders().entrySet()) { mbr=entry.getKey(); val=entry.getValue(); low=val.getLow(); highest_seqno=val.getHighestDeliveredSeqno(); // highest *delivered* seqno highest_seen_seqno=val.getHighestReceivedSeqno(); // highest *received* seqno my_low=digest.lowSeqnoAt(mbr); new_low=Math.min(my_low, low); // compute the minimum of the highest seqnos deliverable (for garbage collection) my_highest_seqno=digest.highestDeliveredSeqnoAt(mbr); // compute the maximum of the highest seqnos seen (for retransmission of last missing message) my_highest_seen_seqno=digest.highestReceivedSeqnoAt(mbr); new_highest_seqno=Math.min(my_highest_seqno, highest_seqno); new_highest_seen_seqno=Math.max(my_highest_seen_seqno, highest_seen_seqno); digest.setHighestDeliveredAndSeenSeqnos(mbr, new_low, new_highest_seqno, new_highest_seen_seqno); } if(log.isTraceEnabled()) { sb.append("\nresult: ").append(digest.printHighestDeliveredSeqnos()).append("\n"); log.trace(sb); } return true; } private void resetDigest(Vector<Address> new_members) { if(new_members == null || new_members.isEmpty()) return; synchronized(votes) { votes.clear(); votes.addAll(new_members); } Digest copy_of_latest; synchronized(latest_local_digest) { copy_of_latest=latest_local_digest.copy(); } synchronized(digest) { digest.replace(copy_of_latest); if(log.isTraceEnabled()) log.trace("resetting digest from NAKACK: " + copy_of_latest.printHighestDeliveredSeqnos()); } } /** * Removes mbr from heard_from and returns true if this was the last member, otherwise false. * Resets the heard_from list (populates with membership) * @param mbr */ private boolean removeVote(Address mbr) { synchronized(votes) { boolean removed=votes.remove(mbr); if(removed && votes.isEmpty()) { resetDigest(this.mbrs); return true; } } return false; } private void startStableTask() { stable_task_lock.lock(); try { if(stable_task_future == null || stable_task_future.isDone()) { StableTask stable_task=new StableTask(); stable_task_future=timer.scheduleWithDynamicInterval(stable_task, true); if(log.isTraceEnabled()) log.trace("stable task started"); } } finally { stable_task_lock.unlock(); } } private void stopStableTask() { stable_task_lock.lock(); try { if(stable_task_future != null) { stable_task_future.cancel(false); stable_task_future=null; } } finally { stable_task_lock.unlock(); } } private void startResumeTask(long max_suspend_time) { max_suspend_time=(long)(max_suspend_time * 1.1); // little slack if(max_suspend_time <= 0) max_suspend_time=MAX_SUSPEND_TIME; synchronized(resume_task_mutex) { if(resume_task_future == null || resume_task_future.isDone()) { ResumeTask resume_task=new ResumeTask(); resume_task_future=timer.schedule(resume_task, max_suspend_time, TimeUnit.MILLISECONDS); // fixed-rate scheduling if(log.isDebugEnabled()) log.debug("resume task started, max_suspend_time=" + max_suspend_time); } } } private void stopResumeTask() { synchronized(resume_task_mutex) { if(resume_task_future != null) { resume_task_future.cancel(false); resume_task_future=null; } } } private void startStabilityTask(Digest d, long delay) { stability_lock.lock(); try { if(stability_task_future == null || stability_task_future.isDone()) { StabilitySendTask stability_task=new StabilitySendTask(d); // runs only once stability_task_future=timer.schedule(stability_task, delay, TimeUnit.MILLISECONDS); } } finally { stability_lock.unlock(); } } private void stopStabilityTask() { stability_lock.lock(); try { if(stability_task_future != null) { stability_task_future.cancel(false); stability_task_future=null; } } finally { stability_lock.unlock(); } } /** Digest d contains (a) the highest seqnos <em>deliverable</em> for each sender and (b) the highest seqnos <em>seen</em> for each member. (Difference: with 1,2,4,5, the highest seqno seen is 5, whereas the highest seqno deliverable is 2). The minimum of all highest seqnos deliverable will be taken to send a stability message, which results in garbage collection of messages lower than the ones in the stability vector. The maximum of all seqnos will be taken to trigger possible retransmission of last missing seqno (see DESIGN for details). */ private void handleStableMessage(Address sender, Digest d) { if(d == null || sender == null) { if(log.isErrorEnabled()) log.error("digest or sender is null"); return; } if(!initialized) { if(log.isTraceEnabled()) log.trace("STABLE message will not be handled as I'm not yet initialized"); return; } if(suspended) { if(log.isTraceEnabled()) log.trace("STABLE message will not be handled as I'm suspended"); return; } if(!votes.contains(sender)) { // already received gossip from sender; discard it return; } num_stable_msgs_received++; Digest copy; synchronized(digest) { boolean success=updateLocalDigest(d, sender); if(!success) // we can only remove the sender from heard_from if *all* elements of my digest were updated return; copy=digest.copy(); } boolean was_last=removeVote(sender); if(was_last) { sendStabilityMessage(copy); } } private void handleStabilityMessage(Digest d, Address sender) { if(d == null) { if(log.isErrorEnabled()) log.error("stability digest is null"); return; } if(!initialized) { if(log.isTraceEnabled()) log.trace("STABLE message will not be handled as I'm not yet initialized"); return; } if(suspended) { if(log.isDebugEnabled()) { log.debug("stability message will not be handled as I'm suspended"); } return; } if(log.isTraceEnabled()) log.trace(new StringBuffer("received stability msg from ").append(sender).append(": ").append(d.printHighestDeliveredSeqnos())); stopStabilityTask(); // we won't handle the gossip d, if d's members don't match the membership in my own digest, // this is part of the fix for the NAKACK problem (bugs #943480 and #938584) if(!this.digest.sameSenders(d)) { if(log.isDebugEnabled()) { log.debug("received digest (digest=" + d + ") which does not match my own digest ("+ this.digest + "): ignoring digest and re-initializing own digest"); } return; } num_stability_msgs_received++; resetDigest(mbrs); // pass STABLE event down the stack, so NAKACK can garbage collect old messages down_prot.down(new Event(Event.STABLE, d)); } /** * Bcasts a STABLE message of the current digest to all members. Message contains highest seqnos of all members * seen by this member. Highest seqnos are retrieved from the NAKACK layer below. * @param d A <em>copy</em> of this.digest */ private void sendStableMessage(Digest d) { if(suspended) { if(log.isTraceEnabled()) log.trace("will not send STABLE message as I'm suspended"); return; } if(d != null && d.size() > 0) { if(log.isTraceEnabled()) log.trace("sending stable msg " + d.printHighestDeliveredSeqnos()); num_stable_msgs_sent++; Message msg=new Message(); // mcast message msg.setFlag(Message.OOB); StableHeader hdr=new StableHeader(StableHeader.STABLE_GOSSIP, d); msg.putHeader(name, hdr); down_prot.down(new Event(Event.MSG, msg)); } } /** Schedules a stability message to be mcast after a random number of milliseconds (range 1-5 secs). The reason for waiting a random amount of time is that, in the worst case, all members receive a STABLE_GOSSIP message from the last outstanding member at the same time and would therefore mcast the STABILITY message at the same time too. To avoid this, each member waits random N msecs. If, before N elapses, some other member sent the STABILITY message, we just cancel our own message. If, during waiting for N msecs to send STABILITY message S1, another STABILITY message S2 is to be sent, we just discard S2. @param tmp A copy of te stability digest, so we don't need to copy it again */ private void sendStabilityMessage(Digest tmp) { long delay; if(suspended) { if(log.isTraceEnabled()) log.trace("STABILITY message will not be sent as I'm suspended"); return; } // give other members a chance to mcast STABILITY message. if we receive STABILITY by the end of // our random sleep, we will not send the STABILITY msg. this prevents that all mbrs mcast a // STABILITY msg at the same time delay=Util.random(stability_delay); if(log.isTraceEnabled()) log.trace("sending stability msg (in " + delay + " ms) " + tmp.printHighestDeliveredSeqnos() + " (copy=" + tmp.hashCode() + ")"); startStabilityTask(tmp, delay); } public static class StableHeader extends Header implements Streamable { public static final int STABLE_GOSSIP=1; public static final int STABILITY=2; int type=0; // Digest digest=new Digest(); // used for both STABLE_GOSSIP and STABILITY message Digest stableDigest=null; // changed by Bela April 4 2004 public StableHeader() { } // used for externalizable public StableHeader(int type, Digest digest) { this.type=type; this.stableDigest=digest; } static String type2String(int t) { switch(t) { case STABLE_GOSSIP: return "STABLE_GOSSIP"; case STABILITY: return "STABILITY"; default: return "<unknown>"; } } public String toString() { StringBuilder sb=new StringBuilder(); sb.append('['); sb.append(type2String(type)); sb.append("]: digest is "); sb.append(stableDigest); return sb.toString(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(type); if(stableDigest == null) { out.writeBoolean(false); return; } out.writeBoolean(true); stableDigest.writeExternal(out); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { type=in.readInt(); boolean digest_not_null=in.readBoolean(); if(digest_not_null) { stableDigest=new Digest(); stableDigest.readExternal(in); } } public int size() { int retval=Global.INT_SIZE + Global.BYTE_SIZE; // type + presence for digest if(stableDigest != null) retval+=stableDigest.serializedSize(); return retval; } public void writeTo(DataOutputStream out) throws IOException { out.writeInt(type); Util.writeStreamable(stableDigest, out); } public void readFrom(DataInputStream in) throws IOException, IllegalAccessException, InstantiationException { type=in.readInt(); stableDigest=(Digest)Util.readStreamable(Digest.class, in); } } /** Mcast periodic STABLE message. Interval between sends varies. */ private class StableTask implements TimeScheduler.Task { public long nextInterval() { long interval=computeSleepTime(); if(interval <= 0) return 10000; else return interval; } public void run() { if(suspended) { if(log.isTraceEnabled()) log.trace("stable task will not run as suspended=" + suspended); return; } // asks the NAKACK protocol for the current digest Digest my_digest=(Digest)down_prot.down(Event.GET_DIGEST_STABLE_EVT); if(my_digest == null) { if(log.isWarnEnabled()) log.warn("received null digest, skipped sending of stable message"); return; } synchronized(latest_local_digest) { latest_local_digest.replace(my_digest); } if(log.isTraceEnabled()) log.trace("setting latest_local_digest from NAKACK: " + my_digest.printHighestDeliveredSeqnos()); sendStableMessage(my_digest); } long computeSleepTime() { return getRandom((mbrs.size() * desired_avg_gossip * 2)); } long getRandom(long range) { return (long)((Math.random() * range) % range); } } /** * Multicasts a STABILITY message. */ private class StabilitySendTask implements Runnable { Digest stability_digest=null; StabilitySendTask(Digest d) { this.stability_digest=d; } public void run() { Message msg; StableHeader hdr; if(suspended) { if(log.isDebugEnabled()) { log.debug("STABILITY message will not be sent as suspended=" + suspended); } return; } if(stability_digest != null) { msg=new Message(); msg.setFlag(Message.OOB); hdr=new StableHeader(StableHeader.STABILITY, stability_digest); msg.putHeader(STABLE.name, hdr); if(log.isTraceEnabled()) log.trace("sending stability msg " + stability_digest.printHighestDeliveredSeqnos() + " (copy=" + stability_digest.hashCode() + ")"); num_stability_msgs_sent++; down_prot.down(new Event(Event.MSG, msg)); } } } private class ResumeTask implements Runnable { ResumeTask() { } public void run() { if(suspended) log.warn("ResumeTask resumed message garbage collection - this should be done by a RESUME_STABLE event; " + "check why this event was not received (or increase max_suspend_time for large state transfers)"); resume(); } } }
/* * $Id: CacheExceptionMap.java,v 1.3 2004-02-09 22:54:27 troberts Exp $ */ package org.lockss.plugin; import java.util.HashMap; import java.net.HttpURLConnection; import java.io.*; /** * <p>CacheExceptionHandler: </p> * <p>@author Claire Griffin</p> * <p>@version 1.0</p> * */ public class CacheExceptionMap { // int[] SuccessCodes = {200, 203}; int[] SuccessCodes = {200, 203, 304}; int[] SameUrlCodes = { 408, 413, 500, 502, 503, 504}; int[] MovePermCodes = { 301}; int[] MoveTempCodes = { 307, 303, 302}; int[] UnimplementedCodes = { 300, 204}; int[] ExpectedCodes = { 401, 402, 403, 404, 405, 406, 407, 410, 305}; int[] UnexpectedCodes = { // 201, 202, 205, 206, 304, 306, 400, 409, 201, 202, 205, 206, 306, 400, 409, 411, 412, 414, 415, 416, 417, 501, 505}; static final String EXCEPTION_CLASS_ROOT = "org.lockss.plugin.CacheException$"; static final String SUCCESS_STRING = "OK"; HashMap exceptionTable = new HashMap(); public CacheExceptionMap() { initExceptionTable(); } protected void initExceptionTable() { storeArrayEntries(SuccessCodes, SUCCESS_STRING); storeArrayEntries(SameUrlCodes, EXCEPTION_CLASS_ROOT + "RetrySameUrlException"); storeArrayEntries(MovePermCodes, EXCEPTION_CLASS_ROOT + "RetryPermUrlException"); storeArrayEntries(MoveTempCodes, EXCEPTION_CLASS_ROOT + "RetryTempUrlException"); storeArrayEntries(UnimplementedCodes, EXCEPTION_CLASS_ROOT + "UnimplementedCodeException"); storeArrayEntries(ExpectedCodes, EXCEPTION_CLASS_ROOT + "ExpectedNoRetryException"); storeArrayEntries(UnexpectedCodes, EXCEPTION_CLASS_ROOT + "UnexpectedNoRetryException"); } public void storeArrayEntries(int[] codeArray, String exceptionClassName) { for(int i=0; i< codeArray.length; i++) { storeMapEntry(codeArray[i], exceptionClassName); } } public void storeMapEntry(int code, String exceptionClassName) { exceptionTable.put(new Integer(code), exceptionClassName); } protected String getExceptionClassName(int resultCode) { return (String) exceptionTable.get(new Integer(resultCode)); } public CacheException getHostException(String message) { return new CacheException.HostException(message); } public CacheException getRepositoryException(String message) { return new CacheException.RepositoryException(message); } public CacheException checkException(HttpURLConnection connection) { try { int code = connection.getResponseCode(); String msg = connection.getResponseMessage(); return mapException(connection, code, "response " + code + ": " + msg); } catch (IOException ex) { return getHostException(ex.getMessage()); } } protected CacheException mapException(HttpURLConnection connection, int resultCode, String message) { Integer key = new Integer(resultCode); String exceptionClassName = (String) exceptionTable.get(key); try { // check for return codes that we can ignore if(exceptionClassName.equals(SUCCESS_STRING)) { return null; } Class exceptionClass = Class.forName(exceptionClassName); Object exception = exceptionClass.newInstance(); CacheException cacheException; // check for an instance of handler class if (exception instanceof CacheExceptionHandler) { cacheException = ( (CacheExceptionHandler) exception).handleException( resultCode, connection); } else { cacheException = (CacheException) exception; cacheException.initMessage(message); } return cacheException; } catch (Exception ex) { return new CacheException.UnknownCodeException( "Unable to make exception:" + ex.getMessage()); } } }
package org.python.pydev.django_templates.comon.parsing; import java.io.IOException; import org.python.pydev.django_templates.common.parsing.lexer.DjangoTemplatesTokens; import beaver.Symbol; import com.aptana.editor.common.parsing.CompositeParser; import com.aptana.editor.common.parsing.CompositeParserScanner; import com.aptana.parsing.IParseState; import com.aptana.parsing.ast.IParseNode; import com.aptana.parsing.ast.ParseNode; import com.aptana.parsing.ast.ParseRootNode; public abstract class DjParser extends CompositeParser { private String language; public DjParser(CompositeParserScanner defaultScanner, String primaryParserLanguage, String language) { super(defaultScanner, primaryParserLanguage); this.language = language; } @Override protected IParseNode processEmbeddedlanguage(IParseState parseState) throws Exception { String source = new String(parseState.getSource()); int startingOffset = parseState.getStartingOffset(); IParseNode root = new ParseRootNode(language, new ParseNode[0], startingOffset, startingOffset + source.length() - 1); advance(); short id = getCurrentSymbol().getId(); while (id != DjangoTemplatesTokens.EOF) { // only cares about django templates tokens switch (id) { case DjangoTemplatesTokens.DJ_START: processDjBlock(root); break; } advance(); id = getCurrentSymbol().getId(); } return root; } private void processDjBlock(IParseNode root) throws IOException, Exception { Symbol startTag = getCurrentSymbol(); advance(); // finds the entire django templates block int start = getCurrentSymbol().getStart(); int end = start; short id = getCurrentSymbol().getId(); while (id != DjangoTemplatesTokens.DJ_END && id != DjangoTemplatesTokens.EOF) { end = getCurrentSymbol().getEnd(); advance(); id = getCurrentSymbol().getId(); } ParseNode parseNode = new ParseNode(language); parseNode.setLocation(start, end); Symbol endTag = getCurrentSymbol(); DjangoTemplatesNode node = new DjangoTemplatesNode(language, parseNode, startTag.value.toString(), endTag.value.toString()); node.setLocation(startTag.getStart(), endTag.getEnd()); root.addChild(node); } }
/* * $Id: ArchivalUnitStatus.java,v 1.40 2006-07-19 05:55:45 tlipkis Exp $ */ package org.lockss.state; import java.util.*; import java.net.MalformedURLException; import org.lockss.config.Configuration; import org.lockss.daemon.*; import org.lockss.daemon.status.*; import org.lockss.plugin.*; import org.lockss.util.*; import org.lockss.app.*; import org.lockss.poller.*; import org.lockss.poller.v3.*; import org.lockss.protocol.*; import org.lockss.repository.*; import org.lockss.servlet.LockssServlet; /** * Collect and report the status of the ArchivalUnits */ public class ArchivalUnitStatus extends BaseLockssDaemonManager implements ConfigurableManager { public static final String PREFIX = Configuration.PREFIX + "auStatus."; /** * The default maximum number of nodes to display in a single page of the ui. */ public static final String PARAM_MAX_NODES_TO_DISPLAY = PREFIX + "nodesPerPage"; static final int DEFAULT_MAX_NODES_TO_DISPLAY = 100; /** * Node URLs are links to cached content page if true */ public static final String PARAM_CONTENT_IS_LINK = PREFIX + "contentUrlIsLink"; static final boolean DEFAULT_CONTENT_IS_LINK = true; public static final String SERVICE_STATUS_TABLE_NAME = "ArchivalUnitStatusTable"; public static final String AUIDS_TABLE_NAME = "AuIds"; public static final String AU_STATUS_TABLE_NAME = "ArchivalUnitTable"; public static final String PEERS_VOTE_TABLE_NAME = "PeerVoteSummary"; public static final String PEERS_REPAIR_TABLE_NAME = "PeerRepair"; static final OrderedObject DASH = new OrderedObject("-", new Long(-1)); private static Logger logger = Logger.getLogger("AuStatus"); private static int defaultNumRows = DEFAULT_MAX_NODES_TO_DISPLAY; private static boolean isContentIsLink = DEFAULT_CONTENT_IS_LINK; public void startService() { super.startService(); StatusService statusServ = theDaemon.getStatusService(); statusServ.registerStatusAccessor(SERVICE_STATUS_TABLE_NAME, new AuSummary(theDaemon)); statusServ.registerStatusAccessor(AUIDS_TABLE_NAME, new AuIds(theDaemon)); statusServ.registerStatusAccessor(AU_STATUS_TABLE_NAME, new AuStatus(theDaemon)); statusServ.registerStatusAccessor(PEERS_VOTE_TABLE_NAME, new PeerVoteSummary(theDaemon)); statusServ.registerStatusAccessor(PEERS_REPAIR_TABLE_NAME, new PeerRepair(theDaemon)); logger.debug2("Status accessors registered."); } public void stopService() { // unregister our status accessors StatusService statusServ = theDaemon.getStatusService(); statusServ.unregisterStatusAccessor(SERVICE_STATUS_TABLE_NAME); statusServ.unregisterStatusAccessor(AU_STATUS_TABLE_NAME); statusServ.unregisterStatusAccessor(PEERS_VOTE_TABLE_NAME); statusServ.unregisterStatusAccessor(PEERS_REPAIR_TABLE_NAME); logger.debug2("Status accessors unregistered."); super.stopService(); } public void setConfig(Configuration config, Configuration oldConfig, Configuration.Differences changedKeys) { defaultNumRows = config.getInt(PARAM_MAX_NODES_TO_DISPLAY, DEFAULT_MAX_NODES_TO_DISPLAY); isContentIsLink = config.getBoolean(PARAM_CONTENT_IS_LINK, DEFAULT_CONTENT_IS_LINK); } static class AuSummary implements StatusAccessor { static final String TABLE_TITLE = "Archival Units"; static final String FOOT_STATUS = "Flags may follow status: C means the AU is complete, D means that the AU is no longer available from the publisher"; private static final List columnDescriptors = ListUtil.list( new ColumnDescriptor("AuName", "Volume", ColumnDescriptor.TYPE_STRING), // new ColumnDescriptor("AuNodeCount", "Nodes", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("AuSize", "Content Size", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("DiskUsage", "Disk Usage (MB)", ColumnDescriptor.TYPE_FLOAT), new ColumnDescriptor("Peers", "Peers", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("AuPolls", "Polls", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("Damaged", "Status", ColumnDescriptor.TYPE_STRING, FOOT_STATUS), new ColumnDescriptor("AuLastPoll", "Last Poll", ColumnDescriptor.TYPE_DATE), new ColumnDescriptor("AuLastCrawl", "Last Crawl", ColumnDescriptor.TYPE_DATE), new ColumnDescriptor("AuLastTreeWalk", "Last TreeWalk", ColumnDescriptor.TYPE_DATE) ); private static final List sortRules = ListUtil.list(new StatusTable.SortRule("AuName", CatalogueOrderComparator.SINGLETON)); private LockssDaemon theDaemon; AuSummary(LockssDaemon theDaemon) { this.theDaemon = theDaemon; } public String getDisplayName() { return TABLE_TITLE; } public void populateTable(StatusTable table) throws StatusService.NoSuchTableException { table.setColumnDescriptors(columnDescriptors); table.setDefaultSortRules(sortRules); Stats stats = new Stats(); table.setRows(getRows(table, stats)); table.setSummaryInfo(getSummaryInfo(stats)); } public boolean requiresKey() { return false; } class Stats { int aus = 0; } private List getRows(StatusTable table, Stats stats) { PluginManager pluginMgr = theDaemon.getPluginManager(); boolean includeInternalAus = table.getOptions().get(StatusTable.OPTION_DEBUG_USER); List rowL = new ArrayList(); for (Iterator iter = pluginMgr.getAllAus().iterator(); iter.hasNext(); ) { ArchivalUnit au = (ArchivalUnit)iter.next(); if (!includeInternalAus && pluginMgr.isInternalAu(au)) { continue; } try { NodeManager nodeMan = theDaemon.getNodeManager(au); CachedUrlSet auCus = au.getAuCachedUrlSet(); NodeState topNodeState = nodeMan.getNodeState(auCus); rowL.add(makeRow(au, nodeMan.getAuState(), topNodeState)); stats.aus++; } catch (Exception e) { logger.warning("Unexpected expection building row", e); } } return rowL; } private Map makeRow(ArchivalUnit au, AuState auState, NodeState topNodeState) { HashMap rowMap = new HashMap(); PollManager.V3PollStatusAccessor v3status = theDaemon.getPollManager().getV3Status(); // If this is a v3 AU, we cannot access some of the poll // status through the nodestate. Eventually, this will be totally // refactored. boolean isV3 = AuUtil.getProtocolVersion(au) == Poll.V3_PROTOCOL; //"AuID" rowMap.put("AuName", AuStatus.makeAuRef(au.getName(), au.getAuId())); // rowMap.put("AuNodeCount", new Integer(-1)); rowMap.put("AuSize", new Long(AuUtil.getAuContentSize(au))); rowMap.put("DiskUsage", new Double(((double)AuUtil.getAuDiskUsage(au)) / (1024*1024))); rowMap.put("AuLastCrawl", new Long(auState.getLastCrawlTime())); rowMap.put("Peers", PeerRepair.makeAuRef("peers", au.getAuId())); rowMap.put("AuLastTreeWalk", new Long(auState.getLastTreeWalkTime())); Object stat; if (isV3) { String auId = au.getAuId(); Integer numPolls = new Integer(v3status.getNumPolls(auId)); rowMap.put("AuPolls", new StatusTable.Reference(numPolls, V3PollStatus.POLLER_STATUS_TABLE_NAME, auId)); rowMap.put("AuLastPoll", new Long(v3status.getLastPollTime(auId))); // Percent damaged float fv = v3status.getAgreement(auId); // It's scary to see "0% Agreement" if no polls have completed. if (numPolls.intValue() == 0) { stat = "Waiting"; } else { stat = Integer.toString(Math.round(fv * 100)) + "% Agreement"; } } else { rowMap.put("AuPolls", theDaemon.getStatusService(). getReference(PollerStatus.MANAGER_STATUS_TABLE_NAME, au)); rowMap.put("AuLastPoll", new Long(auState.getLastTopLevelPollTime())); stat = topNodeState.hasDamage() ? DAMAGE_STATE_DAMAGED : DAMAGE_STATE_OK; } boolean isPubDown = AuUtil.isPubDown(au); boolean isClosed = AuUtil.isClosed(au); if (isPubDown || isClosed) { List val = ListUtil.list(stat, " ("); if (isClosed) { val.add("C"); } if (isPubDown) { val.add("D"); } val.add(")"); stat = val; } rowMap.put("Damaged", stat); return rowMap; } private List getSummaryInfo(Stats stats) { String numaus = StringUtil.numberOfUnits(stats.aus, "Archival Unit", "Archival Units"); return ListUtil.list(new StatusTable.SummaryInfo(null, ColumnDescriptor.TYPE_STRING, numaus)); } } static class AuIds implements StatusAccessor { static final String TABLE_TITLE = "AU Ids"; private static final List columnDescriptors = ListUtil.list( new ColumnDescriptor("AuName", "Volume", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("AuId", "AU Id", ColumnDescriptor.TYPE_STRING) ); private static final List sortRules = ListUtil.list(new StatusTable.SortRule("AuName", CatalogueOrderComparator.SINGLETON)); private LockssDaemon theDaemon; AuIds(LockssDaemon theDaemon) { this.theDaemon = theDaemon; } public String getDisplayName() { return TABLE_TITLE; } public void populateTable(StatusTable table) throws StatusService.NoSuchTableException { table.setColumnDescriptors(columnDescriptors); table.setDefaultSortRules(sortRules); Stats stats = new Stats(); table.setRows(getRows(table, stats)); table.setSummaryInfo(getSummaryInfo(stats)); } public boolean requiresKey() { return false; } class Stats { int aus = 0; } private List getRows(StatusTable table, Stats stats) { PluginManager pluginMgr = theDaemon.getPluginManager(); boolean includeInternalAus = table.getOptions().get(StatusTable.OPTION_DEBUG_USER); List rowL = new ArrayList(); for (Iterator iter = pluginMgr.getAllAus().iterator(); iter.hasNext(); ) { ArchivalUnit au = (ArchivalUnit)iter.next(); if (!includeInternalAus && pluginMgr.isInternalAu(au)) { continue; } try { rowL.add(makeRow(au)); stats.aus++; } catch (Exception e) { logger.warning("Unexpected expection building row", e); } } return rowL; } private Map makeRow(ArchivalUnit au) { HashMap rowMap = new HashMap(); rowMap.put("AuId", au.getAuId()); rowMap.put("AuName", au.getName()); return rowMap; } private List getSummaryInfo(Stats stats) { String numaus = StringUtil.numberOfUnits(stats.aus, "Archival Unit", "Archival Units"); return ListUtil.list(new StatusTable.SummaryInfo(null, ColumnDescriptor.TYPE_STRING, numaus)); } } static final StatusTable.DisplayedValue DAMAGE_STATE_OK = new StatusTable.DisplayedValue("Ok"); static final StatusTable.DisplayedValue DAMAGE_STATE_DAMAGED = new StatusTable.DisplayedValue("Repairing"); // static { // DAMAGE_STATE_OK.setColor("green"); // DAMAGE_STATE_DAMAGED.setColor("yellow"); abstract static class PerAuTable implements StatusAccessor { protected LockssDaemon theDaemon; PerAuTable(LockssDaemon theDaemon) { this.theDaemon = theDaemon; } public boolean requiresKey() { return true; } public String getDisplayName() { throw new UnsupportedOperationException("Au table has no generic title"); } public void populateTable(StatusTable table) throws StatusService.NoSuchTableException { String key = table.getKey(); try { ArchivalUnit au = theDaemon.getPluginManager().getAuFromId(key); if (au == null) { throw new StatusService.NoSuchTableException("Unknown auid: " + key); } populateTable(table, au); } catch (StatusService.NoSuchTableException e) { throw e; } catch (Exception e) { logger.warning("Error building table", e); throw new StatusService. NoSuchTableException("Error building table for auid: " + key); } } protected abstract void populateTable(StatusTable table, ArchivalUnit au) throws StatusService.NoSuchTableException; } static class AuStatus extends PerAuTable { private static final List columnDescriptors = ListUtil.list( new ColumnDescriptor("NodeName", "Node Url", ColumnDescriptor.TYPE_STRING), // new ColumnDescriptor("NodeHasContent", "Content", // ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("NodeVersion", "Version", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("NodeContentSize", "Size", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("NodeTreeSize", "Tree Size", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("NodeChildCount", "Children", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("NodeStatus", "Status", ColumnDescriptor.TYPE_STRING) ); private static final List sortRules = ListUtil.list(new StatusTable.SortRule("sort", true)); AuStatus(LockssDaemon theDaemon) { super(theDaemon); } protected void populateTable(StatusTable table, ArchivalUnit au) throws StatusService.NoSuchTableException { LockssRepository repo = theDaemon.getLockssRepository(au); NodeManager nodeMan = theDaemon.getNodeManager(au); table.setTitle(getTitle(au.getName())); CachedUrlSet auCus = au.getAuCachedUrlSet(); NodeState topNode = nodeMan.getNodeState(auCus); table.setSummaryInfo(getSummaryInfo(au, nodeMan.getAuState(), topNode)); if (!table.getOptions().get(StatusTable.OPTION_NO_ROWS)) { table.setColumnDescriptors(columnDescriptors); table.setDefaultSortRules(sortRules); table.setRows(getRows(table, au, repo, nodeMan)); } } int getIntProp(StatusTable table, String name) { Properties props = table.getProperties(); if (props == null) return -1; String s = props.getProperty(name); if (StringUtil.isNullString(s)) return -1; try { return Integer.parseInt(s); } catch (Exception e) { return -1; } } private List getRows(StatusTable table, ArchivalUnit au, LockssRepository repo, NodeManager nodeMan) { int startRow = Math.max(0, getIntProp(table, "skiprows")); int numRows = getIntProp(table, "numrows"); if (numRows <= 0) { numRows = defaultNumRows; } List rowL = new ArrayList(); Iterator cusIter = au.getAuCachedUrlSet().contentHashIterator(); int endRow1 = startRow + numRows; // end row + 1 if (startRow > 0) { // add 'previous' int start = startRow - defaultNumRows; if (start < 0) { start = 0; } rowL.add(makeOtherRowsLink(false, start, au.getAuId())); } for (int curRow = 0; (curRow < endRow1) && cusIter.hasNext(); curRow++) { CachedUrlSetNode cusn = (CachedUrlSetNode)cusIter.next(); if (curRow < startRow) { continue; } CachedUrlSet cus; if (cusn.getType() == CachedUrlSetNode.TYPE_CACHED_URL_SET) { cus = (CachedUrlSet)cusn; } else { CachedUrlSetSpec spec = new RangeCachedUrlSetSpec(cusn.getUrl()); cus = au.makeCachedUrlSet(spec); } try { Map row = makeRow(au, repo.getNode(cus.getUrl()), nodeMan.getNodeState(cus)); row.put("sort", new Integer(curRow)); rowL.add(row); } catch (MalformedURLException ignore) { } } if (cusIter.hasNext()) { // add 'next' rowL.add(makeOtherRowsLink(true, endRow1, au.getAuId())); } return rowL; } private Map makeRow(ArchivalUnit au, RepositoryNode node, NodeState state) { String url = node.getNodeUrl(); boolean hasContent = node.hasContent(); Object val; HashMap rowMap = new HashMap(); if (hasContent && isContentIsLink) { Properties args = new Properties(); args.setProperty("auid", au.getAuId()); args.setProperty("url", url); val = new StatusTable.SrvLink(url, LockssServlet.SERVLET_DISPLAY_CONTENT, args); } else { val = url; } rowMap.put("NodeName", val); String status = null; if (node.isDeleted()) { status = "Deleted"; } else if (node.isContentInactive()) { status = "Inactive"; } else if (state.hasDamage()) { status = "Damaged"; } else { // status = "Active"; } if (status != null) { rowMap.put("NodeStatus", status); } Object versionObj = DASH; Object sizeObj = DASH; if (hasContent) { versionObj = new OrderedObject(new Long(node.getCurrentVersion())); sizeObj = new OrderedObject(new Long(node.getContentSize())); } rowMap.put("NodeHasContent", (hasContent ? "yes" : "no")); rowMap.put("NodeVersion", versionObj); rowMap.put("NodeContentSize", sizeObj); if (!node.isLeaf()) { rowMap.put("NodeChildCount", new OrderedObject(new Long(node.getChildCount()))); rowMap.put("NodeTreeSize", new OrderedObject(new Long(node.getTreeContentSize(null)))); } else { rowMap.put("NodeChildCount", DASH); rowMap.put("NodeTreeSize", DASH); } return rowMap; } private Map makeOtherRowsLink(boolean isNext, int startRow, String auKey) { HashMap rowMap = new HashMap(); String label = (isNext ? "Next" : "Previous") + " (" + (startRow + 1) + "-" + (startRow + defaultNumRows) + ")"; StatusTable.Reference link = new StatusTable.Reference(label, AU_STATUS_TABLE_NAME, auKey); link.setProperty("skiprows", Integer.toString(startRow)); link.setProperty("numrows", Integer.toString(defaultNumRows)); rowMap.put("NodeName", link); rowMap.put("sort", new Integer(isNext ? Integer.MAX_VALUE : -1)); return rowMap; } private String getTitle(String key) { return "Status of AU: " + key; } private List getSummaryInfo(ArchivalUnit au, AuState state, NodeState topNode) { List summaryList = ListUtil.list( new StatusTable.SummaryInfo("Volume" , ColumnDescriptor.TYPE_STRING, au.getName()), // new StatusTable.SummaryInfo("Nodes", ColumnDescriptor.TYPE_INT, // new Integer(-1)), new StatusTable.SummaryInfo("Content Size", ColumnDescriptor.TYPE_INT, new Long(AuUtil.getAuContentSize(au))), new StatusTable.SummaryInfo("Disk Usage (MB)", ColumnDescriptor.TYPE_FLOAT, new Float(AuUtil.getAuContentSize(au) / (float)(1024 * 1024))), new StatusTable.SummaryInfo("Status", ColumnDescriptor.TYPE_STRING, (topNode.hasDamage() ? DAMAGE_STATE_DAMAGED : DAMAGE_STATE_OK)), new StatusTable.SummaryInfo("Available From Publisher", ColumnDescriptor.TYPE_STRING, (AuUtil.isPubDown(au) ? "No" : "Yes")), // new StatusTable.SummaryInfo("Volume Complete", // ColumnDescriptor.TYPE_STRING, // (AuUtil.isClosed(au) ? "Yes" : "No")), new StatusTable.SummaryInfo("Polling Protocol Version", ColumnDescriptor.TYPE_INT, new Integer(AuUtil.getProtocolVersion(au))), new StatusTable.SummaryInfo("Last Crawl Time", ColumnDescriptor.TYPE_DATE, new Long(state.getLastCrawlTime())), new StatusTable.SummaryInfo("Last Top-level Poll", ColumnDescriptor.TYPE_DATE, new Long(state.getLastTopLevelPollTime())), new StatusTable.SummaryInfo("Last Treewalk", ColumnDescriptor.TYPE_DATE, new Long(state.getLastTreeWalkTime())), new StatusTable.SummaryInfo("Current Activity", ColumnDescriptor.TYPE_STRING, "-") ); return summaryList; } // utility method for making a Reference public static StatusTable.Reference makeAuRef(Object value, String key) { StatusTable.Reference ref = new StatusTable.Reference(value, AU_STATUS_TABLE_NAME, key); // ref.setProperty("numrows", Integer.toString(defaultNumRows)); return ref; } } abstract static class PeersAgreement extends PerAuTable { protected static final List sortRules = ListUtil.list(new StatusTable.SortRule("Cache", true)); PeersAgreement(LockssDaemon theDaemon) { super(theDaemon); } protected Map makeRow(CacheStats stats) { Map rowMap = new HashMap(); PeerIdentity peer = stats.peer; Object id = peer.getIdString(); if (peer.isLocalIdentity()) { StatusTable.DisplayedValue val = new StatusTable.DisplayedValue(id); val.setBold(true); id = val; } rowMap.put("Cache", id); return rowMap; } static class CacheStats { PeerIdentity peer; int totalPolls = 0; int agreePolls = 0; Vote lastAgree; long lastAgreeTime = 0; Vote lastDisagree; long lastDisagreeTime = 0; CacheStats(PeerIdentity peer) { this.peer = peer; } boolean isLastAgree() { return (lastAgreeTime != 0 && (lastDisagreeTime == 0 || lastAgreeTime >= lastDisagreeTime)); } } } static class PeerVoteSummary extends PeersAgreement { private static final List columnDescriptors = ListUtil.list( new ColumnDescriptor("Cache", "Cache", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("Last", "Last", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("Polls", "Polls", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Agree", "Agree", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("LastAgree", "Last Agree", ColumnDescriptor.TYPE_DATE), new ColumnDescriptor("LastDisagree", "Last Disagree", ColumnDescriptor.TYPE_DATE) ); PeerVoteSummary(LockssDaemon theDaemon) { super(theDaemon); } protected String getTitle(ArchivalUnit au) { return "All caches voting on AU: " + au.getName(); } protected void populateTable(StatusTable table, ArchivalUnit au) throws StatusService.NoSuchTableException { NodeManager nodeMan = theDaemon.getNodeManager(au); table.setTitle(getTitle(au)); int totalPeers = 0; int totalAgreement = 0; if (!table.getOptions().get(StatusTable.OPTION_NO_ROWS)) { table.setColumnDescriptors(columnDescriptors); table.setDefaultSortRules(sortRules); Map statsMap = buildCacheStats(au, nodeMan); List rowL = new ArrayList(); for (Iterator iter = statsMap.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry)iter.next(); PeerIdentity peer = (PeerIdentity)entry.getKey(); CacheStats stats = (CacheStats)entry.getValue(); if (! peer.isLocalIdentity()) { totalPeers++; if (stats.isLastAgree()) { totalAgreement++; } } Map row = makeRow(stats); rowL.add(row); } table.setRows(rowL); } table.setSummaryInfo(getSummaryInfo(au, totalPeers, totalAgreement)); } public Map buildCacheStats(ArchivalUnit au, NodeManager nodeMan) { Map statsMap = new HashMap(); NodeState node = nodeMan.getNodeState(au.getAuCachedUrlSet()); for (Iterator history_it = node.getPollHistories(); history_it.hasNext(); ) { PollHistory history = (PollHistory)history_it.next(); long histTime = history.getStartTime(); for (Iterator votes_it = history.getVotes(); votes_it.hasNext(); ) { Vote vote = (Vote)votes_it.next(); PeerIdentity peer = vote.getVoterIdentity(); CacheStats stats = (CacheStats)statsMap.get(peer); if (stats == null) { stats = new CacheStats(peer); statsMap.put(peer, stats); } stats.totalPolls++; if (vote.isAgreeVote()) { stats.agreePolls++; if (stats.lastAgree == null || histTime > stats.lastAgreeTime) { stats.lastAgree = vote; stats.lastAgreeTime = histTime; } } else { if (stats.lastDisagree == null || histTime > stats.lastDisagreeTime) { stats.lastDisagree = vote; stats.lastDisagreeTime = histTime; } } } } return statsMap; } protected Map makeRow(CacheStats stats) { Map rowMap = super.makeRow(stats); rowMap.put("Last", stats.isLastAgree() ? "Agree" : "Disagree"); rowMap.put("Polls", new Long(stats.totalPolls)); rowMap.put("Agree", new Long(stats.agreePolls)); rowMap.put("LastAgree", new Long(stats.lastAgreeTime)); rowMap.put("LastDisagree", new Long(stats.lastDisagreeTime)); return rowMap; } protected List getSummaryInfo(ArchivalUnit au, int totalPeers, int totalAgreement) { List summaryList = ListUtil.list( new StatusTable.SummaryInfo("Peers voting on AU", ColumnDescriptor.TYPE_INT, new Integer(totalPeers)), new StatusTable.SummaryInfo("Agreeing peers", ColumnDescriptor.TYPE_INT, new Integer(totalAgreement)) ); return summaryList; } // utility method for making a Reference public static StatusTable.Reference makeAuRef(Object value, String key) { return new StatusTable.Reference(value, PEERS_VOTE_TABLE_NAME, key); } } static class PeerRepair extends PeersAgreement { private static final List columnDescriptors = ListUtil.list( new ColumnDescriptor("Cache", "Cache", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("Last", "Complete Consensus", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("LastAgree", "Last Complete Consensus", ColumnDescriptor.TYPE_DATE), new ColumnDescriptor("LastDisagree", "Last Partial Disagreement", ColumnDescriptor.TYPE_DATE) ); PeerRepair(LockssDaemon theDaemon) { super(theDaemon); } protected String getTitle(ArchivalUnit au) { return "Repair candidates for AU: " + au.getName(); } private static final String FOOT_TITLE = "These caches have proven to us that they have (or had) a correct \n" + "copy of this AU. We will fetch repairs from them if necessary, \n" + "and they may fetch repairs from us."; protected void populateTable(StatusTable table, ArchivalUnit au) throws StatusService.NoSuchTableException { IdentityManager idMgr = theDaemon.getIdentityManager(); table.setTitle(getTitle(au)); table.setTitleFootnote(FOOT_TITLE); int totalPeers = 0; if (!table.getOptions().get(StatusTable.OPTION_NO_ROWS)) { table.setColumnDescriptors(columnDescriptors); table.setDefaultSortRules(sortRules); Map statsMap = buildCacheStats(au, idMgr); List rowL = new ArrayList(); for (Iterator iter = statsMap.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry)iter.next(); PeerIdentity peer = (PeerIdentity)entry.getKey(); CacheStats stats = (CacheStats)entry.getValue(); if (! peer.isLocalIdentity()) { totalPeers++; } Map row = makeRow(stats); rowL.add(row); } table.setRows(rowL); } table.setSummaryInfo(getSummaryInfo(au, totalPeers)); } public Map buildCacheStats(ArchivalUnit au, IdentityManager idMgr) { Map statsMap = new HashMap(); for (Iterator iter = idMgr.getIdentityAgreements(au).iterator(); iter.hasNext(); ) { IdentityManager.IdentityAgreement ida = (IdentityManager.IdentityAgreement)iter.next(); try { PeerIdentity pid = idMgr.stringToPeerIdentity(ida.getId()); if (ida.getLastAgree() > 0) { // only add those that have agreed CacheStats stats = new CacheStats(pid); statsMap.put(pid, stats); stats.lastAgreeTime = ida.getLastAgree(); stats.lastDisagreeTime = ida.getLastDisagree(); } } catch (IdentityManager.MalformedIdentityKeyException e) { logger.warning("Malformed id key in IdentityAgreement", e); continue; } } return statsMap; } protected Map makeRow(CacheStats stats) { Map rowMap = super.makeRow(stats); rowMap.put("Last", stats.isLastAgree() ? "Yes" : "No"); rowMap.put("LastAgree", new Long(stats.lastAgreeTime)); rowMap.put("LastDisagree", new Long(stats.lastDisagreeTime)); return rowMap; } protected List getSummaryInfo(ArchivalUnit au, int totalPeers) { List summaryList = ListUtil.list( new StatusTable.SummaryInfo("Peers holding AU", ColumnDescriptor.TYPE_INT, new Integer(totalPeers)), new StatusTable.SummaryInfo("Peers", ColumnDescriptor.TYPE_STRING, PeerVoteSummary.makeAuRef("Voting on AU", au.getAuId())) ); return summaryList; } // utility method for making a Reference public static StatusTable.Reference makeAuRef(Object value, String key) { return new StatusTable.Reference(value, PEERS_REPAIR_TABLE_NAME, key); } } }
package org.curriki.xwiki.servlet.restlet.resource.users; import org.restlet.resource.Representation; import org.restlet.resource.Variant; import org.restlet.resource.ResourceException; import org.restlet.Context; import org.restlet.data.Request; import org.restlet.data.Response; import org.restlet.data.Status; import org.curriki.xwiki.servlet.restlet.resource.BaseResource; import org.curriki.xwiki.plugin.asset.Asset; import org.curriki.xwiki.plugin.asset.composite.RootCollectionCompositeAsset; import java.util.Map; import java.util.List; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import net.sf.json.JSONException; import com.xpn.xwiki.XWikiException; public class UserCollectionsResource extends BaseResource { public UserCollectionsResource(Context context, Request request, Response response) { super(context, request, response); setReadable(true); setModifiable(true); defaultVariants(); } @Override public Representation represent(Variant variant) throws ResourceException { setupXWiki(); Request request = getRequest(); String forUser = (String) request.getAttributes().get("userName"); List<String> resultList; Map<String,Object> results; try { resultList = plugin.fetchUserCollectionsList(forUser); results = plugin.fetchUserCollectionsInfo(forUser); } catch (XWikiException e) { throw error(Status.CLIENT_ERROR_NOT_FOUND, e.getMessage()); } JSONArray json = flattenMapToJSONArray(results, resultList, "collectionPage"); return formatJSON(json, variant); } @Override public void storeRepresentation(Representation representation) throws ResourceException { setupXWiki(); Request request = getRequest(); String forUser = (String) request.getAttributes().get("userName"); JSONObject json = representationToJSONObject(representation); Asset asset; try { asset = plugin.fetchRootCollection(forUser); } catch (XWikiException e) { throw error(Status.CLIENT_ERROR_NOT_FOUND, e.getMessage()); } if (asset == null) { throw error(Status.CLIENT_ERROR_NOT_FOUND, "Collection for "+forUser+" not found."); } JSONArray orig; try { orig = json.getJSONArray("original"); if (orig.isEmpty()){ throw error(Status.CLIENT_ERROR_NOT_ACCEPTABLE, "You must provide the orignal order."); } } catch (JSONException e) { throw error(Status.CLIENT_ERROR_NOT_ACCEPTABLE, "You must provide the original order."); } JSONArray want; try { want = json.getJSONArray("wanted"); if (want.isEmpty()){ throw error(Status.CLIENT_ERROR_NOT_ACCEPTABLE, "You must provide a new order."); } } catch (JSONException e) { throw error(Status.CLIENT_ERROR_NOT_ACCEPTABLE, "You must provide a new order."); } if (asset instanceof RootCollectionCompositeAsset) { try { RootCollectionCompositeAsset fAsset = asset.as(RootCollectionCompositeAsset.class); fAsset.reorder(orig, want); fAsset.save(xwikiContext.getMessageTool().get("curriki.comment.reordered")); } catch (XWikiException e) { throw error(Status.CLIENT_ERROR_PRECONDITION_FAILED, e.getMessage()); } } else { throw error(Status.CLIENT_ERROR_PRECONDITION_FAILED, "Asset is not a root collection."); } getResponse().setEntity(represent(getPreferredVariant())); } }
package org.xbill.DNS; import java.io.*; import java.util.*; import org.xbill.DNS.utils.*; /** * A cache of DNS records. The cache obeys TTLs, so items are purged after * their validity period is complete. Negative answers are cached, to * avoid repeated failed DNS queries. The credibility of each RRset is * maintained, so that more credible records replace less credible records, * and lookups can specify the minimum credibility of data they are requesting. * @see RRset * @see Credibility * * @author Brian Wellington */ public class Cache extends NameSet { private class Element { RRset rrset; short type, dclass; byte credibility; long timeIn; int ttl; int srcid; Thread tid; public Element(int _ttl, byte cred, int src, short _type, short _dclass) { rrset = null; type = _type; dclass = _dclass; credibility = cred; ttl = _ttl; srcid = src; timeIn = System.currentTimeMillis(); tid = Thread.currentThread(); } public Element(Record r, byte cred, int src) { rrset = new RRset(); type = rrset.getType(); dclass = rrset.getDClass(); credibility = cred; timeIn = System.currentTimeMillis(); ttl = -1; srcid = src; update(r); tid = Thread.currentThread(); } public Element(RRset r, byte cred, int src) { rrset = r; type = r.getType(); dclass = r.getDClass(); credibility = cred; timeIn = System.currentTimeMillis(); ttl = r.getTTL(); srcid = src; tid = Thread.currentThread(); } public void update(Record r) { rrset.addRR(r); timeIn = System.currentTimeMillis(); if (ttl < 0) ttl = r.getTTL(); } public void deleteRecord(Record r) { rrset.deleteRR(r); } public boolean expiredTTL() { long now = System.currentTimeMillis(); long expire = timeIn + (1000 * (long)ttl); return (now > expire); } public boolean TTL0Ours() { return (ttl == 0 && tid == Thread.currentThread()); } public boolean TTL0NotOurs() { return (ttl == 0 && tid != Thread.currentThread()); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append(rrset); sb.append(" cl = "); sb.append(credibility); return sb.toString(); } } private class CacheCleaner extends Thread { public CacheCleaner() { setDaemon(true); setName("CacheCleaner"); } public void run() { while (true) { boolean interrupted = false; try { Thread.sleep(cleanInterval * 60 * 1000); } catch (InterruptedException e) { interrupted = true; } if (interrupted) continue; Enumeration e = names(); while (e.hasMoreElements()) { Name name = (Name) e.nextElement(); TypeClassMap tcm = findName(name); if (tcm == null) continue; Object [] elements; elements = tcm.getMultiple(Type.ANY, DClass.ANY); if (elements == null) continue; for (int i = 0; i < elements.length; i++) { Element element = (Element) elements[i]; if (element.ttl == 0) continue; if (element.expiredTTL()) removeSet(name, element.type, element.dclass, element); } } } } } private Verifier verifier; private boolean secure; private int maxncache = -1; private long cleanInterval = 30; private Thread cleaner; /** Creates an empty Cache */ public Cache() { super(); cleaner = new CacheCleaner(); } /** Empties the Cache */ public void clearCache() { clear(); } /** * Creates a Cache which initially contains all records in the specified file */ public Cache(String file) throws IOException { cleaner = new CacheCleaner(); Master m = new Master(file); Record record; while ((record = m.nextRecord()) != null) { addRecord(record, Credibility.HINT, m); } } /** * Adds a record to the Cache * @param r The record to be added * @param cred The credibility of the record * @param o The source of the record (this could be a Message, for example) @ @see Record */ public void addRecord(Record r, byte cred, Object o) { Name name = r.getName(); short type = r.getRRsetType(); short dclass = r.getDClass(); if (!Type.isRR(type)) return; int src = (o != null) ? o.hashCode() : 0; Element element = (Element) findExactSet(name, type, dclass); if (element == null || cred > element.credibility) addSet(name, type, dclass, element = new Element(r, cred, src)); else if (cred == element.credibility) { if (element.srcid != src) { element.rrset.clear(); element.srcid = src; } element.update(r); } } /** * Adds an RRset to the Cache * @param r The RRset to be added * @param cred The credibility of these records * @param o The source of this RRset (this could be a Message, for example) * @see RRset */ public void addRRset(RRset rrset, byte cred, Object o) { Name name = rrset.getName(); short type = rrset.getType(); short dclass = rrset.getDClass(); int src = (o != null) ? o.hashCode() : 0; if (verifier != null) rrset.setSecurity(verifier.verify(rrset, this)); if (secure && rrset.getSecurity() < DNSSEC.Secure) return; Element element = (Element) findExactSet(name, type, dclass); if (element == null || cred > element.credibility) addSet(name, type, dclass, new Element(rrset, cred, src)); } /** * Adds a negative entry to the Cache * @param name The name of the negative entry * @param type The type of the negative entry * @param dclass The class of the negative entry * @param ttl The ttl of the negative entry * @param cred The credibility of the negative entry * @param o The source of this data */ public void addNegative(Name name, short type, short dclass, int ttl, byte cred, Object o) { int src = (o != null) ? o.hashCode() : 0; Element element = (Element) findExactSet(name, type, dclass); if (element == null || cred > element.credibility) addSet(name, type, dclass, new Element(ttl, cred, src, type, dclass)); } /** * Looks up Records in the Cache. This follows CNAMEs and handles negatively * cached data. * @param name The name to look up * @param type The type to look up * @param dclass The class to look up * @param minCred The minimum acceptable credibility * @return A SetResponse object * @see SetResponse * @see Credibility */ public SetResponse lookupRecords(Name name, short type, short dclass, byte minCred) { SetResponse cr = null; Object [] objects = findSets(name, type, dclass); if (objects == null) return new SetResponse(SetResponse.UNKNOWN); int nelements = 0; for (int i = 0; i < objects.length; i++) { Element element = (Element) objects[i]; if (element.TTL0Ours()) { removeSet(name, type, dclass, element); nelements++; } else if (element.TTL0NotOurs()) { objects[i] = null; } else if (element.expiredTTL()) { removeSet(name, type, dclass, element); objects[i] = null; } else if (element.credibility < minCred) objects[i] = null; else nelements++; } if (nelements == 0) return new SetResponse(SetResponse.UNKNOWN); Element [] elements = new Element[nelements]; for (int i = 0, j = 0; i < objects.length; i++) { if (objects[i] == null) continue; elements[j++] = (Element) objects[i]; } for (int i = 0; i < elements.length; i++) { RRset rrset = elements[i].rrset; /* Is this a negatively cached entry? */ if (rrset == null) { /* * If we're looking for ANY, don't return it in * case we find something better. */ if (type == Type.ANY) continue; /* * If not, and we're not looking for a wildcard, * try that instead. */ if (!name.isWild()) { cr = lookupRecords(name.wild(1), type, dclass, minCred); if (cr.isSuccessful()) return cr; } return new SetResponse(SetResponse.NEGATIVE); } /* * Found a CNAME when we weren't looking for one. Time * to recurse. */ if (type != Type.CNAME && type != Type.ANY && rrset.getType() == Type.CNAME) { CNAMERecord cname = (CNAMERecord) rrset.first(); cr = lookupRecords(cname.getTarget(), type, dclass, minCred); if (cr.isUnknown()) cr.set(SetResponse.PARTIAL, cname); cr.addCNAME(cname); return cr; } /* If we found something, save it */ if (cr == null) cr = new SetResponse(SetResponse.SUCCESSFUL); cr.addRRset(rrset); } /* * As far as I can tell, the only time cr will be null is if we * queried for ANY and only saw negative responses. So, return * NEGATIVE. */ if (cr == null && type == Type.ANY) return new SetResponse(SetResponse.NEGATIVE); return cr; } private RRset [] findRecords(Name name, short type, short dclass, byte minCred) { SetResponse cr = lookupRecords(name, type, dclass, minCred); if (cr.isSuccessful()) return cr.answers(); else return null; } /** * Looks up credible Records in the Cache (a wrapper around lookupRecords). * Unlike lookupRecords, this given no indication of why failure occurred. * @param name The name to look up * @param type The type to look up * @param dclass The class to look up * @return An array of RRsets, or null * @see Credibility */ public RRset [] findRecords(Name name, short type, short dclass) { return findRecords(name, type, dclass, Credibility.NONAUTH_ANSWER); } /** * Looks up Records in the Cache (a wrapper around lookupRecords). Unlike * lookupRecords, this given no indication of why failure occurred. * @param name The name to look up * @param type The type to look up * @param dclass The class to look up * @return An array of RRsets, or null * @see Credibility */ public RRset [] findAnyRecords(Name name, short type, short dclass) { return findRecords(name, type, dclass, Credibility.NONAUTH_ADDITIONAL); } /** * Adds all data from a Message into the Cache. Each record is added with * the appropriate credibility, and negative answers are cached as such. * @param in The Message to be added * @see Message */ public void addMessage(Message in) { Enumeration e; boolean isAuth = in.getHeader().getFlag(Flags.AA); Name queryName = in.getQuestion().getName(); short queryType = in.getQuestion().getType(); short queryClass = in.getQuestion().getDClass(); byte cred; short rcode = in.getHeader().getRcode(); int ancount = in.getHeader().getCount(Section.ANSWER); Cache c; if (secure) c = new Cache(); else c = this; if (rcode != Rcode.NOERROR && rcode != Rcode.NXDOMAIN) return; e = in.getSection(Section.ANSWER); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); if (isAuth && r.getName().equals(queryName)) cred = Credibility.AUTH_ANSWER; else if (isAuth) cred = Credibility.AUTH_NONAUTH_ANSWER; else cred = Credibility.NONAUTH_ANSWER; c.addRecord(r, cred, in); } if (ancount == 0 || rcode == Rcode.NXDOMAIN) { /* This is a negative response */ SOARecord soa = null; e = in.getSection(Section.AUTHORITY); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); if (r.getType() == Type.SOA) { soa = (SOARecord) r; break; } } if (isAuth) cred = Credibility.AUTH_AUTHORITY; else cred = Credibility.NONAUTH_AUTHORITY; if (soa != null) { int ttl = Math.min(soa.getTTL(), soa.getMinimum()); if (maxncache >= 0) ttl = Math.min(ttl, maxncache); if (ancount == 0) c.addNegative(queryName, queryType, queryClass, ttl, cred, in); else { Record [] cnames; cnames = in.getSectionArray(Section.ANSWER); int last = cnames.length - 1; Name cname; cname = ((CNAMERecord)cnames[last]).getTarget(); c.addNegative(cname, queryType, queryClass, ttl, cred, in); } } } e = in.getSection(Section.AUTHORITY); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); if (isAuth) cred = Credibility.AUTH_AUTHORITY; else cred = Credibility.NONAUTH_AUTHORITY; c.addRecord(r, cred, in); } e = in.getSection(Section.ADDITIONAL); while (e.hasMoreElements()) { Record r = (Record) e.nextElement(); if (isAuth) cred = Credibility.AUTH_ADDITIONAL; else cred = Credibility.NONAUTH_ADDITIONAL; c.addRecord(r, cred, in); } if (secure) { e = c.names(); while (e.hasMoreElements()) { Name name = (Name) e.nextElement(); TypeClassMap tcm = c.findName(name); if (tcm == null) continue; Object [] elements; elements = tcm.getMultiple(Type.ANY, DClass.ANY); if (elements == null) continue; for (int i = 0; i < elements.length; i++) { Element element = (Element) elements[i]; RRset rrset = element.rrset; /* for now, ignore negative cache entries */ if (rrset == null) continue; if (verifier != null) rrset.setSecurity( verifier.verify(rrset, this)); if (rrset.getSecurity() < DNSSEC.Secure) continue; addSet(name, rrset.getType(), rrset.getDClass(), element); } } } } /** * Flushes an RRset from the cache * @param name The name of the records to be flushed * @param type The type of the records to be flushed * @param dclass The class of the records to be flushed * @see RRset */ void flushSet(Name name, short type, short dclass) { Element element = (Element) findExactSet(name, type, dclass); if (element == null || element.rrset == null) return; removeSet(name, type, dclass, element); } /** * Flushes all RRsets with a given name from the cache * @param name The name of the records to be flushed * @see RRset */ void flushName(Name name) { removeName(name); } /** * Defines a module to be used for data verification (DNSSEC). An * implementation is found in org.xbill.DNSSEC.security.DNSSECVerifier, * which requires Java 2 or above and the Java Cryptography Extensions. */ public void setVerifier(Verifier v) { verifier = v; } /** * Mandates that all data stored in this Cache must be verified and proven * to be secure, using a verifier (as defined in setVerifier). */ public void setSecurePolicy() { secure = true; } /** * Sets the maximum length of time that a negative response will be stored * in this Cache. A negative value disables this feature (that is, sets * no limit). */ public void setMaxNCache(int seconds) { maxncache = seconds; } /** * Sets the interval (in minutes) that all expired records will be expunged * the cache. The default is 30 minutes. 0 or a negative value disables this * feature. */ public void setCleanInterval(int minutes) { cleanInterval = minutes; if (cleanInterval <= 0) cleaner = null; else if (cleaner == null) cleaner = new CacheCleaner(); } }
package org.loklak.harvester; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Semaphore; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.loklak.api.client.ClientConnection; import org.loklak.data.DAO; import org.loklak.data.ProviderType; import org.loklak.data.Timeline; import org.loklak.data.MessageEntry; import org.loklak.data.UserEntry; import org.loklak.tools.UTF8; public class TwitterScraper { public static ExecutorService executor = Executors.newFixedThreadPool(20); public static Timeline search(final String query, final Timeline.Order order) { // check // https://support.twitter.com/articles/71577-how-to-use-advanced-twitter-search String https_url = ""; try { StringBuilder t = new StringBuilder(query.length()); for (String s: query.replace('+', ' ').split(" ")) { t.append(' '); if (s.startsWith("since:") || s.startsWith("until:")) { int u = s.indexOf('_'); t.append(u < 0 ? s : s.substring(0, u)); } else { t.append(s); } } String q = t.length() == 0 ? "*" : URLEncoder.encode(t.substring(1), "UTF-8"); https_url = "https://twitter.com/search?q=" + q + "&src=typd&vertical=default&f=tweets"; } catch (UnsupportedEncodingException e) {} Timeline timeline = null; try { ClientConnection connection = new ClientConnection(https_url); if (connection.inputStream == null) return null; try { BufferedReader br = new BufferedReader(new InputStreamReader(connection.inputStream, UTF8.charset)); timeline = search(br, order); } catch (IOException e) { e.printStackTrace(); } finally { connection.close(); } } catch (IOException e) { // this could mean that twitter rejected the connection (DoS protection?) e.printStackTrace(); if (timeline == null) timeline = new Timeline(order); }; // wait until all messages in the timeline are ready if (timeline == null) { // timeout occurred timeline = new Timeline(order); } return timeline; } private static Timeline search(final BufferedReader br, final Timeline.Order order) throws IOException { Timeline timeline = new Timeline(order); String input; Map<String, prop> props = new HashMap<String, prop>(); Set<String> images = new LinkedHashSet<>(); Set<String> videos = new LinkedHashSet<>(); String place_id = "", place_name = ""; boolean parsing_favourite = false, parsing_retweet = false; while ((input = br.readLine()) != null){ input = input.trim(); //System.out.println(input); // uncomment temporary to debug or add new fields int p; if ((p = input.indexOf("class=\"account-group")) > 0) { props.put("userid", new prop(input, p, "data-user-id")); continue; } if ((p = input.indexOf("class=\"avatar")) > 0) { props.put("useravatarurl", new prop(input, p, "src")); continue; } if ((p = input.indexOf("class=\"fullname")) > 0) { props.put("userfullname", new prop(input, p, null)); continue; } if ((p = input.indexOf("class=\"username")) > 0) { props.put("usernickname", new prop(input, p, null)); continue; } if ((p = input.indexOf("class=\"tweet-timestamp")) > 0) { props.put("tweetstatusurl", new prop(input, 0, "href")); props.put("tweettimename", new prop(input, p, "title")); // don't continue here because "class=\"_timestamp" is in the same line } if ((p = input.indexOf("class=\"_timestamp")) > 0) { props.put("tweettimems", new prop(input, p, "data-time-ms")); continue; } if ((p = input.indexOf("class=\"ProfileTweet-action--retweet")) > 0) { parsing_retweet = true; continue; } if ((p = input.indexOf("class=\"ProfileTweet-action--favorite")) > 0) { parsing_favourite = true; continue; } if ((p = input.indexOf("class=\"TweetTextSize")) > 0) { props.put("tweettext", new prop(input, p, null)); continue; } if ((p = input.indexOf("class=\"ProfileTweet-actionCount")) > 0) { if (parsing_retweet) { props.put("tweetretweetcount", new prop(input, p, "data-tweet-stat-count")); parsing_retweet = false; } if (parsing_favourite) { props.put("tweetfavouritecount", new prop(input, p, "data-tweet-stat-count")); parsing_favourite = false; } continue; } // get images if ((p = input.indexOf("class=\"media media-thumbnail twitter-timeline-link media-forward is-preview")) > 0 || (p = input.indexOf("class=\"multi-photo")) > 0) { images.add(new prop(input, p, "data-resolved-url-large").value); continue; } // we have two opportunities to get video thumbnails == more images; images in the presence of video content should be treated as thumbnail for the video if ((p = input.indexOf("class=\"animated-gif-thumbnail\"")) > 0) { images.add(new prop(input, 0, "src").value); continue; } if ((p = input.indexOf("class=\"animated-gif\"")) > 0) { images.add(new prop(input, p, "poster").value); continue; } if ((p = input.indexOf("<source video-src")) >= 0 && input.indexOf("type=\"video/") > p) { videos.add(new prop(input, p, "video-src").value); continue; } if ((p = input.indexOf("class=\"Tweet-geo")) > 0) { prop place_name_prop = new prop(input, p, "title"); place_name = place_name_prop.value; continue; } if ((p = input.indexOf("class=\"ProfileTweet-actionButton u-linkClean js-nav js-geo-pivot-link")) > 0) { prop place_id_prop = new prop(input, p, "data-place-id"); place_id = place_id_prop.value; continue; } if (props.size() == 10) { // the tweet is complete, evaluate the result UserEntry user = new UserEntry( props.get("userid").value, props.get("usernickname").value, props.get("useravatarurl").value, MessageEntry.html2utf8(props.get("userfullname").value) ); ArrayList<String> imgs = new ArrayList<String>(images.size()); imgs.addAll(images); ArrayList<String> vids = new ArrayList<String>(videos.size()); vids.addAll(videos); TwitterTweet tweet = new TwitterTweet( user.getScreenName(), Long.parseLong(props.get("tweettimems").value), props.get("tweettimename").value, props.get("tweetstatusurl").value, props.get("tweettext").value, Long.parseLong(props.get("tweetretweetcount").value), Long.parseLong(props.get("tweetfavouritecount").value), imgs, vids, place_name, place_id ); //new Thread(tweet).start(); // todo: use thread pools //tweet.run(); // for debugging executor.execute(tweet); timeline.add(tweet, user); images.clear(); props.clear(); continue; } } //for (prop p: props.values()) System.out.println(p); br.close(); return timeline; } private static class prop { public String key, value = null; public prop(String line, int start, String key) { this.key = key; if (key == null) { int p = line.indexOf('>', start); if (p > 0) { int c = 1; int q = p + 1; while (c > 0 && q < line.length()) { char a = line.charAt(q); if (a == '<') { if (line.charAt(q+1) != 'i') { if (line.charAt(q+1) == '/') c--; else c++; } } q++; } value = line.substring(p + 1, q - 1); } } else { int p = line.indexOf(key + "=\"", start); if (p > 0) { int q = line.indexOf('"', p + key.length() + 2); if (q > 0) { value = line.substring(p + key.length() + 2, q); } } } } @SuppressWarnings("unused") public boolean success() { return value != null; } public String toString() { return this.key + "=" + (this.value == null ? "unknown" : this.value); } } final static Pattern timeline_link_pattern = Pattern.compile("<a .*?href=\"(.*?)\".*?data-expanded-url=\"(.*?)\".*?twitter-timeline-link.*title=\"(.*?)\".*?>.*?</a>"); final static Pattern timeline_embed_pattern = Pattern.compile("<a .*?href=\"(.*?)\".*?twitter-timeline-link.*?>pic.twitter.com/(.*?)</a>"); final static Pattern emoji_pattern = Pattern.compile("<img .*?class=\"twitter-emoji\".*?alt=\"(.*?)\".*?>"); public static class TwitterTweet extends MessageEntry implements Runnable { private Semaphore ready = null; private Boolean exists = null; public TwitterTweet( final String user_screen_name_raw, final long created_at_raw, final String created_at_name_raw, // not used here but should be compared to created_at_raw final String status_id_url_raw, final String text_raw, final long retweets, final long favourites, final Collection<String> images, final Collection<String> videos, final String place_name, final String place_id) throws MalformedURLException { super(); this.source_type = SourceType.TWITTER; this.provider_type = ProviderType.SCRAPED; this.screen_name = user_screen_name_raw; this.created_at = new Date(created_at_raw); this.status_id_url = new URL("https://twitter.com" + status_id_url_raw); int p = status_id_url_raw.lastIndexOf('/'); this.id_str = p >= 0 ? status_id_url_raw.substring(p + 1) : "-1"; this.retweet_count = retweets; this.favourites_count = favourites; this.place_name = place_name; this.place_id = place_id; this.images = new LinkedHashSet<>(); for (String image: images) this.images.add(image); this.videos = new LinkedHashSet<>(); for (String video: videos) this.videos.add(video); //Date d = new Date(timemsraw); //System.out.println(d); this.text = text_raw.replaceAll("</?(s|b|strong)>", "").replaceAll("<a href=\"/hashtag.*?>", "").replaceAll("<a.*?class=\"twitter-atreply.*?>", "").replaceAll("<span.*?span>", "").replaceAll(" ", " "); // this.text MUST be analysed with analyse(); this is not done here because it should be started concurrently; run run(); } private void analyse() { while (true) { try { Matcher m = timeline_link_pattern.matcher(this.text); if (m.find()) { //String href = m.group(1); String expanded = RedirectUnshortener.unShorten(m.group(2)); //String title = m.group(3); this.text = m.replaceFirst(expanded); continue; } } catch (Throwable e) { e.printStackTrace(); break; } try { Matcher m = timeline_embed_pattern.matcher(this.text); if (m.find()) { //String href = resolveShortURL(m.group(1)); String shorturl = RedirectUnshortener.unShorten(m.group(2)); this.text = m.replaceFirst("https://pic.twitter.com/" + shorturl + " "); continue; } } catch (Throwable e) { e.printStackTrace(); break; } try { Matcher m = emoji_pattern.matcher(this.text); if (m.find()) { String emoji = m.group(1); this.text = m.replaceFirst(emoji); continue; } } catch (Throwable e) { e.printStackTrace(); break; } break; } this.text = html2utf8(this.text).replaceAll(" ", " ").trim(); } @Override public void run() { this.ready = new Semaphore(0); try { this.exists = new Boolean(DAO.existMessage(this.getIdStr())); // only analyse and enrich the message if it does not actually exist in the search index because it will be abandoned otherwise anyway //if (!this.exists) { this.analyse(); this.enrich(); } catch (Throwable e) { e.printStackTrace(); } finally { this.ready.release(1000); } } public boolean isReady() { return this.ready == null || this.ready.availablePermits() > 0; } public void waitReady() { if (this.ready != null) try { this.ready.acquire(); } catch (InterruptedException e) {} } /** * the exist method has a 3-value boolean logic: false, true and NULL for: don't know * @return */ public Boolean exist() { return this.exists; } } /** * Usage: java twitter4j.examples.search.SearchTweets [query] * * @param args search query */ public static void main(String[] args) { Timeline result = TwitterScraper.search(args[0], Timeline.Order.CREATED_AT); for (MessageEntry tweet : result) { if (tweet instanceof TwitterTweet) { ((TwitterTweet) tweet).waitReady(); } System.out.println("@" + tweet.getScreenName() + " - " + tweet.getText()); } System.exit(0); } }
package org.ow2.chameleon.rose.pubsubhubbub.constants; public class PubsubhubbubConstants { public static final String RSS_EVENT_TOPIC = "org/ow2/chameleon/syndication"; public static final String FEED_TITLE_NEW = "Endpoint added"; public static final String FEED_TITLE_REMOVE = "Endpoint removed"; public static final String FEED_AUTHOR = "Rose RSS"; public static final String HTTP_POST_HEADER_TYPE = "application/x-www-form-urlencoded"; public static final String HTTP_POST_PARAMETER_HUB_MODE = "hub.mode"; public static final String HTTP_POST_PARAMETER_URL_CALLBACK = "hub.callback"; public static final String HTTP_POST_PARAMETER_RSS_TOPIC_URL = "hub.topic"; public static final String HTTP_POST_PARAMETER_MACHINEID = "machine.id"; public static final String HTTP_POST_PARAMETER_ENDPOINT_FILTER = "hub.endp.filter"; public static final String HTTP_POST_PARAMETER_RECONNECT = "reconnect"; public static final String HTTP_POST_UPDATE_SUBSTRIPCTION_OPTION = "hub.subscription"; public static final String HTTP_POST_UPDATE_CONTENT = "hub.content"; public static final String HUB_SUBSCRIPTION_UPDATE_ENDPOINT_ADDED = "endpoint.add"; public static final String HUB_SUBSCRIPTION_UPDATE_ENDPOINT_REMOVED = "endpoint.remove"; public static final String DEFAULT_HTTP_PORT = "8080"; public enum HubMode { publish, unpublish, update, subscribe, unsubscribe, getAllEndpoints; } }
package org.mit.jstreamit; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import org.mit.jstreamit.PrimitiveWorker.StreamPosition; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import org.objectweb.asm.tree.AbstractInsnNode; import org.objectweb.asm.tree.FieldInsnNode; import org.objectweb.asm.tree.IntInsnNode; import org.objectweb.asm.tree.LdcInsnNode; import org.objectweb.asm.tree.MethodInsnNode; import org.objectweb.asm.tree.MethodNode; import org.objectweb.asm.tree.VarInsnNode; /** * * @author Jeffrey Bosboom <jeffreybosboom@gmail.com> * @since 1/29/2013 */ final class MessageConstraint { private final PrimitiveWorker<?, ?> sender, recipient; private final int latency; private final PrimitiveWorker.StreamPosition direction; private final SDEPData sdepData; private MessageConstraint(PrimitiveWorker<?, ?> sender, PrimitiveWorker<?, ?> recipient, int latency, StreamPosition direction, SDEPData sdepData) { this.sender = sender; this.recipient = recipient; this.latency = latency; this.direction = direction; this.sdepData = sdepData; } public PrimitiveWorker<?, ?> getSender() { return sender; } public PrimitiveWorker<?, ?> getRecipient() { return recipient; } public int getLatency() { return latency; } public PrimitiveWorker.StreamPosition getDirection() { return direction; } public int sdep(int downstreamExecutionCount) { return sdepData.sdep(downstreamExecutionCount); } public int reverseSdep(int upstreamExecutionCount) { return sdepData.reverseSdep(upstreamExecutionCount); } @Override public String toString() { return String.format("%s from %s to %s after %d", direction, sender, recipient, latency); } /** * Grovels through the stream graph, discovering message constraints. * @param graph * @return */ public static List<MessageConstraint> findConstraints(PrimitiveWorker<?, ?> graph) { List<MessageConstraint> mc = new ArrayList<>(); List<PrimitiveWorker<?, ?>> workers = new ArrayList<>(); workers.add(graph); workers.addAll(graph.getAllSuccessors()); //Parsing bytecodes is (relatively) expensive; we only want to do it //once per class, no matter how many instances are in the stream graph. //If a class doesn't send messages, it maps to an empty list, and we do //nothing in the loop below. Map<Class<?>, List<WorkerData>> workerDataCache = new HashMap<>(); Map<Edge, SDEPData> sdepCache = new HashMap<>(); for (PrimitiveWorker<?, ?> sender : workers) { List<WorkerData> datas = workerDataCache.get(sender.getClass()); if (datas == null) { datas = buildWorkerData(sender); workerDataCache.put(sender.getClass(), datas); } for (WorkerData d : datas) { int latency = d.getLatency(sender); for (PrimitiveWorker<?, ?> recipient : d.getPortal(sender).getRecipients()) { StreamPosition direction = sender.compareStreamPosition(recipient); Edge edge = direction == StreamPosition.UPSTREAM ? new Edge(sender, recipient) : new Edge(recipient, sender); SDEPData sdepData = computeSDEP(edge, sdepCache); mc.add(new MessageConstraint(sender, recipient, latency, direction, sdepData)); } } } return Collections.unmodifiableList(mc); } //<editor-fold defaultstate="collapsed" desc="WorkerData building (bytecode parsing)"> /** * WorkerData encapsulates the Field(s) and/or constant for the Portal and * latency value of a particular class. (Note that one class might have * multiple WorkerDatas if it sends multiple messages.) WorkerData also * provides methods to easily get the field values from an object of the * class. */ private static class WorkerData { private final Field portalField, latencyField; private final int constantLatency; WorkerData(Field portalField, Field latencyField) { this(portalField, latencyField, Integer.MIN_VALUE); } WorkerData(Field portalField, int constantLatency) { this(portalField, null, constantLatency); } WorkerData(Field portalField, Field latencyField, int constantLatency) { this.portalField = portalField; this.latencyField = latencyField; this.constantLatency = constantLatency; this.portalField.setAccessible(true); if (this.latencyField != null) this.latencyField.setAccessible(true); } public Portal<?> getPortal(PrimitiveWorker<?, ?> worker) { try { return (Portal<?>)portalField.get(worker); } catch (IllegalAccessException | IllegalArgumentException | NullPointerException | ExceptionInInitializerError ex) { throw new AssertionError("getting a portal object", ex); } } public int getLatency(PrimitiveWorker<?, ?> worker) { if (latencyField == null) return constantLatency; try { return latencyField.getInt(worker); } catch (IllegalAccessException | IllegalArgumentException | NullPointerException | ExceptionInInitializerError ex) { throw new AssertionError("getting latency from field", ex); } } @Override public String toString() { return portalField.toGenericString()+", "+(latencyField != null ? latencyField.toGenericString() : constantLatency); } } private static List<WorkerData> buildWorkerData(PrimitiveWorker<?, ?> worker) { Class<?> klass = worker.getClass(); //A worker can only send messages if it has a Portal field, and most //workers with Portal fields will send messages, so this is an efficient //and useful test to avoid the bytecode parse. if (!hasPortalField(worker.getClass())) return Collections.emptyList(); return parseBytecodes(klass); } private static boolean hasPortalField(Class<?> klass) { while (klass != null) { for (Field f : klass.getDeclaredFields()) if (f.getType().equals(Portal.class)) return true; for (Class<?> i : klass.getInterfaces()) for (Field f : i.getDeclaredFields()) if (f.getType().equals(Portal.class)) return true; klass = klass.getSuperclass(); } return false; } /** * Parse the given class' bytecodes, looking for calls to getHandle() and * returning WorkerDatas holding the calls' arguments. * @param klass * @return */ private static List<WorkerData> parseBytecodes(Class<?> klass) { ClassReader r = null; try { r = new ClassReader(klass.getName()); } catch (IOException ex) { throw new IllegalStreamGraphException("Couldn't get bytecode for "+klass.getName(), ex); } WorkClassVisitor wcv = new WorkClassVisitor(); r.accept(wcv, ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES); MethodNode mn = wcv.getWorkMethodNode(); List<WorkerData> workerDatas = new ArrayList<>(); for (AbstractInsnNode insn = mn.instructions.getFirst(); insn != null; insn = insn.getNext()) { if (insn instanceof MethodInsnNode) { MethodInsnNode call = (MethodInsnNode)insn; if (call.name.equals("getHandle") && call.owner.equals(Type.getType(Portal.class).getInternalName())) workerDatas.add(dataFromCall(klass, call)); } } return workerDatas.isEmpty() ? Collections.<WorkerData>emptyList() : Collections.unmodifiableList(workerDatas); } /** * Parse the given getHandle() call instruction and preceding instructions * into a WorkerData. This is a rather brittle pattern-matching job and * will fail on obfuscated bytecodes. * @param call * @return */ private static WorkerData dataFromCall(Class<?> klass, MethodInsnNode call) { //Latency is either an integer constant or a getfield on this. Field latencyField = null; int constantLatency = Integer.MIN_VALUE; AbstractInsnNode latencyInsn = call.getPrevious(); if (latencyInsn instanceof FieldInsnNode) { FieldInsnNode fieldInsn = (FieldInsnNode)latencyInsn; if (fieldInsn.getOpcode() != Opcodes.GETFIELD) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": latency field insn opcode "+fieldInsn.getOpcode()); if (!fieldInsn.desc.equals(Type.INT_TYPE.getDescriptor())) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": latency field desc "+fieldInsn.desc); if (!fieldInsn.owner.equals(Type.getType(klass).getInternalName())) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": latency field owner "+fieldInsn.owner); //Move latencyInsn to sync up with the other else-if branches. latencyInsn = latencyInsn.getPrevious(); //We must be loading from this. if (latencyInsn.getOpcode() != Opcodes.ALOAD) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": getfield subject opcode "+latencyInsn.getOpcode()); int varIdx = ((VarInsnNode)latencyInsn).var; if (varIdx != 0) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": getfield not from this but from "+varIdx); //Check the field we're loading from is constant (final). //A static field is okay here since it isn't a reference parameter. try { latencyField = klass.getDeclaredField(fieldInsn.name); if (!Modifier.isFinal(latencyField.getModifiers())) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": latency field not final: "+latencyField.toGenericString()); } catch (NoSuchFieldException ex) { throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": getfield not from this but from "+varIdx); } } else if (latencyInsn instanceof LdcInsnNode) { Object constant = ((LdcInsnNode)latencyInsn).cst; if (!(constant instanceof Integer)) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": ldc "+constant); constantLatency = ((Integer)constant); } else switch (latencyInsn.getOpcode()) { case Opcodes.ICONST_M1: constantLatency = -1; break; case Opcodes.ICONST_0: constantLatency = 0; break; case Opcodes.ICONST_1: constantLatency = 1; break; case Opcodes.ICONST_2: constantLatency = 2; break; case Opcodes.ICONST_3: constantLatency = 3; break; case Opcodes.ICONST_4: constantLatency = 4; break; case Opcodes.ICONST_5: constantLatency = 5; break; case Opcodes.BIPUSH: case Opcodes.SIPUSH: constantLatency = ((IntInsnNode)latencyInsn).operand; break; default: throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": latencyInsn opcode "+latencyInsn.getOpcode()); } //Finally, we've parsed the latency parameter. //Next is an aload_0 for the sender parameter. AbstractInsnNode senderInsn = latencyInsn.getPrevious(); if (senderInsn.getOpcode() != Opcodes.ALOAD || ((VarInsnNode)senderInsn).var != 0) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": bad sender"); //Finally, a getfield of this for a final Portal instance field. AbstractInsnNode portalInsn = senderInsn.getPrevious(); if (!(portalInsn instanceof FieldInsnNode)) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal getfield opcode "+portalInsn.getOpcode()); FieldInsnNode fieldInsn = (FieldInsnNode)portalInsn; if (fieldInsn.getOpcode() != Opcodes.GETFIELD) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal field insn opcode "+fieldInsn.getOpcode()); if (!fieldInsn.desc.equals(Type.getType(Portal.class).getDescriptor())) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal field desc "+fieldInsn.desc); if (!fieldInsn.owner.equals(Type.getType(klass).getInternalName())) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal field owner "+fieldInsn.owner); portalInsn = portalInsn.getPrevious(); //We must be loading from this. if (portalInsn.getOpcode() != Opcodes.ALOAD) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal getfield subject opcode "+portalInsn.getOpcode()); int varIdx = ((VarInsnNode)portalInsn).var; if (varIdx != 0) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal getfield not from this but from "+varIdx); //Check the field we're loading from is constant (final) and nonstatic. Field portalField; try { portalField = klass.getDeclaredField(fieldInsn.name); if (!Modifier.isFinal(portalField.getModifiers())) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal field not final: "+portalField.toGenericString()); if (Modifier.isStatic(portalField.getModifiers())) throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal field is static: "+portalField.toGenericString()); } catch (NoSuchFieldException ex) { throw new IllegalStreamGraphException("Unsupported getHandle() use in "+klass+": portal getfield not from this but from "+varIdx); } return latencyField != null ? new WorkerData(portalField, latencyField) : new WorkerData(portalField, constantLatency); } /** * Builds a MethodNode for the work() method. */ private static class WorkClassVisitor extends ClassVisitor { private MethodNode mn; WorkClassVisitor() { super(Opcodes.ASM4); } @Override public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) { if (name.equals("work") && desc.equals("()V")) { mn = new MethodNode(Opcodes.ASM4, access, name, desc, signature, exceptions); return mn; } return null; } public MethodNode getWorkMethodNode() { return mn; } } //</editor-fold> /** * Computes the SDEPData for the given edge between two workers, using the * given cache of previously-computed SDEP data. (The cache allows us to * reuse common parts of the path from a sender to many recipients.) * @param goalEdge the edge to compute for * @param cache previously-computed SDEP data * @return SDEPData for the given edge */ private static SDEPData computeSDEP(Edge goalEdge, Map<Edge, SDEPData> cache) { Set<PrimitiveWorker<?, ?>> allNodes = new NodesInPathsBetweenComputer(goalEdge.upstream, goalEdge.downstream).get(); //TODO: see if NodesInPathsComputer adds these itself or not allNodes.add(goalEdge.upstream); allNodes.add(goalEdge.downstream); List<PrimitiveWorker<?, ?>> sortedNodes = topologicalSort(allNodes); for (PrimitiveWorker<?, ?> w : sortedNodes) { Edge selfEdge = new Edge(w, w); if (!cache.containsKey(selfEdge)) cache.put(selfEdge, SDEPData.fromWorker(w)); } //For each pair of nodes that follow one another, extend the edge from //the upstream through the pair of nodes, merging if such an edge is //already in the cache. Because it's topologically ordered, we process //all upstream pairs before downstream pairs. for (int i = 0; i < sortedNodes.size(); ++i) { for (int j = i; j < sortedNodes.size(); ++j) { if (!sortedNodes.get(i).getSuccessors().contains(sortedNodes.get(j))) continue; Edge upstreamEdge = new Edge(sortedNodes.get(0), sortedNodes.get(i)); Edge downstreamEdge = new Edge(sortedNodes.get(j), sortedNodes.get(j)); assert cache.containsKey(upstreamEdge) : "Bad topological sort?"; SDEPData data = cache.get(upstreamEdge); assert cache.containsKey(downstreamEdge) : "Not caching self-edges?"; SDEPData selfData = cache.get(downstreamEdge); Edge producedEdge = new Edge(upstreamEdge.upstream, downstreamEdge.downstream); SDEPData seriesData = SDEPData.fromSeriesData(data, selfData); SDEPData currentData = cache.get(producedEdge); if (currentData != null) cache.put(producedEdge, SDEPData.fromParallelData(currentData, seriesData)); else cache.put(producedEdge, seriesData); } } assert cache.containsKey(goalEdge); return cache.get(goalEdge); } /** * Encapsulates the data built during hierarchical SDEP computation for a * particular pair of workers (not stored in the structure). * * Compare LatencyEdge in classic StreamIt. */ private static class SDEPData { private final int upstreamInitExecutions, upstreamSteadyExecutions; private final int downstreamInitExecutions, downstreamSteadyExecutions; /** * The actual dependency function values. In order for the downstream * worker to execute i times, the upstream worker must have executed at * least sdep[i] times. Note that sdep[0] == 0. * * This array is (downstreamInitExecutions + downstreamSteadyExecutions + 1) * in size; queries beyond that can be reduced to this region only * because SDEP is periodic in the steady state. */ private final int[] sdep; private SDEPData(int upstreamInitExecutions, int upstreamSteadyExecutions, int downstreamInitExecutions, int downstreamSteadyExecutions, int[] sdep) { this.upstreamInitExecutions = upstreamInitExecutions; this.upstreamSteadyExecutions = upstreamSteadyExecutions; this.downstreamInitExecutions = downstreamInitExecutions; this.downstreamSteadyExecutions = downstreamSteadyExecutions; this.sdep = sdep; } /** * Constructs SDEPData relating a worker to itself. */ public static SDEPData fromWorker(PrimitiveWorker<?, ?> worker) { //A plain worker has 0 init executions and 1 steady execution, and //an SDEP(1) of 1. TODO: prework may mean 1 init execution? return new SDEPData(0, 1, 0, 1, new int[]{0, 1}); } /** * Merge two edges that connect the same nodes. (Used for taking the * maximum over splitjoins.) */ public static SDEPData fromParallelData(SDEPData left, SDEPData right) { assert left != right; int downstreamInitExecutions = Math.max(left.downstreamInitExecutions, right.downstreamInitExecutions); int upstreamInitExecutions = Math.max(left.sdep(downstreamInitExecutions), right.sdep(downstreamInitExecutions)); int use1 = left.upstreamSteadyExecutions, use2 = right.upstreamSteadyExecutions; int dse1 = left.downstreamSteadyExecutions, dse2 = right.downstreamSteadyExecutions; //TODO: why only using use2/dse2? because we do use1/dse1 * mult later? int uMult = use2 / gcd(use1, use2); int dMult = dse2 / gcd(dse1, dse2); int mult = uMult / gcd(uMult, dMult) * dMult; int upstreamSteadyExecutions = use1 * mult; int downstreamSteadyExecutions = dse1 * mult; int[] sdep = new int[downstreamInitExecutions + downstreamSteadyExecutions + 1]; for (int i = 0; i < sdep.length; ++i) sdep[i] = Math.max(left.sdep(i), right.sdep(i)); return new SDEPData(upstreamInitExecutions, upstreamSteadyExecutions, downstreamInitExecutions, downstreamSteadyExecutions, sdep); } /** * Merge two edges that connect two different nodes. (Pipelines.) */ public static SDEPData fromSeriesData(SDEPData upstream, SDEPData downstream) { int upstreamInitExecutions = Math.max(upstream.upstreamInitExecutions, upstream.sdep(downstream.upstreamInitExecutions)); int downstreamInitExecutions = Math.max(downstream.downstreamInitExecutions, upstream.reverseSdep(downstream.downstreamInitExecutions)); int gcd = gcd(upstream.downstreamSteadyExecutions, downstream.upstreamSteadyExecutions); int uMult = downstream.upstreamSteadyExecutions / gcd; int dMult = upstream.downstreamSteadyExecutions / gcd; int upstreamSteadyExecutions = upstream.upstreamSteadyExecutions * uMult; int downstreamSteadyExecutions = downstream.downstreamSteadyExecutions * dMult; int[] sdep = new int[downstreamInitExecutions + downstreamSteadyExecutions + 1]; for (int i = 0; i < sdep.length; ++i) sdep[i] = upstream.sdep(downstream.sdep(i)); return new SDEPData(upstreamInitExecutions, upstreamSteadyExecutions, downstreamInitExecutions, downstreamSteadyExecutions, sdep); } public int sdep(int downstreamExecutionCount) { if (downstreamExecutionCount < downstreamInitExecutions + 1) return sdep[downstreamExecutionCount]; int steadyStates = (downstreamExecutionCount - (downstreamInitExecutions + 1)) / downstreamSteadyExecutions; //Where we are in the current steady state, adjusted to ignore the //initialization prefix in the sdep array. int curSteadyStateProgress = (downstreamExecutionCount - (downstreamInitExecutions + 1)) % downstreamSteadyExecutions + downstreamInitExecutions + 1; return sdep[curSteadyStateProgress] + steadyStates * upstreamSteadyExecutions; } public int reverseSdep(int upstreamExecutionCount) { //Factor out steady state executions, leaving upstreamExecutionCount //with only a partial steady state. int downstreamSteadyStateExecutions = 0; if (upstreamExecutionCount >= upstreamInitExecutions + upstreamSteadyExecutions + 1) { int steadyStates = (upstreamExecutionCount - upstreamInitExecutions - 1) / upstreamSteadyExecutions; downstreamSteadyStateExecutions = steadyStates * downstreamSteadyExecutions; upstreamExecutionCount -= steadyStates * upstreamSteadyExecutions; } //Find how many times the downstream executed during the //upstreamExecutionCount portion of the steady state. int downstreamExecutionCount = Arrays.binarySearch(sdep, upstreamExecutionCount); //Arrays.binarySearch doesn't guarantee which index it'll find, but //we want the first one. If we didn't find one, this is a no-op. while (downstreamExecutionCount > 0 && sdep[downstreamExecutionCount] == upstreamExecutionCount) --downstreamExecutionCount; return downstreamSteadyStateExecutions + (downstreamExecutionCount >= 0 ? downstreamExecutionCount : downstreamInitExecutions + downstreamSteadyExecutions); } } /** * Finds all nodes in any path between two nodes in the graph. */ private static class NodesInPathsBetweenComputer { private final PrimitiveWorker<?, ?> head, tail; private final Set<PrimitiveWorker<?, ?>> tailSuccessors; private final Map<PrimitiveWorker<?, ?>, Set<PrimitiveWorker<?, ?>>> nextNodesToTail = new HashMap<>(); private NodesInPathsBetweenComputer(PrimitiveWorker<?, ?> head, PrimitiveWorker<?, ?> tail) { this.head = head; this.tail = tail; this.tailSuccessors = tail.getAllSuccessors(); } public Set<PrimitiveWorker<?, ?>> get() { compute(head); Set<PrimitiveWorker<?, ?>> result = new HashSet<>(); for (Set<PrimitiveWorker<?, ?>> nexts : nextNodesToTail.values()) result.addAll(nexts); return result; } private boolean compute(PrimitiveWorker<?, ?> h) { if (h == tail) return true; Set<PrimitiveWorker<?, ?>> nodes = nextNodesToTail.get(h); if (nodes == null) { nodes = new HashSet<>(); for (PrimitiveWorker<?, ?> next : h.getSuccessors()) { //If next is one of tail's successors, we can stop checking //this branch because we've gone too far down. if (tailSuccessors.contains(next)) continue; //See if this node leads to tail. if (compute(next)) nodes.add(next); } nextNodesToTail.put(h, nodes); } return !nodes.isEmpty(); } } /** * Topologically sort the given set of nodes, such that each node precedes * all of its successors in the returned list. * @param nodes the set of nodes to sort * @return a topologically-ordered list of the given nodes */ private static List<PrimitiveWorker<?, ?>> topologicalSort(Set<PrimitiveWorker<?, ?>> nodes) { //Build a "use count" for each node, counting the number of nodes that //have it as a successor. Map<PrimitiveWorker<?, ?>, Integer> useCount = new HashMap<>(); for (PrimitiveWorker<?, ?> n : nodes) useCount.put(n, 0); for (PrimitiveWorker<?, ?> n : nodes) for (PrimitiveWorker<?, ?> next : n.getSuccessors()) { Integer count = useCount.get(next); if (count != null) useCount.put(next, count+1); } List<PrimitiveWorker<?, ?>> result = new ArrayList<>(); Queue<PrimitiveWorker<?, ?>> unused = new ArrayDeque<>(); for (Map.Entry<PrimitiveWorker<?, ?>, Integer> e : useCount.entrySet()) if (e.getValue() == 0) unused.add(e.getKey()); while (!unused.isEmpty()) { PrimitiveWorker<?, ?> n = unused.remove(); result.add(n); //Decrement the use counts of n's successors, adding them to unused //if the use count becomes zero. for (PrimitiveWorker<?, ?> next : n.getSuccessors()) { Integer count = useCount.get(next); if (count != null) { count -= 1; useCount.put(next, count); if (count == 0) unused.add(next); } } } assert result.size() == nodes.size(); return result; } /** * Shouldn't this be in the standard library already? */ private static int gcd(int a, int b) { assert a > 0 && b >= 0; return b == 0 ? a : gcd(b, a % b); } /** * A pair of workers, with equality based on the workers' identity. */ private static class Edge { public final PrimitiveWorker<?, ?> upstream, downstream; Edge(PrimitiveWorker<?, ?> upstream, PrimitiveWorker<?, ?> downstream) { this.upstream = upstream; this.downstream = downstream; StreamPosition direction = upstream.compareStreamPosition(downstream); assert direction == StreamPosition.UPSTREAM || direction == StreamPosition.EQUAL; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (getClass() != obj.getClass()) return false; final Edge other = (Edge)obj; if (this.upstream != other.upstream) return false; if (this.downstream != other.downstream) return false; return true; } @Override public int hashCode() { int hash = 3; hash = 23 * hash + System.identityHashCode(this.upstream); hash = 23 * hash + System.identityHashCode(this.downstream); return hash; } } }
package org.mozilla.mozstumbler; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Point; import android.net.wifi.ScanResult; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.widget.Toast; import java.lang.Void; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.mozilla.mozstumbler.cellscanner.CellInfo; import org.mozilla.mozstumbler.cellscanner.CellScanner; import org.mozilla.mozstumbler.communicator.Searcher; import org.osmdroid.tileprovider.MapTileProviderBasic; import org.osmdroid.tileprovider.tilesource.ITileSource; import org.osmdroid.tileprovider.tilesource.OnlineTileSourceBase; import org.osmdroid.tileprovider.tilesource.TileSourceFactory; import org.osmdroid.tileprovider.tilesource.XYTileSource; import org.osmdroid.util.GeoPoint; import org.osmdroid.views.MapView; import org.osmdroid.views.overlay.ItemizedIconOverlay; import org.osmdroid.views.overlay.ItemizedOverlay; import org.osmdroid.views.overlay.ItemizedOverlayWithFocus; import org.osmdroid.views.overlay.OverlayItem; import org.osmdroid.views.overlay.SafeDrawOverlay; import org.osmdroid.views.overlay.TilesOverlay; import org.osmdroid.views.safecanvas.ISafeCanvas; import org.osmdroid.views.safecanvas.SafePaint; public final class MapActivity extends Activity { private static final String LOGTAG = MapActivity.class.getName(); private static final String STATUS_OK = "ok"; private static final String STATUS_NOT_FOUND = "not_found"; private static final String STATUS_FAILED = "failed"; private static final String COVERAGE_URL = "https://location.services.mozilla.com/tiles/"; private MapView mMap; private ReporterBroadcastReceiver mReceiver; private List<ScanResult> mWifiData; private List<CellInfo> mCellData; private class ReporterBroadcastReceiver extends BroadcastReceiver { private boolean mDone; @Override public void onReceive(Context context, Intent intent) { if (mDone) { return; } String action = intent.getAction(); if (!action.equals(ScannerService.MESSAGE_TOPIC)) { Log.e(LOGTAG, "Received an unknown intent: " + action); return; } String subject = intent.getStringExtra(Intent.EXTRA_SUBJECT); if (WifiScanner.WIFI_SCANNER_EXTRA_SUBJECT.equals(subject)) { mWifiData = intent.getParcelableArrayListExtra(WifiScanner.WIFI_SCANNER_ARG_SCAN_RESULTS); } else if (CellScanner.CELL_SCANNER_EXTRA_SUBJECT.equals(subject)) { mCellData = intent.getParcelableArrayListExtra(CellScanner.CELL_SCANNER_ARG_CELLS); } else { return; } new GetLocationAndMapItTask().execute(""); mDone = true; } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_map); mWifiData = Collections.emptyList(); mMap = (MapView) this.findViewById(R.id.map); mMap.setTileSource(getTileSource()); mMap.setBuiltInZoomControls(true); mMap.setMultiTouchControls(true); TilesOverlay coverageTilesOverlay = CoverageTilesOverlay(this); mMap.getOverlays().add(coverageTilesOverlay); mReceiver = new ReporterBroadcastReceiver(); registerReceiver(mReceiver, new IntentFilter(ScannerService.MESSAGE_TOPIC)); mMap.getController().setZoom(2); Log.d(LOGTAG, "onCreate"); } @SuppressWarnings("ConstantConditions") private static OnlineTileSourceBase getTileSource() { if (BuildConfig.TILE_SERVER_URL == null) { return TileSourceFactory.DEFAULT_TILE_SOURCE; } return new XYTileSource("MozStumbler Tile Store", null, 1, 20, 256, ".png", BuildConfig.TILE_SERVER_URL); } private static TilesOverlay CoverageTilesOverlay(Context context) { final MapTileProviderBasic coverageTileProvider = new MapTileProviderBasic(context); final ITileSource coverageTileSource = new XYTileSource("Mozilla Location Service Coverage Map", null, 1, 13, 256, ".png", COVERAGE_URL); coverageTileProvider.setTileSource(coverageTileSource); final TilesOverlay coverageTileOverlay = new TilesOverlay(coverageTileProvider,context); coverageTileOverlay.setLoadingBackgroundColor(Color.TRANSPARENT); return coverageTileOverlay; } private void positionMapAt(float lat, float lon, float accuracy) { GeoPoint point = new GeoPoint(lat, lon); mMap.getController().setZoom(16); mMap.getController().animateTo(point); mMap.getOverlays().add(getMapMarker(point)); // You are here! mMap.getOverlays().add(new AccuracyCircleOverlay(MapActivity.this, point, accuracy)); mMap.invalidate(); } private static class AccuracyCircleOverlay extends SafeDrawOverlay { private GeoPoint mPoint; private float mAccuracy; public AccuracyCircleOverlay(Context ctx, GeoPoint point, float accuracy) { super(ctx); //this.mPoint = (GeoPoint) point.clone(); this.mPoint = point; this.mAccuracy = accuracy; } protected void drawSafe(ISafeCanvas c, MapView osmv, boolean shadow) { if (shadow || mPoint == null) { return; } MapView.Projection pj = osmv.getProjection(); Point center = pj.toPixels(mPoint, null); float radius = pj.metersToEquatorPixels(mAccuracy); SafePaint circle = new SafePaint(); circle.setARGB(0, 100, 100, 255); // Fill circle.setAlpha(40); circle.setStyle(Paint.Style.FILL); c.drawCircle(center.x, center.y, radius, circle); // Border circle.setAlpha(165); circle.setStyle(Paint.Style.STROKE); c.drawCircle(center.x, center.y, radius, circle); } } private ItemizedOverlay<OverlayItem> getMapMarker(GeoPoint point) { ArrayList<OverlayItem> items = new ArrayList<OverlayItem>(); items.add(new OverlayItem(null, null, point)); return new ItemizedOverlayWithFocus<OverlayItem>( MapActivity.this, items, new ItemizedIconOverlay.OnItemGestureListener<OverlayItem>() { @Override public boolean onItemSingleTapUp(int index, OverlayItem item) { return false; } @Override public boolean onItemLongPress(int index, OverlayItem item) { return false; } }); } @Override protected void onStart() { super.onStart(); Context context = getApplicationContext(); Intent i = new Intent(ScannerService.MESSAGE_TOPIC); i.putExtra(Intent.EXTRA_SUBJECT, "Scanner"); i.putExtra("enable", 1); context.sendBroadcast(i); Log.d(LOGTAG, "onStart"); } @Override protected void onStop() { super.onStop(); Log.d(LOGTAG, "onStop"); mMap.getTileProvider().clearTileCache(); if (mReceiver != null) { unregisterReceiver(mReceiver); mReceiver = null; } } @Override public boolean onCreateOptionsMenu(Menu menu) { return false; } private final class GetLocationAndMapItTask extends AsyncTask<String, Void, String> { private String mStatus=""; private float mLat = 0; private float mLon = 0; private float mAccuracy = 0; @Override public String doInBackground(String... params) { Log.d(LOGTAG, "requesting location..."); JSONObject wrapper; try { wrapper = new JSONObject("{}"); if (mCellData != null) { wrapper.put("radio", mCellData.get(0).getRadio()); JSONArray cellData = new JSONArray(); for (CellInfo info : mCellData) { JSONObject item = info.toJSONObject(); cellData.put(item); } wrapper.put("cell", cellData); } if (mWifiData != null) { JSONArray wifiData = new JSONArray(); for (ScanResult result : mWifiData) { JSONObject item = new JSONObject(); item.put("key", BSSIDBlockList.canonicalizeBSSID(result.BSSID)); item.put("frequency", result.frequency); item.put("signal", result.level); wifiData.put(item); } wrapper.put("wifi", wifiData); } } catch (JSONException jsonex) { Log.w(LOGTAG, "json exception", jsonex); return ""; } String data = wrapper.toString(); byte[] bytes = data.getBytes(); Searcher searcher = new Searcher(MapActivity.this); if (searcher.cleanSend(bytes)) { mStatus = searcher.getStatus(); mLat = searcher.getLat(); mLon = searcher.getLon(); mAccuracy = searcher.getAccuracy(); } else { mStatus = STATUS_FAILED; } searcher.close(); Log.d(LOGTAG, "Upload status: " + mStatus); return mStatus; } @Override protected void onPostExecute(String result) { if (STATUS_OK.equals(mStatus)) { positionMapAt(mLat, mLon, mAccuracy); } else if (STATUS_NOT_FOUND.equals(mStatus)) { Toast.makeText(MapActivity.this, getResources().getString(R.string.location_not_found), Toast.LENGTH_LONG).show(); } else { Toast.makeText(MapActivity.this, getResources().getString(R.string.location_lookup_error), Toast.LENGTH_LONG).show(); Log.e(LOGTAG, "", new IllegalStateException("mStatus=" + mStatus)); } } } }
package org.sagebionetworks.repo.manager.table; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.sagebionetworks.repo.manager.NodeManager; import org.sagebionetworks.repo.model.AnnotationNameSpace; import org.sagebionetworks.repo.model.Annotations; import org.sagebionetworks.repo.model.NamedAnnotations; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.dao.table.ColumnModelDAO; import org.sagebionetworks.repo.model.dbo.dao.table.ViewScopeDao; import org.sagebionetworks.repo.model.jdo.KeyFactory; import org.sagebionetworks.repo.model.table.AnnotationType; import org.sagebionetworks.repo.model.table.ColumnChange; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.EntityField; import org.sagebionetworks.repo.model.table.SparseRowDto; import org.sagebionetworks.repo.model.table.ViewType; import org.sagebionetworks.repo.transactions.RequiresNewReadCommitted; import org.sagebionetworks.repo.transactions.WriteTransactionReadCommitted; import org.sagebionetworks.table.cluster.SQLUtils; import org.sagebionetworks.util.ValidateArgument; import org.springframework.beans.factory.annotation.Autowired; public class TableViewManagerImpl implements TableViewManager { public static final String ETG_COLUMN_MISSING = "The view schema must include '"+EntityField.etag.name()+"' column."; public static final String ETAG_MISSING_MESSAGE = "The '"+EntityField.etag.name()+"' must be included to update an Entity's annotations."; /** * Max columns per view is now the same as the max per table. */ public static final int MAX_COLUMNS_PER_VIEW = ColumnModelManagerImpl.MY_SQL_MAX_COLUMNS_PER_TABLE; @Autowired ViewScopeDao viewScopeDao; @Autowired ColumnModelManager columModelManager; @Autowired TableManagerSupport tableManagerSupport; @Autowired ColumnModelDAO columnModelDao; @Autowired NodeManager nodeManager; /* * (non-Javadoc) * @see org.sagebionetworks.repo.manager.table.TableViewManager#setViewSchemaAndScope(org.sagebionetworks.repo.model.UserInfo, java.util.List, java.util.List, java.lang.String) */ @WriteTransactionReadCommitted @Override public void setViewSchemaAndScope(UserInfo userInfo, List<String> schema, List<String> scope, ViewType type, String viewIdString) { ValidateArgument.required(userInfo, "userInfo"); ValidateArgument.required(type, "viewType"); validateViewSchemaSize(schema); Long viewId = KeyFactory.stringToKey(viewIdString); Set<Long> scopeIds = null; if(scope != null){ scopeIds = new HashSet<Long>(KeyFactory.stringToKey(scope)); } // validate the scope size tableManagerSupport.validateScopeSize(scopeIds, type); // Define the scope of this view. viewScopeDao.setViewScopeAndType(viewId, scopeIds, type); // Define the schema of this view. columModelManager.bindColumnToObject(userInfo, schema, viewIdString); // trigger an update tableManagerSupport.setTableToProcessingAndTriggerUpdate(viewIdString); } @Override public Set<Long> findViewsContainingEntity(String entityId) { Set<Long> entityPath = tableManagerSupport.getEntityPath(entityId); return viewScopeDao.findViewScopeIntersectionWithPath(entityPath); } @Override public List<ColumnModel> getViewSchema(String tableId) { return tableManagerSupport.getColumnModelsForTable(tableId); } @WriteTransactionReadCommitted @Override public List<ColumnModel> applySchemaChange(UserInfo user, String viewId, List<ColumnChange> changes, List<String> orderedColumnIds) { // first determine what the new Schema will be List<String> newSchemaIds = columModelManager.calculateNewSchemaIdsAndValidate(viewId, changes, orderedColumnIds); validateViewSchemaSize(newSchemaIds); columModelManager.bindColumnToObject(user, newSchemaIds, viewId); boolean keepOrder = true; List<ColumnModel> newSchema = columModelManager.getColumnModel(user, newSchemaIds, keepOrder); // trigger an update. tableManagerSupport.setTableToProcessingAndTriggerUpdate(viewId); return newSchema; } /** * Validate that the new schema is within the allowed size for views. * @param newSchema */ public static void validateViewSchemaSize(List<String> newSchema) { if(newSchema != null) { if(newSchema.size() > MAX_COLUMNS_PER_VIEW) { throw new IllegalArgumentException("A view cannot have "+newSchema.size()+" columns. It must have "+MAX_COLUMNS_PER_VIEW+" columns or less."); } } } @Override public List<String> getTableSchema(String tableId){ return columModelManager.getColumnIdForTable(tableId); } /** * Update an Entity using data form a view. * * NOTE: Each entity is updated in a separate transaction to prevent * locking the entity tables for long periods of time. This also prevents * deadlock. * * @return The EntityId. * */ @RequiresNewReadCommitted @Override public void updateEntityInView(UserInfo user, List<ColumnModel> tableSchema, SparseRowDto row) { ValidateArgument.required(row, "SparseRowDto"); ValidateArgument.required(row.getRowId(), "row.rowId"); if(row.getValues() == null || row.getValues().isEmpty()){ // nothing to do for this row. return; } String entityId = KeyFactory.keyToString(row.getRowId()); Map<String, String> values = row.getValues(); String etag = row.getEtag(); if(etag == null){ /* * Prior to PLFM-4249, users provided the etag as a column on the table. * View query results will now include the etag if requested, even if the * view does not have an etag column. However, if this etag is null, then * for backwards compatibility we still need to look for an etag column * in the view. */ ColumnModel etagColumn = getEtagColumn(tableSchema); etag = values.get(etagColumn.getId()); if(etag == null){ throw new IllegalArgumentException(ETAG_MISSING_MESSAGE); } } // Get the current annotations for this entity. NamedAnnotations annotations = nodeManager.getAnnotations(user, entityId); Annotations additional = annotations.getAdditionalAnnotations(); additional.setEtag(etag); boolean updated = updateAnnotationsFromValues(additional, tableSchema, values); if(updated){ // save the changes. nodeManager.updateAnnotations(user, entityId, additional, AnnotationNameSpace.ADDITIONAL); } } /** * Lookup the etag column from the given schema. * @param schema * @return */ public static ColumnModel getEtagColumn(List<ColumnModel> schema){ for(ColumnModel cm: schema){ if(EntityField.etag.name().equals(cm.getName())){ return cm; } } throw new IllegalArgumentException(ETG_COLUMN_MISSING); } /** * Update the passed Annotations using the given schema and values map. * * @param additional * @param tableSchema * @param values * @return */ public static boolean updateAnnotationsFromValues(Annotations additional, List<ColumnModel> tableSchema, Map<String, String> values){ boolean updated = false; // process each column of the view for(ColumnModel column: tableSchema){ EntityField matchedField = EntityField.findMatch(column); // Ignore all entity fields. if(matchedField == null){ // is this column included in the row? if(values.containsKey(column.getId())){ updated = true; // Match the column type to an annotation type. AnnotationType type = SQLUtils.translateColumnTypeToAnnotationType(column.getColumnType()); String value = values.get(column.getId()); // Unconditionally remove a current annotation. additional.deleteAnnotation(column.getName()); // Add back the annotation if the value is not null if(value != null){ Object objectValue = type.parseValue(value); additional.replaceAnnotation(column.getName(), objectValue); } } } } return updated; } }
package org.pentaho.di.version; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.Date; import org.pentaho.di.core.Const; /** * Singleton class to allow us to see on which date & time the kettle3.jar was built. * * @author Matt * @since 2006-aug-12 */ public class BuildVersion { /** name of the Kettle version file, updated in the ant script, contains date and time of build */ public static final String BUILD_VERSION_FILE = "build_version.txt"; public static final String SEPARATOR = "@"; public static final String BUILD_DATE_FORMAT = "yyyy/MM/dd'T'HH:mm:ss"; private static BuildVersion buildVersion; /** * @return the instance of the BuildVersion singleton */ public static final BuildVersion getInstance() { if (buildVersion!=null) return buildVersion; buildVersion = new BuildVersion(); return buildVersion; } private int version; private Date buildDate; private String hostname; private BuildVersion() { String filename = BUILD_VERSION_FILE; StringBuffer buffer = new StringBuffer(30); try { // The version file only contains a single lines of text InputStream inputStream = getClass().getResourceAsStream( "/"+filename ); // try to find it in the jars... if (inputStream==null) // not found { // System.out.println("Stream not found for filename [/"+filename+"], looking for it on the normal filesystem..."); try { inputStream = new FileInputStream(filename); // Retry from normal file system } catch(FileNotFoundException e) { inputStream = new FileInputStream("./"+filename); } } else { } // read the file into a String int c=inputStream.read(); while ( c>0 && c!='\n' && c!='\r' ) { if (c!=' ' && c!='\t') buffer.append((char)c); // no spaces or tabs please ;-) c=inputStream.read(); } // The 3 parts we expect are in here: String parts[] = buffer.toString().split(SEPARATOR); if (parts.length!=3) { throw new RuntimeException("Could not find 3 parts in versioning line : ["+buffer+"]"); } // Get the revision version = Integer.parseInt(parts[0]); // Get the build date SimpleDateFormat format = new SimpleDateFormat(BUILD_DATE_FORMAT); buildDate = format.parse(parts[1]); } catch(Exception e) { System.out.println("Unable to load revision number from file : ["+filename+"] : "+e.toString()); System.out.println(Const.getStackTracker(e)); version = 1; buildDate = new Date(); } } /** * @return the buildDate */ public Date getBuildDate() { return buildDate; } /** * @param buildDate the buildDate to set */ public void setBuildDate(Date buildDate) { this.buildDate = buildDate; } /** * @return the revision */ public int getVersion() { return version; } /** * @param revision the revision to set */ public void setVersion(int revision) { this.version = revision; } public void save() { FileWriter fileWriter = null; String filename = BUILD_VERSION_FILE; File file = new File( filename ); try { fileWriter = new FileWriter(file); // First write the revision fileWriter.write(Integer.toString(version)+" "); // Then the separator fileWriter.write(SEPARATOR); // Finally the build date SimpleDateFormat format = new SimpleDateFormat(BUILD_DATE_FORMAT); fileWriter.write(" "+format.format(buildDate)+" "); // Then the separator fileWriter.write(SEPARATOR); // Then the hostname fileWriter.write(" "+Const.getHostname()); // Return fileWriter.write(Const.CR); System.out.println("Saved build version info to file ["+file.getAbsolutePath()+"]"); } catch(Exception e) { throw new RuntimeException("Unable to save revision information to file ["+BUILD_VERSION_FILE+"]", e); } finally { try { if (fileWriter!=null) { fileWriter.close(); } } catch(Exception e) { throw new RuntimeException("Unable to close file ["+BUILD_VERSION_FILE+"] after writing", e); } } } /** * @return the hostname */ public String getHostname() { return hostname; } /** * @param hostname the hostname to set */ public void setHostname(String hostname) { this.hostname = hostname; } }
package org.jboss.forge.addon.shell.aesh.completion; import org.jboss.aesh.complete.CompleteOperation; import org.jboss.forge.addon.convert.Converter; import org.jboss.forge.addon.convert.ConverterFactory; import org.jboss.forge.addon.shell.ui.ShellContext; import org.jboss.forge.addon.ui.input.InputComponent; import org.jboss.forge.addon.ui.input.SelectComponent; import org.jboss.forge.addon.ui.util.InputComponents; import org.jboss.forge.furnace.util.Strings; /** * * * @author <a href="ggastald@redhat.com">George Gastaldi</a> */ public class SelectComponentCompletionStrategy implements CompletionStrategy { @SuppressWarnings("unchecked") @Override public void complete(CompleteOperation completeOperation, InputComponent<?, Object> input, ShellContext context, String typedValue, ConverterFactory converterFactory) { SelectComponent<?, Object> selectComponent = (SelectComponent<?, Object>) input; Converter<Object, String> itemLabelConverter = (Converter<Object, String>) InputComponents .getItemLabelConverter(converterFactory, selectComponent); boolean noTypedValue = Strings.isNullOrEmpty(typedValue); Iterable<Object> valueChoices = selectComponent.getValueChoices(); for (Object choice : valueChoices) { String convert = itemLabelConverter.convert(choice); if (noTypedValue || convert.startsWith(typedValue)) { completeOperation.addCompletionCandidate(convert); } } } }
package com.ai.cloud.skywalking.reciever.persistance; import com.ai.cloud.skywalking.reciever.selfexamination.ServerHealthCollector; import com.ai.cloud.skywalking.reciever.selfexamination.ServerHeathReading; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Collection; import static com.ai.cloud.skywalking.reciever.conf.Config.RegisterPersistence.*; public class RegisterPersistenceThread extends Thread { private Logger logger = LogManager .getLogger(RegisterPersistenceThread.class); private BufferedWriter writer; public RegisterPersistenceThread() { super("RegisterPersistenceThread"); File offsetParentDir = new File(REGISTER_FILE_PARENT_DIRECTORY); if (!offsetParentDir.exists()){ offsetParentDir.mkdirs(); } } @Override public void run() { while (true) { try { Thread.sleep(OFFSET_WRITTEN_FILE_WAIT_CYCLE); } catch (InterruptedException e) { logger.error("Sleep failure", e); } try { File file = new File(REGISTER_FILE_PARENT_DIRECTORY, REGISTER_FILE_NAME); File bakFile = new File(REGISTER_FILE_PARENT_DIRECTORY, REGISTER_BAK_FILE_NAME); if (bakFile.exists()) { bakFile.delete(); } file.renameTo(bakFile); if (!file.exists()) { file.createNewFile(); } Collection<FileRegisterEntry> fileRegisterEntries = MemoryRegister .instance().getEntries(); try { writer = new BufferedWriter(new FileWriter(file)); } catch (IOException e) { logger.error("Write The offset file anomalies."); } for (FileRegisterEntry fileRegisterEntry : fileRegisterEntries) { try { writer.write(fileRegisterEntry.toString() + "\n"); } catch (IOException e) { logger.error( "Write file register entry to offset file failure", e); } } try { writer.write("EOF\n"); writer.flush(); } catch (IOException e) { logger.error("Flush offset file failure", e); } finally { try { writer.close(); } catch (IOException e) { logger.error("close offset file failure", e); } } } catch (IOException e) { logger.error("Failed to back up offset file.", e); } ServerHealthCollector.getCurrentHeathReading(null).updateData( ServerHeathReading.INFO, "flush memory register to file."); } } }
package org.wikimedia.commons; import java.io.*; import java.util.Date; import org.mediawiki.api.*; import org.wikimedia.commons.media.Media; import de.mastacode.http.ProgressListener; import android.app.*; import android.content.*; import android.database.Cursor; import android.os.*; import android.provider.MediaStore; import android.support.v4.app.NotificationCompat; import android.text.method.DateTimeKeyListener; import android.util.Log; import android.view.View; import android.widget.RemoteViews; import android.widget.Toast; import android.net.*; public class UploadService extends IntentService { private static final String EXTRA_PREFIX = "org.wikimedia.commons.uploader"; public static final String EXTRA_MEDIA_URI = EXTRA_PREFIX + ".media_uri"; public static final String EXTRA_TARGET_FILENAME = EXTRA_PREFIX + ".filename"; public static final String EXTRA_DESCRIPTION = EXTRA_PREFIX + ".description"; public static final String EXTRA_EDIT_SUMMARY = EXTRA_PREFIX + ".summary"; private NotificationManager notificationManager; private CommonsApplication app; public UploadService(String name) { super(name); } public UploadService() { super("UploadService"); } // DO NOT HAVE NOTIFICATION ID OF 0 FOR ANYTHING // Seriously, Android? public static final int NOTIFICATION_DOWNLOAD_IN_PROGRESS = 1; public static final int NOTIFICATION_DOWNLOAD_COMPLETE = 2; private class NotificationUpdateProgressListener implements ProgressListener { Notification curNotification; String notificationTag; boolean notificationTitleChanged; String notificationProgressTitle; String notificationFinishingTitle; private int lastPercent = 0; public NotificationUpdateProgressListener(Notification curNotification, String notificationTag, String notificationProgressTitle, String notificationFinishingTitle) { this.curNotification = curNotification; this.notificationTag = notificationTag; this.notificationProgressTitle = notificationProgressTitle; this.notificationFinishingTitle = notificationFinishingTitle; } @Override public void onProgress(long transferred, long total) { RemoteViews curView = curNotification.contentView; if(!notificationTitleChanged) { curView.setTextViewText(R.id.uploadNotificationTitle, notificationProgressTitle); notificationTitleChanged = false; startForeground(NOTIFICATION_DOWNLOAD_IN_PROGRESS, curNotification); } int percent =(int) ((double)transferred / (double)total * 100); if(percent > lastPercent) { curNotification.contentView.setProgressBar(R.id.uploadNotificationProgress, 100, percent, false); startForeground(NOTIFICATION_DOWNLOAD_IN_PROGRESS, curNotification); lastPercent = percent; } if(percent == 100) { // Completed! curView.setTextViewText(R.id.uploadNotificationTitle, notificationFinishingTitle); startForeground(NOTIFICATION_DOWNLOAD_IN_PROGRESS, curNotification); } } } @Override public void onDestroy() { super.onDestroy(); Log.d("Commons", "ZOMG I AM BEING KILLED HALP!"); } @Override public void onCreate() { super.onCreate(); notificationManager = (NotificationManager)getSystemService(NOTIFICATION_SERVICE); app = (CommonsApplication)this.getApplicationContext(); } @Override protected void onHandleIntent(Intent intent) { MWApi api = app.getApi(); InputStream file; long length; ApiResult result; RemoteViews notificationView; Bundle extras = intent.getExtras(); Uri mediaUri = (Uri)extras.getParcelable(EXTRA_MEDIA_URI); String filename = intent.getStringExtra(EXTRA_TARGET_FILENAME); String description = intent.getStringExtra(EXTRA_DESCRIPTION); String editSummary = intent.getStringExtra(EXTRA_EDIT_SUMMARY); String notificationTag = mediaUri.toString(); Date dateCreated = null; try { file = this.getContentResolver().openInputStream(mediaUri); length = this.getContentResolver().openAssetFileDescriptor(mediaUri, "r").getLength(); Cursor cursor = this.getContentResolver().query(mediaUri, new String[] { MediaStore.Images.ImageColumns.DATE_TAKEN }, null, null, null); if(cursor.getCount() != 0) { cursor.moveToFirst(); dateCreated = new Date(cursor.getInt(0)); } } catch (FileNotFoundException e) { throw new RuntimeException(e); } notificationView = new RemoteViews(getPackageName(), R.layout.layout_upload_progress); notificationView.setTextViewText(R.id.uploadNotificationTitle, String.format(getString(R.string.upload_progress_notification_title_start), filename)); notificationView.setProgressBar(R.id.uploadNotificationProgress, 100, 0, false); Log.d("Commons", "Before execution!"); Notification progressNotification = new NotificationCompat.Builder(this).setAutoCancel(true) .setSmallIcon(R.drawable.ic_launcher) .setAutoCancel(true) .setContent(notificationView) .setOngoing(true) .setContentIntent(PendingIntent.getActivity(getApplicationContext(), 0, new Intent(), 0)) .getNotification(); this.startForeground(NOTIFICATION_DOWNLOAD_IN_PROGRESS, progressNotification); Log.d("Commons", "Just before"); NotificationUpdateProgressListener notificationUpdater = new NotificationUpdateProgressListener(progressNotification, notificationTag, String.format(getString(R.string.upload_progress_notification_title_in_progress), filename), String.format(getString(R.string.upload_progress_notification_title_finishing), filename) ); try { if(!api.validateLogin()) { // Need to revalidate! if(app.revalidateAuthToken()) { Log.d("Commons", "Successfully revalidated token!"); } else { Log.d("Commons", "Unable to revalidate :("); // TODO: Put up a new notification, ask them to re-login notificationManager.cancel(notificationTag, NOTIFICATION_DOWNLOAD_IN_PROGRESS); Toast failureToast = Toast.makeText(this, R.string.authentication_failed, Toast.LENGTH_LONG); failureToast.show(); return; } } Media media = new Media(mediaUri, filename, description, editSummary, app.getCurrentAccount().name, dateCreated); result = api.upload(filename, file, length, media.getPageContents(), editSummary, notificationUpdater); } catch (IOException e) { e.printStackTrace(); Log.d("Commons", "I have a network fuckup"); throw new RuntimeException(e); } Log.d("Commons", "Response is" + CommonsApplication.getStringFromDOM(result.getDocument())); stopForeground(true); String descUrl = result.getString("/api/upload/imageinfo/@descriptionurl"); Intent openUploadedPageIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(descUrl)); Notification doneNotification = new NotificationCompat.Builder(this) .setAutoCancel(true) .setSmallIcon(R.drawable.ic_launcher) .setContentTitle(String.format(getString(R.string.upload_completed_notification_title), filename)) .setContentText(getString(R.string.upload_completed_notification_text)) .setContentIntent(PendingIntent.getActivity(this, 0, openUploadedPageIntent, 0)) .getNotification(); notificationManager.notify(notificationTag, NOTIFICATION_DOWNLOAD_COMPLETE, doneNotification); } }
package org.objectweb.proactive.core.component.adl.implementations; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.objectweb.fractal.adl.ADLException; import org.objectweb.fractal.api.Component; import org.objectweb.fractal.api.Type; import org.objectweb.fractal.api.control.BindingController; import org.objectweb.fractal.api.type.ComponentType; import org.objectweb.fractal.util.Fractal; import org.objectweb.proactive.core.component.Constants; import org.objectweb.proactive.core.component.ContentDescription; import org.objectweb.proactive.core.component.ControllerDescription; import org.objectweb.proactive.core.component.adl.RegistryManager; import org.objectweb.proactive.core.component.adl.nodes.VirtualNode; import org.objectweb.proactive.core.component.adl.vnexportation.ExportedVirtualNodesList; import org.objectweb.proactive.core.component.adl.vnexportation.LinkedVirtualNode; import org.objectweb.proactive.core.component.factory.ProActiveGenericFactory; import org.objectweb.proactive.core.descriptor.data.ProActiveDescriptor; import org.objectweb.proactive.core.group.Group; import org.objectweb.proactive.core.node.Node; import org.objectweb.proactive.core.node.NodeException; import org.objectweb.proactive.core.util.log.Loggers; import org.objectweb.proactive.core.util.log.ProActiveLogger; import org.objectweb.proactive.gcmdeployment.GCMApplication; import org.objectweb.proactive.gcmdeployment.GCMVirtualNode; //import org.objectweb.proactive.extensions.gcmdeployment.GCMApplication.GCMApplication; //import org.objectweb.proactive.extensions.gcmdeployment.core.GCMVirtualNode; /** * @author The ProActive Team */ public class ProActiveImplementationBuilderImpl implements ProActiveImplementationBuilder, BindingController { public final static String REGISTRY_BINDING = "registry"; public RegistryManager registry; protected static Logger logger = ProActiveLogger.getLogger(Loggers.COMPONENTS_ADL); // Implementation of the BindingController interface public String[] listFc() { return new String[] { REGISTRY_BINDING }; } public Object lookupFc(final String itf) { if (itf.equals(REGISTRY_BINDING)) { return registry; } return null; } public void bindFc(final String itf, final Object value) { if (itf.equals(REGISTRY_BINDING)) { registry = (RegistryManager) value; } } public void unbindFc(final String itf) { if (itf.equals(REGISTRY_BINDING)) { registry = null; } } // Implementation of the Implementation Builder and ProActiveImplementationBuilder interfaces public Object createComponent(Object arg0, String arg1, String arg2, Object arg3, Object arg4, Object arg5) throws Exception { return null; } public Object createComponent(Object type, String name, String definition, ControllerDescription controllerDesc, ContentDescription contentDesc, VirtualNode adlVN, Map context) throws Exception { ObjectsContainer obj = commonCreation(type, name, definition, contentDesc, adlVN, context); return createFComponent(type, obj.getDvn(), controllerDesc, contentDesc, adlVN, obj .getBootstrapComponent()); } protected ObjectsContainer commonCreation(Object type, String name, String definition, ContentDescription contentDesc, VirtualNode adlVN, Map context) throws Exception { Component bootstrap = null; if (context != null) { bootstrap = (Component) context.get("bootstrap"); } if (bootstrap == null) { bootstrap = Fractal.getBootstrapComponent(); } ObjectsContainer result = null; if (adlVN != null) { // consider exported virtual nodes LinkedVirtualNode exported = ExportedVirtualNodesList.instance().getNode(name, adlVN.getName(), false); if (exported != null) { adlVN.setName(exported.getExportedVirtualNodeNameAfterComposition()); adlVN.setCardinality(exported.isMultiple() ? VirtualNode.MULTIPLE : VirtualNode.SINGLE); } else { // TODO add self exported virtual node ? // for the moment, just add a leaf to the linked vns ExportedVirtualNodesList.instance().addLeafVirtualNode(name, adlVN.getName(), adlVN.getCardinality()); // TODO_M check this } Object deploymentDescriptor = context.get("deployment-descriptor"); if (deploymentDescriptor != null) { if (deploymentDescriptor instanceof GCMApplication) { // New deployment GCMApplication gcmApplication = (GCMApplication) deploymentDescriptor; GCMVirtualNode virtualNode = gcmApplication.getVirtualNode(adlVN.getName()); result = new ObjectsContainer(virtualNode, bootstrap); } else if (deploymentDescriptor instanceof ProActiveDescriptor) { // Old deployment org.objectweb.proactive.core.descriptor.data.VirtualNodeInternal deploymentVN = null; ProActiveDescriptor proactiveDecriptor = (ProActiveDescriptor) deploymentDescriptor; org.objectweb.proactive.core.descriptor.data.VirtualNode vn = proactiveDecriptor .getVirtualNode(adlVN.getName()); if (vn != null) { deploymentVN = vn.getVirtualNodeInternal(); } if (deploymentVN == null) { if (adlVN.getName().equals("null")) { logger .info(name + " will be instantiated in the current virtual machine (\"null\" was specified as the virtual node name)"); } else { throw new ADLException("Could not find virtual node " + adlVN.getName() + " in the deployment descriptor", null); } } else { if (deploymentVN.isMultiple() && (adlVN.getCardinality().equals(VirtualNode.SINGLE))) { // there will be only one instance of the component, on one node of the virtual node contentDesc.forceSingleInstance(); } else if (!(deploymentVN.isMultiple()) && (adlVN.getCardinality().equals(VirtualNode.MULTIPLE))) { throw new ADLException( "Cannot deploy on a single virtual node when the cardinality of this virtual node named " + adlVN.getName() + " in the ADL is set to multiple", null); } } result = new ObjectsContainer(deploymentVN, bootstrap); } } } else { // adlVN == null result = new ObjectsContainer(bootstrap); } return result; } private Component createFComponent(Object type, org.objectweb.proactive.core.descriptor.data.VirtualNode deploymentVN, ControllerDescription controllerDesc, ContentDescription contentDesc, VirtualNode adlVN, Component bootstrap) throws Exception { Component result; // FIXME : exhaustively specify the behaviour if ((deploymentVN != null) && VirtualNode.MULTIPLE.equals(adlVN.getCardinality()) && controllerDesc.getHierarchicalType().equals(Constants.PRIMITIVE) && !contentDesc.uniqueInstance()) { Group fcInstance = (Group) newFcInstanceAsList(bootstrap, (ComponentType) type, controllerDesc, contentDesc, deploymentVN); result = (Component) fcInstance.getGroupByType(); } else { result = newFcInstance(bootstrap, (ComponentType) type, controllerDesc, contentDesc, deploymentVN); } // registry.addComponent(result); // the registry can handle groups return result; } protected class ObjectsContainer { private GCMVirtualNode gcmDeploymentVN; private org.objectweb.proactive.core.descriptor.data.VirtualNode deploymentVN; private Component bootstrap; public ObjectsContainer(org.objectweb.proactive.core.descriptor.data.VirtualNode dVn, Component bstrp) { deploymentVN = dVn; gcmDeploymentVN = null; bootstrap = bstrp; } public ObjectsContainer(GCMVirtualNode gcmVn, Component bstrp) { deploymentVN = null; gcmDeploymentVN = gcmVn; bootstrap = bstrp; } public ObjectsContainer(Component bstrp) { deploymentVN = null; gcmDeploymentVN = null; bootstrap = bstrp; } public org.objectweb.proactive.core.descriptor.data.VirtualNode getDvn() { return deploymentVN; } public Component getBootstrapComponent() { return bootstrap; } public GCMVirtualNode getGCMDeploymentVN() { return gcmDeploymentVN; } } private List<Component> newFcInstanceAsList(Component bootstrap, Type type, ControllerDescription controllerDesc, ContentDescription contentDesc, org.objectweb.proactive.core.descriptor.data.VirtualNode virtualNode) throws Exception { ProActiveGenericFactory genericFactory = (ProActiveGenericFactory) Fractal .getGenericFactory(bootstrap); if (virtualNode == null) { return genericFactory.newFcInstanceAsList(type, controllerDesc, contentDesc, (Node[]) null); } try { virtualNode.activate(); return genericFactory.newFcInstanceAsList(type, controllerDesc, contentDesc, virtualNode .getNodes()); } catch (NodeException e) { throw new InstantiationException( "could not instantiate components due to a deployment problem : " + e.getMessage()); } } private Component newFcInstance(Component bootstrap, Type type, ControllerDescription controllerDesc, ContentDescription contentDesc, org.objectweb.proactive.core.descriptor.data.VirtualNode virtualNode) throws Exception { ProActiveGenericFactory genericFactory = (ProActiveGenericFactory) Fractal .getGenericFactory(bootstrap); if (virtualNode == null) { return genericFactory.newFcInstance(type, controllerDesc, contentDesc, (Node) null); } try { virtualNode.activate(); if (virtualNode.getNodes().length == 0) { throw new InstantiationException( "Cannot create component on virtual node as no node is associated with this virtual node"); } return genericFactory.newFcInstance(type, controllerDesc, contentDesc, virtualNode.getNode()); } catch (NodeException e) { throw new InstantiationException( "could not instantiate components due to a deployment problem : " + e.getMessage()); } } }
package ca.corefacility.bioinformatics.irida.web.controller.api; import static org.springframework.hateoas.mvc.ControllerLinkBuilder.linkTo; import static org.springframework.hateoas.mvc.ControllerLinkBuilder.methodOn; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.FileSystemResource; import org.springframework.hateoas.Link; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.ResponseBody; import ca.corefacility.bioinformatics.irida.exceptions.EntityNotFoundException; import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState; import ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject; import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisAssemblyAnnotation; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisAssemblyAnnotationCollection; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisOutputFile; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisPhylogenomicsPipeline; import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisSISTRTyping; import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission; import ca.corefacility.bioinformatics.irida.service.AnalysisSubmissionService; import ca.corefacility.bioinformatics.irida.service.sample.SampleService; import ca.corefacility.bioinformatics.irida.web.assembler.resource.ResourceCollection; import ca.corefacility.bioinformatics.irida.web.controller.api.samples.RESTSampleSequenceFilesController; import com.google.common.collect.ImmutableMap; /** * REST controller to manage sharing of {@link AnalysisSubmission}, * {@link Analysis}, and {@link AnalysisOutputFile} classes. */ @Controller @RequestMapping(value = "/api/analysisSubmissions") public class RESTAnalysisSubmissionController extends RESTGenericController<AnalysisSubmission> { private AnalysisSubmissionService analysisSubmissionService; private SampleService sampleService; // rel for reading the analysis for a submission public static final String ANALYSIS_REL = "analysis"; public static final String FILE_REL = "outputFile"; public static final String SUBMISSIONS_REL = "analysisSubmissions"; // rels for reading input files for a submission public static final String INPUT_FILES_UNPAIRED_REL = "input/unpaired"; public static final String INPUT_FILES_PAIRED_REL = "input/paired"; // available analysis types to filter for public static Map<String, Class<? extends Analysis>> ANALYSIS_TYPES = ImmutableMap.<String, Class<? extends Analysis>>builder(). put("phylogenomics", AnalysisPhylogenomicsPipeline.class). put("assembly", AnalysisAssemblyAnnotation.class). put("assembly-collection", AnalysisAssemblyAnnotationCollection.class). put("sistr", AnalysisSISTRTyping.class).build(); @Autowired public RESTAnalysisSubmissionController(AnalysisSubmissionService analysisSubmissionService, SampleService sampleService) { super(analysisSubmissionService, AnalysisSubmission.class); this.analysisSubmissionService = analysisSubmissionService; this.sampleService = sampleService; } /** * Get all analyses of a given type * * @param type * The type to request * @return ModelMap containing the requested type of resource */ @RequestMapping("/analysisType/{type}") public ModelMap listOfType(@PathVariable String type) { ModelMap model = new ModelMap(); if (!ANALYSIS_TYPES.containsKey(type)) { throw new EntityNotFoundException("Analysis type not found"); } Class<? extends Analysis> analysisClass = ANALYSIS_TYPES.get(type); Iterable<AnalysisSubmission> submissions = analysisSubmissionService.findAll(); List<AnalysisSubmission> analysesOfType = StreamSupport.stream(submissions.spliterator(), false) .filter((s) -> s.getAnalysis() != null && s.getAnalysis().getClass().equals(analysisClass)) .collect(Collectors.toList()); ResourceCollection<AnalysisSubmission> resourceCollection = new ResourceCollection<>(analysesOfType.size()); for (AnalysisSubmission s : analysesOfType) { s.add(constructCustomResourceLinks(s)); s.add(linkTo(methodOn(RESTAnalysisSubmissionController.class).getResource(s.getId())).withSelfRel()); resourceCollection.add(s); } resourceCollection.add(linkTo(methodOn(RESTAnalysisSubmissionController.class).listOfType(type)).withSelfRel()); resourceCollection.add(linkTo(RESTAnalysisSubmissionController.class).withRel(SUBMISSIONS_REL)); model.addAttribute(RESOURCE_NAME, resourceCollection); return model; } /** * {@inheritDoc} */ @Override protected Collection<Link> constructCollectionResourceLinks(ResourceCollection<AnalysisSubmission> list) { Collection<Link> links = super.constructCollectionResourceLinks(list); for (String type : ANALYSIS_TYPES.keySet()) { links.add(linkTo(methodOn(RESTAnalysisSubmissionController.class).listOfType(type)).withRel( SUBMISSIONS_REL + "/" + type)); } return links; } /** * Get the {@link SequenceFilePair}s used for the {@link AnalysisSubmission} * * @param identifier * {@link AnalysisSubmission} id * @return list of {@link SequenceFilePair}s */ @RequestMapping("/{identifier}/sequenceFiles/pairs") public ModelMap getAnalysisInputFilePairs(@PathVariable Long identifier) { ModelMap map = new ModelMap(); AnalysisSubmission analysisSubmission = analysisSubmissionService.read(identifier); Set<SequenceFilePair> pairs = analysisSubmission.getPairedInputFiles(); ResourceCollection<SequenceFilePair> resources = new ResourceCollection<>(pairs.size()); for (SequenceFilePair pair : pairs) { SampleSequencingObjectJoin join = sampleService.getSampleForSequencingObject(pair); Long sampleId = join.getSubject().getId(); pair = RESTSampleSequenceFilesController.addSequencingObjectLinks(pair, sampleId); resources.add(pair); } resources.add(linkTo(methodOn(RESTAnalysisSubmissionController.class).getAnalysisInputFilePairs(identifier)) .withSelfRel()); map.addAttribute(RESTGenericController.RESOURCE_NAME, resources); return map; } /** * get the {@link SequenceFile}s not in {@link SequenceFilePair}s used for * the {@link AnalysisSubmission} * * @param identifier * the {@link AnalysisSubmission} id * @return list of {@link SequenceFile}s */ @RequestMapping("/{identifier}/sequenceFiles/unpaired") public ModelMap getAnalysisInputUnpairedFiles(@PathVariable Long identifier) { ModelMap map = new ModelMap(); AnalysisSubmission analysisSubmission = analysisSubmissionService.read(identifier); Set<SingleEndSequenceFile> inputFilesSingleEnd = analysisSubmission.getInputFilesSingleEnd(); ResourceCollection<SequencingObject> resources = new ResourceCollection<>(inputFilesSingleEnd.size()); for (SingleEndSequenceFile file : inputFilesSingleEnd) { SampleSequencingObjectJoin join = sampleService.getSampleForSequencingObject(file); SequencingObject sequencingObject = join.getObject(); RESTSampleSequenceFilesController.addSequencingObjectLinks(sequencingObject, join.getSubject().getId()); resources.add(sequencingObject); } resources .add(linkTo(methodOn(RESTAnalysisSubmissionController.class).getAnalysisInputUnpairedFiles(identifier)) .withSelfRel()); map.addAttribute(RESTGenericController.RESOURCE_NAME, resources); return map; } /** * Get the {@link Analysis} for an {@link AnalysisSubmission}. * * @param identifier * {@link AnalysisSubmission} identifier to read * @return ModelMap containing the {@link Analysis} */ @RequestMapping("/{identifier}/analysis") public ModelMap getAnalysisForSubmission(@PathVariable Long identifier) { ModelMap model = new ModelMap(); AnalysisSubmission read = analysisSubmissionService.read(identifier); if (read.getAnalysisState() != AnalysisState.COMPLETED) { throw new EntityNotFoundException("Analysis is not completed"); } Analysis analysis = read.getAnalysis(); analysis.add(linkTo(methodOn(RESTAnalysisSubmissionController.class).getAnalysisForSubmission(identifier)) .withSelfRel()); /* * Add links to the available files */ for (String name : analysis.getAnalysisOutputFileNames()) { analysis.add(linkTo( methodOn(RESTAnalysisSubmissionController.class).getAnalysisOutputFile(identifier, name)).withRel( FILE_REL + "/" + name)); } model.addAttribute(RESOURCE_NAME, analysis); return model; } /** * Get an analysis output file for a given submission * * @param submissionId * The {@link AnalysisSubmission} id * @param fileType * The {@link AnalysisOutputFile} type as defined in the * {@link Analysis} subclass * @return {@link ModelMap} containing the {@link AnalysisOutputFile} */ @RequestMapping("/{submissionId}/analysis/file/{fileType}") public ModelMap getAnalysisOutputFile(@PathVariable Long submissionId, @PathVariable String fileType) { ModelMap model = new ModelMap(); AnalysisSubmission read = analysisSubmissionService.read(submissionId); if (read.getAnalysisState() != AnalysisState.COMPLETED) { throw new EntityNotFoundException("Analysis is not completed"); } AnalysisOutputFile analysisOutputFile = read.getAnalysis().getAnalysisOutputFile(fileType); analysisOutputFile.add(linkTo( methodOn(RESTAnalysisSubmissionController.class).getAnalysisOutputFile(submissionId, fileType)) .withSelfRel()); model.addAttribute(RESOURCE_NAME, analysisOutputFile); return model; } /** * Get the actual file contents for an analysis output file. * * @param submissionId * The {@link AnalysisSubmission} id * @param fileType * The {@link AnalysisOutputFile} type as defined in the * {@link Analysis} subclass * @return a {@link FileSystemResource} containing the contents of the * {@link AnalysisOutputFile}. */ @RequestMapping(value = "/{submissionId}/analysis/file/{fileType}", produces = MediaType.TEXT_PLAIN_VALUE) @ResponseBody public FileSystemResource getAnalysisOutputFileContents(@PathVariable Long submissionId, @PathVariable String fileType) { AnalysisSubmission read = analysisSubmissionService.read(submissionId); if (read.getAnalysisState() != AnalysisState.COMPLETED) { throw new EntityNotFoundException("Analysis is not completed"); } AnalysisOutputFile analysisOutputFile = read.getAnalysis().getAnalysisOutputFile(fileType); return new FileSystemResource(analysisOutputFile.getFile().toFile()); } /** * {@inheritDoc} add analysis rel if available */ @Override protected Collection<Link> constructCustomResourceLinks(AnalysisSubmission resource) { Collection<Link> links = new HashSet<>(); if (resource.getAnalysisState().equals(AnalysisState.COMPLETED)) { links.add(linkTo( methodOn(RESTAnalysisSubmissionController.class).getAnalysisForSubmission(resource.getId())) .withRel(ANALYSIS_REL)); } links.add(linkTo( methodOn(RESTAnalysisSubmissionController.class).getAnalysisInputUnpairedFiles(resource.getId())) .withRel(INPUT_FILES_UNPAIRED_REL)); links.add(linkTo(methodOn(RESTAnalysisSubmissionController.class).getAnalysisInputFilePairs(resource.getId())) .withRel(INPUT_FILES_PAIRED_REL)); return links; } }
package org.carlspring.strongbox.artifact.coordinates; import org.carlspring.maven.commons.util.ArtifactUtils; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.DefaultArtifact; import org.apache.maven.artifact.handler.DefaultArtifactHandler; /** * @author carlspring */ public class MavenArtifactCoordinates extends AbstractArtifactCoordinates { private static final String GROUPID = "groupId"; private static final String ARTIFACTID = "artifactId"; private static final String VERSION = "version"; private static final String CLASSIFIER = "classifier"; private static final String EXTENSION = "extension"; private String groupId; private String artifactId; private String version; private String classifier; private String extension; public MavenArtifactCoordinates() { defineCoordinates(GROUPID, ARTIFACTID, VERSION, CLASSIFIER, EXTENSION); } public MavenArtifactCoordinates(String path) { this(ArtifactUtils.convertPathToArtifact(path)); } public MavenArtifactCoordinates(String... coordinateValues) { this(); int i = 0; for (String coordinateValue : coordinateValues) { // Please, forgive the following construct... // (In my defense, I felt equally stupid and bad for doing it this way): switch (i) { case 0: setGroupId(coordinateValue); break; case 1: setArtifactId(coordinateValue); break; case 2: setVersion(coordinateValue); break; case 3: setClassifier(coordinateValue); break; case 4: setExtension(coordinateValue); break; default: break; } i++; } } public MavenArtifactCoordinates(Artifact artifact) { this(); setGroupId(artifact.getGroupId()); setArtifactId(artifact.getArtifactId()); setVersion(artifact.getVersion()); setClassifier(artifact.getClassifier()); if (artifact.getFile() != null) { String extension = artifact.getFile().getAbsolutePath(); extension = extension.substring(extension.lastIndexOf("."), extension.length()); setExtension(extension); } else { setExtension("jar"); } } @Override public String toPath() { return ArtifactUtils.convertArtifactToPath(toArtifact()); } public Artifact toArtifact() { return new DefaultArtifact(getGroupId(), getArtifactId(), getVersion(), "compile", getExtension(), getClassifier(), new DefaultArtifactHandler(getExtension())); } public String getGroupId() { return groupId; } public void setGroupId(String groupId) { this.groupId = groupId; setCoordinate(GROUPID, this.groupId); } public String getArtifactId() { return artifactId; } public void setArtifactId(String artifactId) { this.artifactId = artifactId; setCoordinate(ARTIFACTID, this.artifactId); } @Override public String getId() { return artifactId; } @Override public void setId(String id) { setArtifactId(id); } @Override public String getVersion() { return version; } @Override public void setVersion(String version) { this.version = version; setCoordinate(VERSION, this.version); } public String getClassifier() { return classifier; } public void setClassifier(String classifier) { this.classifier = classifier; setCoordinate(CLASSIFIER, this.classifier); } public String getExtension() { return extension; } public void setExtension(String extension) { this.extension = extension; setCoordinate(EXTENSION, this.extension); } }
package io.enmasse.systemtest.isolated.api; import io.enmasse.address.model.Address; import io.enmasse.address.model.AddressBuilder; import io.enmasse.address.model.AddressSpace; import io.enmasse.address.model.AddressSpaceBuilder; import io.enmasse.address.model.DoneableAddressSpace; import io.enmasse.config.AnnotationKeys; import io.enmasse.systemtest.UserCredentials; import io.enmasse.systemtest.bases.TestBase; import io.enmasse.systemtest.bases.isolated.ITestIsolatedStandard; import io.enmasse.systemtest.executor.ExecutionResultData; import io.enmasse.systemtest.isolated.Credentials; import io.enmasse.systemtest.model.addressplan.DestinationPlan; import io.enmasse.systemtest.model.addressspace.AddressSpacePlans; import io.enmasse.systemtest.model.addressspace.AddressSpaceType; import io.enmasse.systemtest.platform.KubeCMDClient; import io.enmasse.systemtest.resources.CliOutputData; import io.enmasse.systemtest.time.TimeoutBudget; import io.enmasse.systemtest.utils.AddressSpaceUtils; import io.enmasse.systemtest.utils.AddressUtils; import io.enmasse.systemtest.utils.TestUtils; import io.enmasse.systemtest.utils.UserUtils; import io.enmasse.user.model.v1.Operation; import io.enmasse.user.model.v1.User; import io.enmasse.user.model.v1.UserAuthorizationBuilder; import io.enmasse.user.model.v1.UserBuilder; import io.vertx.core.json.JsonObject; import org.junit.jupiter.api.Test; import java.util.Collections; import java.util.Optional; import java.util.concurrent.TimeUnit; import static io.enmasse.systemtest.platform.KubeCMDClient.createCR; import static io.enmasse.systemtest.platform.KubeCMDClient.patchCR; import static io.enmasse.systemtest.platform.KubeCMDClient.updateCR; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; class CustomResourceDefinitionAddressSpacesTest extends TestBase implements ITestIsolatedStandard { @Test void testAddressSpaceCreateViaCmdRemoveViaApi() throws Exception { AddressSpace brokered = new AddressSpaceBuilder() .withNewMetadata() .withName("crd-space-foo") .withNamespace(kubernetes.getInfraNamespace()) .endMetadata() .withNewSpec() .withType(AddressSpaceType.BROKERED.toString()) .withPlan(AddressSpacePlans.BROKERED) .withNewAuthenticationService() .withName("standard-authservice") .endAuthenticationService() .endSpec() .build(); JsonObject addressSpacePayloadJson = AddressSpaceUtils.addressSpaceToJson(brokered); isolatedResourcesManager.addToAddressSpaces(brokered); createCR(addressSpacePayloadJson.toString()); resourcesManager.waitForAddressSpaceReady(brokered); resourcesManager.deleteAddressSpace(brokered); TestUtils.waitForNamespaceDeleted(kubernetes, brokered.getMetadata().getName()); TestUtils.waitUntilCondition(() -> { ExecutionResultData allAddresses = KubeCMDClient.getAddressSpace(environment.namespace(), "-a"); return allAddresses.getStdOut() + allAddresses.getStdErr(); }, "No resources found.", new TimeoutBudget(30, TimeUnit.SECONDS)); } @Test void testReplacePatchAddressSpace() throws Exception { // create new address space with plan "small" AddressSpace standard = new AddressSpaceBuilder() .withNewMetadata() .withName("crd-patch-space") .withNamespace(kubernetes.getInfraNamespace()) .endMetadata() .withNewSpec() .withType(AddressSpaceType.STANDARD.toString()) .withPlan(AddressSpacePlans.STANDARD_SMALL) .withNewAuthenticationService() .withName("standard-authservice") .endAuthenticationService() .endSpec() .build(); isolatedResourcesManager.addToAddressSpaces(standard); createCR(AddressSpaceUtils.addressSpaceToJson(standard).toString()); isolatedResourcesManager.addToAddressSpaces(standard); resourcesManager.waitForAddressSpaceReady(standard); String currentConfig = resourcesManager.getAddressSpace(kubernetes.getInfraNamespace(), standard.getMetadata().getName()).getAnnotation(AnnotationKeys.APPLIED_CONFIGURATION); log.info("Initial config: {}", currentConfig); // change plan to "unlimited" standard = new DoneableAddressSpace(standard) .editMetadata().withResourceVersion(null).endMetadata() .editSpec().withPlan(AddressSpacePlans.STANDARD_UNLIMITED).endSpec() .done(); assertTrue(updateCR(AddressSpaceUtils.addressSpaceToJson(standard).toString()).getRetCode()); AddressSpaceUtils.waitForAddressSpaceConfigurationApplied(standard, currentConfig); assertThat(resourcesManager.getAddressSpace(standard.getMetadata().getName()).getSpec().getPlan(), is(AddressSpacePlans.STANDARD_UNLIMITED)); currentConfig = resourcesManager.getAddressSpace(kubernetes.getInfraNamespace(), standard.getMetadata().getName()).getAnnotation(AnnotationKeys.APPLIED_CONFIGURATION); // Patch back to "small" plan assertTrue(patchCR(standard.getKind().toLowerCase(), standard.getMetadata().getName(), "{\"spec\":{\"plan\":\"" + AddressSpacePlans.STANDARD_SMALL + "\"}}").getRetCode()); standard = resourcesManager.getAddressSpace(standard.getMetadata().getName()); resourcesManager.waitForAddressSpaceReady(standard); AddressSpaceUtils.waitForAddressSpaceConfigurationApplied(standard, currentConfig); assertThat(resourcesManager.getAddressSpace(standard.getMetadata().getName()).getSpec().getPlan(), is(AddressSpacePlans.STANDARD_SMALL)); } @Test void testAddressSpaceCreateViaApiRemoveViaCmd() throws Exception { AddressSpace brokered = new AddressSpaceBuilder() .withNewMetadata() .withName("crd-space-bar") .withNamespace(kubernetes.getInfraNamespace()) .endMetadata() .withNewSpec() .withType(AddressSpaceType.BROKERED.toString()) .withPlan(AddressSpacePlans.BROKERED) .withNewAuthenticationService() .withName("standard-authservice") .endAuthenticationService() .endSpec() .build(); resourcesManager.createAddressSpace(brokered); ExecutionResultData addressSpaces = KubeCMDClient.getAddressSpace(environment.namespace(), brokered.getMetadata().getName()); String output = addressSpaces.getStdOut(); assertTrue(output.contains(brokered.getMetadata().getName()), String.format("Get all addressspaces should contains '%s'; but contains only: %s", brokered.getMetadata().getName(), output)); KubeCMDClient.deleteAddressSpace(environment.namespace(), brokered.getMetadata().getName()); AddressSpaceUtils.waitForAddressSpaceDeleted(brokered); TestUtils.waitUntilCondition(() -> { ExecutionResultData allAddresses = KubeCMDClient.getAddressSpace(environment.namespace(), "-a"); return allAddresses.getStdErr(); }, "No resources found.", new TimeoutBudget(30, TimeUnit.SECONDS)); } @Test void testCreateAddressSpaceViaCmdNonAdminUser() throws Exception { String namespace = Credentials.namespace(); UserCredentials user = Credentials.userCredentials(); try { AddressSpace brokered = new AddressSpaceBuilder() .withNewMetadata() .withName("crd-space-baz") .withNamespace(namespace) .endMetadata() .withNewSpec() .withType(AddressSpaceType.BROKERED.toString()) .withPlan(AddressSpacePlans.BROKERED) .withNewAuthenticationService() .withName("standard-authservice") .endAuthenticationService() .endSpec() .build(); isolatedResourcesManager.addToAddressSpaces(brokered); JsonObject addressSpacePayloadJson = AddressSpaceUtils.addressSpaceToJson(brokered); KubeCMDClient.loginUser(user.getUsername(), user.getPassword()); KubeCMDClient.createNamespace(namespace); createCR(namespace, addressSpacePayloadJson.toString()); resourcesManager.waitForAddressSpaceReady(brokered); resourcesManager.deleteAddressSpace(brokered); TestUtils.waitForNamespaceDeleted(kubernetes, brokered.getMetadata().getName()); TestUtils.waitUntilCondition(() -> { ExecutionResultData allAddresses = KubeCMDClient.getAddressSpace(namespace, Optional.empty()); return allAddresses.getStdOut() + allAddresses.getStdErr(); }, "No resources found.", new TimeoutBudget(30, TimeUnit.SECONDS)); } finally { KubeCMDClient.loginUser(environment.getApiToken()); KubeCMDClient.switchProject(environment.namespace()); kubernetes.deleteNamespace(namespace); } } @Test void testCliOutput() throws Exception { String namespace = "cli-output"; UserCredentials user = new UserCredentials("pepan", "pepan"); try { // AddressSpace part AddressSpace brokered = new AddressSpaceBuilder() .withNewMetadata() .withName("cdr-brokered") .withNamespace(namespace) .endMetadata() .withNewSpec() .withType(AddressSpaceType.BROKERED.toString()) .withPlan(AddressSpacePlans.BROKERED) .withNewAuthenticationService() .withName("standard-authservice") .endAuthenticationService() .endSpec() .build(); AddressSpace standard = new AddressSpaceBuilder() .withNewMetadata() .withName("crd-standard") .withNamespace(namespace) .endMetadata() .withNewSpec() .withType(AddressSpaceType.STANDARD.toString()) .withPlan(AddressSpacePlans.STANDARD_MEDIUM) .withNewAuthenticationService() .withName("standard-authservice") .endAuthenticationService() .endSpec() .build(); isolatedResourcesManager.addToAddressSpaces(brokered); isolatedResourcesManager.addToAddressSpaces(standard); KubeCMDClient.loginUser(user.getUsername(), user.getPassword()); KubeCMDClient.createNamespace(namespace); createCR(namespace, AddressSpaceUtils.addressSpaceToJson(brokered).toString()); createCR(namespace, AddressSpaceUtils.addressSpaceToJson(standard).toString()); ExecutionResultData result = KubeCMDClient.getAddressSpace(namespace, Optional.of("wide")); assertTrue(result.getStdOut().contains(brokered.getMetadata().getName())); assertTrue(result.getStdOut().contains(standard.getMetadata().getName())); resourcesManager.waitForAddressSpaceReady(brokered); CliOutputData data = new CliOutputData(KubeCMDClient.getAddressSpace(namespace, Optional.of("wide")).getStdOut(), CliOutputData.CliOutputDataType.ADDRESS_SPACE); assertTrue(((CliOutputData.AddressSpaceRow) data.getData(brokered.getMetadata().getName())).isReady()); if (((CliOutputData.AddressSpaceRow) data.getData(brokered.getMetadata().getName())).isReady()) { assertEquals("", ((CliOutputData.AddressSpaceRow) data.getData(brokered.getMetadata().getName())).getStatus()); } else { assertThat(((CliOutputData.AddressSpaceRow) data.getData(brokered.getMetadata().getName())).getStatus(), containsString("Following deployments and statefulsets are not ready")); } resourcesManager.waitForAddressSpaceReady(standard); data = new CliOutputData(KubeCMDClient.getAddressSpace(namespace, Optional.of("wide")).getStdOut(), CliOutputData.CliOutputDataType.ADDRESS_SPACE); assertTrue(((CliOutputData.AddressSpaceRow) data.getData(brokered.getMetadata().getName())).isReady()); assertTrue(((CliOutputData.AddressSpaceRow) data.getData(standard.getMetadata().getName())).isReady()); assertEquals("Active", ((CliOutputData.AddressSpaceRow) data.getData(standard.getMetadata().getName())).getPhase()); assertEquals("", ((CliOutputData.AddressSpaceRow) data.getData(standard.getMetadata().getName())).getStatus()); // User part UserCredentials cred = new UserCredentials("pepanatestovani", "pepaNaTestovani"); User testUser = UserUtils.createUserResource(cred) .editSpec() .withAuthorization(Collections.singletonList( new UserAuthorizationBuilder() .withAddresses("*") .withOperations(Operation.send, Operation.recv).build())) .endSpec() .done(); //create user assertThat(KubeCMDClient.createCR(namespace, UserUtils.userToJson(brokered.getMetadata().getName(), testUser).toString()).getRetCode(), is(true)); assertThat(KubeCMDClient.createCR(namespace, UserUtils.userToJson(standard.getMetadata().getName(), testUser).toString()).getRetCode(), is(true)); TimeoutBudget budget = new TimeoutBudget(1, TimeUnit.MINUTES); UserUtils.waitForUserActive(new UserBuilder(testUser).editOrNewMetadata().withName(String.format("%s.%s", brokered.getMetadata().getName(), cred.getUsername())).withNamespace(namespace).endMetadata().build(), budget); UserUtils.waitForUserActive(new UserBuilder(testUser).editOrNewMetadata().withName(String.format("%s.%s", standard.getMetadata().getName(), cred.getUsername())).withNamespace(namespace).endMetadata().build(), budget); data = new CliOutputData(KubeCMDClient.getUser(namespace).getStdOut(), CliOutputData.CliOutputDataType.USER); assertEquals(((CliOutputData.UserRow) data.getData(String.format("%s.%s", brokered.getMetadata().getName(), cred.getUsername()))).getUsername(), cred.getUsername()); assertEquals(data.getData(String.format("%s.%s", standard.getMetadata().getName(), cred.getUsername())).getType(), "password"); // Address part Address queue = new AddressBuilder() .withNewMetadata() .withNamespace(brokered.getMetadata().getNamespace()) .withName(AddressUtils.generateAddressMetadataName(brokered, "queue")) .endMetadata() .withNewSpec() .withType("queue") .withAddress("queue") .withPlan(DestinationPlan.BROKERED_QUEUE) .endSpec() .build(); Address topicBrokered = new AddressBuilder() .withNewMetadata() .withNamespace(brokered.getMetadata().getNamespace()) .withName(AddressUtils.generateAddressMetadataName(brokered, "topic")) .endMetadata() .withNewSpec() .withType("topic") .withAddress("topic") .withPlan(DestinationPlan.BROKERED_TOPIC) .endSpec() .build(); Address topicStandard = new AddressBuilder() .withNewMetadata() .withNamespace(standard.getMetadata().getNamespace()) .withName(AddressUtils.generateAddressMetadataName(standard, "topic")) .endMetadata() .withNewSpec() .withType("topic") .withAddress("topic") .withPlan(DestinationPlan.STANDARD_SMALL_TOPIC) .endSpec() .build(); Address anycast = new AddressBuilder() .withNewMetadata() .withNamespace(standard.getMetadata().getNamespace()) .withName(AddressUtils.generateAddressMetadataName(standard, "anycast")) .endMetadata() .withNewSpec() .withType("anycast") .withAddress("anycast") .withPlan(DestinationPlan.STANDARD_SMALL_ANYCAST) .endSpec() .build(); assertTrue(KubeCMDClient.createCR(namespace, AddressUtils.addressToYaml(queue)).getRetCode()); assertTrue(KubeCMDClient.createCR(namespace, AddressUtils.addressToYaml(topicBrokered)).getRetCode()); assertTrue(KubeCMDClient.createCR(namespace, AddressUtils.addressToYaml(topicStandard)).getRetCode()); assertTrue(KubeCMDClient.createCR(namespace, AddressUtils.addressToYaml(anycast)).getRetCode()); data = new CliOutputData(KubeCMDClient.getAddress(namespace).getStdOut(), CliOutputData.CliOutputDataType.ADDRESS); assertEquals(((CliOutputData.AddressRow) data.getData(topicStandard.getMetadata().getName())).getPlan(), DestinationPlan.STANDARD_SMALL_TOPIC); AddressUtils.waitForDestinationsReady(new TimeoutBudget(5, TimeUnit.MINUTES), queue, topicBrokered); data = new CliOutputData(KubeCMDClient.getAddress(namespace).getStdOut(), CliOutputData.CliOutputDataType.ADDRESS); assertTrue(((CliOutputData.AddressRow) data.getData(queue.getMetadata().getName())).isReady()); assertEquals(((CliOutputData.AddressRow) data.getData(topicStandard.getMetadata().getName())).getPlan(), DestinationPlan.STANDARD_SMALL_TOPIC); AddressUtils.waitForDestinationsReady(new TimeoutBudget(5, TimeUnit.MINUTES), anycast, topicStandard); data = new CliOutputData(KubeCMDClient.getAddress(namespace).getStdOut(), CliOutputData.CliOutputDataType.ADDRESS); assertTrue(((CliOutputData.AddressRow) data.getData(queue.getMetadata().getName())).isReady()); assertEquals(((CliOutputData.AddressRow) data.getData(topicStandard.getMetadata().getName())).getPlan(), DestinationPlan.STANDARD_SMALL_TOPIC); assertEquals(((CliOutputData.AddressRow) data.getData(anycast.getMetadata().getName())).getPhase(), "Active"); // Clean part KubeCMDClient.deleteAddressSpace(namespace, brokered.getMetadata().getName()); KubeCMDClient.deleteAddressSpace(namespace, standard.getMetadata().getName()); AddressSpaceUtils.waitForAddressSpaceDeleted(brokered); AddressSpaceUtils.waitForAddressSpaceDeleted(standard); TestUtils.waitUntilCondition(() -> { ExecutionResultData allAddresses = KubeCMDClient.getAddressSpace(namespace, Optional.empty()); return allAddresses.getStdOut() + allAddresses.getStdErr(); }, "No resources found", new TimeoutBudget(30, TimeUnit.SECONDS)); } finally { KubeCMDClient.loginUser(environment.getApiToken()); KubeCMDClient.switchProject(environment.namespace()); kubernetes.deleteNamespace(namespace); } } @Test void testCannotCreateAddressSpaceViaCmdNonAdminUser() throws Exception { UserCredentials user = Credentials.userCredentials(); try { AddressSpace brokered = new AddressSpaceBuilder() .withNewMetadata() .withName("crd-addr-space-barr") .withNamespace(kubernetes.getInfraNamespace()) .endMetadata() .withNewSpec() .withType(AddressSpaceType.BROKERED.toString()) .withPlan(AddressSpacePlans.BROKERED) .withNewAuthenticationService() .withName("standard-authservice") .endAuthenticationService() .endSpec() .build(); JsonObject addressSpacePayloadJson = AddressSpaceUtils.addressSpaceToJson(brokered); KubeCMDClient.loginUser(user.getUsername(), user.getPassword()); assertThat(KubeCMDClient.createCR(addressSpacePayloadJson.toString()).getRetCode(), is(false)); } finally { KubeCMDClient.loginUser(environment.getApiToken()); KubeCMDClient.switchProject(environment.namespace()); } } }
package org.eclipse.xtext.scoping.impl; import static com.google.common.collect.Iterables.*; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl; import org.eclipse.xtext.index.IndexTestLanguageStandaloneSetup; import org.eclipse.xtext.index.indexTestLanguage.Datatype; import org.eclipse.xtext.index.indexTestLanguage.Entity; import org.eclipse.xtext.index.indexTestLanguage.IndexTestLanguagePackage; import org.eclipse.xtext.junit.AbstractXtextTests; import org.eclipse.xtext.naming.DefaultDeclarativeQualifiedNameProvider; import org.eclipse.xtext.naming.IQualifiedNameConverter; import org.eclipse.xtext.naming.IQualifiedNameProvider; import org.eclipse.xtext.naming.QualifiedName; import org.eclipse.xtext.resource.IEObjectDescription; import org.eclipse.xtext.resource.IResourceDescription; import org.eclipse.xtext.resource.IResourceServiceProvider; import org.eclipse.xtext.resource.ResourceSetReferencingResourceSetImpl; import org.eclipse.xtext.resource.XtextResource; import org.eclipse.xtext.resource.impl.DefaultResourceDescription; import org.eclipse.xtext.resource.impl.DefaultResourceDescriptionManager; import org.eclipse.xtext.resource.impl.DefaultResourceServiceProvider; import org.eclipse.xtext.resource.impl.ResourceServiceProviderRegistryImpl; import org.eclipse.xtext.scoping.IScope; import org.eclipse.xtext.scoping.ISelector; import org.eclipse.xtext.util.StringInputStream; import com.google.common.collect.Iterables; import com.google.inject.internal.Lists; /** * @author Sven Efftinge - Initial contribution and API * */ public class ImportNamespaceAwareScopeProviderTest extends AbstractXtextTests { private ImportedNamespaceAwareLocalScopeProvider scopeProvider; private ResourceSetGlobalScopeProvider globalScopeProvider; private IQualifiedNameProvider nameProvider; private IQualifiedNameConverter nameConverter; @Override public void setUp() throws Exception { super.setUp(); with(new IndexTestLanguageStandaloneSetup()); globalScopeProvider = new ResourceSetGlobalScopeProvider(); final DefaultResourceServiceProvider provider = new DefaultResourceServiceProvider(); nameProvider = new DefaultDeclarativeQualifiedNameProvider(); nameConverter = new IQualifiedNameConverter.DefaultImpl(); provider.setResourceDescriptionManager(new DefaultResourceDescriptionManager() { @Override public IResourceDescription getResourceDescription(Resource resource) { DefaultResourceDescription resourceDescription = new DefaultResourceDescription(resource, nameProvider); return resourceDescription; } }); globalScopeProvider.setResourceServiceProviderRegistry(new ResourceServiceProviderRegistryImpl() { @Override public IResourceServiceProvider getResourceServiceProvider(URI uri, String contentType) { return provider; } }); scopeProvider = new ImportedNamespaceAwareLocalScopeProvider(globalScopeProvider, nameProvider, nameConverter); } public void testImports() throws Exception { XtextResource resource = getResource(new StringInputStream("import foo.bar.* "), URI .createURI("import.indextestlanguage")); resource.getResourceSet().createResource(URI.createURI("foo.indextestlanguage")).load( new StringInputStream("foo.bar { " + " entity Person { " + " String name " + " } " + " datatype String " + "}"), null); IScope scope = scopeProvider.getScope(resource.getContents().get(0), IndexTestLanguagePackage.eINSTANCE .getFile_Elements()); List<QualifiedName> names = toListOfNames(scope.getElements(ISelector.SELECT_ALL)); assertEquals(names.toString(), 5, names.size()); assertTrue(names.contains(nameConverter.toQualifiedName("Person"))); assertTrue(names.contains(nameConverter.toQualifiedName("String"))); assertTrue(names.contains(nameConverter.toQualifiedName("foo.bar"))); assertTrue(names.contains(nameConverter.toQualifiedName("foo.bar.Person"))); assertTrue(names.contains(nameConverter.toQualifiedName("foo.bar.String"))); } public void testRelativeContext() throws Exception { final XtextResource resource = getResource(new StringInputStream( "stuff { " + " baz { " + " datatype String " + " } " + " entity Person {}" + "}"), URI .createURI("relative.indextestlanguage")); Iterable<EObject> allContents = new Iterable<EObject>() { public Iterator<EObject> iterator() { return resource.getAllContents(); } }; Entity entity = filter(allContents, Entity.class).iterator().next(); IScope scope = scopeProvider.getScope(entity, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("baz.String")))); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("stuff.baz.String")))); } // FIXME // public void testRelativePath() throws Exception { // final XtextResource resource = getResource(new StringInputStream( // "stuff { " // + " import baz.*" // + " baz { " // + " datatype String " // + " entity Person {" // + "}"), URI // .createURI("relative.indextestlanguage")); // Iterable<EObject> allContents = new Iterable<EObject>() { // public Iterator<EObject> iterator() { // return resource.getAllContents(); // Entity entity = filter(allContents, Entity.class).iterator().next(); // IScope scope = scopeProvider.getScope(entity, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); // assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("String")))); // assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("baz.String")))); // assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("stuff.baz.String")))); public void testReexports2() throws Exception { final XtextResource resource = getResource(new StringInputStream("A { " + " B { " + " entity D {}" + " }" + "}" + "E {" + " import A.B.*" + " entity D {}" + " datatype Context" + "}"), URI .createURI("testReexports2.indextestlanguage")); Iterable<EObject> allContents = new Iterable<EObject>() { public Iterator<EObject> iterator() { return resource.getAllContents(); } }; Datatype datatype = filter(allContents, Datatype.class).iterator().next(); IScope scope = scopeProvider.getScope(datatype, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("D")))); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("E.D")))); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("A.B.D")))); } public void testLocalElementsNotFromIndex() throws Exception { final XtextResource resource = getResource(new StringInputStream("A { " + " B { " + " entity D {}" + " }" + "}" + "E {" + " datatype Context" + "}"), URI.createURI("foo23.indextestlanguage")); Iterable<EObject> allContents = new Iterable<EObject>() { public Iterator<EObject> iterator() { return resource.getAllContents(); } }; Datatype datatype = filter(allContents, Datatype.class).iterator().next(); IScope scope = scopeProvider.getScope(datatype, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("A.B.D")))); } public void testImportsWithoutWildcard() throws Exception { final XtextResource resource = getResource(new StringInputStream( "foo { " + " import bar.Bar" + " entity Foo {" + " }" + "}" + "bar {" + " entity Bar{}" + "}"), URI .createURI("withoutwildcard.indextestlanguage")); Iterable<EObject> allContents = new Iterable<EObject>() { public Iterator<EObject> iterator() { return resource.getAllContents(); } }; Iterator<Entity> iterator = Iterables.filter(allContents, Entity.class).iterator(); Entity foo = iterator.next(); assertEquals("Foo", foo.getName()); IScope scope = scopeProvider.getScope(foo, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("Bar")))); } public void testMultipleFiles() throws Exception { ResourceSetImpl rs = new ResourceSetImpl(); final Resource res1 = rs.createResource(URI.createURI("file1.indextestlanguage")); Resource res2 = rs.createResource(URI.createURI("file2.indextestlanguage")); res1.load(new StringInputStream("foo { " + " import bar.Bar" + " entity Foo {" + " }" + "}"), null); res2.load(new StringInputStream("bar {" + " entity Bar{}" + "}"), null); Iterable<EObject> allContents = new Iterable<EObject>() { public Iterator<EObject> iterator() { return res1.getAllContents(); } }; Iterator<Entity> iterator = Iterables.filter(allContents, Entity.class).iterator(); Entity foo = iterator.next(); assertEquals("Foo", foo.getName()); IScope scope = scopeProvider.getScope(foo, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("Bar")))); } public void testResourceSetReferencingResourceSet() throws Exception { ResourceSetReferencingResourceSetImpl rs = new ResourceSetReferencingResourceSetImpl(); Resource res = rs.createResource(URI.createURI("file2.indextestlanguage")); res.load(new StringInputStream("bar {" + " entity Bar{}" + "}"), null); ResourceSetReferencingResourceSetImpl rs1 = new ResourceSetReferencingResourceSetImpl(); rs1.getReferencedResourceSets().add(rs); final Resource res1 = rs1.createResource(URI.createURI("file1.indextestlanguage")); res1.load(new StringInputStream("foo { " + " import bar.Bar" + " entity Foo {" + " }" + "}"), null); Iterable<EObject> allContents = new Iterable<EObject>() { public Iterator<EObject> iterator() { return res1.getAllContents(); } }; Iterator<Entity> iterator = Iterables.filter(allContents, Entity.class).iterator(); Entity foo = iterator.next(); assertEquals("Foo", foo.getName()); IScope scope = scopeProvider.getScope(foo, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("Bar")))); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("bar.Bar")))); } public void testResourceSetReferencingResourceSet2() throws Exception { ResourceSetReferencingResourceSetImpl rs = new ResourceSetReferencingResourceSetImpl(); Resource res = rs.createResource(URI.createURI("file2.indextestlanguage")); res.load(new StringInputStream("bar {" + " entity Bar{}" + "}"), null); ResourceSetReferencingResourceSetImpl rs1 = new ResourceSetReferencingResourceSetImpl(); rs1.getReferencedResourceSets().add(rs); final Resource res1 = rs1.createResource(URI.createURI("file1.indextestlanguage")); res1.load(new StringInputStream("foo { " + " import bar.Bar" + " entity Foo {" + " }" + "}"), null); ResourceSetReferencingResourceSetImpl rs2 = new ResourceSetReferencingResourceSetImpl(); rs2.getReferencedResourceSets().add(rs1); final Resource res2 = rs2.createResource(URI.createURI("file2.indextestlanguage")); res2.load(new StringInputStream("baz {" + " entity Baz{}" + "}"), null); Entity baz = getEntityByName(res2,"Baz"); IScope scope = scopeProvider.getScope(baz, IndexTestLanguagePackage.eINSTANCE.getProperty_Type()); assertNotNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("foo.Foo")))); assertNull(scope.getSingleElement(new ISelector.SelectByName(nameConverter.toQualifiedName("bar.Bar")))); } protected Entity getEntityByName(final Resource res2, String name) { Iterable<EObject> allContents = new Iterable<EObject>() { public Iterator<EObject> iterator() { return res2.getAllContents(); } }; Iterable<Entity> iterator = Iterables.filter(allContents, Entity.class); for (Entity entity : iterator) { if (entity.getName().equals(name)) return entity; } return null; } protected List<QualifiedName> toListOfNames(Iterable<IEObjectDescription> elements) { List<QualifiedName> result = Lists.newArrayList(); for (IEObjectDescription e : elements) { if (!result.contains(e.getName())) result.add(e.getName()); } Collections.sort(result); return result; } }
package dk.statsbiblioteket.medieplatform.ticketsystem; import dk.statsbiblioteket.doms.webservices.configuration.ConfigCollection; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.UriInfo; import java.util.HashMap; import java.util.List; import java.util.Map; @Path("/tickets/") public class TicketSystemService { private static TicketSystem tickets; private static final Object lock = new Object(); private static final String TICKET_TTL_PROP = "dk.statsbiblioteket.ticket-system.timeToLive"; private static final String TICKET_AUTH_SERVICE = "dk.statsbiblioteket.ticket-system.auth-checker"; private final Log log = LogFactory.getLog(TicketSystemService.class); public TicketSystemService() throws BackendException { log.trace("Created a new TicketSystem webservice object"); synchronized (lock){ if (tickets == null){ long ttl; try { String ttlString = ConfigCollection.getProperties() .getProperty(TICKET_TTL_PROP,""+30*1000); log.trace("Read '"+TICKET_TTL_PROP+"' property as '"+ttlString+"'"); ttl = Long.parseLong(ttlString); } catch (NumberFormatException e) { log.warn("Could not parse the '"+ TICKET_TTL_PROP +"' as a long, using default 30 sec timetolive",e); ttl = 30*1000; } String authService = ConfigCollection.getProperties().getProperty(TICKET_AUTH_SERVICE); Authorization authorization = new Authorization(authService); tickets = new TicketSystem(ttl, authorization); } } } /*Issuing of tickets*/ @GET @Path("issueTicket") @Produces({MediaType.APPLICATION_JSON}) public Map<String, String> issueTicketGet( @QueryParam("id") List<String> id, @QueryParam("type") String type, @QueryParam("userIdentifier") String userIdentifier, @Context UriInfo uriInfo ) throws MissingArgumentException { return issueTicketQueryParams(id, type, userIdentifier, uriInfo); } @POST @Path("issueTicket") @Produces({MediaType.APPLICATION_JSON}) public Map<String, String> issueTicketQueryParams( @QueryParam("id") List<String> resources, @QueryParam("type") String type, @QueryParam("userIdentifier") String userIdentifier, @Context UriInfo uriInfo ) throws MissingArgumentException { MultivaluedMap<String, String> queryParams = uriInfo.getQueryParameters(); queryParams.remove("id"); queryParams.remove("type"); queryParams.remove("userIdentifier"); if (resources == null){ throw new MissingArgumentException("id is missing"); } if (type == null){ throw new MissingArgumentException("type is missing"); } if (userIdentifier == null){ throw new MissingArgumentException("userIdentifier is missing"); } Map<String, List<String>> userAttributes = new HashMap<String, List<String>>(); for (String key : queryParams.keySet()) { List<String> values = queryParams.get(key); if (values != null && values.size() > 0) { userAttributes.put(key, values); } } HashMap<String, String> ticketMap = new HashMap<String, String>(); Ticket ticket = tickets.issueTicket(resources, type, userIdentifier, userAttributes); for (String resource : ticket.getResources()) { ticketMap.put(resource, ticket.getID()); } log.debug("Issued ticket: " + ticket); return ticketMap; } /*Resolving of tickets*/ @GET @Path("resolveTicket") @Produces({MediaType.APPLICATION_JSON}) public Ticket resolveTicket( @QueryParam("ID") String ID) throws TicketNotFoundException { log.trace("Entered resolveTicket with param ID='"+ID+"'"); Ticket ticket = tickets.getTicketFromID(ID); if (ticket == null){ throw new TicketNotFoundException("The ticket ID '"+ID+"' was not found in the system"); } log.trace("Found ticket='"+ticket.getID()+"'"); return ticket; } @GET @Path("resolveTicket/{ID}") @Produces({MediaType.APPLICATION_JSON}) public Ticket resolveTicketAlt( @PathParam("ID") String ID) throws TicketNotFoundException { return resolveTicket(ID); } }
package com.vaadin.components.grid.selection; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.InputElement; import com.google.gwt.dom.client.LabelElement; import com.google.gwt.query.client.js.JsUtils; import com.google.gwt.user.client.DOM; import com.vaadin.client.data.DataSource.RowHandle; import com.vaadin.client.renderers.Renderer; import com.vaadin.client.widget.grid.RendererCellReference; import com.vaadin.client.widget.grid.events.SelectAllEvent; import com.vaadin.client.widget.grid.selection.MultiSelectionRenderer; import com.vaadin.client.widget.grid.selection.SelectionEvent; import com.vaadin.client.widget.grid.selection.SelectionModelMulti; import com.vaadin.client.widgets.Grid; import com.vaadin.components.common.js.JS; import com.vaadin.components.common.js.JSArray; import com.vaadin.components.common.js.JSValidate; /** * An {@link IndexBasedSelectionModel} for multiple selection. */ public class IndexBasedSelectionModelMulti extends SelectionModelMulti<Object> implements IndexBasedSelectionModel { private Renderer<Boolean> renderer; protected Grid<Object> grid; private boolean allowSelection = true; private final JSArray<Double> indexes = JS.createArray(); private boolean invertedSelection = false; private boolean dataSizeUpdated = false; @Override public void setGrid(Grid<Object> grid) { super.setGrid(grid); this.grid = grid; renderer = new MultiSelectionRenderer<Object>(grid) { @Override public void init(RendererCellReference cell) { InputElement checkbox = Document.get() .createCheckInputElement(); LabelElement label = Document.get().createLabelElement(); checkbox.setId(DOM.createUniqueId()); checkbox.setTabIndex(-1); label.setHtmlFor(checkbox.getId()); cell.getElement().removeAllChildren(); cell.getElement().appendChild(checkbox); cell.getElement().appendChild(label); checkbox.addClassName("v-grid style-scope"); label.addClassName("v-grid style-scope"); } @Override protected void setSelected(int logicalRow, boolean select) { if (allowSelection) { super.setSelected(logicalRow, select); allowSelection = false; } } }; } @Override public Renderer<Boolean> getSelectionColumnRenderer() { return renderer; } @Override public void startBatchSelect() { allowSelection = true; } @Override public void commitBatchSelect() { allowSelection = true; } public boolean isIndeterminate() { return (size() > 0) ? size() != grid.getDataSource().size() : false; } public boolean isChecked() { return (size() > 0) ? size() == grid.getDataSource().size() : false; } public IndexBasedSelectionModelMulti(boolean invertedSelection) { this.invertedSelection = invertedSelection; } @Override protected boolean selectByHandle(RowHandle<Object> handle) { return select(SelectionUtil.getRowIndex(grid, handle), true); } @Override protected boolean deselectByHandle(RowHandle<Object> handle) { return deselect(SelectionUtil.getRowIndex(grid, handle), true); } @Override public boolean isSelected(Object row) { return invertedSelection ? indexes.indexOf((double) SelectionUtil .getRowIndexByRow(grid, row)) == -1 : indexes.indexOf((double) SelectionUtil.getRowIndexByRow(grid, row)) != -1; } @Override public void reset() { invertedSelection = false; indexes.setLength(0); grid.fireEvent(new SelectionEvent<Object>(grid, null, null, true)); } @Override public JSArray<Object> selected(JavaScriptObject mapper, Integer from, Integer to) { JSArray result = JS.createArray(); mapper = SelectionUtil.verifyMapper(mapper); if (invertedSelection) { int size = size(); int fromIndex = JSValidate.Integer.val(from, 0, 0); fromIndex = Math.min(Math.max(fromIndex, 0), size - 1); int defaultTo = size() - 1; int toIndex = JSValidate.Integer.val(to, defaultTo, defaultTo); toIndex = Math.min(Math.max(toIndex, 0), size - 1); int count = toIndex - fromIndex + 1; int index = 0; int selectedIndexCount = 0; int addedSelectedIndexCount = 0; while (addedSelectedIndexCount < count) { if (indexes.indexOf((double) index) == -1) { if (selectedIndexCount++ >= fromIndex) { addedSelectedIndexCount++; Object mappedValue = JsUtils.jsni(mapper, "call", mapper, index); if (mappedValue != null) { result.add(mappedValue); } } } index++; } } else { int fromIndex = JSValidate.Integer.val(from, 0, 0); fromIndex = Math.min(fromIndex, indexes.length() - 1); int defaultTo = indexes.length() - 1; int toIndex = JSValidate.Integer.val(to, defaultTo, defaultTo); toIndex = Math.min(toIndex, indexes.length() - 1); for (int i = fromIndex; i <= toIndex; i++) { Object mappedValue = JsUtils.jsni(mapper, "call", mapper, indexes.get(i)); if (mappedValue != null) { result.add(mappedValue); } } } return result; } @Override public JSArray<Object> deselected(JavaScriptObject mapper, Integer from, Integer to) { if (invertedSelection) { JSArray result = JS.createArray(); mapper = SelectionUtil.verifyMapper(mapper); int fromIndex = JSValidate.Integer.val(from, 0, 0); fromIndex = Math.min(fromIndex, indexes.length() - 1); int defaultTo = indexes.length() - 1; int toIndex = JSValidate.Integer.val(to, defaultTo, defaultTo); toIndex = Math.min(toIndex, indexes.length() - 1); for (int i = fromIndex; i <= toIndex; i++) { Object mappedValue = JsUtils.jsni(mapper, "call", mapper, indexes.get(i)); if (mappedValue != null) { result.add(mappedValue); } } return result; } else { return JS.createArray(); } } @Override public int size() { return invertedSelection ? grid.getDataSource().size() - indexes.length() : indexes.length(); } @Override public boolean select(int index, boolean skipOwnEvents) { if (invertedSelection) { return removeIndex(index, skipOwnEvents); } else { return addIndex(index, skipOwnEvents); } } @Override public boolean deselect(int index, boolean skipOwnEvents) { if (invertedSelection) { return addIndex(index, skipOwnEvents); } else { return removeIndex(index, skipOwnEvents); } } private boolean addIndex(int index, boolean skipOwnEvents) { if (index >= 0 && (!dataSizeUpdated || index < grid.getDataSource().size()) && indexes.indexOf((double) index) == -1) { indexes.add((double) index); skipOwnEvents = JSValidate.Boolean.val(skipOwnEvents, false, false); if (!skipOwnEvents) { grid.fireEvent(new SelectionEvent<Object>(grid, null, null, false)); } if (invertedSelection && size() == 0) { clear(); return false; } else if (!invertedSelection && isChecked()) { selectAll(); return false; } return true; } return false; } private boolean removeIndex(int index, boolean skipOwnEvents) { if (indexes.indexOf((double) index) != -1) { indexes.remove((double) index); skipOwnEvents = JSValidate.Boolean.val(skipOwnEvents, false, false); if (!skipOwnEvents) { grid.fireEvent(new SelectionEvent<Object>(grid, null, null, false)); } return true; } return false; } @Override public void clear() { reset(); } @Override public void selectAll() { indexes.setLength(0); invertedSelection = true; grid.fireEvent(new SelectionEvent<Object>(grid, null, null, true)); } @Override public boolean deselectAll() { indexes.setLength(0); invertedSelection = false; grid.fireEvent(new SelectAllEvent<Object>(this)); return true; } @Override public IndexBasedSelectionMode getMode() { return invertedSelection ? IndexBasedSelectionMode.ALL : IndexBasedSelectionMode.MULTI; } @Override public void dataSizeUpdated(int newSize) { dataSizeUpdated = true; // If row indexes contain values that are out of bounds, remove // them. boolean changed = false; for (int i = 0; i < indexes.length(); i++) { if (indexes.get(i) >= newSize) { indexes.remove(indexes.get(i changed = true; } } if (changed) { grid.fireEvent(new SelectionEvent<Object>(grid, null, null, true)); } } }
package won.protocol.message.processor.impl; import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.rdf.model.AnonId; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Resource; import de.uni_koblenz.aggrimm.icp.crypto.sign.algorithm.algorithm.SignatureAlgorithmFisteus2010; import won.cryptography.rdfsign.*; import won.protocol.message.SignatureReference; import won.protocol.message.WonMessage; import won.protocol.vocabulary.WONMSG; import java.security.PrivateKey; import java.security.PublicKey; import java.util.List; import java.util.Map; public class WonMessageSignerVerifier { public static WonMessage sign(PrivateKey privateKey, String privateKeyUri, WonMessage message) throws Exception { Dataset msgDataset = message.getCompleteDataset(); SigningStage sigStage = new SigningStage(message); addUnreferencedSigReferences(msgDataset, sigStage); WonSigner signer = new WonSigner(msgDataset, new SignatureAlgorithmFisteus2010()); signContents(msgDataset, sigStage, signer, privateKey, privateKeyUri); signEnvelopes(msgDataset, sigStage, signer, privateKey, privateKeyUri); return new WonMessage(msgDataset); } /** * If the provided signing stage has unsigned content graphs, sign them, add signature graphs * to the dataset, and add signature references of those signatures into the envelope graph * that has contains envelope property referencing signed by that signature envelope graph * @param msgDataset * @param sigStage * @param signer * @param privateKey * @param privateKeyUri */ private static void signEnvelopes(final Dataset msgDataset, final SigningStage sigStage, final WonSigner signer, final PrivateKey privateKey, final String privateKeyUri) throws Exception { SignatureReference sigRef = null; for (String envUri : sigStage.getUnsignedEnvUrisOrderedByContainment()) { if (sigRef != null) { addSignatureReference(sigStage.getMessageUri(envUri), sigRef, envUri, msgDataset); } sigRef = signer.sign(privateKey, privateKeyUri, envUri).get(0); } } public static void addSignatureReference(final String msgUri, final SignatureReference sigRef, final String envUri, final Dataset msgDataset) { Model envelopeGraph = msgDataset.getNamedModel(envUri); Resource messageEventResource = envelopeGraph.createResource(msgUri); Resource bnode = envelopeGraph.createResource(AnonId.create()); messageEventResource.addProperty( WONMSG.REFERENCES_SIGNATURE_PROPERTY, bnode); bnode.addProperty(WONMSG.HAS_SIGNATURE_GRAPH_PROPERTY, envelopeGraph.createResource(sigRef.getSignatureGraphUri())); bnode.addProperty(WONMSG.HAS_SIGNED_GRAPH_PROPERTY, envelopeGraph.createResource(sigRef.getSignedGraphUri())); bnode.addProperty(WONMSG.HAS_SIGNATURE_VALUE_PROPERTY, envelopeGraph.createLiteral(sigRef.getSignatureValue())); } /** * If the provided signing stage has unsigned content graphs, sign them, add signature graphs * to the dataset, and add signature references of those signatures into the envelope graph * that has has content property referencing signed by that signature content graph * @param msgDataset * @param sigStage * @param signer * @param privateKey * @param privateKeyUri */ private static void signContents(final Dataset msgDataset, final SigningStage sigStage, final WonSigner signer, final PrivateKey privateKey, final String privateKeyUri) throws Exception { List<SignatureReference> sigRefs = signer.sign(privateKey, privateKeyUri, sigStage.getUnsignedContentUris()); for (SignatureReference sigRef : sigRefs) { String envUri = sigStage.getEnvelopeUriContainingContent(sigRef.getSignedGraphUri()); addSignatureReference(sigStage.getMessageUri(envUri), sigRef, envUri, msgDataset); } } /** * If the provided signing stage has signature graphs that are not referenced from any envelope graphs, they * should be added to the innermost not-signed envelope graph of the dataset * @param msgDataset * @param sigStage */ private static void addUnreferencedSigReferences(final Dataset msgDataset, final SigningStage sigStage) { String innemostUnsignedEnvUri = null; List<String> envUris = sigStage.getUnsignedEnvUrisOrderedByContainment(); if (envUris.isEmpty()) { return; } else { innemostUnsignedEnvUri = envUris.get(0); } for (SignatureReference sigRef : sigStage.getNotReferencedSignaturesAsReferences()) { addSignatureReference(sigStage.getMessageUri(innemostUnsignedEnvUri), sigRef, innemostUnsignedEnvUri, msgDataset); } } public static SignatureVerificationResult verify(Map<String,PublicKey> keys, WonMessage message) throws Exception { Dataset dataset = message.getCompleteDataset(); WonVerifier verifier = new WonVerifier(dataset); verifier.verify(keys); return verifier.getVerificationResult(); } }
package org.elasticsearch.xpack.ilm; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ilm.AllocateAction; import org.elasticsearch.xpack.core.ilm.DeleteAction; import org.elasticsearch.xpack.core.ilm.ErrorStep; import org.elasticsearch.xpack.core.ilm.ForceMergeAction; import org.elasticsearch.xpack.core.ilm.FreezeAction; import org.elasticsearch.xpack.core.ilm.InitializePolicyContextStep; import org.elasticsearch.xpack.core.ilm.LifecycleAction; import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import org.elasticsearch.xpack.core.ilm.Phase; import org.elasticsearch.xpack.core.ilm.ReadOnlyAction; import org.elasticsearch.xpack.core.ilm.RolloverAction; import org.elasticsearch.xpack.core.ilm.SetPriorityAction; import org.elasticsearch.xpack.core.ilm.ShrinkAction; import org.elasticsearch.xpack.core.ilm.ShrinkStep; import org.elasticsearch.xpack.core.ilm.Step; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.elasticsearch.xpack.core.ilm.TerminalPolicyStep; import org.elasticsearch.xpack.core.ilm.UpdateRolloverLifecycleDateStep; import org.elasticsearch.xpack.core.ilm.WaitForActiveShardsStep; import org.elasticsearch.xpack.core.ilm.WaitForRolloverReadyStep; import org.elasticsearch.xpack.core.ilm.WaitForSnapshotAction; import org.hamcrest.Matchers; import org.junit.Before; import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class TimeSeriesLifecycleActionsIT extends ESRestTestCase { private static final Logger logger = LogManager.getLogger(TimeSeriesLifecycleActionsIT.class); private static final String FAILED_STEP_RETRY_COUNT_FIELD = "failed_step_retry_count"; private static final String IS_AUTO_RETRYABLE_ERROR_FIELD = "is_auto_retryable_error"; private String index; private String policy; private String alias; @Before public void refreshIndex() { index = "index-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); policy = "policy-" + randomAlphaOfLength(5); alias = "alias-" + randomAlphaOfLength(5); } public static void updatePolicy(String indexName, String policy) throws IOException { Request changePolicyRequest = new Request("PUT", "/" + indexName + "/_settings"); final StringEntity changePolicyEntity = new StringEntity("{ \"index.lifecycle.name\": \"" + policy + "\" }", ContentType.APPLICATION_JSON); changePolicyRequest.setEntity(changePolicyEntity); assertOK(client().performRequest(changePolicyRequest)); } public void testFullPolicy() throws Exception { String originalIndex = index + "-000001"; String shrunkenOriginalIndex = ShrinkAction.SHRUNKEN_INDEX_PREFIX + originalIndex; String secondIndex = index + "-000002"; createIndexWithSettings(originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 4) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put("index.routing.allocation.include._name", "integTest-0") .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias)); // create policy createFullPolicy(TimeValue.ZERO); // update policy on index updatePolicy(originalIndex, policy); // index document {"foo": "bar"} to trigger rollover index(client(), originalIndex, "_id", "foo", "bar"); /* * These asserts are in the order that they should be satisfied in, in * order to maximize the time for all operations to complete. * An "out of order" assert here may result in this test occasionally * timing out and failing inappropriately. */ // asserts that rollover was called assertBusy(() -> assertTrue(indexExists(secondIndex))); // asserts that shrink deleted the original index assertBusy(() -> assertFalse(indexExists(originalIndex)), 60, TimeUnit.SECONDS); // asserts that the delete phase completed for the managed shrunken index assertBusy(() -> assertFalse(indexExists(shrunkenOriginalIndex))); } public void testMoveToAllocateStep() throws Exception { String originalIndex = index + "-000001"; createIndexWithSettings(originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 4) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put("index.routing.allocation.include._name", "integTest-0") .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, "alias")); // create policy createFullPolicy(TimeValue.timeValueHours(10)); // update policy on index updatePolicy(originalIndex, policy); // move to a step Request moveToStepRequest = new Request("POST", "_ilm/move/" + originalIndex); assertBusy(() -> assertTrue(getStepKeyForIndex(originalIndex).equals(new StepKey("new", "complete", "complete")))); moveToStepRequest.setJsonEntity("{\n" + " \"current_step\": {\n" + " \"phase\": \"new\",\n" + " \"action\": \"complete\",\n" + " \"name\": \"complete\"\n" + " },\n" + " \"next_step\": {\n" + " \"phase\": \"cold\",\n" + " \"action\": \"allocate\",\n" + " \"name\": \"allocate\"\n" + " }\n" + "}"); client().performRequest(moveToStepRequest); assertBusy(() -> assertFalse(indexExists(originalIndex))); } public void testMoveToRolloverStep() throws Exception { String originalIndex = index + "-000001"; String shrunkenOriginalIndex = ShrinkAction.SHRUNKEN_INDEX_PREFIX + originalIndex; String secondIndex = index + "-000002"; createIndexWithSettings(originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 4) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put("index.routing.allocation.include._name", "integTest-0") .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias)); createFullPolicy(TimeValue.timeValueHours(10)); // update policy on index updatePolicy(originalIndex, policy); // move to a step Request moveToStepRequest = new Request("POST", "_ilm/move/" + originalIndex); // index document to trigger rollover index(client(), originalIndex, "_id", "foo", "bar"); logger.info(getStepKeyForIndex(originalIndex)); moveToStepRequest.setJsonEntity("{\n" + " \"current_step\": {\n" + " \"phase\": \"new\",\n" + " \"action\": \"complete\",\n" + " \"name\": \"complete\"\n" + " },\n" + " \"next_step\": {\n" + " \"phase\": \"hot\",\n" + " \"action\": \"rollover\",\n" + " \"name\": \"attempt-rollover\"\n" + " }\n" + "}"); client().performRequest(moveToStepRequest); /* * These asserts are in the order that they should be satisfied in, in * order to maximize the time for all operations to complete. * An "out of order" assert here may result in this test occasionally * timing out and failing inappropriately. */ // asserts that rollover was called assertBusy(() -> assertTrue(indexExists(secondIndex))); // asserts that shrink deleted the original index assertBusy(() -> assertFalse(indexExists(originalIndex)), 30, TimeUnit.SECONDS); // asserts that the delete phase completed for the managed shrunken index assertBusy(() -> assertFalse(indexExists(shrunkenOriginalIndex))); } public void testRetryFailedShrinkAction() throws Exception { int numShards = 4; int divisor = randomFrom(2, 4); int expectedFinalShards = numShards / divisor; String shrunkenIndex = ShrinkAction.SHRUNKEN_INDEX_PREFIX + index; createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); createNewSingletonPolicy("warm", new ShrinkAction(numShards + randomIntBetween(1, numShards))); updatePolicy(index, policy); assertBusy(() -> { String failedStep = getFailedStepForIndex(index); assertThat(failedStep, equalTo(ShrinkStep.NAME)); }); // update policy to be correct createNewSingletonPolicy("warm", new ShrinkAction(expectedFinalShards)); updatePolicy(index, policy); // retry step Request retryRequest = new Request("POST", index + "/_ilm/retry"); assertOK(client().performRequest(retryRequest)); // assert corrected policy is picked up and index is shrunken assertBusy(() -> assertTrue(indexExists(shrunkenIndex)), 30, TimeUnit.SECONDS); assertBusy(() -> assertTrue(aliasExists(shrunkenIndex, index))); assertBusy(() -> assertThat(getStepKeyForIndex(shrunkenIndex), equalTo(TerminalPolicyStep.KEY))); assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(shrunkenIndex); assertThat(settings.get(IndexMetaData.SETTING_NUMBER_OF_SHARDS), equalTo(String.valueOf(expectedFinalShards))); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); assertThat(settings.get(IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_id"), nullValue()); }); expectThrows(ResponseException.class, this::indexDocument); } public void testRolloverAction() throws Exception { String originalIndex = index + "-000001"; String secondIndex = index + "-000002"; createIndexWithSettings(originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias)); // create policy createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); // update policy on index updatePolicy(originalIndex, policy); // index document {"foo": "bar"} to trigger rollover index(client(), originalIndex, "_id", "foo", "bar"); assertBusy(() -> assertTrue(indexExists(secondIndex))); assertBusy(() -> assertTrue(indexExists(originalIndex))); assertBusy(() -> assertEquals("true", getOnlyIndexSettings(originalIndex).get(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE))); } public void testRolloverActionWithIndexingComplete() throws Exception { String originalIndex = index + "-000001"; String secondIndex = index + "-000002"; createIndexWithSettings(originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias)); Request updateSettingsRequest = new Request("PUT", "/" + originalIndex + "/_settings"); updateSettingsRequest.setJsonEntity("{\n" + " \"settings\": {\n" + " \"" + LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE + "\": true\n" + " }\n" + "}"); client().performRequest(updateSettingsRequest); Request updateAliasRequest = new Request("POST", "/_aliases"); updateAliasRequest.setJsonEntity("{\n" + " \"actions\": [\n" + " {\n" + " \"add\": {\n" + " \"index\": \"" + originalIndex + "\",\n" + " \"alias\": \"" + alias + "\",\n" + " \"is_write_index\": false\n" + " }\n" + " }\n" + " ]\n" + "}"); client().performRequest(updateAliasRequest); // create policy createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); // update policy on index updatePolicy(originalIndex, policy); // index document {"foo": "bar"} to trigger rollover index(client(), originalIndex, "_id", "foo", "bar"); assertBusy(() -> assertEquals(TerminalPolicyStep.KEY, getStepKeyForIndex(originalIndex))); assertBusy(() -> assertTrue(indexExists(originalIndex))); assertBusy(() -> assertFalse(indexExists(secondIndex))); assertBusy(() -> assertEquals("true", getOnlyIndexSettings(originalIndex).get(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE))); } public void testAllocateOnlyAllocation() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); String allocateNodeName = "integTest-" + randomFrom(0, 1); AllocateAction allocateAction = new AllocateAction(null, null, null, singletonMap("_name", allocateNodeName)); createNewSingletonPolicy(randomFrom("warm", "cold"), allocateAction); updatePolicy(index, policy); assertBusy(() -> { assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); }); ensureGreen(index); } public void testAllocateActionOnlyReplicas() throws Exception { int numShards = randomFrom(1, 5); int numReplicas = randomFrom(0, 1); int finalNumReplicas = (numReplicas + 1) % 2; createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas)); AllocateAction allocateAction = new AllocateAction(finalNumReplicas, null, null, null); createNewSingletonPolicy(randomFrom("warm", "cold"), allocateAction); updatePolicy(index, policy); assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); assertThat(settings.get(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey()), equalTo(String.valueOf(finalNumReplicas))); }); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/50781") public void testWaitForSnapshot() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); String smlPolicy = randomAlphaOfLengthBetween(4, 10); createNewSingletonPolicy("delete", new WaitForSnapshotAction(smlPolicy)); updatePolicy(index, policy); assertBusy(() -> assertThat(getStepKeyForIndex(index).getAction(), equalTo("wait_for_snapshot"))); assertBusy(() -> assertThat(getFailedStepForIndex(index), equalTo("wait-for-snapshot"))); String repo = createSnapshotRepo(); createSlmPolicy(smlPolicy, repo); assertBusy(() -> assertThat(getStepKeyForIndex(index).getAction(), equalTo("wait_for_snapshot"))); Request request = new Request("PUT", "/_slm/policy/" + smlPolicy + "/_execute"); assertOK(client().performRequest(request)); assertBusy(() -> assertThat(getStepKeyForIndex(index).getAction(), equalTo("completed")), 2, TimeUnit.MINUTES); request = new Request("DELETE", "/_slm/policy/" + smlPolicy); assertOK(client().performRequest(request)); request = new Request("DELETE", "/_snapshot/" + repo); assertOK(client().performRequest(request)); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/50781") public void testWaitForSnapshotSlmExecutedBefore() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); String smlPolicy = randomAlphaOfLengthBetween(4, 10); createNewSingletonPolicy("delete", new WaitForSnapshotAction(smlPolicy)); String repo = createSnapshotRepo(); createSlmPolicy(smlPolicy, repo); Request request = new Request("PUT", "/_slm/policy/" + smlPolicy + "/_execute"); assertOK(client().performRequest(request)); updatePolicy(index, policy); assertBusy(() -> assertThat(getStepKeyForIndex(index).getAction(), equalTo("wait_for_snapshot"))); assertBusy(() -> assertThat(getStepKeyForIndex(index).getName(), equalTo("wait-for-snapshot"))); request = new Request("PUT", "/_slm/policy/" + smlPolicy + "/_execute"); assertOK(client().performRequest(request)); request = new Request("PUT", "/_slm/policy/" + smlPolicy + "/_execute"); assertOK(client().performRequest(request)); assertBusy(() -> assertThat(getStepKeyForIndex(index).getAction(), equalTo("completed")), 2, TimeUnit.MINUTES); request = new Request("DELETE", "/_slm/policy/" + smlPolicy); assertOK(client().performRequest(request)); request = new Request("DELETE", "/_snapshot/" + repo); assertOK(client().performRequest(request)); } public void testDelete() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); createNewSingletonPolicy("delete", new DeleteAction()); updatePolicy(index, policy); assertBusy(() -> assertFalse(indexExists(index))); } public void testDeleteOnlyShouldNotMakeIndexReadonly() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); createNewSingletonPolicy("delete", new DeleteAction(), TimeValue.timeValueHours(1)); updatePolicy(index, policy); assertBusy(() -> { assertThat(getStepKeyForIndex(index).getAction(), equalTo("complete")); Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), not("true")); }); indexDocument(); } public void testDeleteDuringSnapshot() throws Exception { // Create the repository before taking the snapshot. Request request = new Request("PUT", "/_snapshot/repo"); request.setJsonEntity(Strings .toString(JsonXContent.contentBuilder() .startObject() .field("type", "fs") .startObject("settings") .field("compress", randomBoolean()) .field("location", System.getProperty("tests.path.repo")) .field("max_snapshot_bytes_per_sec", "256b") .endObject() .endObject())); assertOK(client().performRequest(request)); // create delete policy createNewSingletonPolicy("delete", new DeleteAction(), TimeValue.timeValueMillis(0)); // create index without policy createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); // index document so snapshot actually does something indexDocument(); // start snapshot String snapName = "snapshot-" + randomAlphaOfLength(6).toLowerCase(Locale.ROOT); request = new Request("PUT", "/_snapshot/repo/" + snapName); request.addParameter("wait_for_completion", "false"); request.setJsonEntity("{\"indices\": \"" + index + "\"}"); assertOK(client().performRequest(request)); // add policy and expect it to trigger delete immediately (while snapshot in progress) updatePolicy(index, policy); // assert that index was deleted assertBusy(() -> assertFalse(indexExists(index)), 2, TimeUnit.MINUTES); // assert that snapshot is still in progress and clean up assertThat(getSnapshotState(snapName), equalTo("SUCCESS")); assertOK(client().performRequest(new Request("DELETE", "/_snapshot/repo/" + snapName))); } public void testReadOnly() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); createNewSingletonPolicy("warm", new ReadOnlyAction()); updatePolicy(index, policy); assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); }); } @SuppressWarnings("unchecked") public void testForceMergeAction() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); for (int i = 0; i < randomIntBetween(2, 10); i++) { Request request = new Request("PUT", index + "/_doc/" + i); request.addParameter("refresh", "true"); request.setEntity(new StringEntity("{\"a\": \"test\"}", ContentType.APPLICATION_JSON)); client().performRequest(request); } Supplier<Integer> numSegments = () -> { try { Map<String, Object> segmentResponse = getAsMap(index + "/_segments"); segmentResponse = (Map<String, Object>) segmentResponse.get("indices"); segmentResponse = (Map<String, Object>) segmentResponse.get(index); segmentResponse = (Map<String, Object>) segmentResponse.get("shards"); List<Map<String, Object>> shards = (List<Map<String, Object>>) segmentResponse.get("0"); return (Integer) shards.get(0).get("num_search_segments"); } catch (Exception e) { throw new RuntimeException(e); } }; assertThat(numSegments.get(), greaterThan(1)); createNewSingletonPolicy("warm", new ForceMergeAction(1)); updatePolicy(index, policy); assertBusy(() -> { assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(numSegments.get(), equalTo(1)); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); }); expectThrows(ResponseException.class, this::indexDocument); } public void testShrinkAction() throws Exception { int numShards = 4; int divisor = randomFrom(2, 4); int expectedFinalShards = numShards / divisor; String shrunkenIndex = ShrinkAction.SHRUNKEN_INDEX_PREFIX + index; createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); createNewSingletonPolicy("warm", new ShrinkAction(expectedFinalShards)); updatePolicy(index, policy); assertBusy(() -> assertTrue(indexExists(shrunkenIndex)), 30, TimeUnit.SECONDS); assertBusy(() -> assertTrue(aliasExists(shrunkenIndex, index))); assertBusy(() -> assertThat(getStepKeyForIndex(shrunkenIndex), equalTo(TerminalPolicyStep.KEY))); assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(shrunkenIndex); assertThat(settings.get(IndexMetaData.SETTING_NUMBER_OF_SHARDS), equalTo(String.valueOf(expectedFinalShards))); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); assertThat(settings.get(IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_id"), nullValue()); }); expectThrows(ResponseException.class, this::indexDocument); } public void testShrinkSameShards() throws Exception { int numberOfShards = randomFrom(1, 2); String shrunkenIndex = ShrinkAction.SHRUNKEN_INDEX_PREFIX + index; createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); createNewSingletonPolicy("warm", new ShrinkAction(numberOfShards)); updatePolicy(index, policy); assertBusy(() -> { assertTrue(indexExists(index)); assertFalse(indexExists(shrunkenIndex)); assertFalse(aliasExists(shrunkenIndex, index)); Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); assertThat(settings.get(IndexMetaData.SETTING_NUMBER_OF_SHARDS), equalTo(String.valueOf(numberOfShards))); assertNull(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey())); assertThat(settings.get(IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_id"), nullValue()); }); } public void testShrinkDuringSnapshot() throws Exception { String shrunkenIndex = ShrinkAction.SHRUNKEN_INDEX_PREFIX + index; // Create the repository before taking the snapshot. Request request = new Request("PUT", "/_snapshot/repo"); request.setJsonEntity(Strings .toString(JsonXContent.contentBuilder() .startObject() .field("type", "fs") .startObject("settings") .field("compress", randomBoolean()) .field("location", System.getProperty("tests.path.repo")) .field("max_snapshot_bytes_per_sec", "256b") .endObject() .endObject())); assertOK(client().performRequest(request)); // create delete policy createNewSingletonPolicy("warm", new ShrinkAction(1), TimeValue.timeValueMillis(0)); // create index without policy createIndexWithSettings(index, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) // required so the shrink doesn't wait on SetSingleNodeAllocateStep .put(IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_name", "integTest-0")); // index document so snapshot actually does something indexDocument(); // start snapshot request = new Request("PUT", "/_snapshot/repo/snapshot"); request.addParameter("wait_for_completion", "false"); request.setJsonEntity("{\"indices\": \"" + index + "\"}"); assertOK(client().performRequest(request)); // add policy and expect it to trigger shrink immediately (while snapshot in progress) updatePolicy(index, policy); // assert that index was shrunk and original index was deleted assertBusy(() -> { assertTrue(indexExists(shrunkenIndex)); assertTrue(aliasExists(shrunkenIndex, index)); Map<String, Object> settings = getOnlyIndexSettings(shrunkenIndex); assertThat(getStepKeyForIndex(shrunkenIndex), equalTo(TerminalPolicyStep.KEY)); assertThat(settings.get(IndexMetaData.SETTING_NUMBER_OF_SHARDS), equalTo(String.valueOf(1))); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); assertThat(settings.get(IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_id"), nullValue()); }, 2, TimeUnit.MINUTES); expectThrows(ResponseException.class, this::indexDocument); // assert that snapshot succeeded assertThat(getSnapshotState("snapshot"), equalTo("SUCCESS")); assertOK(client().performRequest(new Request("DELETE", "/_snapshot/repo/snapshot"))); } public void testFreezeAction() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); createNewSingletonPolicy("cold", new FreezeAction()); updatePolicy(index, policy); assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); assertThat(settings.get(IndexSettings.INDEX_SEARCH_THROTTLED.getKey()), equalTo("true")); assertThat(settings.get("index.frozen"), equalTo("true")); }); } public void testFreezeDuringSnapshot() throws Exception { // Create the repository before taking the snapshot. Request request = new Request("PUT", "/_snapshot/repo"); request.setJsonEntity(Strings .toString(JsonXContent.contentBuilder() .startObject() .field("type", "fs") .startObject("settings") .field("compress", randomBoolean()) .field("location", System.getProperty("tests.path.repo")) .field("max_snapshot_bytes_per_sec", "256b") .endObject() .endObject())); assertOK(client().performRequest(request)); // create delete policy createNewSingletonPolicy("cold", new FreezeAction(), TimeValue.timeValueMillis(0)); // create index without policy createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); // index document so snapshot actually does something indexDocument(); // start snapshot request = new Request("PUT", "/_snapshot/repo/snapshot"); request.addParameter("wait_for_completion", "false"); request.setJsonEntity("{\"indices\": \"" + index + "\"}"); assertOK(client().performRequest(request)); // add policy and expect it to trigger delete immediately (while snapshot in progress) updatePolicy(index, policy); // assert that the index froze assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); assertThat(settings.get(IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.getKey()), equalTo("true")); assertThat(settings.get(IndexSettings.INDEX_SEARCH_THROTTLED.getKey()), equalTo("true")); assertThat(settings.get("index.frozen"), equalTo("true")); }, 2, TimeUnit.MINUTES); // assert that snapshot is still in progress and clean up assertThat(getSnapshotState("snapshot"), equalTo("SUCCESS")); assertOK(client().performRequest(new Request("DELETE", "/_snapshot/repo/snapshot"))); } public void testSetPriority() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.INDEX_PRIORITY_SETTING.getKey(), 100)); int priority = randomIntBetween(0, 99); createNewSingletonPolicy("warm", new SetPriorityAction(priority)); updatePolicy(index, policy); assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); assertThat(settings.get(IndexMetaData.INDEX_PRIORITY_SETTING.getKey()), equalTo(String.valueOf(priority))); }); } public void testSetNullPriority() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.INDEX_PRIORITY_SETTING.getKey(), 100)); createNewSingletonPolicy("warm", new SetPriorityAction((Integer) null)); updatePolicy(index, policy); assertBusy(() -> { Map<String, Object> settings = getOnlyIndexSettings(index); assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY)); assertNull(settings.get(IndexMetaData.INDEX_PRIORITY_SETTING.getKey())); }); } @SuppressWarnings("unchecked") public void testNonexistentPolicy() throws Exception { String indexPrefix = randomAlphaOfLengthBetween(5,15).toLowerCase(Locale.ROOT); final StringEntity template = new StringEntity("{\n" + " \"index_patterns\": \"" + indexPrefix + "*\",\n" + " \"settings\": {\n" + " \"index\": {\n" + " \"lifecycle\": {\n" + " \"name\": \"does_not_exist\",\n" + " \"rollover_alias\": \"test_alias\"\n" + " }\n" + " }\n" + " }\n" + "}", ContentType.APPLICATION_JSON); Request templateRequest = new Request("PUT", "_template/test"); templateRequest.setEntity(template); client().performRequest(templateRequest); policy = randomAlphaOfLengthBetween(5,20); createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); index = indexPrefix + "-000001"; final StringEntity putIndex = new StringEntity("{\n" + " \"aliases\": {\n" + " \"test_alias\": {\n" + " \"is_write_index\": true\n" + " }\n" + " }\n" + "}", ContentType.APPLICATION_JSON); Request putIndexRequest = new Request("PUT", index); putIndexRequest.setEntity(putIndex); client().performRequest(putIndexRequest); indexDocument(); assertBusy(() -> { Request explainRequest = new Request("GET", index + "/_ilm/explain"); Response response = client().performRequest(explainRequest); Map<String, Object> responseMap; try (InputStream is = response.getEntity().getContent()) { responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); } logger.info(responseMap); Map<String, Object> indexStatus = (Map<String, Object>)((Map<String, Object>) responseMap.get("indices")).get(index); assertNull(indexStatus.get("phase")); assertNull(indexStatus.get("action")); assertNull(indexStatus.get("step")); Map<String, String> stepInfo = (Map<String, String>) indexStatus.get("step_info"); assertNotNull(stepInfo); assertEquals("policy [does_not_exist] does not exist", stepInfo.get("reason")); assertEquals("illegal_argument_exception", stepInfo.get("type")); }); } public void testInvalidPolicyNames() { ResponseException ex; policy = randomAlphaOfLengthBetween(0,10) + "," + randomAlphaOfLengthBetween(0,10); ex = expectThrows(ResponseException.class, () -> createNewSingletonPolicy("delete", new DeleteAction())); assertThat(ex.getMessage(), containsString("invalid policy name")); policy = randomAlphaOfLengthBetween(0,10) + "%20" + randomAlphaOfLengthBetween(0,10); ex = expectThrows(ResponseException.class, () -> createNewSingletonPolicy("delete", new DeleteAction())); assertThat(ex.getMessage(), containsString("invalid policy name")); policy = "_" + randomAlphaOfLengthBetween(1, 20); ex = expectThrows(ResponseException.class, () -> createNewSingletonPolicy("delete", new DeleteAction())); assertThat(ex.getMessage(), containsString("invalid policy name")); policy = randomAlphaOfLengthBetween(256, 1000); ex = expectThrows(ResponseException.class, () -> createNewSingletonPolicy("delete", new DeleteAction())); assertThat(ex.getMessage(), containsString("invalid policy name")); } public void testDeletePolicyInUse() throws IOException { String managedIndex1 = randomAlphaOfLength(7).toLowerCase(Locale.ROOT); String managedIndex2 = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); String unmanagedIndex = randomAlphaOfLength(9).toLowerCase(Locale.ROOT); String managedByOtherPolicyIndex = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createNewSingletonPolicy("delete", new DeleteAction(), TimeValue.timeValueHours(12)); String originalPolicy = policy; String otherPolicy = randomValueOtherThan(policy, () -> randomAlphaOfLength(5)); policy = otherPolicy; createNewSingletonPolicy("delete", new DeleteAction(), TimeValue.timeValueHours(13)); createIndexWithSettingsNoAlias(managedIndex1, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1,10)) .put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), originalPolicy)); createIndexWithSettingsNoAlias(managedIndex2, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1,10)) .put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), originalPolicy)); createIndexWithSettingsNoAlias(unmanagedIndex, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1,10))); createIndexWithSettingsNoAlias(managedByOtherPolicyIndex, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1,10)) .put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), otherPolicy)); Request deleteRequest = new Request("DELETE", "_ilm/policy/" + originalPolicy); ResponseException ex = expectThrows(ResponseException.class, () -> client().performRequest(deleteRequest)); assertThat(ex.getMessage(), Matchers.allOf( containsString("Cannot delete policy [" + originalPolicy + "]. It is in use by one or more indices: ["), containsString(managedIndex1), containsString(managedIndex2), not(containsString(unmanagedIndex)), not(containsString(managedByOtherPolicyIndex)))); } public void testRemoveAndReaddPolicy() throws Exception { String originalIndex = index + "-000001"; String secondIndex = index + "-000002"; // Set up a policy with rollover createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); createIndexWithSettings( originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias)); // Index a document index(client(), originalIndex, "_id", "foo", "bar"); // Wait for rollover to happen assertBusy(() -> assertTrue(indexExists(secondIndex))); // Remove the policy from the original index Request removeRequest = new Request("POST", "/" + originalIndex + "/_ilm/remove"); removeRequest.setJsonEntity(""); client().performRequest(removeRequest); // Add the policy again Request addPolicyRequest = new Request("PUT", "/" + originalIndex + "/_settings"); addPolicyRequest.setJsonEntity("{\n" + " \"settings\": {\n" + " \"index.lifecycle.name\": \"" + policy + "\",\n" + " \"index.lifecycle.rollover_alias\": \"" + alias + "\"\n" + " }\n" + "}"); client().performRequest(addPolicyRequest); assertBusy(() -> assertTrue((boolean) explainIndex(originalIndex).getOrDefault("managed", false))); // Wait for everything to be copacetic assertBusy(() -> assertThat(getStepKeyForIndex(originalIndex), equalTo(TerminalPolicyStep.KEY))); } public void testMoveToInjectedStep() throws Exception { String shrunkenIndex = ShrinkAction.SHRUNKEN_INDEX_PREFIX + index; createNewSingletonPolicy("warm", new ShrinkAction(1), TimeValue.timeValueHours(12)); createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias)); assertBusy(() -> assertThat(getStepKeyForIndex(index), equalTo(new StepKey("new", "complete", "complete")))); // Move to a step from the injected unfollow action Request moveToStepRequest = new Request("POST", "_ilm/move/" + index); moveToStepRequest.setJsonEntity("{\n" + " \"current_step\": { \n" + " \"phase\": \"new\",\n" + " \"action\": \"complete\",\n" + " \"name\": \"complete\"\n" + " },\n" + " \"next_step\": { \n" + " \"phase\": \"warm\",\n" + " \"action\": \"unfollow\",\n" + " \"name\": \"wait-for-indexing-complete\"\n" + " }\n" + "}"); // If we get an OK on this request we have successfully moved to the injected step assertOK(client().performRequest(moveToStepRequest)); // Make sure we actually move on to and execute the shrink action assertBusy(() -> { assertTrue(indexExists(shrunkenIndex)); assertTrue(aliasExists(shrunkenIndex, index)); assertThat(getStepKeyForIndex(shrunkenIndex), equalTo(TerminalPolicyStep.KEY)); }); } public void testMoveToStepRereadsPolicy() throws Exception { createNewSingletonPolicy("hot", new RolloverAction(null, TimeValue.timeValueHours(1), null), TimeValue.ZERO); createIndexWithSettings("test-1", Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias), true); assertBusy(() -> assertThat(getStepKeyForIndex("test-1"), equalTo(new StepKey("hot", "rollover", "check-rollover-ready")))); createNewSingletonPolicy("hot", new RolloverAction(null, TimeValue.timeValueSeconds(1), null), TimeValue.ZERO); // Move to the same step, which should re-read the policy Request moveToStepRequest = new Request("POST", "_ilm/move/test-1"); moveToStepRequest.setJsonEntity("{\n" + " \"current_step\": { \n" + " \"phase\": \"hot\",\n" + " \"action\": \"rollover\",\n" + " \"name\": \"check-rollover-ready\"\n" + " },\n" + " \"next_step\": { \n" + " \"phase\": \"hot\",\n" + " \"action\": \"rollover\",\n" + " \"name\": \"check-rollover-ready\"\n" + " }\n" + "}"); assertOK(client().performRequest(moveToStepRequest)); // Make sure we actually rolled over assertBusy(() -> { indexExists("test-000002"); }); } public void testCanStopILMWithPolicyUsingNonexistentPolicy() throws Exception { createIndexWithSettings(index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(5,15))); Request stopILMRequest = new Request("POST", "_ilm/stop"); assertOK(client().performRequest(stopILMRequest)); Request statusRequest = new Request("GET", "_ilm/status"); assertBusy(() -> { Response statusResponse = client().performRequest(statusRequest); assertOK(statusResponse); Map<String, Object> statusResponseMap = entityAsMap(statusResponse); String status = (String) statusResponseMap.get("operation_mode"); assertEquals("STOPPED", status); }); // Re-start ILM so that subsequent tests don't fail Request startILMReqest = new Request("POST", "_ilm/start"); assertOK(client().performRequest(startILMReqest)); } public void testExplainFilters() throws Exception { String goodIndex = index + "-good-000001"; String errorIndex = index + "-error"; String nonexistantPolicyIndex = index + "-nonexistant-policy"; String unmanagedIndex = index + "-unmanaged"; createFullPolicy(TimeValue.ZERO); { // Create a "shrink-only-policy" Map<String, LifecycleAction> warmActions = new HashMap<>(); warmActions.put(ShrinkAction.NAME, new ShrinkAction(17)); Map<String, Phase> phases = new HashMap<>(); phases.put("warm", new Phase("warm", TimeValue.ZERO, warmActions)); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy("shrink-only-policy", phases); // PUT policy XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity( "{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); Request request = new Request("PUT", "_ilm/policy/shrink-only-policy"); request.setEntity(entity); assertOK(client().performRequest(request)); } createIndexWithSettings(goodIndex, Settings.builder() .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) ); createIndexWithSettingsNoAlias(errorIndex, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, "shrink-only-policy") ); createIndexWithSettingsNoAlias(nonexistantPolicyIndex, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, randomValueOtherThan(policy, () -> randomAlphaOfLengthBetween(3,10)))); createIndexWithSettingsNoAlias(unmanagedIndex, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); assertBusy(() -> { Map<String, Map<String, Object>> explainResponse = explain(index + "*", false, false); assertNotNull(explainResponse); assertThat(explainResponse, allOf(hasKey(goodIndex), hasKey(errorIndex), hasKey(nonexistantPolicyIndex), hasKey(unmanagedIndex))); Map<String, Map<String, Object>> onlyManagedResponse = explain(index + "*", false, true); assertNotNull(onlyManagedResponse); assertThat(onlyManagedResponse, allOf(hasKey(goodIndex), hasKey(errorIndex), hasKey(nonexistantPolicyIndex))); assertThat(onlyManagedResponse, not(hasKey(unmanagedIndex))); Map<String, Map<String, Object>> onlyErrorsResponse = explain(index + "*", true, true); assertNotNull(onlyErrorsResponse); assertThat(onlyErrorsResponse, allOf(hasKey(errorIndex), hasKey(nonexistantPolicyIndex))); assertThat(onlyErrorsResponse, allOf(not(hasKey(goodIndex)), not(hasKey(unmanagedIndex)))); }); } public void testExplainIndexContainsAutomaticRetriesInformation() throws Exception { createFullPolicy(TimeValue.ZERO); // create index without alias so the rollover action fails and is retried createIndexWithSettingsNoAlias(index, Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) ); assertBusy(() -> { Map<String, Object> explainIndex = explainIndex(index); assertThat((Integer) explainIndex.get(FAILED_STEP_RETRY_COUNT_FIELD), greaterThanOrEqualTo(1)); assertThat(explainIndex.get(IS_AUTO_RETRYABLE_ERROR_FIELD), is(true)); }); } public void testILMRolloverRetriesOnReadOnlyBlock() throws Exception { String firstIndex = index + "-000001"; createNewSingletonPolicy("hot", new RolloverAction(null, TimeValue.timeValueSeconds(1), null)); // create the index as readonly and associate the ILM policy to it createIndexWithSettings( firstIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias) .put("index.blocks.read_only", true), true ); // wait for ILM to start retrying the step assertBusy(() -> assertThat((Integer) explainIndex(firstIndex).get(FAILED_STEP_RETRY_COUNT_FIELD), greaterThanOrEqualTo(1))); // remove the read only block Request allowWritesOnIndexSettingUpdate = new Request("PUT", firstIndex + "/_settings"); allowWritesOnIndexSettingUpdate.setJsonEntity("{" + " \"index\": {\n" + " \"blocks.read_only\" : \"false\" \n" + " }\n" + "}"); client().performRequest(allowWritesOnIndexSettingUpdate); // index is not readonly so the ILM should complete successfully assertBusy(() -> assertThat(getStepKeyForIndex(firstIndex), equalTo(TerminalPolicyStep.KEY))); } public void testILMRolloverOnManuallyRolledIndex() throws Exception { String originalIndex = index + "-000001"; String secondIndex = index + "-000002"; String thirdIndex = index + "-000003"; // Set up a policy with rollover createNewSingletonPolicy("hot", new RolloverAction(null, null, 2L)); Request createIndexTemplate = new Request("PUT", "_template/rolling_indexes"); createIndexTemplate.setJsonEntity("{" + "\"index_patterns\": [\"" + index + "-*\"], \n" + " \"settings\": {\n" + " \"number_of_shards\": 1,\n" + " \"number_of_replicas\": 0,\n" + " \"index.lifecycle.name\": \"" + policy + "\", \n" + " \"index.lifecycle.rollover_alias\": \"" + alias + "\"\n" + " }\n" + "}"); client().performRequest(createIndexTemplate); createIndexWithSettings( originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0), true ); // Index a document index(client(), originalIndex, "1", "foo", "bar"); Request refreshOriginalIndex = new Request("POST", "/" + originalIndex + "/_refresh"); client().performRequest(refreshOriginalIndex); // Manual rollover Request rolloverRequest = new Request("POST", "/" + alias + "/_rollover"); rolloverRequest.setJsonEntity("{\n" + " \"conditions\": {\n" + " \"max_docs\": \"1\"\n" + " }\n" + "}" ); client().performRequest(rolloverRequest); assertBusy(() -> assertTrue(indexExists(secondIndex))); // Index another document into the original index so the ILM rollover policy condition is met index(client(), originalIndex, "2", "foo", "bar"); client().performRequest(refreshOriginalIndex); // Wait for the rollover policy to execute assertBusy(() -> assertThat(getStepKeyForIndex(originalIndex), equalTo(TerminalPolicyStep.KEY))); // ILM should manage the second index after attempting (and skipping) rolling the original index assertBusy(() -> assertTrue((boolean) explainIndex(secondIndex).getOrDefault("managed", true))); // index some documents to trigger an ILM rollover index(client(), alias, "1", "foo", "bar"); index(client(), alias, "2", "foo", "bar"); index(client(), alias, "3", "foo", "bar"); Request refreshSecondIndex = new Request("POST", "/" + secondIndex + "/_refresh"); client().performRequest(refreshSecondIndex).getStatusLine(); // ILM should rollover the second index even though it skipped the first one assertBusy(() -> assertThat(getStepKeyForIndex(secondIndex), equalTo(TerminalPolicyStep.KEY))); assertBusy(() -> assertTrue(indexExists(thirdIndex))); } public void testRolloverStepRetriesUntilRolledOverIndexIsDeleted() throws Exception { String index = this.index + "-000001"; String rolledIndex = this.index + "-000002"; createNewSingletonPolicy("hot", new RolloverAction(null, TimeValue.timeValueSeconds(1), null)); // create the rolled index so the rollover of the first index fails createIndexWithSettings( rolledIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias), false ); createIndexWithSettings( index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias), true ); assertBusy(() -> assertThat((Integer) explainIndex(index).get(FAILED_STEP_RETRY_COUNT_FIELD), greaterThanOrEqualTo(1)), 30, TimeUnit.SECONDS); Request moveToStepRequest = new Request("POST", "_ilm/move/" + index); moveToStepRequest.setJsonEntity("{\n" + " \"current_step\": {\n" + " \"phase\": \"hot\",\n" + " \"action\": \"rollover\",\n" + " \"name\": \"check-rollover-ready\"\n" + " },\n" + " \"next_step\": {\n" + " \"phase\": \"hot\",\n" + " \"action\": \"rollover\",\n" + " \"name\": \"attempt-rollover\"\n" + " }\n" + "}"); // Using {@link #waitUntil} here as ILM moves back and forth between the {@link WaitForRolloverReadyStep} step and // {@link org.elasticsearch.xpack.core.ilm.ErrorStep} in order to retry the failing step. As {@link #assertBusy} // increases the wait time between calls exponentially, we might miss the window where the policy is on // {@link WaitForRolloverReadyStep} and the move to `attempt-rollover` request will not be successful. waitUntil(() -> { try { return client().performRequest(moveToStepRequest).getStatusLine().getStatusCode() == 200; } catch (IOException e) { return false; } }, 30, TimeUnit.SECONDS); // Similar to above, using {@link #waitUntil} as we want to make sure the `attempt-rollover` step started failing and is being // retried (which means ILM moves back and forth between the `attempt-rollover` step and the `error` step) waitUntil(() -> { try { Map<String, Object> explainIndexResponse = explainIndex(index); String step = (String) explainIndexResponse.get("step"); Integer retryCount = (Integer) explainIndexResponse.get(FAILED_STEP_RETRY_COUNT_FIELD); return step != null && step.equals("attempt-rollover") && retryCount != null && retryCount >= 1; } catch (IOException e) { return false; } }, 30, TimeUnit.SECONDS); deleteIndex(rolledIndex); // the rollover step should eventually succeed assertBusy(() -> assertThat(indexExists(rolledIndex), is(true))); assertBusy(() -> assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY))); } public void testUpdateRolloverLifecycleDateStepRetriesWhenRolloverInfoIsMissing() throws Exception { String index = this.index + "-000001"; createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); createIndexWithSettings( index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias), true ); assertBusy(() -> assertThat(getStepKeyForIndex(index).getName(), is(WaitForRolloverReadyStep.NAME))); // moving ILM to the "update-rollover-lifecycle-date" without having gone through the actual rollover step // the "update-rollover-lifecycle-date" step will fail as the index has no rollover information Request moveToStepRequest = new Request("POST", "_ilm/move/" + index); moveToStepRequest.setJsonEntity("{\n" + " \"current_step\": {\n" + " \"phase\": \"hot\",\n" + " \"action\": \"rollover\",\n" + " \"name\": \"check-rollover-ready\"\n" + " },\n" + " \"next_step\": {\n" + " \"phase\": \"hot\",\n" + " \"action\": \"rollover\",\n" + " \"name\": \"update-rollover-lifecycle-date\"\n" + " }\n" + "}"); client().performRequest(moveToStepRequest); waitUntil(() -> { try { Map<String, Object> explainIndexResponse = explainIndex(index); String step = (String) explainIndexResponse.get("step"); Integer retryCount = (Integer) explainIndexResponse.get(FAILED_STEP_RETRY_COUNT_FIELD); return step != null && step.equals(UpdateRolloverLifecycleDateStep.NAME) && retryCount != null && retryCount >= 1; } catch (IOException e) { return false; } }); index(client(), index, "1", "foo", "bar"); Request refreshIndex = new Request("POST", "/" + index + "/_refresh"); client().performRequest(refreshIndex); // manual rollover the index so the "update-rollover-lifecycle-date" ILM step can continue and finish successfully as the index // will have rollover information now Request rolloverRequest = new Request("POST", "/" + alias + "/_rollover"); rolloverRequest.setJsonEntity("{\n" + " \"conditions\": {\n" + " \"max_docs\": \"1\"\n" + " }\n" + "}" ); client().performRequest(rolloverRequest); assertBusy(() -> assertThat(getStepKeyForIndex(index), equalTo(TerminalPolicyStep.KEY))); } public void testWaitForActiveShardsStep() throws Exception { String originalIndex = index + "-000001"; String secondIndex = index + "-000002"; createIndexWithSettings(originalIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, alias), true); // create policy createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); // update policy on index updatePolicy(originalIndex, policy); Request createIndexTemplate = new Request("PUT", "_template/rolling_indexes"); createIndexTemplate.setJsonEntity("{" + "\"index_patterns\": [\""+ index + "-*\"], \n" + " \"settings\": {\n" + " \"number_of_shards\": 1,\n" + " \"number_of_replicas\": 142,\n" + " \"index.write.wait_for_active_shards\": \"all\"\n" + " }\n" + "}"); client().performRequest(createIndexTemplate); // index document to trigger rollover index(client(), originalIndex, "_id", "foo", "bar"); assertBusy(() -> assertTrue(indexExists(secondIndex))); assertBusy(() -> assertThat(getStepKeyForIndex(originalIndex).getName(), equalTo(WaitForActiveShardsStep.NAME))); // reset the number of replicas to 0 so that the second index wait for active shard condition can be met updateIndexSettings(secondIndex, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); assertBusy(() -> assertThat(getStepKeyForIndex(originalIndex), equalTo(TerminalPolicyStep.KEY))); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/50353") public void testHistoryIsWrittenWithSuccess() throws Exception { String index = "success-index"; createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); Request createIndexTemplate = new Request("PUT", "_template/rolling_indexes"); createIndexTemplate.setJsonEntity("{" + "\"index_patterns\": [\""+ index + "-*\"], \n" + " \"settings\": {\n" + " \"number_of_shards\": 1,\n" + " \"number_of_replicas\": 0,\n" + " \"index.lifecycle.name\": \"" + policy+ "\",\n" + " \"index.lifecycle.rollover_alias\": \"" + alias + "\"\n" + " }\n" + "}"); client().performRequest(createIndexTemplate); createIndexWithSettings(index + "-1", Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0), true); // Index a document index(client(), index + "-1", "1", "foo", "bar"); Request refreshIndex = new Request("POST", "/" + index + "-1/_refresh"); client().performRequest(refreshIndex); assertBusy(() -> assertThat(getStepKeyForIndex(index + "-1"), equalTo(TerminalPolicyStep.KEY))); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "wait-for-indexing-complete"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "wait-for-follow-shard-tasks"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "pause-follower-index"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "close-follower-index"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "unfollow-follower-index"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "open-follower-index"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "wait-for-yellow-step"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "check-rollover-ready"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "attempt-rollover"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "update-rollover-lifecycle-date"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "set-indexing-complete"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", true, "completed"), 30, TimeUnit.SECONDS); assertBusy(() -> assertHistoryIsPresent(policy, index + "-000002", true, "check-rollover-ready"), 30, TimeUnit.SECONDS); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/50353") public void testHistoryIsWrittenWithFailure() throws Exception { String index = "failure-index"; createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); Request createIndexTemplate = new Request("PUT", "_template/rolling_indexes"); createIndexTemplate.setJsonEntity("{" + "\"index_patterns\": [\""+ index + "-*\"], \n" + " \"settings\": {\n" + " \"number_of_shards\": 1,\n" + " \"number_of_replicas\": 0,\n" + " \"index.lifecycle.name\": \"" + policy+ "\"\n" + " }\n" + "}"); client().performRequest(createIndexTemplate); createIndexWithSettings(index + "-1", Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0), false); // Index a document index(client(), index + "-1", "1", "foo", "bar"); Request refreshIndex = new Request("POST", "/" + index + "-1/_refresh"); client().performRequest(refreshIndex); assertBusy(() -> assertThat(getStepKeyForIndex(index + "-1").getName(), equalTo(ErrorStep.NAME))); assertBusy(() -> assertHistoryIsPresent(policy, index + "-1", false, "ERROR"), 30, TimeUnit.SECONDS); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/50353") public void testHistoryIsWrittenWithDeletion() throws Exception { String index = "delete-index"; createNewSingletonPolicy("delete", new DeleteAction()); Request createIndexTemplate = new Request("PUT", "_template/delete_indexes"); createIndexTemplate.setJsonEntity("{" + "\"index_patterns\": [\""+ index + "\"], \n" + " \"settings\": {\n" + " \"number_of_shards\": 1,\n" + " \"number_of_replicas\": 0,\n" + " \"index.lifecycle.name\": \"" + policy+ "\"\n" + " }\n" + "}"); client().performRequest(createIndexTemplate); // Index should be created and then deleted by ILM createIndexWithSettings(index, Settings.builder(), false); assertBusy(() -> { logger.info("--> checking for index deletion..."); Request existCheck = new Request("HEAD", "/" + index); Response resp = client().performRequest(existCheck); assertThat(resp.getStatusLine().getStatusCode(), equalTo(404)); }); assertBusy(() -> { assertHistoryIsPresent(policy, index, true, "delete", "delete", "wait-for-shard-history-leases"); assertHistoryIsPresent(policy, index, true, "delete", "delete", "complete"); }, 30, TimeUnit.SECONDS); } public void testRetryableInitializationStep() throws Exception { String index = "retryinit-20xx-01-10"; Request stopReq = new Request("POST", "/_ilm/stop"); Request startReq = new Request("POST", "/_ilm/start"); createNewSingletonPolicy("hot", new SetPriorityAction(1)); // Stop ILM so that the initialize step doesn't run assertOK(client().performRequest(stopReq)); // Create the index with the origination parsing turn *off* so it doesn't prevent creation createIndexWithSettings( index, Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(LifecycleSettings.LIFECYCLE_NAME, policy) .put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, false)); updateIndexSettings(index, Settings.builder() .put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true)); assertOK(client().performRequest(startReq)); // Wait until an error has occurred. waitUntil(() -> { try { Map<String, Object> explainIndexResponse = explainIndex(index); String step = (String) explainIndexResponse.get("step"); Integer retryCount = (Integer) explainIndexResponse.get(FAILED_STEP_RETRY_COUNT_FIELD); return step != null && step.equals(InitializePolicyContextStep.KEY.getAction()) && retryCount != null && retryCount >= 1; } catch (IOException e) { return false; } }, 30, TimeUnit.SECONDS); // Turn origination date parsing back off updateIndexSettings(index, Settings.builder() .put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, false)); assertBusy(() -> { Map<String, Object> explainResp = explainIndex(index); String phase = (String) explainResp.get("phase"); assertThat(phase, equalTo(TerminalPolicyStep.COMPLETED_PHASE)); }); } public void testRefreshablePhaseJson() throws Exception { String index = "refresh-index"; createNewSingletonPolicy("hot", new RolloverAction(null, null, 100L)); Request createIndexTemplate = new Request("PUT", "_template/rolling_indexes"); createIndexTemplate.setJsonEntity("{" + "\"index_patterns\": [\""+ index + "-*\"], \n" + " \"settings\": {\n" + " \"number_of_shards\": 1,\n" + " \"number_of_replicas\": 0,\n" + " \"index.lifecycle.name\": \"" + policy+ "\",\n" + " \"index.lifecycle.rollover_alias\": \"" + alias + "\"\n" + " }\n" + "}"); client().performRequest(createIndexTemplate); createIndexWithSettings(index + "-1", Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0), true); // Index a document index(client(), index + "-1", "1", "foo", "bar"); // Wait for the index to enter the check-rollover-ready step assertBusy(() -> assertThat(getStepKeyForIndex(index + "-1").getName(), equalTo(WaitForRolloverReadyStep.NAME))); // Update the policy to allow rollover at 1 document instead of 100 createNewSingletonPolicy("hot", new RolloverAction(null, null, 1L)); // Index should now have been able to roll over, creating the new index and proceeding to the "complete" step assertBusy(() -> assertThat(indexExists(index + "-000002"), is(true))); assertBusy(() -> assertThat(getStepKeyForIndex(index + "-1").getName(), equalTo(TerminalPolicyStep.KEY.getName()))); } // This method should be called inside an assertBusy, it has no retry logic of its own private void assertHistoryIsPresent(String policyName, String indexName, boolean success, String stepName) throws IOException { assertHistoryIsPresent(policyName, indexName, success, null, null, stepName); } // This method should be called inside an assertBusy, it has no retry logic of its own private void assertHistoryIsPresent(String policyName, String indexName, boolean success, @Nullable String phase, @Nullable String action, String stepName) throws IOException { logger.info("--> checking for history item [{}], [{}], success: [{}], phase: [{}], action: [{}], step: [{}]", policyName, indexName, success, phase, action, stepName); final Request historySearchRequest = new Request("GET", "ilm-history*/_search"); historySearchRequest.setJsonEntity("{\n" + " \"query\": {\n" + " \"bool\": {\n" + " \"must\": [\n" + " {\n" + " \"term\": {\n" + " \"policy\": \"" + policyName + "\"\n" + " }\n" + " },\n" + " {\n" + " \"term\": {\n" + " \"success\": " + success + "\n" + " }\n" + " },\n" + " {\n" + " \"term\": {\n" + " \"index\": \"" + indexName + "\"\n" + " }\n" + " },\n" + " {\n" + " \"term\": {\n" + " \"state.step\": \"" + stepName + "\"\n" + " }\n" + " }\n" + (phase == null ? "" : ",{\"term\": {\"state.phase\": \"" + phase + "\"}}") + (action == null ? "" : ",{\"term\": {\"state.action\": \"" + action + "\"}}") + " ]\n" + " }\n" + " }\n" + "}"); Response historyResponse; try { historyResponse = client().performRequest(historySearchRequest); Map<String, Object> historyResponseMap; try (InputStream is = historyResponse.getEntity().getContent()) { historyResponseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); } logger.info("--> history response: {}", historyResponseMap); int hits = (int)((Map<String, Object>) ((Map<String, Object>) historyResponseMap.get("hits")).get("total")).get("value"); // For a failure, print out whatever history we *do* have for the index if (hits == 0) { final Request allResults = new Request("GET", "ilm-history*/_search"); allResults.setJsonEntity("{\n" + " \"query\": {\n" + " \"bool\": {\n" + " \"must\": [\n" + " {\n" + " \"term\": {\n" + " \"policy\": \"" + policyName + "\"\n" + " }\n" + " },\n" + " {\n" + " \"term\": {\n" + " \"index\": \"" + indexName + "\"\n" + " }\n" + " }\n" + " ]\n" + " }\n" + " }\n" + "}"); final Response allResultsResp = client().performRequest(historySearchRequest); Map<String, Object> allResultsMap; try (InputStream is = allResultsResp.getEntity().getContent()) { allResultsMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); } logger.info("--> expected at least 1 hit, got 0. All history for index [{}]: {}", index, allResultsMap); } assertThat(hits, greaterThanOrEqualTo(1)); } catch (ResponseException e) { // Throw AssertionError instead of an exception if the search fails so that assertBusy works as expected logger.error(e); fail("failed to perform search:" + e.getMessage()); } // Finally, check that the history index is in a good state Step.StepKey stepKey = getStepKeyForIndex("ilm-history-1-000001"); assertEquals("hot", stepKey.getPhase()); assertEquals(RolloverAction.NAME, stepKey.getAction()); assertEquals(WaitForRolloverReadyStep.NAME, stepKey.getName()); } private void createFullPolicy(TimeValue hotTime) throws IOException { Map<String, LifecycleAction> hotActions = new HashMap<>(); hotActions.put(SetPriorityAction.NAME, new SetPriorityAction(100)); hotActions.put(RolloverAction.NAME, new RolloverAction(null, null, 1L)); Map<String, LifecycleAction> warmActions = new HashMap<>(); warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1)); warmActions.put(AllocateAction.NAME, new AllocateAction(1, singletonMap("_name", "integTest-1,integTest-2"), null, null)); warmActions.put(ShrinkAction.NAME, new ShrinkAction(1)); Map<String, LifecycleAction> coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); coldActions.put(AllocateAction.NAME, new AllocateAction(0, singletonMap("_name", "integTest-3"), null, null)); Map<String, Phase> phases = new HashMap<>(); phases.put("hot", new Phase("hot", hotTime, hotActions)); phases.put("warm", new Phase("warm", TimeValue.ZERO, warmActions)); phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); phases.put("delete", new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, new DeleteAction()))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity( "{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); Request request = new Request("PUT", "_ilm/policy/" + policy); request.setEntity(entity); assertOK(client().performRequest(request)); } private void createNewSingletonPolicy(String phaseName, LifecycleAction action) throws IOException { createNewSingletonPolicy(phaseName, action, TimeValue.ZERO); } private void createNewSingletonPolicy(String phaseName, LifecycleAction action, TimeValue after) throws IOException { Phase phase = new Phase(phaseName, after, singletonMap(action.getWriteableName(), action)); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, singletonMap(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity( "{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); Request request = new Request("PUT", "_ilm/policy/" + policy); request.setEntity(entity); client().performRequest(request); } private void createIndexWithSettingsNoAlias(String index, Settings.Builder settings) throws IOException { Request request = new Request("PUT", "/" + index); request.setJsonEntity("{\n \"settings\": " + Strings.toString(settings.build()) + "}"); client().performRequest(request); // wait for the shards to initialize ensureGreen(index); } private void createIndexWithSettings(String index, Settings.Builder settings) throws IOException { createIndexWithSettings(index, settings, randomBoolean()); } private void createIndexWithSettings(String index, Settings.Builder settings, boolean useWriteIndex) throws IOException { Request request = new Request("PUT", "/" + index); String writeIndexSnippet = ""; if (useWriteIndex) { writeIndexSnippet = "\"is_write_index\": true"; } request.setJsonEntity("{\n \"settings\": " + Strings.toString(settings.build()) + ", \"aliases\" : { \"" + alias + "\": { " + writeIndexSnippet + " } } }"); client().performRequest(request); // wait for the shards to initialize ensureGreen(index); } private static void index(RestClient client, String index, String id, Object... fields) throws IOException { XContentBuilder document = jsonBuilder().startObject(); for (int i = 0; i < fields.length; i += 2) { document.field((String) fields[i], fields[i + 1]); } document.endObject(); final Request request = new Request("POST", "/" + index + "/_doc/" + id); request.setJsonEntity(Strings.toString(document)); assertOK(client.performRequest(request)); } @SuppressWarnings("unchecked") private Map<String, Object> getOnlyIndexSettings(String index) throws IOException { Map<String, Object> response = (Map<String, Object>) getIndexSettings(index).get(index); if (response == null) { return Collections.emptyMap(); } return (Map<String, Object>) response.get("settings"); } public static StepKey getStepKeyForIndex(String indexName) throws IOException { Map<String, Object> indexResponse = explainIndex(indexName); if (indexResponse == null) { return new StepKey(null, null, null); } return getStepKey(indexResponse); } private static StepKey getStepKey(Map<String, Object> explainIndexResponse) { String phase = (String) explainIndexResponse.get("phase"); String action = (String) explainIndexResponse.get("action"); String step = (String) explainIndexResponse.get("step"); return new StepKey(phase, action, step); } private String getFailedStepForIndex(String indexName) throws IOException { Map<String, Object> indexResponse = explainIndex(indexName); if (indexResponse == null) { return null; } return (String) indexResponse.get("failed_step"); } private static Map<String, Object> explainIndex(String indexName) throws IOException { return explain(indexName, false, false).get(indexName); } private static Map<String, Map<String, Object>> explain(String indexPattern, boolean onlyErrors, boolean onlyManaged) throws IOException { Request explainRequest = new Request("GET", indexPattern + "/_ilm/explain"); explainRequest.addParameter("only_errors", Boolean.toString(onlyErrors)); explainRequest.addParameter("only_managed", Boolean.toString(onlyManaged)); Response response = client().performRequest(explainRequest); Map<String, Object> responseMap; try (InputStream is = response.getEntity().getContent()) { responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); } @SuppressWarnings("unchecked") Map<String, Map<String, Object>> indexResponse = ((Map<String, Map<String, Object>>) responseMap.get("indices")); return indexResponse; } private void indexDocument() throws IOException { Request indexRequest = new Request("POST", index + "/_doc"); indexRequest.setEntity(new StringEntity("{\"a\": \"test\"}", ContentType.APPLICATION_JSON)); Response response = client().performRequest(indexRequest); logger.info(response.getStatusLine()); } @SuppressWarnings("unchecked") private String getSnapshotState(String snapshot) throws IOException { Response response = client().performRequest(new Request("GET", "/_snapshot/repo/" + snapshot)); Map<String, Object> responseMap; try (InputStream is = response.getEntity().getContent()) { responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); } Map<String, Object> repoResponse = ((List<Map<String, Object>>) responseMap.get("responses")).get(0); Map<String, Object> snapResponse = ((List<Map<String, Object>>) repoResponse.get("snapshots")).get(0); assertThat(snapResponse.get("snapshot"), equalTo(snapshot)); return (String) snapResponse.get("state"); } private void createSlmPolicy(String smlPolicy, String repo) throws IOException { Request request; request = new Request("PUT", "/_slm/policy/" + smlPolicy); request.setJsonEntity(Strings .toString(JsonXContent.contentBuilder() .startObject() .field("schedule", "59 59 23 31 12 ? 2099") .field("repository", repo) .field("name", "snap" + randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT)) .startObject("config") .field("include_global_state", false) .endObject() .endObject())); assertOK(client().performRequest(request)); } private String createSnapshotRepo() throws IOException { String repo = randomAlphaOfLengthBetween(4, 10); Request request = new Request("PUT", "/_snapshot/" + repo); request.setJsonEntity(Strings .toString(JsonXContent.contentBuilder() .startObject() .field("type", "fs") .startObject("settings") .field("compress", randomBoolean()) .field("location", System.getProperty("tests.path.repo")) .field("max_snapshot_bytes_per_sec", "256b") .endObject() .endObject())); assertOK(client().performRequest(request)); return repo; } }
package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.junit.After; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; /** * A set of tests that ensure we comply to the model memory limit */ public class AutodetectMemoryLimitIT extends MlNativeAutodetectIntegTestCase { @After public void cleanUpTest() { cleanUp(); } @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testTooManyPartitions() throws Exception { Detector.Builder detector = new Detector.Builder("count", null); detector.setPartitionFieldName("user"); TimeValue bucketSpan = TimeValue.timeValueHours(1); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); analysisConfig.setBucketSpan(bucketSpan); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeFormat("epoch"); Job.Builder job = new Job.Builder("autodetect-memory-limit-test-too-many-partitions"); job.setAnalysisConfig(analysisConfig); job.setDataDescription(dataDescription); // Set the memory limit to 30MB AnalysisLimits limits = new AnalysisLimits(30L, null); job.setAnalysisLimits(limits); registerJob(job); putJob(job); openJob(job.getId()); long now = Instant.now().getEpochSecond(); long timestamp = now - 8 * bucketSpan.seconds(); List<String> data = new ArrayList<>(); while (timestamp < now) { for (int i = 0; i < 10000; i++) { // It's important that the values used here are either always represented in less than 16 UTF-8 bytes or // always represented in more than 22 UTF-8 bytes. Otherwise platform differences in when the small string // optimisation is used will make the results of this test very different for the different platforms. data.add(createJsonRecord(createRecord(timestamp, String.valueOf(i), ""))); } timestamp += bucketSpan.seconds(); } postData(job.getId(), data.stream().collect(Collectors.joining())); closeJob(job.getId()); // Assert we haven't violated the limit too much GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(31500000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(24000000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testTooManyByFields() throws Exception { Detector.Builder detector = new Detector.Builder("count", null); detector.setByFieldName("user"); TimeValue bucketSpan = TimeValue.timeValueHours(1); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); analysisConfig.setBucketSpan(bucketSpan); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeFormat("epoch"); Job.Builder job = new Job.Builder("autodetect-memory-limit-test-too-many-by-fields"); job.setAnalysisConfig(analysisConfig); job.setDataDescription(dataDescription); // Set the memory limit to 30MB AnalysisLimits limits = new AnalysisLimits(30L, null); job.setAnalysisLimits(limits); registerJob(job); putJob(job); openJob(job.getId()); long now = Instant.now().getEpochSecond(); long timestamp = now - 8 * bucketSpan.seconds(); List<String> data = new ArrayList<>(); while (timestamp < now) { for (int i = 0; i < 10000; i++) { // It's important that the values used here are either always represented in less than 16 UTF-8 bytes or // always represented in more than 22 UTF-8 bytes. Otherwise platform differences in when the small string // optimisation is used will make the results of this test very different for the different platforms. data.add(createJsonRecord(createRecord(timestamp, String.valueOf(i), ""))); } timestamp += bucketSpan.seconds(); } postData(job.getId(), data.stream().collect(Collectors.joining())); closeJob(job.getId()); // Assert we haven't violated the limit too much GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(31500000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(25000000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testTooManyByAndOverFields() throws Exception { Detector.Builder detector = new Detector.Builder("count", null); detector.setByFieldName("department"); detector.setOverFieldName("user"); TimeValue bucketSpan = TimeValue.timeValueHours(1); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); analysisConfig.setBucketSpan(bucketSpan); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeFormat("epoch"); Job.Builder job = new Job.Builder("autodetect-memory-limit-test-too-many-by-and-over-fields"); job.setAnalysisConfig(analysisConfig); job.setDataDescription(dataDescription); // Set the memory limit to 30MB AnalysisLimits limits = new AnalysisLimits(30L, null); job.setAnalysisLimits(limits); registerJob(job); putJob(job); openJob(job.getId()); long now = Instant.now().getEpochSecond(); long timestamp = now - 8 * bucketSpan.seconds(); while (timestamp < now) { for (int department = 0; department < 10; department++) { List<String> data = new ArrayList<>(); for (int user = 0; user < 10000; user++) { // It's important that the values used here are either always represented in less than 16 UTF-8 bytes or // always represented in more than 22 UTF-8 bytes. Otherwise platform differences in when the small string // optimisation is used will make the results of this test very different for the different platforms. data.add(createJsonRecord(createRecord( timestamp, String.valueOf(department) + "_" + String.valueOf(user), String.valueOf(department)))); } postData(job.getId(), data.stream().collect(Collectors.joining())); } timestamp += bucketSpan.seconds(); } closeJob(job.getId()); // Assert we haven't violated the limit too much GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(31500000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(24000000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testManyDistinctOverFields() throws Exception { Detector.Builder detector = new Detector.Builder("sum", "value"); detector.setOverFieldName("user"); TimeValue bucketSpan = TimeValue.timeValueHours(1); AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); analysisConfig.setBucketSpan(bucketSpan); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setTimeFormat("epoch"); Job.Builder job = new Job.Builder("autodetect-memory-limit-test-too-many-distinct-over-fields"); job.setAnalysisConfig(analysisConfig); job.setDataDescription(dataDescription); // Set the memory limit to 110MB AnalysisLimits limits = new AnalysisLimits(110L, null); job.setAnalysisLimits(limits); registerJob(job); putJob(job); openJob(job.getId()); long now = Instant.now().getEpochSecond(); long timestamp = now - 15 * bucketSpan.seconds(); int user = 0; while (timestamp < now) { List<String> data = new ArrayList<>(); for (int i = 0; i < 10000; i++) { // It's important that the values used here are either always represented in less than 16 UTF-8 bytes or // always represented in more than 22 UTF-8 bytes. Otherwise platform differences in when the small string // optimisation is used will make the results of this test very different for the different platforms. Map<String, Object> record = new HashMap<>(); record.put("time", timestamp); record.put("user", user++); record.put("value", 42.0); data.add(createJsonRecord(record)); } postData(job.getId(), data.stream().collect(Collectors.joining())); timestamp += bucketSpan.seconds(); } closeJob(job.getId()); // Assert we haven't violated the limit too much GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); assertThat(modelSizeStats.getModelBytes(), lessThan(116000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(90000000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } private static Map<String, Object> createRecord(long timestamp, String user, String department) { Map<String, Object> record = new HashMap<>(); record.put("time", timestamp); record.put("user", user); record.put("department", department); return record; } }
package core.access.iterator; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Map; import com.google.common.io.ByteStreams; import core.access.Partition; import core.access.Query.FilterQuery; import core.index.MDIndex.BucketCounts; import core.index.robusttree.RNode; public class RepartitionIterator extends PartitionIterator{ private FilterQuery query; private RNode newIndexTree; protected String zookeeperHosts; protected Map<Integer,Partition> newPartitions; public RepartitionIterator(){ } public RepartitionIterator(String iteratorString){ try { readFields(ByteStreams.newDataInput(iteratorString.getBytes())); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException("Failed to read the fields"); } } public RepartitionIterator(FilterQuery query, RNode newIndexTree){ this.query = query; this.newIndexTree = newIndexTree; } public void setZookeeper(String zookeeperHosts){ this.zookeeperHosts = zookeeperHosts; } public DistributedRepartitionIterator createDistributedIterator(){ DistributedRepartitionIterator itr = new DistributedRepartitionIterator(query, newIndexTree); itr.setZookeeper(zookeeperHosts); return itr; } public FilterQuery getQuery(){ return this.query; } public RNode getIndexTree(){ return this.newIndexTree; } @Override public void setPartition(Partition partition){ super.setPartition(partition); } @Override protected boolean isRelevant(IteratorRecord record){ int id = newIndexTree.getBucketId(record); Partition p; if(newPartitions.containsKey(id)){ p = newPartitions.get(id); } else{ p = partition.clone(); p.setPartitionId(id); newPartitions.put(id, p); } p.write(record.getBytes(), record.getOffset(), record.getLength()); return query.qualifies(record); } @Override protected void finalize(){ BucketCounts c = new BucketCounts(zookeeperHosts); for(Partition p: newPartitions.values()){ p.store(true); c.setToBucketCount(p.getPartitionId(), p.getRecordCount()); } partition.drop(); c.removeBucketCount(partition.getPartitionId()); c.close(); } @Override public void write(DataOutput out) throws IOException{ query.write(out); byte[] indexBytes = newIndexTree.marshall().getBytes(); out.writeInt(indexBytes.length); out.write(indexBytes); out.writeBytes(zookeeperHosts+"\n"); } @Override public void readFields(DataInput in) throws IOException{ query = new FilterQuery(); query.readFields(in); newIndexTree = new RNode(); byte[] indexBytes = new byte[in.readInt()]; in.readFully(indexBytes); newIndexTree.unmarshall(indexBytes); zookeeperHosts = in.readLine(); } public static RepartitionIterator read(DataInput in) throws IOException { RepartitionIterator it = new RepartitionIterator(); it.readFields(in); return it; } }
package auction; /* 1990-09-10 lifsong initial version * 1996-09-13 fred add check if file exists * 1996-09-15 fred add sorting buying info * 1996-09-19 simon add sorting selling info */ import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Set; public class Auction implements Runnable { private static FileOperation fOp = new FileOperation(); private static final float EPS = 1e-2f; @Override public void run() { while (true) { System.out.println("Auction app is running!"); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } if (isTime2Run()) { /* pre-condition: ask if the files are existing */ if (!fOp.isFileExisting(FileOperation.getClosePriceFilename())) { System.out.println("ClosePriceFile does not exist!"); continue; } else if (!fOp.isFileExisting(FileOperation.getBuyingInfoFilename())) { System.out.println("BuyingInfoFile does not exist!"); continue; } else if (!fOp.isFileExisting(FileOperation.getSellingInfoFilename())) { System.out.println("SellingInfoFile does not exist!"); continue; } /* get buying info and sort */ ArrayList<Buying> buyingInfo = fOp.getAllBuyingInfo(); sortBuyingInfo(buyingInfo); /* get selling info and sort */ ArrayList<Selling> sellingInfo = fOp.getAllSellingInfo(); sortSellingInfo(sellingInfo); /* get close prices */ ArrayList<ClosePrice> closePrices = fOp.getAllClosePrices(); /* * ToDo: filter out stocks which exist only in one of buying or * selling list. */ /* ToDo: do the aggregation auction */ /* ToDo: update the matrix screen */ } } } private boolean isTime2Run() { Calendar c = Calendar.getInstance(); if (c.get(Calendar.HOUR_OF_DAY) != 9) { return false; } if ((c.get(Calendar.MINUTE) < 25) || (c.get(Calendar.MINUTE) >= 30)) { return false; } return true; } private Set<Integer> findTradingStocks(Set<Integer> sellingStocks, Set<Integer> buyingStocks) { Set<Integer> mySet = new HashSet<Integer>(); for (Integer i : sellingStocks) { for (Integer j : buyingStocks) { if (i.intValue() == j.intValue()) { mySet.add(i); } } } return mySet; } private class SortBuyingInfo implements Comparator<Buying> { @Override public int compare(Buying buy1, Buying buy2) { if (Math.abs(buy1.price - buy2.price) < EPS) { return buy1.time.compareTo(buy2.time); } if (buy1.price > buy2.price) { return -1; } else if (buy1.price < buy2.price) { return 1; } return 0; } } private void sortBuyingInfo(ArrayList<Buying> buyingInfo) { Collections.sort(buyingInfo, new SortBuyingInfo()); for (Buying buy : buyingInfo) { System.out.println(buy); } } private class SortSellingInfo implements Comparator<Selling> { @Override public int compare(Selling sell1, Selling sell2) { if (Math.abs(sell1.price - sell2.price) < EPS) { return sell1.time.compareTo(sell2.time); } if (sell1.price < sell2.price) { return -1; } else if (sell1.price > sell2.price) { return 1; } return 0; } } private void sortSellingInfo(ArrayList<Selling> sellingInfo) { Collections.sort(sellingInfo, new SortSellingInfo()); for (Selling sell : sellingInfo) { System.out.println(sell); } } }
package dr.app.beagle.tools; import java.util.ArrayList; import java.util.List; import java.util.Locale; import beagle.Beagle; import beagle.BeagleFactory; import dr.app.beagle.evomodel.sitemodel.BranchSubstitutionModel; import dr.app.beagle.evomodel.sitemodel.EpochBranchSubstitutionModel; import dr.app.beagle.evomodel.sitemodel.GammaSiteRateModel; import dr.app.beagle.evomodel.sitemodel.HomogenousBranchSubstitutionModel; import dr.app.beagle.evomodel.substmodel.FrequencyModel; import dr.app.beagle.evomodel.substmodel.HKY; import dr.app.beagle.evomodel.substmodel.SubstitutionModel; import dr.app.beagle.evomodel.treelikelihood.BufferIndexHelper; import dr.evolution.alignment.Alignment; import dr.evolution.alignment.SimpleAlignment; import dr.evolution.datatype.Codons; import dr.evolution.datatype.DataType; import dr.evolution.datatype.Nucleotides; import dr.evolution.io.NewickImporter; import dr.evolution.sequence.Sequence; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evomodel.branchratemodel.BranchRateModel; import dr.evomodel.branchratemodel.DefaultBranchRateModel; import dr.evomodel.substmodel.YangCodonModel; import dr.evomodel.tree.TreeModel; import dr.inference.model.Parameter; import dr.math.MathUtils; public class BeagleSequenceSimulator { private TreeModel treeModel; private GammaSiteRateModel siteModel; private BranchSubstitutionModel branchSubstitutionModel; private int replications; private FrequencyModel freqModel; private DataType dataType; private int categoryCount; private int eigenCount; private Beagle beagle; private BufferIndexHelper eigenBufferHelper; private BufferIndexHelper matrixBufferHelper; private int stateCount; private boolean has_ancestralSequence = false; private Sequence ancestralSequence = null; private double[][] probabilities; public BeagleSequenceSimulator(TreeModel treeModel, BranchSubstitutionModel branchSubstitutionModel, GammaSiteRateModel siteModel, BranchRateModel branchRateModel, FrequencyModel freqModel, int replications ) { this.treeModel = treeModel; this.siteModel = siteModel; this.replications = replications; this.freqModel = freqModel; this.dataType = freqModel.getDataType(); this.branchSubstitutionModel = branchSubstitutionModel; this.eigenCount = branchSubstitutionModel.getEigenCount(); // this.freqModel = siteModel.getSubstitutionModel().getFrequencyModel(); // this.branchSubstitutionModel = (BranchSubstitutionModel) siteModel.getModel(0); int tipCount = treeModel.getExternalNodeCount(); int nodeCount = treeModel.getNodeCount(); int internalNodeCount = treeModel.getInternalNodeCount(); int scaleBufferCount = internalNodeCount + 1; int compactPartialsCount = tipCount; int patternCount = replications; int stateCount = dataType.getStateCount(); this.categoryCount = siteModel.getCategoryCount(); this.probabilities = new double[categoryCount][stateCount * stateCount]; this.stateCount = stateCount; // one partials buffer for each tip and two for each internal node (for store restore) BufferIndexHelper partialBufferHelper = new BufferIndexHelper(nodeCount, tipCount); // two eigen buffers for each decomposition for store and restore. eigenBufferHelper = new BufferIndexHelper(eigenCount, 0); // two matrices for each node less the root matrixBufferHelper = new BufferIndexHelper(nodeCount, 0); // null implies no restrictions int[] resourceList = null; long preferenceFlags = 0; long requirementFlags = 0; beagle = BeagleFactory.loadBeagleInstance(tipCount, partialBufferHelper.getBufferCount(), compactPartialsCount, stateCount, patternCount, eigenBufferHelper.getBufferCount(), matrixBufferHelper.getBufferCount() + this.branchSubstitutionModel.getExtraBufferCount(treeModel), categoryCount, scaleBufferCount, resourceList, preferenceFlags, requirementFlags ); }// END: Constructor public void setAncestralSequence(Sequence seq) { ancestralSequence = seq; has_ancestralSequence = true; }// END: setAncestralSequence private int[] sequence2intArray(Sequence seq) { int array[] = new int[replications]; if (dataType instanceof Codons) { int k = 0; for (int i = 0; i < replications; i++) { // System.err.println(i + ": " + seq.getChar(k) + "" + seq.getChar(k + 1) + "" + seq.getChar(k + 2)); array[i] = ((Codons) dataType).getState(seq.getChar(k), seq.getChar(k + 1), seq.getChar(k + 2)); k += 3; }// END: replications loop } else { for (int i = 0; i < replications; i++) { array[i] = dataType.getState(seq.getChar(i)); }// END: replications loop }// END: dataType check return array; }// END: sequence2intArray // TODO: private Sequence intArray2Sequence(int[] seq, NodeRef node) { StringBuilder sSeq = new StringBuilder(); if (dataType instanceof Codons) { for (int i = 0; i < replications; i++) { sSeq.append(dataType.getTriplet(seq[i])); // System.err.println(seq[i] + " " + dataType.getTriplet(seq[i])); }// END: replications loop // System.exit(-1); } else { for (int i = 0; i < replications; i++) { sSeq.append(dataType.getCode(seq[i])); }// END: replications loop }// END: dataType check return new Sequence(treeModel.getNodeTaxon(node), sSeq.toString()); }// END: intArray2Sequence public Alignment simulate() { SimpleAlignment alignment = new SimpleAlignment(); alignment.setDataType(dataType); alignment.setReportCountStatistics(false); NodeRef root = treeModel.getRoot(); double[] categoryProbs = siteModel.getCategoryProportions(); int[] category = new int[replications]; for (int i = 0; i < replications; i++) { category[i] = MathUtils.randomChoicePDF(categoryProbs); } int[] seq = new int[replications]; if (has_ancestralSequence) { seq = sequence2intArray(ancestralSequence); } else { for (int i = 0; i < replications; i++) { seq[i] = MathUtils.randomChoicePDF(freqModel.getFrequencies()); } }// END: ancestral sequence check for (int i = 0; i < eigenCount; i++) { eigenBufferHelper.flipOffset(i); branchSubstitutionModel.setEigenDecomposition( beagle, i, eigenBufferHelper, 0 ); } double[] categoryRates = siteModel.getCategoryRates(); beagle.setCategoryRates(categoryRates); // double[] categoryWeights = gammaSiteRateModel.getCategoryProportions(); // beagle.setCategoryWeights(0, categoryWeights); // double[] frequencies = branchSubstitutionModel.getStateFrequencies(0); // beagle.setStateFrequencies(0, frequencies); traverse(root, seq, category, alignment); return alignment; }// END: simulate private void traverse(NodeRef node, int[] parentSequence, int[] category, SimpleAlignment alignment) { for (int iChild = 0; iChild < treeModel.getChildCount(node); iChild++) { NodeRef child = treeModel.getChild(node, iChild); int[] sequence = new int[replications]; double[] cProb = new double[stateCount]; getTransitionProbabilities(treeModel, child, probabilities); for (int i = 0; i < replications; i++) { System.arraycopy(probabilities[category[i]], parentSequence[i] * stateCount, cProb, 0, stateCount); sequence[i] = MathUtils.randomChoicePDF(cProb); } if (treeModel.getChildCount(child) == 0) { alignment.addSequence(intArray2Sequence(sequence, child)); } traverse(treeModel.getChild(node, iChild), sequence, category, alignment); }// END: child nodes loop }// END: traverse private void getTransitionProbabilities(Tree tree, NodeRef node, double[][] probabilities) { int nodeNum = node.getNumber(); matrixBufferHelper.flipOffset(nodeNum); int branchIndex = matrixBufferHelper.getOffsetIndex(nodeNum); int eigenIndex = branchSubstitutionModel.getBranchIndex(tree, node, branchIndex); int count = 1; branchSubstitutionModel.updateTransitionMatrices(beagle, eigenIndex, eigenBufferHelper, new int[] { branchIndex }, null, null, new double[] { tree.getBranchLength(node) }, count ); double transitionMatrix[] = new double[categoryCount * stateCount * stateCount]; beagle.getTransitionMatrix(branchIndex, transitionMatrix ); for (int i = 0; i < categoryCount; i++) { System.arraycopy(transitionMatrix, i * stateCount, probabilities[i], 0, stateCount * stateCount); } // System.out.println("eigenIndex:" + eigenIndex); // System.out.println("bufferIndex: " + branchIndex); // System.out.println("weight: " + tree.getBranchLength(node)); // print2DArray(probabilities); }// END: getTransitionProbabilities public static void main(String[] args) { // simulateEpochModel(); // simulateHKY(); simulateCodon(); } // END: main static void simulateCodon() { try { int sequenceLength = 10; // create tree NewickImporter importer = new NewickImporter("(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);"); Tree tree = importer.importTree(null); TreeModel treeModel = new TreeModel(tree); // create Frequency Model Parameter freqs = new Parameter.Default(new double[] { 0.0163936, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344, 0.01639344 }); Codons codonDataType = Codons.UNIVERSAL; dr.evomodel.substmodel.FrequencyModel freqModel = new dr.evomodel.substmodel.FrequencyModel(codonDataType, freqs); // create codon substitution model Parameter kappa = new Parameter.Default(1, 10); Parameter omega = new Parameter.Default(1, 10); YangCodonModel yangCodonModel = new YangCodonModel(codonDataType, omega, kappa, freqModel); // HKY hky = new HKY(kappa, freqModel); // HomogenousBranchSubstitutionModel substitutionModel = new HomogenousBranchSubstitutionModel(hky, freqModel); // create site model // GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel"); // siteRateModel.addModel(substitutionModel); // create branch rate model // BranchRateModel branchRateModel = new DefaultBranchRateModel(); // feed to sequence simulator and generate leaves // BeagleSequenceSimulator beagleSequenceSimulator = new BeagleSequenceSimulator( // treeModel, // // substitutionModel,// // siteRateModel, // // branchRateModel, // // freqModel, // // sequenceLength // // Sequence ancestralSequence = new Sequence(); // ancestralSequence.appendSequenceString("AAAAAAAAAA"); // beagleSequenceSimulator.setAncestralSequence(ancestralSequence); // System.out.println(beagleSequenceSimulator.simulate().toString()); } catch (Exception e) { e.printStackTrace(); }// END: try-catch block }// END: simulateCodon static void simulateHKY() { try { int sequenceLength = 10; // create tree NewickImporter importer = new NewickImporter("(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);"); Tree tree = importer.importTree(null); TreeModel treeModel = new TreeModel(tree); // create Frequency Model Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 }); FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE, freqs); // create substitution model Parameter kappa = new Parameter.Default(1, 10); HKY hky = new HKY(kappa, freqModel); HomogenousBranchSubstitutionModel substitutionModel = new HomogenousBranchSubstitutionModel(hky, freqModel); // create site model GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel"); // siteRateModel.addModel(substitutionModel); // create branch rate model BranchRateModel branchRateModel = new DefaultBranchRateModel(); // feed to sequence simulator and generate leaves BeagleSequenceSimulator beagleSequenceSimulator = new BeagleSequenceSimulator( treeModel, substitutionModel, siteRateModel, branchRateModel, freqModel, sequenceLength ); Sequence ancestralSequence = new Sequence(); ancestralSequence.appendSequenceString("AAAAAAAAAA"); beagleSequenceSimulator.setAncestralSequence(ancestralSequence); System.out.println(beagleSequenceSimulator.simulate().toString()); } catch (Exception e) { e.printStackTrace(); }// END: try-catch block }// END: simulateHKY static void simulateEpochModel() { try { int sequenceLength = 10; // create tree NewickImporter importer = new NewickImporter("(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);"); Tree tree = importer.importTree(null); TreeModel treeModel = new TreeModel(tree); // create Frequency Model Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 }); FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE, freqs); List<FrequencyModel> frequencyModelList = new ArrayList<FrequencyModel>(); frequencyModelList.add(freqModel); // create Epoch Model Parameter kappa1 = new Parameter.Default(1, 10); Parameter kappa2 = new Parameter.Default(1, 10); HKY hky1 = new HKY(kappa1, freqModel); HKY hky2 = new HKY(kappa2, freqModel); List<SubstitutionModel> substModelList = new ArrayList<SubstitutionModel>(); substModelList.add(hky1); substModelList.add(hky2); Parameter epochTimes = new Parameter.Default(1, 20); EpochBranchSubstitutionModel substitutionModel = new EpochBranchSubstitutionModel( substModelList, frequencyModelList, epochTimes ); // create site model GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel"); siteRateModel.addModel(substitutionModel); // create branch rate model BranchRateModel branchRateModel = new DefaultBranchRateModel(); // feed to sequence simulator and generate leaves BeagleSequenceSimulator beagleSequenceSimulator = new BeagleSequenceSimulator( treeModel, substitutionModel, siteRateModel, branchRateModel, freqModel, sequenceLength ); Sequence ancestralSequence = new Sequence(); ancestralSequence.appendSequenceString("TCAGGTCAAG"); beagleSequenceSimulator.setAncestralSequence(ancestralSequence); System.out.println(beagleSequenceSimulator.simulate().toString()); } catch (Exception e) { e.printStackTrace(); }// END: try-catch block }// END : simulateEpochModel public static void printArray(int[] category) { for (int i = 0; i < category.length; i++) { System.out.println(category[i]); } }// END: printArray public static void printArray(double[] matrix) { for (int i = 0; i < matrix.length; i++) { // System.out.println(matrix[i]); System.out.println(String.format(Locale.US, "%.20f", matrix[i])); } System.out.print("\n"); }// END: printArray public void print2DArray(double[][] array) { for (int row = 0; row < array.length; row++) { for (int col = 0; col < array[row].length; col++) { System.out.print(array[row][col] + " "); } System.out.print("\n"); } }// END: print2DArray public static void print2DArray(int[][] array) { for (int row = 0; row < array.length; row++) { for (int col = 0; col < array[row].length; col++) { System.out.print(array[row][col] + " "); } System.out.print("\n"); } }// END: print2DArray } // END: class
package dr.app.beauti.tipdatepanel; import dr.app.beauti.options.DateGuesser; import dr.app.beauti.options.STARBEASTOptions; import dr.app.beauti.util.TextUtil; import dr.app.gui.components.RealNumberField; import figtree.treeviewer.TreeViewer; import jam.mac.Utils; import jam.panels.OptionsPanel; import javax.swing.*; import javax.swing.border.EmptyBorder; import java.awt.*; import java.awt.event.*; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.prefs.Preferences; /** * @author Andrew Rambaut * @author Alexei Drummond * @version $Id: PriorDialog.java,v 1.4 2006/09/05 13:29:34 rambaut Exp $ */ public class GuessDatesDialog { public static Preferences PREFS = Preferences.userNodeForPackage(TreeViewer.class); public static final String DELIMIT_RADIO_KEY = "delimitRadio"; public static final String ORDER_COMBO_KEY = "orderCombo"; public static final String PREFIX_TEXT_KEY = "prefixText"; public static final String REGEX_TEXT_KEY = "regexText"; public static final String PARSE_RADIO_KEY = "parseRadio"; public static final String OFFSET_CHECK_KEY = "offsetCheck"; public static final String OFFSET_TEXT_KEY = "offsetText"; public static final String UNLESS_CHECK_KEY = "unlessCheck"; public static final String UNLESS_TEXT_KEY = "unlessText"; public static final String OFFSET2_TEXT_KEY = "offset2Text"; public static final String DATE_FORMAT_TEXT_KEY = "dateFormatText"; private JFrame frame; private final OptionsPanel optionPanel; private final JRadioButton orderRadio = new JRadioButton("Defined just by its order", true); private final JComboBox orderCombo = new JComboBox(new String[]{"first", "second", "third", "fourth", "fourth from last", "third from last", "second from last", "last"}); private final JRadioButton prefixRadio = new JRadioButton("Defined by a prefix and its order", false); private final JTextField prefixText = new JTextField(16); private final JRadioButton regexRadio = new JRadioButton("Defined by regular expression (REGEX)", false); private final JTextField regexText = new JTextField(16); private final JRadioButton numericalRadio = new JRadioButton("Parse as a number", true); private final JRadioButton calendarRadio = new JRadioButton("Parse as a calendar date", true); private final JRadioButton calendar2Radio = new JRadioButton("Parse calendar dates with variable precision", true); private final JCheckBox offsetCheck = new JCheckBox("Add the following value to each: ", false); private final RealNumberField offsetText = new RealNumberField(); private final JCheckBox unlessCheck = new JCheckBox("...unless less than:", false); private final RealNumberField unlessText = new RealNumberField(); private final RealNumberField offset2Text = new RealNumberField(); private final JTextField dateFormatText = new JTextField(16); private String description = "Guess Dates for Taxa"; public GuessDatesDialog(final JFrame frame) { this.frame = frame; final int defaultDelimitRadioOption = PREFS.getInt(DELIMIT_RADIO_KEY, 0); final int defaultOrderCombo = PREFS.getInt(ORDER_COMBO_KEY, 0); final String defaultPrefixText = PREFS.get(PREFIX_TEXT_KEY, ""); final String defaultRegexText = PREFS.get(REGEX_TEXT_KEY, ""); final int defaultParseRadioOption = PREFS.getInt(PARSE_RADIO_KEY, 0); final boolean defaultOffsetCheckOption = PREFS.getBoolean(OFFSET_CHECK_KEY, false); final String defaultOffsetText = PREFS.get(OFFSET_TEXT_KEY, "1900"); final boolean defaultUnlessCheckOption = PREFS.getBoolean(UNLESS_CHECK_KEY, false); final String defaultUnlessText = PREFS.get(UNLESS_TEXT_KEY, "16"); final String defaultOffset2Text = PREFS.get(OFFSET2_TEXT_KEY, "2000"); final String defaultDateFormatText = PREFS.get(DATE_FORMAT_TEXT_KEY, "yyyy-MM-dd"); optionPanel = new OptionsPanel(12, 12); optionPanel.addLabel("The date is given by a numerical field in the taxon label that is:"); optionPanel.addSpanningComponent(orderRadio); // optionPanel.addSeparator(); optionPanel.addSpanningComponent(prefixRadio); final JLabel orderLabel = new JLabel("Order:"); optionPanel.addComponents(orderLabel, orderCombo); final JLabel prefixLabel = new JLabel("Prefix:"); optionPanel.addComponents(prefixLabel, prefixText); prefixLabel.setEnabled(false); prefixText.setEnabled(false); regexText.setEnabled(false); optionPanel.addComponents(regexRadio, regexText); optionPanel.addSeparator(); optionPanel.addSpanningComponent(numericalRadio); offsetText.setValue(1900); offsetText.setColumns(16); offsetText.setEnabled(false); optionPanel.addComponents(offsetCheck, offsetText); Calendar calendar = GregorianCalendar.getInstance(); int year = calendar.get(Calendar.YEAR) - 1999; unlessText.setValue(year); unlessText.setColumns(16); unlessText.setEnabled(false); optionPanel.addComponents(unlessCheck, unlessText); offset2Text.setValue(2000); offset2Text.setColumns(16); offset2Text.setEnabled(false); final JLabel offset2Label = new JLabel("...in which case add:"); optionPanel.addComponents(offset2Label, offset2Text); optionPanel.addSpanningComponent(calendarRadio); final JLabel dateFormatLabel = new JLabel("Date format:"); final JButton helpButton = new JButton(Utils.isMacOSX() ? "" : "?"); helpButton.putClientProperty("JButton.buttonType", "help"); JPanel panel = new JPanel(); panel.add(dateFormatText); panel.add(helpButton); panel.setOpaque(false); optionPanel.addComponents(dateFormatLabel, panel); dateFormatText.setText("yyyy-MM-dd"); optionPanel.addSpanningComponent(calendar2Radio); dateFormatLabel.setEnabled(false); dateFormatText.setEnabled(false); numericalRadio.setToolTipText("Parse the date field as a decimal number"); calendarRadio.setToolTipText("Parse the date field using a standard date format specification"); calendar2Radio.setToolTipText("Parse the date field yyyy[-mm[-dd]] with possibly missing month or day"); helpButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent actionEvent) { JScrollPane scrollPane = TextUtil.createHTMLScrollPane( DATE_FORMAT_HELP, new Dimension(560,480)); JOptionPane.showMessageDialog(frame, scrollPane, "Date format help", JOptionPane.PLAIN_MESSAGE); } }); offsetCheck.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { offsetText.setEnabled(offsetCheck.isSelected()); unlessCheck.setEnabled(offsetCheck.isSelected()); unlessText.setEnabled(offsetCheck.isSelected() && unlessCheck.isSelected()); offset2Label.setEnabled(offsetCheck.isSelected() && unlessCheck.isSelected()); offset2Text.setEnabled(offsetCheck.isSelected() && unlessCheck.isSelected()); } }); unlessCheck.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { unlessText.setEnabled(unlessCheck.isSelected()); offset2Label.setEnabled(unlessCheck.isSelected()); offset2Text.setEnabled(unlessCheck.isSelected()); } }); ButtonGroup group = new ButtonGroup(); group.add(orderRadio); group.add(prefixRadio); group.add(regexRadio); ItemListener listener = new ItemListener() { public void itemStateChanged(ItemEvent e) { orderLabel.setEnabled(!regexRadio.isSelected()); orderCombo.setEnabled(!regexRadio.isSelected()); prefixLabel.setEnabled(prefixRadio.isSelected()); prefixText.setEnabled(prefixRadio.isSelected()); regexText.setEnabled(regexRadio.isSelected()); } }; orderRadio.addItemListener(listener); prefixRadio.addItemListener(listener); regexRadio.addItemListener(listener); group = new ButtonGroup(); group.add(numericalRadio); group.add(calendarRadio); group.add(calendar2Radio); listener = new ItemListener() { public void itemStateChanged(ItemEvent e) { offsetCheck.setEnabled(numericalRadio.isSelected()); offsetText.setEnabled(numericalRadio.isSelected() && offsetCheck.isSelected()); unlessCheck.setEnabled(numericalRadio.isSelected() && offsetCheck.isSelected()); unlessText.setEnabled(numericalRadio.isSelected() && offsetCheck.isSelected() && unlessCheck.isSelected()); offset2Label.setEnabled(numericalRadio.isSelected() && offsetCheck.isSelected() && unlessCheck.isSelected()); offset2Text.setEnabled(numericalRadio.isSelected() && offsetCheck.isSelected() && unlessCheck.isSelected()); dateFormatLabel.setEnabled(calendarRadio.isSelected()); dateFormatText.setEnabled(calendarRadio.isSelected()); } }; numericalRadio.addItemListener(listener); calendarRadio.addItemListener(listener); calendar2Radio.addItemListener(listener); // set from preferences defaults... switch (defaultDelimitRadioOption) { case 0: orderRadio.setSelected(true); break; case 1: prefixRadio.setSelected(true); break; case 2: regexRadio.setSelected(true); break; default: throw new IllegalArgumentException("unknown radio option"); } orderCombo.setSelectedIndex(defaultOrderCombo); prefixText.setText(defaultPrefixText); regexText.setText(defaultRegexText); switch (defaultParseRadioOption) { case 0: numericalRadio.setSelected(true); break; case 1: calendarRadio.setSelected(true); break; case 2: calendar2Radio.setSelected(true); break; default: throw new IllegalArgumentException("unknown radio option"); } offsetCheck.setSelected(defaultOffsetCheckOption); offsetText.setText(defaultOffsetText); unlessCheck.setSelected(defaultUnlessCheckOption); unlessText.setText(defaultUnlessText); offset2Text.setText(defaultOffset2Text); dateFormatText.setText(defaultDateFormatText); } public int showDialog() { JOptionPane optionPane = new JOptionPane(optionPanel, JOptionPane.QUESTION_MESSAGE, JOptionPane.OK_CANCEL_OPTION, null, null, null); optionPane.setBorder(new EmptyBorder(12, 12, 12, 12)); final JDialog dialog = optionPane.createDialog(frame, description); dialog.pack(); dialog.setVisible(true); int result = JOptionPane.CANCEL_OPTION; Integer value = (Integer) optionPane.getValue(); if (value != null && value != -1) { result = value; setPreferencesFromDialog(); } return result; } private void setPreferencesFromDialog() { PREFS.putInt(DELIMIT_RADIO_KEY, (orderRadio.isSelected() ? 0 : (prefixRadio.isSelected() ? 1 : (regexRadio.isSelected() ? 2 : -1)))); PREFS.putInt(ORDER_COMBO_KEY, orderCombo.getSelectedIndex()); PREFS.put(PREFIX_TEXT_KEY, prefixText.getText()); PREFS.put(REGEX_TEXT_KEY, regexText.getText()); PREFS.putInt(PARSE_RADIO_KEY, (numericalRadio.isSelected() ? 0 : (calendarRadio.isSelected() ? 1 : (calendar2Radio.isSelected() ? 2 : -1)))); PREFS.putBoolean(OFFSET_CHECK_KEY, offsetCheck.isSelected()); PREFS.put(OFFSET_TEXT_KEY, offsetText.getText()); PREFS.putBoolean(UNLESS_CHECK_KEY, unlessCheck.isSelected()); PREFS.put(UNLESS_TEXT_KEY, unlessText.getText()); PREFS.put(OFFSET2_TEXT_KEY, offset2Text.getText()); PREFS.put(DATE_FORMAT_TEXT_KEY, dateFormatText.getText()); } public void setupGuesser(DateGuesser guesser) { guesser.order = orderCombo.getSelectedIndex(); guesser.fromLast = false; if (guesser.order > 3) { guesser.fromLast = true; guesser.order = 8 - guesser.order - 1; } if (orderRadio.isSelected()) { guesser.guessType = DateGuesser.GuessType.ORDER; } else if (prefixRadio.isSelected()) { guesser.guessType = DateGuesser.GuessType.PREFIX; guesser.prefix = prefixText.getText(); } else if (regexRadio.isSelected()) { guesser.guessType = DateGuesser.GuessType.REGEX; guesser.regex = regexText.getText(); } else { throw new IllegalArgumentException("unknown radio button selected"); } guesser.parseCalendarDatesAndPrecision = calendar2Radio.isSelected(); guesser.parseCalendarDates = calendarRadio.isSelected(); guesser.calendarDateFormat = dateFormatText.getText(); guesser.offset = 0.0; guesser.unlessLessThan = 0.0; if (offsetCheck.isSelected()) { guesser.offset = offsetText.getValue(); if (unlessCheck.isSelected()) { guesser.unlessLessThan = unlessText.getValue(); guesser.offset2 = offset2Text.getValue(); } } } public void setDescription(String description) { this.description = description; } private static final String DATE_FORMAT_HELP = "<h4>Date and Time Patterns</h4>\n" + " <p>\n" + " Date and time formats are specified by <em>date and time pattern</em>\n" + " strings.\n" + " Within date and time pattern strings, unquoted letters from\n" + " <code>'A'</code> to <code>'Z'</code> and from <code>'a'</code> to\n" + " <code>'z'</code> are interpreted as pattern letters representing the\n" + " components of a date or time string.\n" + " Text can be quoted using single quotes (<code>'</code>) to avoid\n" + " interpretation.\n" + " <code>\"''\"</code> represents a single quote.\n" + " All other characters are not interpreted; they're simply copied into the\n" + " output string during formatting or matched against the input string\n" + " during parsing.\n" + " <p>\n" + " The following pattern letters are defined (all other characters from\n" + " <code>'A'</code> to <code>'Z'</code> and from <code>'a'</code> to\n" + " <code>'z'</code> are reserved):\n" + " <blockquote>\n" + " <table border=0 cellspacing=3 cellpadding=0 summary=\"Chart shows pattern letters, date/time component, presentation, and examples.\">\n" + " <tr bgcolor=\"#ccccff\">\n" + " <th align=left>Letter\n" + " <th align=left>Date or Time Component\n" + " <th align=left>Presentation\n" + " <th align=left>Examples\n" + " <tr>\n" + " <td><code>G</code>\n" + " <td>Era designator\n" + " <td><a href=\"#text\">Text</a>\n" + " <td><code>AD</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>y</code>\n" + " <td>Year\n" + " <td><a href=\"#year\">Year</a>\n" + " <td><code>1996</code>; <code>96</code>\n" + " <tr>\n" + " <td><code>M</code>\n" + " <td>Month in year\n" + " <td><a href=\"#month\">Month</a>\n" + " <td><code>July</code>; <code>Jul</code>; <code>07</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>w</code>\n" + " <td>Week in year\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>27</code>\n" + " <tr>\n" + " <td><code>W</code>\n" + " <td>Week in month\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>2</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>D</code>\n" + " <td>Day in year\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>189</code>\n" + " <tr>\n" + " <td><code>d</code>\n" + " <td>Day in month\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>10</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>F</code>\n" + " <td>Day of week in month\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>2</code>\n" + " <tr>\n" + " <td><code>E</code>\n" + " <td>Day in week\n" + " <td><a href=\"#text\">Text</a>\n" + " <td><code>Tuesday</code>; <code>Tue</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>a</code>\n" + " <td>Am/pm marker\n" + " <td><a href=\"#text\">Text</a>\n" + " <td><code>PM</code>\n" + " <tr>\n" + " <td><code>H</code>\n" + " <td>Hour in day (0-23)\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>0</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>k</code>\n" + " <td>Hour in day (1-24)\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>24</code>\n" + " <tr>\n" + " <td><code>K</code>\n" + " <td>Hour in am/pm (0-11)\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>0</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>h</code>\n" + " <td>Hour in am/pm (1-12)\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>12</code>\n" + " <tr>\n" + " <td><code>m</code>\n" + " <td>Minute in hour\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>30</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>s</code>\n" + " <td>Second in minute\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>55</code>\n" + " <tr>\n" + " <td><code>S</code>\n" + " <td>Millisecond\n" + " <td><a href=\"#number\">Number</a>\n" + " <td><code>978</code>\n" + " <tr bgcolor=\"#eeeeff\">\n" + " <td><code>z</code>\n" + " <td>Time zone\n" + " <td><a href=\"#timezone\">General time zone</a>\n" + " <td><code>Pacific Standard Time</code>; <code>PST</code>; <code>GMT-08:00</code>\n" + " <tr>\n" + " <td><code>Z</code>\n" + " <td>Time zone\n" + " <td><a href=\"#rfc822timezone\">RFC 822 time zone</a>\n" + " <td><code>-0800</code>\n" + " </table>\n" + " </blockquote>\n" + " Pattern letters are usually repeated, as their number determines the\n" + " exact presentation:\n" + " <ul>\n" + " <li><strong><a name=\"text\">Text:</a></strong>\n" + " For formatting, if the number of pattern letters is 4 or more,\n" + " the full form is used; otherwise a short or abbreviated form\n" + " is used if available.\n" + " For parsing, both forms are accepted, independent of the number\n" + " of pattern letters.\n" + " <li><strong><a name=\"number\">Number:</a></strong>\n" + " For formatting, the number of pattern letters is the minimum\n" + " number of digits, and shorter numbers are zero-padded to this amount.\n" + " For parsing, the number of pattern letters is ignored unless\n" + " it's needed to separate two adjacent fields.\n" + " <li><strong><a name=\"year\">Year:</a></strong>\n" + " If the formatter's <A HREF=\"../../java/text/DateFormat.html#getCalendar()\"><CODE>Calendar</CODE></A> is the Gregorian\n" + " calendar, the following rules are applied.<br>\n" + " <ul>\n" + " <li>For formatting, if the number of pattern letters is 2, the year\n" + " is truncated to 2 digits; otherwise it is interpreted as a\n" + " <a href=\"#number\">number</a>.\n" + " <li>For parsing, if the number of pattern letters is more than 2,\n" + " the year is interpreted literally, regardless of the number of\n" + " digits. So using the pattern \"MM/dd/yyyy\", \"01/11/12\" parses to\n" + " Jan 11, 12 A.D.\n" + " <li>For parsing with the abbreviated year pattern (\"y\" or \"yy\"),\n" + " <code>SimpleDateFormat</code> must interpret the abbreviated year\n" + " relative to some century. It does this by adjusting dates to be\n" + " within 80 years before and 20 years after the time the <code>SimpleDateFormat</code>\n" + " instance is created. For example, using a pattern of \"MM/dd/yy\" and a\n" + " <code>SimpleDateFormat</code> instance created on Jan 1, 1997, the string\n" + " \"01/11/12\" would be interpreted as Jan 11, 2012 while the string \"05/04/64\"\n" + " would be interpreted as May 4, 1964.\n" + " During parsing, only strings consisting of exactly two digits, as defined by\n" + " <A HREF=\"../../java/lang/Character.html#isDigit(char)\"><CODE>Character.isDigit(char)</CODE></A>, will be parsed into the default century.\n" + " Any other numeric string, such as a one digit string, a three or more digit\n" + " string, or a two digit string that isn't all digits (for example, \"-1\"), is\n" + " interpreted literally. So \"01/02/3\" or \"01/02/003\" are parsed, using the\n" + " same pattern, as Jan 2, 3 AD. Likewise, \"01/02/-3\" is parsed as Jan 2, 4 BC.\n" + " </ul>\n" + " Otherwise, calendar system specific forms are applied.\n" + " For both formatting and parsing, if the number of pattern\n" + " letters is 4 or more, a calendar specific <A HREF=\"../../java/util/Calendar.html#LONG\">long form</A> is used. Otherwise, a calendar\n" + " specific <A HREF=\"../../java/util/Calendar.html#SHORT\">short or abbreviated form</A>\n" + " is used.\n" + " <li><strong><a name=\"month\">Month:</a></strong>\n" + " If the number of pattern letters is 3 or more, the month is\n" + " interpreted as <a href=\"#text\">text</a>; otherwise,\n" + " it is interpreted as a <a href=\"#number\">number</a>.\n" + " </ul>"; }
package dr.evomodel.branchratemodel; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evolution.tree.TreeTrait; import dr.evolution.tree.TreeTraitProvider; import dr.evolution.util.TaxonList; import dr.evomodel.tree.TreeModel; import dr.evomodelxml.branchratemodel.LocalClockModelParser; import dr.inference.model.Model; import dr.inference.model.Parameter; import dr.inference.model.Variable; import java.util.*; /** * @author Andrew Rambaut * @version $Id: LocalClockModel.java,v 1.1 2005/04/05 09:27:48 rambaut Exp $ */ public class LocalClockModel extends AbstractBranchRateModel { private TreeModel treeModel; protected Map<Integer, LocalClock> localTipClocks = new HashMap<Integer, LocalClock>(); protected Map<BitSet, LocalClock> localCladeClocks = new HashMap<BitSet, LocalClock>(); protected LocalClock trunkClock = null; private boolean updateNodeClocks = true; private Map<NodeRef, LocalClock> nodeClockMap = new HashMap<NodeRef, LocalClock>(); private final Parameter globalRateParameter; public LocalClockModel(TreeModel treeModel, Parameter globalRateParameter) { super(LocalClockModelParser.LOCAL_CLOCK_MODEL); this.treeModel = treeModel; addModel(treeModel); this.globalRateParameter = globalRateParameter; addVariable(globalRateParameter); // add the super class' tree traits (just the rate) helper.addTrait(this); } public void addExternalBranchClock(TaxonList taxonList, Parameter rateParameter, boolean isRelativeRate) throws Tree.MissingTaxonException { Set<Integer> tips = Tree.Utils.getTipsForTaxa(treeModel, taxonList); LocalClock clock = new LocalClock(rateParameter, isRelativeRate, tips, ClockType.EXTERNAL); for (int i : tips) { localTipClocks.put(i, clock); } addVariable(rateParameter); } public void addCladeClock(TaxonList taxonList, Parameter rateParameter, boolean isRelativeRate, boolean includeStem, boolean excludeClade) throws Tree.MissingTaxonException { Set<Integer> tips = Tree.Utils.getTipsForTaxa(treeModel, taxonList); BitSet tipBitSet = Tree.Utils.getTipsBitSetForTaxa(treeModel, taxonList); LocalClock clock = new LocalClock(rateParameter, isRelativeRate, tips, includeStem, excludeClade); localCladeClocks.put(tipBitSet, clock); addVariable(rateParameter); } public void addTrunkClock(TaxonList taxonList, Parameter rateParameter, Parameter indexParameter, boolean isRelativeRate) throws Tree.MissingTaxonException { if (trunkClock != null) { throw new RuntimeException("Trunk already defined for this LocalClockModel"); } List<Integer> tipList = new ArrayList<Integer>(Tree.Utils.getTipsForTaxa(treeModel, taxonList)); trunkClock = new LocalClock(rateParameter, indexParameter, isRelativeRate, tipList, ClockType.TRUNK); addVariable(rateParameter); if (indexParameter != null) { addVariable(indexParameter); } helper.addTrait("trunk", new TreeTrait.S() { @Override public String getTraitName() { return "trunk"; } @Override public Intent getIntent() { return Intent.BRANCH; } @Override public String getTrait(Tree tree, NodeRef node) { setupNodeClocks(tree); if (nodeClockMap.get(node) == trunkClock) { return "T"; } return "B"; } }); } public void handleModelChangedEvent(Model model, Object object, int index) { updateNodeClocks = true; fireModelChanged(); } protected final void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) { if (trunkClock != null && variable == trunkClock.indexParameter) { updateNodeClocks = true; } fireModelChanged(); } protected void storeState() { } protected void restoreState() { updateNodeClocks = true; } protected void acceptState() { } // TreeTraitProvider overrides public TreeTrait[] getTreeTraits() { return helper.getTreeTraits(); } public TreeTrait getTreeTrait(String key) { return helper.getTreeTrait(key); } // BranchRateModel implementation public double getBranchRate(final Tree tree, final NodeRef node) { if (tree.isRoot(node)) { throw new IllegalArgumentException("root node doesn't have a rate!"); } setupNodeClocks(tree); double rate = globalRateParameter.getParameterValue(0); LocalClock localClock = nodeClockMap.get(node); if (localClock != null) { if (localClock.isRelativeRate()) { rate *= localClock.getRateParameter().getParameterValue(0); } else { rate = localClock.getRateParameter().getParameterValue(0); } } return rate; } private void setupNodeClocks(final Tree tree) { if (updateNodeClocks) { nodeClockMap.clear(); setupRateParameters(tree, tree.getRoot(), new BitSet()); if (trunkClock != null) { // backbone will overwrite other local clocks setupTrunkRates(tree, tree.getRoot()); } updateNodeClocks = false; } } private void setupRateParameters(Tree tree, NodeRef node, BitSet tips) { LocalClock clock; if (tree.isExternal(node)) { tips.set(node.getNumber()); clock = localTipClocks.get(node.getNumber()); } else { for (int i = 0; i < tree.getChildCount(node); i++) { NodeRef child = tree.getChild(node, i); BitSet childTips = new BitSet(); setupRateParameters(tree, child, childTips); tips.or(childTips); } clock = localCladeClocks.get(tips); } if (clock != null) { setNodeClock(tree, node, clock, clock.includeStem(), clock.excludeClade()); } } private boolean setupTrunkRates(Tree tree, NodeRef node) { LocalClock clock = null; if (tree.isExternal(node)) { if (trunkClock.indexParameter != null) { if (trunkClock.tipList.get((int)trunkClock.indexParameter.getParameterValue(0)) == node.getNumber()) { clock = trunkClock; } } else if (trunkClock.tips.contains(node.getNumber())) { clock = trunkClock; } } else { for (int i = 0; i < tree.getChildCount(node); i++) { NodeRef child = tree.getChild(node, i); if (setupTrunkRates(tree, child)) { // if any of the desendents are back bone then this node is too clock = trunkClock; } } } if (clock != null) { setNodeClock(tree, node, clock, clock.includeStem(), clock.excludeClade()); return true; } return false; } private void setNodeClock(Tree tree, NodeRef node, LocalClock localClock, boolean includeStem, boolean excludeClade) { if (!tree.isExternal(node) && !excludeClade) { for (int i = 0; i < tree.getChildCount(node); i++) { NodeRef child = tree.getChild(node, i); setNodeClock(tree, child, localClock, true, false); } } if (includeStem && !nodeClockMap.containsKey(node)) { nodeClockMap.put(node, localClock); } } enum ClockType { CLADE, TRUNK, EXTERNAL } private class LocalClock { LocalClock(Parameter rateParameter, boolean isRelativeRate, Set<Integer> tipSet, ClockType type) { this.rateParameter = rateParameter; this.indexParameter = null; this.isRelativeRate = isRelativeRate; this.tips = tipSet; this.tipList = null; this.type = type; this.includeStem = true; this.excludeClade = true; } LocalClock(Parameter rateParameter, Parameter indexParameter, boolean isRelativeRate, List<Integer> tipList, ClockType type) { this.rateParameter = rateParameter; this.indexParameter = indexParameter; this.isRelativeRate = isRelativeRate; this.tips = null; this.tipList = tipList; this.type = type; this.includeStem = true; this.excludeClade = true; } LocalClock(Parameter rateParameter, boolean isRelativeRate, Set<Integer> tips, boolean includeStem, boolean excludeClade) { this.rateParameter = rateParameter; this.indexParameter = null; this.isRelativeRate = isRelativeRate; this.tips = tips; this.tipList = null; this.type = ClockType.CLADE; this.includeStem = includeStem; this.excludeClade = excludeClade; } boolean includeStem() { return this.includeStem; } boolean excludeClade() { return excludeClade; } ClockType getType() { return this.type; } boolean isRelativeRate() { return isRelativeRate; } Parameter getRateParameter() { return this.rateParameter; } private final Parameter rateParameter; private final Parameter indexParameter; private final boolean isRelativeRate; private final Set<Integer> tips; private final List<Integer> tipList; private final ClockType type; private final boolean includeStem; private final boolean excludeClade; } private final Helper helper = new Helper(); }
package aQute.bnd.osgi; import static aQute.bnd.classfile.ConstantPool.CONSTANT_Class; import static aQute.bnd.classfile.ConstantPool.CONSTANT_Fieldref; import static aQute.bnd.classfile.ConstantPool.CONSTANT_InterfaceMethodref; import static aQute.bnd.classfile.ConstantPool.CONSTANT_MethodType; import static aQute.bnd.classfile.ConstantPool.CONSTANT_Methodref; import static aQute.bnd.classfile.ConstantPool.CONSTANT_NameAndType; import static aQute.bnd.classfile.ConstantPool.CONSTANT_String; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toMap; import java.io.DataInput; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Modifier; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Deque; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.Spliterator; import java.util.Spliterators.AbstractSpliterator; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import aQute.bnd.classfile.AnnotationDefaultAttribute; import aQute.bnd.classfile.AnnotationInfo; import aQute.bnd.classfile.AnnotationsAttribute; import aQute.bnd.classfile.Attribute; import aQute.bnd.classfile.BootstrapMethodsAttribute; import aQute.bnd.classfile.BootstrapMethodsAttribute.BootstrapMethod; import aQute.bnd.classfile.ClassFile; import aQute.bnd.classfile.CodeAttribute; import aQute.bnd.classfile.CodeAttribute.ExceptionHandler; import aQute.bnd.classfile.ConstantPool; import aQute.bnd.classfile.ConstantPool.AbstractRefInfo; import aQute.bnd.classfile.ConstantPool.MethodTypeInfo; import aQute.bnd.classfile.ConstantPool.NameAndTypeInfo; import aQute.bnd.classfile.ConstantValueAttribute; import aQute.bnd.classfile.DeprecatedAttribute; import aQute.bnd.classfile.ElementValueInfo; import aQute.bnd.classfile.ElementValueInfo.EnumConst; import aQute.bnd.classfile.ElementValueInfo.ResultConst; import aQute.bnd.classfile.EnclosingMethodAttribute; import aQute.bnd.classfile.ExceptionsAttribute; import aQute.bnd.classfile.FieldInfo; import aQute.bnd.classfile.InnerClassesAttribute; import aQute.bnd.classfile.InnerClassesAttribute.InnerClass; import aQute.bnd.classfile.MemberInfo; import aQute.bnd.classfile.MethodInfo; import aQute.bnd.classfile.MethodParametersAttribute; import aQute.bnd.classfile.ParameterAnnotationInfo; import aQute.bnd.classfile.ParameterAnnotationsAttribute; import aQute.bnd.classfile.RuntimeInvisibleAnnotationsAttribute; import aQute.bnd.classfile.RuntimeInvisibleParameterAnnotationsAttribute; import aQute.bnd.classfile.RuntimeInvisibleTypeAnnotationsAttribute; import aQute.bnd.classfile.RuntimeVisibleAnnotationsAttribute; import aQute.bnd.classfile.RuntimeVisibleParameterAnnotationsAttribute; import aQute.bnd.classfile.RuntimeVisibleTypeAnnotationsAttribute; import aQute.bnd.classfile.SignatureAttribute; import aQute.bnd.classfile.SourceFileAttribute; import aQute.bnd.classfile.StackMapTableAttribute; import aQute.bnd.classfile.StackMapTableAttribute.AppendFrame; import aQute.bnd.classfile.StackMapTableAttribute.FullFrame; import aQute.bnd.classfile.StackMapTableAttribute.ObjectVariableInfo; import aQute.bnd.classfile.StackMapTableAttribute.SameLocals1StackItemFrame; import aQute.bnd.classfile.StackMapTableAttribute.SameLocals1StackItemFrameExtended; import aQute.bnd.classfile.StackMapTableAttribute.StackMapFrame; import aQute.bnd.classfile.StackMapTableAttribute.VerificationTypeInfo; import aQute.bnd.classfile.TypeAnnotationInfo; import aQute.bnd.classfile.TypeAnnotationsAttribute; import aQute.bnd.osgi.Annotation.ElementType; import aQute.bnd.osgi.Descriptors.Descriptor; import aQute.bnd.osgi.Descriptors.PackageRef; import aQute.bnd.osgi.Descriptors.TypeRef; import aQute.bnd.signatures.FieldSignature; import aQute.bnd.signatures.MethodSignature; import aQute.bnd.signatures.Signature; import aQute.lib.exceptions.Exceptions; import aQute.lib.io.ByteBufferDataInput; import aQute.lib.utf8properties.UTF8Properties; import aQute.libg.generics.Create; import aQute.libg.glob.Glob; public class Clazz { private final static Logger logger = LoggerFactory.getLogger(Clazz.class); @Deprecated public class ClassConstant { final int cname; public boolean referred; public ClassConstant(int class_index) { this.cname = class_index; } public String getName() { return constantPool.utf8(cname); } @Override public String toString() { return "ClassConstant[" + getName() + "]"; } } public static enum JAVA { JDK1_1(45, "JRE-1.1", "(&(osgi.ee=JavaSE)(version=1.1))"), JDK1_2(46, "J2SE-1.2", "(&(osgi.ee=JavaSE)(version=1.2))"), JDK1_3(47, "J2SE-1.3", "(&(osgi.ee=JavaSE)(version=1.3))"), JDK1_4(48, "J2SE-1.4", "(&(osgi.ee=JavaSE)(version=1.4))"), J2SE5(49, "J2SE-1.5", "(&(osgi.ee=JavaSE)(version=1.5))"), J2SE6(50, "JavaSE-1.6", "(&(osgi.ee=JavaSE)(version=1.6))"), OpenJDK7(51, "JavaSE-1.7", "(&(osgi.ee=JavaSE)(version=1.7))"), OpenJDK8(52, "JavaSE-1.8", "(&(osgi.ee=JavaSE)(version=1.8))") { Map<String, Set<String>> profiles; @Override public Map<String, Set<String>> getProfiles() throws IOException { if (profiles == null) { Properties p = new UTF8Properties(); try (InputStream in = Clazz.class.getResourceAsStream("profiles-" + this + ".properties")) { p.load(in); } profiles = new HashMap<>(); for (Map.Entry<Object, Object> prop : p.entrySet()) { String list = (String) prop.getValue(); Set<String> set = new HashSet<>(); Collections.addAll(set, list.split("\\s*,\\s*")); profiles.put((String) prop.getKey(), set); } } return profiles; } }, OpenJDK9(53, "JavaSE-9", "(&(osgi.ee=JavaSE)(version=9))"), OpenJDK10(54, "JavaSE-10", "(&(osgi.ee=JavaSE)(version=10))"), OpenJDK11(55, "JavaSE-11", "(&(osgi.ee=JavaSE)(version=11))"), OpenJDK12(56, "JavaSE-12", "(&(osgi.ee=JavaSE)(version=12))"), UNKNOWN(Integer.MAX_VALUE, "<UNKNOWN>", "(osgi.ee=UNKNOWN)"); final int major; final String ee; final String filter; JAVA(int major, String ee, String filter) { this.major = major; this.ee = ee; this.filter = filter; } static JAVA format(int n) { for (JAVA e : JAVA.values()) if (e.major == n) return e; return UNKNOWN; } public int getMajor() { return major; } public boolean hasAnnotations() { return major >= J2SE5.major; } public boolean hasGenerics() { return major >= J2SE5.major; } public boolean hasEnums() { return major >= J2SE5.major; } public static JAVA getJava(int major, @SuppressWarnings("unused") int minor) { for (JAVA j : JAVA.values()) { if (j.major == major) return j; } return UNKNOWN; } public String getEE() { return ee; } public String getFilter() { return filter; } public Map<String, Set<String>> getProfiles() throws IOException { return null; } } public static enum QUERY { IMPLEMENTS, EXTENDS, IMPORTS, NAMED, ANY, VERSION, CONCRETE, ABSTRACT, PUBLIC, ANNOTATED, INDIRECTLY_ANNOTATED, HIERARCHY_ANNOTATED, HIERARCHY_INDIRECTLY_ANNOTATED, RUNTIMEANNOTATIONS, CLASSANNOTATIONS, DEFAULT_CONSTRUCTOR; } public final static EnumSet<QUERY> HAS_ARGUMENT = EnumSet.of(QUERY.IMPLEMENTS, QUERY.EXTENDS, QUERY.IMPORTS, QUERY.NAMED, QUERY.VERSION, QUERY.ANNOTATED, QUERY.INDIRECTLY_ANNOTATED, QUERY.HIERARCHY_ANNOTATED, QUERY.HIERARCHY_INDIRECTLY_ANNOTATED); final static int ACC_SYNTHETIC = 0x1000; final static int ACC_BRIDGE = 0x0040; final static int ACC_ANNOTATION = 0x2000; final static int ACC_ENUM = 0x4000; final static int ACC_MODULE = 0x8000; @Deprecated static protected class Assoc { private Assoc() {} } public abstract class Def { final int access; public Def(int access) { this.access = access; } public int getAccess() { return access; } public boolean isEnum() { return (access & ACC_ENUM) != 0; } public boolean isPublic() { return Modifier.isPublic(access); } public boolean isAbstract() { return Modifier.isAbstract(access); } public boolean isProtected() { return Modifier.isProtected(access); } public boolean isFinal() { return Modifier.isFinal(access); } public boolean isStatic() { return Modifier.isStatic(access); } public boolean isPrivate() { return Modifier.isPrivate(access); } public boolean isNative() { return Modifier.isNative(access); } public boolean isTransient() { return Modifier.isTransient(access); } public boolean isVolatile() { return Modifier.isVolatile(access); } public boolean isInterface() { return Modifier.isInterface(access); } public boolean isSynthetic() { return (access & ACC_SYNTHETIC) != 0; } public boolean isModule() { return Clazz.isModule(access); } public boolean isAnnotation() { return Clazz.isAnnotation(access); } @Deprecated public Collection<TypeRef> getAnnotations() { return null; } public TypeRef getOwnerType() { return classDef.getType(); } public abstract String getName(); public abstract TypeRef getType(); public abstract TypeRef[] getPrototype(); public Object getClazz() { return Clazz.this; } } abstract class ElementDef extends Def { final Attribute[] attributes; ElementDef(int access, Attribute[] attributes) { super(access); this.attributes = attributes; } public boolean isDeprecated() { return attribute(DeprecatedAttribute.class).isPresent() || annotationInfos(RuntimeVisibleAnnotationsAttribute.class) .anyMatch(a -> a.type.equals("Ljava/lang/Deprecated;")); } public String getSignature() { return attribute(SignatureAttribute.class).map(a -> a.signature) .orElse(null); } <A extends Attribute> Stream<A> attributes(Class<A> attributeType) { @SuppressWarnings("unchecked") Stream<A> stream = (Stream<A>) Arrays.stream(attributes) .filter(attributeType::isInstance); return stream; } <A extends Attribute> Optional<A> attribute(Class<A> attributeType) { return attributes(attributeType).findFirst(); } <A extends AnnotationsAttribute> Stream<AnnotationInfo> annotationInfos(Class<A> attributeType) { return attributes(attributeType).flatMap(a -> Arrays.stream(a.annotations)); } public Stream<Annotation> annotations(String binaryNameFilter) { Predicate<AnnotationInfo> matches = matches(binaryNameFilter); ElementType elementType = elementType(); Stream<Annotation> runtimeAnnotations = annotationInfos(RuntimeVisibleAnnotationsAttribute.class) .filter(matches) .map(a -> newAnnotation(a, elementType, RetentionPolicy.RUNTIME, access)); Stream<Annotation> classAnnotations = annotationInfos(RuntimeInvisibleAnnotationsAttribute.class) .filter(matches) .map(a -> newAnnotation(a, elementType, RetentionPolicy.CLASS, access)); return Stream.concat(runtimeAnnotations, classAnnotations); } Predicate<AnnotationInfo> matches(String binaryNameFilter) { if ((binaryNameFilter == null) || binaryNameFilter.equals("*")) { return annotationInfo -> true; } Glob glob = new Glob("L{" + binaryNameFilter + "};"); return annotationInfo -> glob.matches(annotationInfo.type); } <A extends TypeAnnotationsAttribute> Stream<TypeAnnotationInfo> typeAnnotationInfos(Class<A> attributeType) { return attributes(attributeType).flatMap(a -> Arrays.stream(a.type_annotations)); } public Stream<TypeAnnotation> typeAnnotations(String binaryNameFilter) { Predicate<AnnotationInfo> matches = matches(binaryNameFilter); ElementType elementType = elementType(); Stream<TypeAnnotation> runtimeTypeAnnotations = typeAnnotationInfos( RuntimeVisibleTypeAnnotationsAttribute.class).filter(matches) .map(a -> newTypeAnnotation(a, elementType, RetentionPolicy.RUNTIME, access)); Stream<TypeAnnotation> classTypeAnnotations = typeAnnotationInfos( RuntimeInvisibleTypeAnnotationsAttribute.class).filter(matches) .map(a -> newTypeAnnotation(a, elementType, RetentionPolicy.CLASS, access)); return Stream.concat(runtimeTypeAnnotations, classTypeAnnotations); } @Override public String getName() { return super.toString(); } @Override public TypeRef getType() { return null; } @Override public TypeRef[] getPrototype() { return null; } @Override public String toString() { return getName(); } abstract ElementType elementType(); } class CodeDef extends ElementDef { private final ElementType elementType; CodeDef(CodeAttribute code, ElementType elementType) { super(0, code.attributes); this.elementType = elementType; } @Override ElementType elementType() { return elementType; } @Override public boolean isDeprecated() { return false; } } class ClassDef extends ElementDef { private final TypeRef type; ClassDef(ClassFile classFile) { super(classFile.access, classFile.attributes); type = analyzer.getTypeRef(classFile.this_class); } String getSourceFile() { return attribute(SourceFileAttribute.class).map(a -> a.sourcefile) .orElse(null); } boolean isInnerClass() { String binary = type.getBinary(); return attributes(InnerClassesAttribute.class).flatMap(a -> Arrays.stream(a.classes)) .anyMatch(inner -> !Modifier.isStatic(inner.inner_access) && inner.inner_class.equals(binary)); } @Override public String getName() { return type.getFQN(); } @Override public TypeRef getType() { return type; } @Override ElementType elementType() { if (isAnnotation()) { return ElementType.ANNOTATION_TYPE; } if (isModule()) { return ElementType.MODULE; } return type.getBinary() .endsWith("/package-info") ? ElementType.PACKAGE : ElementType.TYPE; } } public class FieldDef extends ElementDef { final String name; final Descriptor descriptor; @Deprecated public FieldDef(int access, String name, String descriptor) { super(access, new Attribute[0]); this.name = name; this.descriptor = analyzer.getDescriptor(descriptor); } FieldDef(MemberInfo memberInfo) { super(memberInfo.access, memberInfo.attributes); this.name = memberInfo.name; this.descriptor = analyzer.getDescriptor(memberInfo.descriptor); } @Override public String getName() { return name; } @Override public TypeRef getType() { return descriptor.getType(); } @Deprecated public void setDeprecated(boolean deprecated) {} public TypeRef getContainingClass() { return getClassName(); } public Descriptor getDescriptor() { return descriptor; } @Deprecated public void setConstant(Object o) {} public Object getConstant() { return attribute(ConstantValueAttribute.class).map(a -> a.value) .orElse(null); } public String getGenericReturnType() { String signature = getSignature(); FieldSignature sig = analyzer.getFieldSignature((signature != null) ? signature : descriptor.toString()); return sig.type.toString(); } @Override public TypeRef[] getPrototype() { return null; } @Override ElementType elementType() { return ElementType.FIELD; } } public static class MethodParameter { private final MethodParametersAttribute.MethodParameter methodParameter; MethodParameter(MethodParametersAttribute.MethodParameter methodParameter) { this.methodParameter = methodParameter; } public String getName() { return methodParameter.name; } public int getAccess() { return methodParameter.access_flags; } @Override public String toString() { return getName(); } static MethodParameter[] parameters(MethodParametersAttribute attribute) { int parameters_count = attribute.parameters.length; MethodParameter[] parameters = new MethodParameter[parameters_count]; for (int i = 0; i < parameters_count; i++) { parameters[i] = new MethodParameter(attribute.parameters[i]); } return parameters; } } public class MethodDef extends FieldDef { @Deprecated public MethodDef(int access, String method, String descriptor) { super(access, method, descriptor); } public MethodDef(MethodInfo methodInfo) { super(methodInfo); } public boolean isConstructor() { return name.equals("<init>") || name.equals("<clinit>"); } @Override public boolean isFinal() { return super.isFinal() || Modifier.isFinal(classDef.getAccess()); } @Override public TypeRef[] getPrototype() { return descriptor.getPrototype(); } public boolean isBridge() { return (access & ACC_BRIDGE) != 0; } @Override public String getGenericReturnType() { String signature = getSignature(); MethodSignature sig = analyzer.getMethodSignature((signature != null) ? signature : descriptor.toString()); return sig.resultType.toString(); } public MethodParameter[] getParameters() { return attribute(MethodParametersAttribute.class).map(MethodParameter::parameters) .orElseGet(() -> new MethodParameter[0]); } @Override public Object getConstant() { return attribute(AnnotationDefaultAttribute.class).map(a -> annotationDefault(a, access)) .orElse(null); } <A extends ParameterAnnotationsAttribute> Stream<ParameterAnnotationInfo> parameterAnnotationInfos( Class<A> attributeType) { return attributes(attributeType).flatMap(a -> Arrays.stream(a.parameter_annotations)); } public Stream<ParameterAnnotation> parameterAnnotations(String binaryNameFilter) { Predicate<AnnotationInfo> matches = matches(binaryNameFilter); ElementType elementType = elementType(); Stream<ParameterAnnotation> runtimeParameterAnnotations = parameterAnnotationInfos( RuntimeVisibleParameterAnnotationsAttribute.class) .flatMap(a -> parameterAnnotations(a, matches, elementType, RetentionPolicy.RUNTIME)); Stream<ParameterAnnotation> classParameterAnnotations = parameterAnnotationInfos( RuntimeInvisibleParameterAnnotationsAttribute.class) .flatMap(a -> parameterAnnotations(a, matches, elementType, RetentionPolicy.CLASS)); return Stream.concat(runtimeParameterAnnotations, classParameterAnnotations); } private Stream<ParameterAnnotation> parameterAnnotations(ParameterAnnotationInfo parameterAnnotationInfo, Predicate<AnnotationInfo> matches, ElementType elementType, RetentionPolicy policy) { int parameter = parameterAnnotationInfo.parameter; return Arrays.stream(parameterAnnotationInfo.annotations) .filter(matches) .map(a -> newParameterAnnotation(parameter, a, elementType, policy, access)); } /** * We must also look in the method's Code attribute for type * annotations. */ @Override <A extends TypeAnnotationsAttribute> Stream<TypeAnnotationInfo> typeAnnotationInfos(Class<A> attributeType) { ElementType elementType = elementType(); Stream<A> methodAttributes = attributes(attributeType); Stream<A> codeAttributes = attribute(CodeAttribute.class) .map(code -> new CodeDef(code, elementType).attributes(attributeType)) .orElseGet(Stream::empty); return Stream.concat(methodAttributes, codeAttributes) .flatMap(a -> Arrays.stream(a.type_annotations)); } @Override ElementType elementType() { return name.equals("<init>") ? ElementType.CONSTRUCTOR : ElementType.METHOD; } } public class TypeDef extends Def { final TypeRef type; final boolean interf; public TypeDef(TypeRef type, boolean interf) { super(Modifier.PUBLIC); this.type = type; this.interf = interf; } public TypeRef getReference() { return type; } public boolean getImplements() { return interf; } @Override public String getName() { if (interf) return "<implements>"; return "<extends>"; } @Override public TypeRef getType() { return type; } @Override public TypeRef[] getPrototype() { return null; } } public static final Comparator<Clazz> NAME_COMPARATOR = (Clazz a, Clazz b) -> a.classDef.getType() .compareTo(b.classDef.getType()); private boolean hasRuntimeAnnotations; private boolean hasClassAnnotations; private boolean hasDefaultConstructor; private Set<PackageRef> imports = Create.set(); private Set<TypeRef> xref = new HashSet<>(); private Set<TypeRef> annotations; private int forName = 0; private int class$ = 0; private Set<PackageRef> api; private ClassFile classFile = null; private ConstantPool constantPool = null; TypeRef superClass; private TypeRef[] interfaces; ClassDef classDef; private Map<TypeRef, Integer> referred = null; final Analyzer analyzer; final String path; final Resource resource; public static final int TYPEUSE_INDEX_NONE = TypeAnnotationInfo.TYPEUSE_INDEX_NONE; public static final int TYPEUSE_TARGET_INDEX_EXTENDS = TypeAnnotationInfo.TYPEUSE_TARGET_INDEX_EXTENDS; public Clazz(Analyzer analyzer, String path, Resource resource) { this.path = path; this.resource = resource; this.analyzer = analyzer; } public Set<TypeRef> parseClassFile() throws Exception { return parseClassFileWithCollector(null); } public Set<TypeRef> parseClassFile(InputStream in) throws Exception { return parseClassFile(in, null); } public Set<TypeRef> parseClassFileWithCollector(ClassDataCollector cd) throws Exception { ByteBuffer bb = resource.buffer(); if (bb != null) { return parseClassFileData(ByteBufferDataInput.wrap(bb), cd); } return parseClassFile(resource.openInputStream(), cd); } public Set<TypeRef> parseClassFile(InputStream in, ClassDataCollector cd) throws Exception { try (DataInputStream din = new DataInputStream(in)) { return parseClassFileData(din, cd); } } private Set<TypeRef> parseClassFileData(DataInput in, ClassDataCollector cd) throws Exception { Set<TypeRef> xref = parseClassFileData(in); visitClassFile(cd); return xref; } private synchronized Set<TypeRef> parseClassFileData(DataInput in) throws Exception { if (classFile != null) { return xref; } logger.debug("parseClassFile(): path={} resource={}", path, resource); classFile = ClassFile.parseClassFile(in); classDef = new ClassDef(classFile); constantPool = classFile.constant_pool; referred = new HashMap<>(constantPool.size()); if (classDef.isPublic()) { api = new HashSet<>(); } if (!classDef.isModule()) { referTo(classDef.getType(), Modifier.PUBLIC); } String superName = classFile.super_class; if (superName == null) { if (!(classDef.getType() .isObject() || classDef.isModule())) { throw new IOException("Class does not have a super class and is not java.lang.Object or module-info"); } } else { superClass = analyzer.getTypeRef(superName); referTo(superClass, classFile.access); } int interfaces_count = classFile.interfaces.length; if (interfaces_count > 0) { interfaces = new TypeRef[interfaces_count]; for (int i = 0; i < interfaces_count; i++) { interfaces[i] = analyzer.getTypeRef(classFile.interfaces[i]); referTo(interfaces[i], classFile.access); } } // All name&type and class constant records contain descriptors we // must treat as references, though not API int constant_pool_count = constantPool.size(); for (int i = 1; i < constant_pool_count; i++) { switch (constantPool.tag(i)) { case CONSTANT_Fieldref : case CONSTANT_Methodref : case CONSTANT_InterfaceMethodref : { AbstractRefInfo info = constantPool.entry(i); classConstRef(constantPool.className(info.class_index)); break; } case CONSTANT_NameAndType : { NameAndTypeInfo info = constantPool.entry(i); referTo(constantPool.utf8(info.descriptor_index), 0); break; } case CONSTANT_MethodType : { MethodTypeInfo info = constantPool.entry(i); referTo(constantPool.utf8(info.descriptor_index), 0); break; } default : break; } } for (FieldInfo fieldInfo : classFile.fields) { referTo(fieldInfo.descriptor, fieldInfo.access); processAttributes(fieldInfo.attributes, elementType(fieldInfo), fieldInfo.access); } /* * We crawl the code to find the ldc(_w) <string constant> invokestatic * Class.forName if so, calculate the method ref index so we can do this * efficiently */ forName = findMethodReference("java/lang/Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;"); class$ = findMethodReference(classDef.getType() .getBinary(), "class$", "(Ljava/lang/String;)Ljava/lang/Class;"); for (MethodInfo methodInfo : classFile.methods) { referTo(methodInfo.descriptor, methodInfo.access); ElementType elementType = elementType(methodInfo); if ((elementType == ElementType.CONSTRUCTOR) && Modifier.isPublic(methodInfo.access) && methodInfo.descriptor.equals("()V")) { hasDefaultConstructor = true; } processAttributes(methodInfo.attributes, elementType, methodInfo.access); } processAttributes(classFile.attributes, elementType(classFile), classFile.access); return xref; } private void visitClassFile(ClassDataCollector cd) throws Exception { if (cd == null) { return; } logger.debug("visitClassFile(): path={} resource={}", path, resource); if (!cd.classStart(this)) { return; } try { cd.version(classFile.minor_version, classFile.major_version); if (superClass != null) { cd.extendsClass(superClass); } if (interfaces != null) { cd.implementsInterfaces(interfaces); } referred.forEach((typeRef, access) -> { cd.addReference(typeRef); cd.referTo(typeRef, access.intValue()); }); for (FieldInfo fieldInfo : classFile.fields) { FieldDef fieldDef = new FieldDef(fieldInfo); cd.field(fieldDef); visitAttributes(cd, fieldDef); } for (MethodInfo methodInfo : classFile.methods) { MethodDef methodDef = new MethodDef(methodInfo); cd.method(methodDef); visitAttributes(cd, methodDef); } cd.memberEnd(); visitAttributes(cd, classDef); } finally { cd.classEnd(); } } public Stream<FieldDef> fields() { return Arrays.stream(classFile.fields) .map(FieldDef::new); } public Stream<MethodDef> methods() { return Arrays.stream(classFile.methods) .map(MethodDef::new); } /** * Find a method reference in the pool that points to the given class, * methodname and descriptor. * * @param clazz * @param methodname * @param descriptor * @return index in constant pool */ private int findMethodReference(String clazz, String methodname, String descriptor) { int constant_pool_count = constantPool.size(); for (int i = 1; i < constant_pool_count; i++) { switch (constantPool.tag(i)) { case CONSTANT_Methodref : case CONSTANT_InterfaceMethodref : AbstractRefInfo refInfo = constantPool.entry(i); if (clazz.equals(constantPool.className(refInfo.class_index))) { NameAndTypeInfo nameAndTypeInfo = constantPool.entry(refInfo.name_and_type_index); if (methodname.equals(constantPool.utf8(nameAndTypeInfo.name_index)) && descriptor.equals(constantPool.utf8(nameAndTypeInfo.descriptor_index))) { return i; } } } } return -1; } /** * Called for the attributes in the class, field, method or Code attribute. */ private void processAttributes(Attribute[] attributes, ElementType elementType, int access_flags) { for (Attribute attribute : attributes) { switch (attribute.name()) { case RuntimeVisibleAnnotationsAttribute.NAME : processAnnotations((AnnotationsAttribute) attribute, elementType, RetentionPolicy.RUNTIME, access_flags); break; case RuntimeInvisibleAnnotationsAttribute.NAME : processAnnotations((AnnotationsAttribute) attribute, elementType, RetentionPolicy.CLASS, access_flags); break; case RuntimeVisibleParameterAnnotationsAttribute.NAME : processParameterAnnotations((ParameterAnnotationsAttribute) attribute, ElementType.PARAMETER, RetentionPolicy.RUNTIME, access_flags); break; case RuntimeInvisibleParameterAnnotationsAttribute.NAME : processParameterAnnotations((ParameterAnnotationsAttribute) attribute, ElementType.PARAMETER, RetentionPolicy.CLASS, access_flags); break; case RuntimeVisibleTypeAnnotationsAttribute.NAME : processTypeAnnotations((TypeAnnotationsAttribute) attribute, ElementType.TYPE_USE, RetentionPolicy.RUNTIME, access_flags); break; case RuntimeInvisibleTypeAnnotationsAttribute.NAME : processTypeAnnotations((TypeAnnotationsAttribute) attribute, ElementType.TYPE_USE, RetentionPolicy.CLASS, access_flags); break; case EnclosingMethodAttribute.NAME : processEnclosingMethod((EnclosingMethodAttribute) attribute); break; case CodeAttribute.NAME : processCode((CodeAttribute) attribute, elementType); break; case SignatureAttribute.NAME : processSignature((SignatureAttribute) attribute, elementType, access_flags); break; case AnnotationDefaultAttribute.NAME : processAnnotationDefault((AnnotationDefaultAttribute) attribute, elementType, access_flags); break; case ExceptionsAttribute.NAME : processExceptions((ExceptionsAttribute) attribute, access_flags); break; case BootstrapMethodsAttribute.NAME : processBootstrapMethods((BootstrapMethodsAttribute) attribute); break; case StackMapTableAttribute.NAME : processStackMapTable((StackMapTableAttribute) attribute); break; default : break; } } } /** * Called for the attributes in the class, field, or method. */ private void visitAttributes(ClassDataCollector cd, ElementDef elementDef) throws Exception { int access_flags = elementDef.getAccess(); ElementType elementType = elementDef.elementType(); if (elementDef.isDeprecated()) { cd.deprecated(); } for (Attribute attribute : elementDef.attributes) { switch (attribute.name()) { case RuntimeVisibleAnnotationsAttribute.NAME : visitAnnotations(cd, (AnnotationsAttribute) attribute, elementType, RetentionPolicy.RUNTIME, access_flags); break; case RuntimeInvisibleAnnotationsAttribute.NAME : visitAnnotations(cd, (AnnotationsAttribute) attribute, elementType, RetentionPolicy.CLASS, access_flags); break; case RuntimeVisibleParameterAnnotationsAttribute.NAME : visitParameterAnnotations(cd, (ParameterAnnotationsAttribute) attribute, ElementType.PARAMETER, RetentionPolicy.RUNTIME, access_flags); break; case RuntimeInvisibleParameterAnnotationsAttribute.NAME : visitParameterAnnotations(cd, (ParameterAnnotationsAttribute) attribute, ElementType.PARAMETER, RetentionPolicy.CLASS, access_flags); break; case RuntimeVisibleTypeAnnotationsAttribute.NAME : visitTypeAnnotations(cd, (TypeAnnotationsAttribute) attribute, ElementType.TYPE_USE, RetentionPolicy.RUNTIME, access_flags); break; case RuntimeInvisibleTypeAnnotationsAttribute.NAME : visitTypeAnnotations(cd, (TypeAnnotationsAttribute) attribute, ElementType.TYPE_USE, RetentionPolicy.CLASS, access_flags); break; case InnerClassesAttribute.NAME : visitInnerClasses(cd, (InnerClassesAttribute) attribute); break; case EnclosingMethodAttribute.NAME : visitEnclosingMethod(cd, (EnclosingMethodAttribute) attribute); break; case CodeAttribute.NAME : visitCode(cd, (CodeAttribute) attribute, elementType); break; case SignatureAttribute.NAME : visitSignature(cd, (SignatureAttribute) attribute); break; case ConstantValueAttribute.NAME : visitConstantValue(cd, (ConstantValueAttribute) attribute); break; case AnnotationDefaultAttribute.NAME : visitAnnotationDefault(cd, (AnnotationDefaultAttribute) attribute, elementDef); break; case MethodParametersAttribute.NAME : visitMethodParameters(cd, (MethodParametersAttribute) attribute, elementDef); break; default : break; } } } private void processEnclosingMethod(EnclosingMethodAttribute attribute) { classConstRef(attribute.class_name); } private void visitEnclosingMethod(ClassDataCollector cd, EnclosingMethodAttribute attribute) { TypeRef cName = analyzer.getTypeRef(attribute.class_name); cd.enclosingMethod(cName, attribute.method_name, attribute.method_descriptor); } private void visitInnerClasses(ClassDataCollector cd, InnerClassesAttribute attribute) throws Exception { for (InnerClass innerClassInfo : attribute.classes) { TypeRef innerClass = analyzer.getTypeRef(innerClassInfo.inner_class); TypeRef outerClass; String outerClassName = innerClassInfo.outer_class; if (outerClassName != null) { outerClass = analyzer.getTypeRef(outerClassName); } else { outerClass = null; } cd.innerClass(innerClass, outerClass, innerClassInfo.inner_name, innerClassInfo.inner_access); } } private void processSignature(SignatureAttribute attribute, ElementType elementType, int access_flags) { String signature = attribute.signature; Signature sig; switch (elementType) { case ANNOTATION_TYPE : case TYPE : case PACKAGE : sig = analyzer.getClassSignature(signature); break; case FIELD : sig = analyzer.getFieldSignature(signature); break; case CONSTRUCTOR : case METHOD : sig = analyzer.getMethodSignature(signature); break; default : throw new IllegalArgumentException( "Signature \"" + signature + "\" found for unknown element type: " + elementType); } Set<String> binaryRefs = sig.erasedBinaryReferences(); for (String binary : binaryRefs) { TypeRef ref = analyzer.getTypeRef(binary); referTo(ref, access_flags); } } private void visitSignature(ClassDataCollector cd, SignatureAttribute attribute) { String signature = attribute.signature; cd.signature(signature); } private void processAnnotationDefault(AnnotationDefaultAttribute attribute, ElementType elementType, int access_flags) { Object value = attribute.value; processElementValue(value, elementType, RetentionPolicy.RUNTIME, access_flags); } private void visitAnnotationDefault(ClassDataCollector cd, AnnotationDefaultAttribute attribute, ElementDef elementDef) { MethodDef methodDef = (MethodDef) elementDef; Object value = annotationDefault(attribute, methodDef.getAccess()); cd.annotationDefault(methodDef, value); } static ElementType elementType(FieldInfo fieldInfo) { return ElementType.FIELD; } static ElementType elementType(MethodInfo methodInfo) { return methodInfo.name.equals("<init>") ? ElementType.CONSTRUCTOR : ElementType.METHOD; } static ElementType elementType(ClassFile classFile) { if (isAnnotation(classFile.access)) { return ElementType.ANNOTATION_TYPE; } if (isModule(classFile.access)) { return ElementType.MODULE; } return classFile.this_class.endsWith("/package-info") ? ElementType.PACKAGE : ElementType.TYPE; } Object annotationDefault(AnnotationDefaultAttribute attribute, int access_flags) { try { return newElementValue(attribute.value, ElementType.METHOD, RetentionPolicy.RUNTIME, access_flags); } catch (Exception e) { throw Exceptions.duck(e); } } private void visitConstantValue(ClassDataCollector cd, ConstantValueAttribute attribute) { Object value = attribute.value; cd.constant(value); } private void processExceptions(ExceptionsAttribute attribute, int access_flags) { for (String exception : attribute.exceptions) { TypeRef clazz = analyzer.getTypeRef(exception); referTo(clazz, access_flags); } } private void visitMethodParameters(ClassDataCollector cd, MethodParametersAttribute attribute, ElementDef elementDef) { MethodDef method = (MethodDef) elementDef; cd.methodParameters(method, MethodParameter.parameters(attribute)); } private void processCode(CodeAttribute attribute, ElementType elementType) { ByteBuffer code = attribute.code.duplicate(); code.rewind(); int lastReference = -1; while (code.hasRemaining()) { int instruction = Byte.toUnsignedInt(code.get()); switch (instruction) { case OpCodes.ldc : { lastReference = Byte.toUnsignedInt(code.get()); classConstRef(lastReference); break; } case OpCodes.ldc_w : { lastReference = Short.toUnsignedInt(code.getShort()); classConstRef(lastReference); break; } case OpCodes.anewarray : case OpCodes.checkcast : case OpCodes.instanceof_ : case OpCodes.new_ : { int class_index = Short.toUnsignedInt(code.getShort()); classConstRef(class_index); lastReference = -1; break; } case OpCodes.multianewarray : { int class_index = Short.toUnsignedInt(code.getShort()); classConstRef(class_index); code.get(); lastReference = -1; break; } case OpCodes.invokestatic : { int method_ref_index = Short.toUnsignedInt(code.getShort()); if ((method_ref_index == forName || method_ref_index == class$) && lastReference != -1) { if (constantPool.tag(lastReference) == CONSTANT_String) { String fqn = constantPool.string(lastReference); if (!fqn.equals("class") && fqn.indexOf('.') > 0) { TypeRef typeRef = analyzer.getTypeRefFromFQN(fqn); referTo(typeRef, 0); } } } lastReference = -1; break; } case OpCodes.wide : { int opcode = Byte.toUnsignedInt(code.get()); code.position(code.position() + (opcode == OpCodes.iinc ? 4 : 2)); lastReference = -1; break; } case OpCodes.tableswitch : { // Skip to place divisible by 4 int rem = code.position() % 4; if (rem != 0) { code.position(code.position() + 4 - rem); } int deflt = code.getInt(); int low = code.getInt(); int high = code.getInt(); code.position(code.position() + (high - low + 1) * 4); lastReference = -1; break; } case OpCodes.lookupswitch : { // Skip to place divisible by 4 int rem = code.position() % 4; if (rem != 0) { code.position(code.position() + 4 - rem); } int deflt = code.getInt(); int npairs = code.getInt(); code.position(code.position() + npairs * 8); lastReference = -1; break; } default : { code.position(code.position() + OpCodes.OFFSETS[instruction]); lastReference = -1; break; } } } for (ExceptionHandler exceptionHandler : attribute.exception_table) { classConstRef(exceptionHandler.catch_type); } processAttributes(attribute.attributes, elementType, 0); } private void visitCode(ClassDataCollector cd, CodeAttribute attribute, ElementType elementType) throws Exception { ByteBuffer code = attribute.code.duplicate(); code.rewind(); while (code.hasRemaining()) { int instruction = Byte.toUnsignedInt(code.get()); switch (instruction) { case OpCodes.invokespecial : { int method_ref_index = Short.toUnsignedInt(code.getShort()); visitReferenceMethod(cd, method_ref_index); break; } case OpCodes.invokevirtual : { int method_ref_index = Short.toUnsignedInt(code.getShort()); visitReferenceMethod(cd, method_ref_index); break; } case OpCodes.invokeinterface : { int method_ref_index = Short.toUnsignedInt(code.getShort()); visitReferenceMethod(cd, method_ref_index); code.position(code.position() + 2); break; } case OpCodes.invokestatic : { int method_ref_index = Short.toUnsignedInt(code.getShort()); visitReferenceMethod(cd, method_ref_index); break; } case OpCodes.wide : { int opcode = Byte.toUnsignedInt(code.get()); code.position(code.position() + (opcode == OpCodes.iinc ? 4 : 2)); break; } case OpCodes.tableswitch : { // Skip to place divisible by 4 int rem = code.position() % 4; if (rem != 0) { code.position(code.position() + 4 - rem); } int deflt = code.getInt(); int low = code.getInt(); int high = code.getInt(); code.position(code.position() + (high - low + 1) * 4); break; } case OpCodes.lookupswitch : { // Skip to place divisible by 4 int rem = code.position() % 4; if (rem != 0) { code.position(code.position() + 4 - rem); } int deflt = code.getInt(); int npairs = code.getInt(); code.position(code.position() + npairs * 8); break; } default : { code.position(code.position() + OpCodes.OFFSETS[instruction]); break; } } } CodeDef codeDef = new CodeDef(attribute, elementType); visitAttributes(cd, codeDef); } /** * Called when crawling the byte code and a method reference is found */ private void visitReferenceMethod(ClassDataCollector cd, int method_ref_index) { AbstractRefInfo refInfo = constantPool.entry(method_ref_index); String className = constantPool.className(refInfo.class_index); NameAndTypeInfo nameAndTypeInfo = constantPool.entry(refInfo.name_and_type_index); String method = constantPool.utf8(nameAndTypeInfo.name_index); String descriptor = constantPool.utf8(nameAndTypeInfo.descriptor_index); TypeRef type = analyzer.getTypeRef(className); cd.referenceMethod(0, type, method, descriptor); } private void processParameterAnnotations(ParameterAnnotationsAttribute attribute, ElementType elementType, RetentionPolicy policy, int access_flags) { for (ParameterAnnotationInfo parameterAnnotationInfo : attribute.parameter_annotations) { for (AnnotationInfo annotationInfo : parameterAnnotationInfo.annotations) { processAnnotation(annotationInfo, elementType, policy, access_flags); } } } private void visitParameterAnnotations(ClassDataCollector cd, ParameterAnnotationsAttribute attribute, ElementType elementType, RetentionPolicy policy, int access_flags) throws Exception { for (ParameterAnnotationInfo parameterAnnotationInfo : attribute.parameter_annotations) { if (parameterAnnotationInfo.annotations.length > 0) { cd.parameter(parameterAnnotationInfo.parameter); for (AnnotationInfo annotationInfo : parameterAnnotationInfo.annotations) { Annotation annotation = newAnnotation(annotationInfo, elementType, policy, access_flags); cd.annotation(annotation); } } } } private void processTypeAnnotations(TypeAnnotationsAttribute attribute, ElementType elementType, RetentionPolicy policy, int access_flags) { for (TypeAnnotationInfo typeAnnotationInfo : attribute.type_annotations) { processAnnotation(typeAnnotationInfo, elementType, policy, access_flags); } } private void visitTypeAnnotations(ClassDataCollector cd, TypeAnnotationsAttribute attribute, ElementType elementType, RetentionPolicy policy, int access_flags) throws Exception { for (TypeAnnotationInfo typeAnnotationInfo : attribute.type_annotations) { cd.typeuse(typeAnnotationInfo.target_type, typeAnnotationInfo.target_index, typeAnnotationInfo.target_info, typeAnnotationInfo.type_path); Annotation annotation = newAnnotation(typeAnnotationInfo, elementType, policy, access_flags); cd.annotation(annotation); } } private void processAnnotations(AnnotationsAttribute attribute, ElementType elementType, RetentionPolicy policy, int access_flags) { for (AnnotationInfo annotationInfo : attribute.annotations) { processAnnotation(annotationInfo, elementType, policy, access_flags); } } private void visitAnnotations(ClassDataCollector cd, AnnotationsAttribute attribute, ElementType elementType, RetentionPolicy policy, int access_flags) throws Exception { for (AnnotationInfo annotationInfo : attribute.annotations) { Annotation annotation = newAnnotation(annotationInfo, elementType, policy, access_flags); cd.annotation(annotation); } } private void processAnnotation(AnnotationInfo annotationInfo, ElementType elementType, RetentionPolicy policy, int access_flags) { if (annotations == null) { annotations = new HashSet<>(); } String typeName = annotationInfo.type; TypeRef typeRef = analyzer.getTypeRef(typeName); annotations.add(typeRef); if (policy == RetentionPolicy.RUNTIME) { referTo(typeRef, 0); hasRuntimeAnnotations = true; if (api != null && (Modifier.isPublic(access_flags) || Modifier.isProtected(access_flags))) { api.add(typeRef.getPackageRef()); } } else { hasClassAnnotations = true; } for (ElementValueInfo elementValueInfo : annotationInfo.values) { processElementValue(elementValueInfo.value, elementType, policy, access_flags); } } Annotation newAnnotation(AnnotationInfo annotationInfo, ElementType elementType, RetentionPolicy policy, int access_flags) { String typeName = annotationInfo.type; TypeRef typeRef = analyzer.getTypeRef(typeName); Map<String, Object> elements = annotationValues(annotationInfo.values, elementType, policy, access_flags); return new Annotation(typeRef, elements, elementType, policy); } ParameterAnnotation newParameterAnnotation(int parameter, AnnotationInfo annotationInfo, ElementType elementType, RetentionPolicy policy, int access_flags) { String typeName = annotationInfo.type; TypeRef typeRef = analyzer.getTypeRef(typeName); Map<String, Object> elements = annotationValues(annotationInfo.values, elementType, policy, access_flags); return new ParameterAnnotation(parameter, typeRef, elements, elementType, policy); } TypeAnnotation newTypeAnnotation(TypeAnnotationInfo annotationInfo, ElementType elementType, RetentionPolicy policy, int access_flags) { String typeName = annotationInfo.type; TypeRef typeRef = analyzer.getTypeRef(typeName); Map<String, Object> elements = annotationValues(annotationInfo.values, elementType, policy, access_flags); return new TypeAnnotation(annotationInfo.target_type, annotationInfo.target_info, annotationInfo.target_index, annotationInfo.type_path, typeRef, elements, elementType, policy); } private Map<String, Object> annotationValues(ElementValueInfo[] values, ElementType elementType, RetentionPolicy policy, int access_flags) { Map<String, Object> elements = new LinkedHashMap<>(); for (ElementValueInfo elementValueInfo : values) { String element = elementValueInfo.name; Object value = newElementValue(elementValueInfo.value, elementType, policy, access_flags); elements.put(element, value); } return elements; } private void processElementValue(Object value, ElementType elementType, RetentionPolicy policy, int access_flags) { if (value instanceof EnumConst) { if (policy == RetentionPolicy.RUNTIME) { EnumConst enumConst = (EnumConst) value; TypeRef name = analyzer.getTypeRef(enumConst.type); referTo(name, 0); if (api != null && (Modifier.isPublic(access_flags) || Modifier.isProtected(access_flags))) { api.add(name.getPackageRef()); } } } else if (value instanceof ResultConst) { if (policy == RetentionPolicy.RUNTIME) { ResultConst resultConst = (ResultConst) value; TypeRef name = analyzer.getTypeRef(resultConst.descriptor); if (!name.isPrimitive()) { PackageRef packageRef = name.getPackageRef(); if (!packageRef.isPrimitivePackage()) { referTo(name, 0); if (api != null && (Modifier.isPublic(access_flags) || Modifier.isProtected(access_flags))) { api.add(packageRef); } } } } } else if (value instanceof AnnotationInfo) { processAnnotation((AnnotationInfo) value, elementType, policy, access_flags); } else if (value instanceof Object[]) { Object[] array = (Object[]) value; int num_values = array.length; for (int i = 0; i < num_values; i++) { processElementValue(array[i], elementType, policy, access_flags); } } } private Object newElementValue(Object value, ElementType elementType, RetentionPolicy policy, int access_flags) { if (value instanceof EnumConst) { EnumConst enumConst = (EnumConst) value; return enumConst.name; } else if (value instanceof ResultConst) { ResultConst resultConst = (ResultConst) value; TypeRef name = analyzer.getTypeRef(resultConst.descriptor); return name; } else if (value instanceof AnnotationInfo) { return newAnnotation((AnnotationInfo) value, elementType, policy, access_flags); } else if (value instanceof Object[]) { Object[] array = (Object[]) value; int num_values = array.length; Object[] result = new Object[num_values]; for (int i = 0; i < num_values; i++) { result[i] = newElementValue(array[i], elementType, policy, access_flags); } return result; } else { return value; } } private void processBootstrapMethods(BootstrapMethodsAttribute attribute) { for (BootstrapMethod bootstrapMethod : attribute.bootstrap_methods) { for (int bootstrap_argument : bootstrapMethod.bootstrap_arguments) { classConstRef(bootstrap_argument); } } } private void processStackMapTable(StackMapTableAttribute attribute) { for (StackMapFrame stackMapFrame : attribute.entries) { switch (stackMapFrame.type()) { case StackMapFrame.SAME_LOCALS_1_STACK_ITEM : SameLocals1StackItemFrame sameLocals1StackItemFrame = (SameLocals1StackItemFrame) stackMapFrame; verification_type_info(sameLocals1StackItemFrame.stack); break; case StackMapFrame.SAME_LOCALS_1_STACK_ITEM_EXTENDED : SameLocals1StackItemFrameExtended sameLocals1StackItemFrameExtended = (SameLocals1StackItemFrameExtended) stackMapFrame; verification_type_info(sameLocals1StackItemFrameExtended.stack); break; case StackMapFrame.APPEND : AppendFrame appendFrame = (AppendFrame) stackMapFrame; for (VerificationTypeInfo verificationTypeInfo : appendFrame.locals) { verification_type_info(verificationTypeInfo); } break; case StackMapFrame.FULL_FRAME : FullFrame fullFrame = (FullFrame) stackMapFrame; for (VerificationTypeInfo verificationTypeInfo : fullFrame.locals) { verification_type_info(verificationTypeInfo); } for (VerificationTypeInfo verificationTypeInfo : fullFrame.stack) { verification_type_info(verificationTypeInfo); } break; } } } private void verification_type_info(VerificationTypeInfo verificationTypeInfo) { switch (verificationTypeInfo.tag) { case VerificationTypeInfo.ITEM_Object :// Object_variable_info ObjectVariableInfo objectVariableInfo = (ObjectVariableInfo) verificationTypeInfo; classConstRef(objectVariableInfo.type); break; } } /** * Add a new package reference. * * @param packageRef A '.' delimited package name */ private void referTo(TypeRef typeRef, int modifiers) { xref.add(typeRef); if (typeRef.isPrimitive()) { return; } PackageRef packageRef = typeRef.getPackageRef(); if (packageRef.isPrimitivePackage()) { return; } imports.add(packageRef); if (api != null && (Modifier.isPublic(modifiers) || Modifier.isProtected(modifiers))) { api.add(packageRef); } referred.merge(typeRef, Integer.valueOf(modifiers), (o, n) -> { int old_modifiers = o.intValue(); int new_modifiers = n.intValue(); if ((old_modifiers == new_modifiers) || (new_modifiers == 0)) { return o; } else if (old_modifiers == 0) { return n; } else { return Integer.valueOf(old_modifiers | new_modifiers); } }); } private void referTo(String descriptor, int modifiers) { char c = descriptor.charAt(0); if (c != '(' && c != 'L' && c != '[' && c != '<' && c != 'T') { return; } Signature sig = (c == '(' || c == '<') ? analyzer.getMethodSignature(descriptor) : analyzer.getFieldSignature(descriptor); Set<String> binaryRefs = sig.erasedBinaryReferences(); for (String binary : binaryRefs) { TypeRef ref = analyzer.getTypeRef(binary); referTo(ref, modifiers); } } @Deprecated public void parseDescriptor(String descriptor, int modifiers) { if (referred == null) { referred = new HashMap<>(); } referTo(descriptor, modifiers); } public Set<PackageRef> getReferred() { return imports; } public String getAbsolutePath() { return path; } @Deprecated public void reset() {} private Stream<Clazz> hierarchyStream(Analyzer analyzer) { requireNonNull(analyzer); Spliterator<Clazz> spliterator = new AbstractSpliterator<Clazz>(Long.MAX_VALUE, Spliterator.DISTINCT | Spliterator.ORDERED | Spliterator.NONNULL) { private Clazz clazz = Clazz.this; @Override public boolean tryAdvance(Consumer<? super Clazz> action) { requireNonNull(action); if (clazz == null) { return false; } action.accept(clazz); TypeRef type = clazz.superClass; if (type == null) { clazz = null; } else { try { clazz = analyzer.findClass(type); } catch (Exception e) { throw Exceptions.duck(e); } if (clazz == null) { analyzer.warning("While traversing the type tree for %s cannot find class %s", Clazz.this, type); } } return true; } }; return StreamSupport.stream(spliterator, false); } private Stream<TypeRef> typeStream(Analyzer analyzer, Function<? super Clazz, Collection<? extends TypeRef>> func, Set<TypeRef> visited) { requireNonNull(analyzer); requireNonNull(func); Spliterator<TypeRef> spliterator = new AbstractSpliterator<TypeRef>(Long.MAX_VALUE, Spliterator.DISTINCT | Spliterator.ORDERED | Spliterator.NONNULL) { private final Deque<TypeRef> queue = new ArrayDeque<>(func.apply(Clazz.this)); private final Set<TypeRef> seen = (visited != null) ? visited : new HashSet<>(); @Override public boolean tryAdvance(Consumer<? super TypeRef> action) { requireNonNull(action); TypeRef type; do { type = queue.poll(); if (type == null) { return false; } } while (seen.contains(type)); seen.add(type); action.accept(type); if (visited != null) { Clazz clazz; try { clazz = analyzer.findClass(type); } catch (Exception e) { throw Exceptions.duck(e); } if (clazz == null) { analyzer.warning("While traversing the type tree for %s cannot find class %s", Clazz.this, type); } else { queue.addAll(func.apply(clazz)); } } return true; } }; return StreamSupport.stream(spliterator, false); } public boolean is(QUERY query, Instruction instr, Analyzer analyzer) throws Exception { switch (query) { case ANY : return true; case NAMED : return instr.matches(getClassName().getDottedOnly()) ^ instr.isNegated(); case VERSION : { String v = classFile.major_version + "." + classFile.minor_version; return instr.matches(v) ^ instr.isNegated(); } case IMPLEMENTS : { Set<TypeRef> visited = new HashSet<>(); return hierarchyStream(analyzer).flatMap(c -> c.typeStream(analyzer, Clazz::interfaces, visited)) .map(TypeRef::getDottedOnly) .anyMatch(instr::matches) ^ instr.isNegated(); } case EXTENDS : return hierarchyStream(analyzer).skip(1) // skip this class .map(Clazz::getClassName) .map(TypeRef::getDottedOnly) .anyMatch(instr::matches) ^ instr.isNegated(); case PUBLIC : return isPublic(); case CONCRETE : return !isAbstract(); case ANNOTATED : return typeStream(analyzer, Clazz::annotations, null) .map(TypeRef::getFQN) .anyMatch(instr::matches) ^ instr.isNegated(); case INDIRECTLY_ANNOTATED : return typeStream(analyzer, Clazz::annotations, new HashSet<>()) .map(TypeRef::getFQN) .anyMatch(instr::matches) ^ instr.isNegated(); case HIERARCHY_ANNOTATED : return hierarchyStream(analyzer) .flatMap(c -> c.typeStream(analyzer, Clazz::annotations, null)) .map(TypeRef::getFQN) .anyMatch(instr::matches) ^ instr.isNegated(); case HIERARCHY_INDIRECTLY_ANNOTATED : { Set<TypeRef> visited = new HashSet<>(); return hierarchyStream(analyzer) .flatMap(c -> c.typeStream(analyzer, Clazz::annotations, visited)) .map(TypeRef::getFQN) .anyMatch(instr::matches) ^ instr.isNegated(); } case RUNTIMEANNOTATIONS : return hasRuntimeAnnotations; case CLASSANNOTATIONS : return hasClassAnnotations; case ABSTRACT : return isAbstract(); case IMPORTS : return hierarchyStream(analyzer) .map(Clazz::getReferred) .flatMap(Set::stream) .distinct() .map(PackageRef::getFQN) .anyMatch(instr::matches) ^ instr.isNegated(); case DEFAULT_CONSTRUCTOR : return hasPublicNoArgsConstructor(); } return instr == null ? false : instr.isNegated(); } @Override public String toString() { return (classDef != null) ? classDef.getName() : resource.toString(); } public boolean isPublic() { return classDef.isPublic(); } public boolean isProtected() { return classDef.isProtected(); } public boolean isEnum() { /** * The additional check for superClass name avoids stating that an * anonymous inner class of an enum is an enum class. */ return classDef.isEnum() && superClass.getBinary() .equals("java/lang/Enum"); } public boolean isSynthetic() { return classDef.isSynthetic(); } public boolean isModule() { return classDef.isModule(); } static boolean isModule(int access) { return (access & ACC_MODULE) != 0; } public JAVA getFormat() { return JAVA.format(classFile.major_version); } public static String objectDescriptorToFQN(String string) { if ((string.startsWith("L") || string.startsWith("T")) && string.endsWith(";")) return string.substring(1, string.length() - 1) .replace('/', '.'); switch (string.charAt(0)) { case 'V' : return "void"; case 'B' : return "byte"; case 'C' : return "char"; case 'I' : return "int"; case 'S' : return "short"; case 'D' : return "double"; case 'F' : return "float"; case 'J' : return "long"; case 'Z' : return "boolean"; case '[' : // Array return objectDescriptorToFQN(string.substring(1)) + "[]"; } throw new IllegalArgumentException("Invalid type character in descriptor " + string); } public static String unCamel(String id) { StringBuilder out = new StringBuilder(); for (int i = 0; i < id.length(); i++) { char c = id.charAt(i); if (c == '_' || c == '$' || c == '-' || c == '.') { if (out.length() > 0 && !Character.isWhitespace(out.charAt(out.length() - 1))) out.append(' '); continue; } int n = i; while (n < id.length() && Character.isUpperCase(id.charAt(n))) { n++; } if (n == i) out.append(id.charAt(i)); else { boolean tolower = (n - i) == 1; if (i > 0 && !Character.isWhitespace(out.charAt(out.length() - 1))) out.append(' '); for (; i < n;) { if (tolower) out.append(Character.toLowerCase(id.charAt(i))); else out.append(id.charAt(i)); i++; } i } } if (id.startsWith(".")) out.append(" *"); out.replace(0, 1, Character.toUpperCase(out.charAt(0)) + ""); return out.toString(); } public boolean isInterface() { return classDef.isInterface(); } public boolean isAbstract() { return classDef.isAbstract(); } public boolean hasPublicNoArgsConstructor() { return hasDefaultConstructor; } public int getAccess() { return classDef.getAccess(); } @Deprecated public void setInnerAccess(int access) {} public Stream<Annotation> annotations(String binaryNameFilter) { return classDef.annotations(binaryNameFilter); } public Stream<TypeAnnotation> typeAnnotations(String binaryNameFilter) { return classDef.typeAnnotations(binaryNameFilter); } public TypeRef getClassName() { return classDef.getType(); } public boolean isInnerClass() { return classDef.isInnerClass(); } @Deprecated public MethodDef getMethodDef(int access, String name, String descriptor) { return new MethodDef(access, name, descriptor); } public TypeRef getSuper() { return superClass; } public String getFQN() { return classDef.getName(); } public TypeRef[] getInterfaces() { return interfaces; } public List<TypeRef> interfaces() { return (interfaces != null) ? Arrays.asList(interfaces) : emptyList(); } public Set<TypeRef> annotations() { return (annotations != null) ? annotations : emptySet(); } public boolean isFinal() { return classDef.isFinal(); } @Deprecated public void setDeprecated(boolean b) {} public boolean isDeprecated() { return classDef.isDeprecated(); } public boolean isAnnotation() { return classDef.isAnnotation(); } static boolean isAnnotation(int access) { return (access & ACC_ANNOTATION) != 0; } public Set<PackageRef> getAPIUses() { return (api != null) ? api : emptySet(); } public Clazz.TypeDef getExtends(TypeRef type) { return new TypeDef(type, false); } public Clazz.TypeDef getImplements(TypeRef type) { return new TypeDef(type, true); } private void classConstRef(int index) { if (constantPool.tag(index) == CONSTANT_Class) { String name = constantPool.className(index); classConstRef(name); } } private void classConstRef(String name) { if (name != null) { TypeRef typeRef = analyzer.getTypeRef(name); referTo(typeRef, 0); } } public String getClassSignature() { return classDef.getSignature(); } public String getSourceFile() { return classDef.getSourceFile(); } public Map<String, Object> getDefaults() throws Exception { parseClassFile(); if (!classDef.isAnnotation()) { return emptyMap(); } Map<String, Object> map = methods().filter(m -> m.attribute(AnnotationDefaultAttribute.class) .isPresent()) .collect(toMap(MethodDef::getName, MethodDef::getConstant)); return map; } public Resource getResource() { return resource; } }
package dr.inference.model; import dr.xml.*; public class MatrixValidationProvider implements CrossValidationProvider { private final MatrixParameter trueParameter; private final MatrixParameter inferredParameter; private final int[] relevantDimensions; private final String[] colNames; private final String sumName; MatrixValidationProvider(MatrixParameter trueParameter, MatrixParameter inferredParameter, String id) { this.trueParameter = trueParameter; this.inferredParameter = inferredParameter; int dimParameter = trueParameter.getDimension(); this.relevantDimensions = new int[dimParameter]; for (int i = 0; i < dimParameter; i++) { relevantDimensions[i] = i; } this.colNames = new String[dimParameter]; for (int i = 0; i < dimParameter; i++) { int row = i / trueParameter.getRowDimension(); int col = i - row * trueParameter.getRowDimension(); colNames[i] = id + (row + 1) + (col + 1); } sumName = id + ".TotalSum"; } @Override public double[] getTrueValues() { return trueParameter.getParameterValues(); } @Override public double[] getInferredValues() { return inferredParameter.getParameterValues(); } @Override public int[] getRelevantDimensions() { return relevantDimensions; } @Override public String getName(int dim) { return colNames[dim]; } @Override public String getNameSum(int dim) { return sumName; } //TODO: Merge with TraitValidationProvider parser ? public static dr.xml.XMLObjectParser PARSER = new dr.xml.AbstractXMLObjectParser() { final static String PARSER_NAME = "matrixValidation"; final static String TRUE_PARAMETER = "trueParameter"; final static String INFERRED_PARAMETER = "inferredParameter"; final static String LOG_SUM = "logSum"; @Override public Object parseXMLObject(XMLObject xo) throws XMLParseException { MatrixParameter trueParameter = (MatrixParameter) xo.getElementFirstChild(TRUE_PARAMETER); MatrixParameter inferredParameter = (MatrixParameter) xo.getElementFirstChild(INFERRED_PARAMETER); String id = PARSER_NAME; if (xo.hasId()) { id = xo.getId(); } if (trueParameter.getRowDimension() != inferredParameter.getRowDimension() || trueParameter.getColumnDimension() != inferredParameter.getColumnDimension()) { throw new XMLParseException("The matrix parameters contained in " + TRUE_PARAMETER + " and " + INFERRED_PARAMETER + " must have the same dimensions."); } MatrixValidationProvider provider = new MatrixValidationProvider(trueParameter, inferredParameter, id); boolean logSum = xo.getAttribute(LOG_SUM, false); if (logSum) return new CrossValidatorSum(provider, ValidationType.SQUARED_ERROR); return new CrossValidator(provider, ValidationType.SQUARED_ERROR); } @Override public XMLSyntaxRule[] getSyntaxRules() { return new XMLSyntaxRule[]{ AttributeRule.newBooleanRule(LOG_SUM, true), new ElementRule(TRUE_PARAMETER, new XMLSyntaxRule[]{ new ElementRule(Parameter.class) }), new ElementRule(INFERRED_PARAMETER, new XMLSyntaxRule[]{ new ElementRule(Parameter.class) }) }; } @Override public String getParserDescription() { return null; } @Override public Class getReturnType() { return CrossValidator.class; } @Override public String getParserName() { return PARSER_NAME; } }; }
package aQute.junit; import java.io.*; import java.lang.reflect.*; import java.util.*; import junit.framework.*; import org.junit.runner.*; import org.junit.runner.manipulation.*; import org.osgi.framework.*; import aQute.junit.constants.*; public class Activator implements BundleActivator, TesterConstants, Runnable { BundleContext context; volatile boolean active; int port = -1; boolean continuous = false; boolean trace = false; PrintStream out = System.err; JUnitEclipseReport jUnitEclipseReport; volatile Thread thread; public Activator() { } public void start(BundleContext context) throws Exception { this.context = context; active = true; if (context.getProperty(TESTER_SEPARATETHREAD) == null) { Hashtable<String,String> ht = new Hashtable<String,String>(); ht.put("main.thread", "true"); ht.put(Constants.SERVICE_DESCRIPTION, "JUnit tester"); context.registerService(Runnable.class.getName(), this, ht); } else { thread = new Thread("bnd Runtime Test Bundle"); thread.start(); } } public void stop(BundleContext context) throws Exception { active = false; if (jUnitEclipseReport != null) jUnitEclipseReport.close(); if (thread != null) { thread.interrupt(); thread.join(10000); } } public void run() { continuous = Boolean.valueOf(context.getProperty(TESTER_CONTINUOUS)); trace = context.getProperty(TESTER_TRACE) != null; if (thread == null) trace("running in main thread"); // We can be started on our own thread or from the main code thread = Thread.currentThread(); String testcases = context.getProperty(TESTER_NAMES); trace("test cases %s", testcases); if (context.getProperty(TESTER_PORT) != null) { port = Integer.parseInt(context.getProperty(TESTER_PORT)); try { trace("using port %s", port); jUnitEclipseReport = new JUnitEclipseReport(port); } catch (Exception e) { System.err.println("Cannot create link Eclipse JUnit on port " + port); System.exit(-2); } } if (testcases == null) { trace("automatic testing of all bundles with Test-Cases header"); try { automatic(); } catch (IOException e) { // ignore } } else { trace("receivednames of classes to test %s", testcases); try { int errors = test(null, testcases, null); System.exit(errors); } catch (Exception e) { e.printStackTrace(); System.exit(-2); } } } void automatic() throws IOException { String testerDir = context.getProperty(TESTER_DIR); if (testerDir == null) testerDir = "testdir"; final File reportDir = new File(testerDir); final List<Bundle> queue = new Vector<Bundle>(); if (!reportDir.exists() && !reportDir.mkdirs()) { throw new IOException("Could not create directory " + reportDir); } trace("using %s, needed creation %s", reportDir, reportDir.mkdirs()); trace("adding Bundle Listener for getting test bundle events"); context.addBundleListener(new SynchronousBundleListener() { public void bundleChanged(BundleEvent event) { if (event.getType() == BundleEvent.STARTED) { checkBundle(queue, event.getBundle()); } } }); for (Bundle b : context.getBundles()) { checkBundle(queue, b); } trace("starting queue"); int result = 0; outer: while (active) { Bundle bundle; synchronized (queue) { while (queue.isEmpty() && active) { try { queue.wait(); } catch (InterruptedException e) { trace("tests bundle queue interrupted"); thread.interrupt(); break outer; } } } try { bundle = queue.remove(0); trace("received bundle to test: %s", bundle.getLocation()); Writer report = getReportWriter(reportDir, bundle); try { trace("test will run"); result += test(bundle, (String) bundle.getHeaders().get("Test-Cases"), report); trace("test ran"); if (queue.isEmpty() && !continuous) { trace("queue " + queue); System.exit(result); } } finally { if (report != null) report.close(); } } catch (Exception e) { error("Not sure what happened anymore %s", e); System.exit(-2); } } } void checkBundle(List<Bundle> queue, Bundle bundle) { if (bundle.getState() == Bundle.ACTIVE) { String testcases = (String) bundle.getHeaders().get("Test-Cases"); if (testcases != null) { trace("found active bundle with test cases %s : %s", bundle, testcases); synchronized (queue) { queue.add(bundle); queue.notifyAll(); } } } } private Writer getReportWriter(File reportDir, Bundle bundle) throws IOException { if (reportDir.isDirectory()) { Version v = bundle.getVersion(); File f = new File(reportDir, "TEST-" + bundle.getSymbolicName() + "-" + v.getMajor() + "." + v.getMinor() + "." + v.getMicro() + ".xml"); return new OutputStreamWriter(new FileOutputStream(f), "UTF-8"); } return null; } /** * The main test routine. * * @param bundle * The bundle under test or null * @param testnames * The names to test * @param report * The report writer or null * @return # of errors */ int test(Bundle bundle, String testnames, Writer report) { trace("testing bundle %s with %s", bundle, testnames); Bundle fw = context.getBundle(0); try { List<String> names = new ArrayList<String>(); StringTokenizer st = new StringTokenizer(testnames, " ,"); while (st.hasMoreTokens()) names.add(st.nextToken()); List<TestReporter> reporters = new ArrayList<TestReporter>(); final TestResult result = new TestResult(); Tee systemErr; Tee systemOut; systemOut = new Tee(System.err); systemErr = new Tee(System.err); systemOut.capture(trace).echo(true); systemErr.capture(trace).echo(true); System.setOut(systemOut.getStream()); System.setErr(systemErr.getStream()); trace("changed streams"); try { BasicTestReport basic = new BasicTestReport(this, systemOut, systemErr) { @Override public void check() { if (!active) result.stop(); } }; add(reporters, result, basic); if (port > 0) { add(reporters, result, jUnitEclipseReport); } if (report != null) { add(reporters, result, new JunitXmlReport(report, bundle, basic)); } for (TestReporter tr : reporters) { tr.setup(fw, bundle); } try { TestSuite suite = createSuite(bundle, names, result); trace("created suite " + suite); List<Test> flattened = new ArrayList<Test>(); int realcount = flatten(flattened, suite); for (TestReporter tr : reporters) { tr.begin(flattened, realcount); } trace("running suite " + suite); suite.run(result); } catch (Throwable t) { trace(t.getMessage()); result.addError(null, t); } finally { for (TestReporter tr : reporters) { tr.end(); } } } catch (Throwable t) { System.err.println("exiting " + t); t.printStackTrace(); } finally { System.setOut(systemOut.oldStream); System.setErr(systemErr.oldStream); trace("unset streams"); } System.err.println("Errors: " + result.errorCount()); System.err.println("Failures: " + result.failureCount()); return result.errorCount() + result.failureCount(); } catch (Exception e) { e.printStackTrace(); } return -1; } private TestSuite createSuite(Bundle tfw, List<String> testNames, TestResult result) throws Exception { TestSuite suite = new TestSuite(); for (String fqn : testNames) { addTest(tfw, suite, fqn, result); } return suite; } private void addTest(Bundle tfw, TestSuite suite, String fqn, TestResult testResult) { try { int n = fqn.indexOf(':'); if (n > -1) { String method = fqn.substring(n + 1); fqn = fqn.substring(0, n); Class< ? > clazz = loadClass(tfw, fqn); if (clazz != null) addTest(tfw, suite, clazz, testResult, method); else { System.err.println("Can not create test case for: " + fqn + ", class might not be included in your test bundle?"); testResult.addError(suite, new Exception("Cannot load class " + fqn + ", was it included in the test bundle?")); } } else { Class< ? > clazz = loadClass(tfw, fqn); if (clazz != null) addTest(tfw, suite, clazz, testResult, null); else { System.err.println("Can not create test case for: " + fqn + ", class might not be included in your test bundle?"); testResult.addError(suite, new Exception("Cannot load class " + fqn + ", was it included in the test bundle?")); } } } catch (Throwable e) { System.err.println("Can not create test case for: " + fqn + " : " + e); testResult.addError(suite, e); } } @SuppressWarnings("unchecked") private void addTest(@SuppressWarnings("unused") Bundle tfw, TestSuite suite, Class< ? > clazz, @SuppressWarnings("unused") TestResult testResult, final String method) { if (TestCase.class.isAssignableFrom(clazz)) { if (method != null) { suite.addTest(TestSuite.createTest(clazz, method)); return; } suite.addTestSuite((Class< ? extends TestCase>) clazz); return; } JUnit4TestAdapter adapter = new JUnit4TestAdapter(clazz); if (method != null) { try { adapter.filter(new org.junit.runner.manipulation.Filter() { @Override public String describe() { return "Method filter"; } @Override public boolean shouldRun(Description description) { if (method.equals(description.getMethodName())) { return true; } return false; } }); } catch (NoTestsRemainException e) { return; } } suite.addTest(new JUnit4TestAdapter(clazz)); } private Class< ? > loadClass(Bundle tfw, String fqn) { try { if (tfw != null) { checkResolved(tfw); try { return tfw.loadClass(fqn); } catch (ClassNotFoundException e1) { return null; } } Bundle bundles[] = context.getBundles(); for (int i = bundles.length - 1; i >= 0; i try { checkResolved(bundles[i]); return bundles[i].loadClass(fqn); } catch (ClassNotFoundException e1) { // try next } } } catch (Exception e) { error("Exception during loading of class: %s. Exception %s and cause %s. This sometimes " + "happens when there is an error in the static initialization, the class has " + "no public constructor, it is an inner class, or it has no public access", fqn, e, e.getCause()); } return null; } private void checkResolved(Bundle bundle) { int state = bundle.getState(); if (state == Bundle.INSTALLED || state == Bundle.UNINSTALLED) { trace("unresolved bundle %s", bundle.getLocation()); } } public int flatten(List<Test> list, TestSuite suite) { int realCount = 0; for (Enumeration< ? > e = suite.tests(); e.hasMoreElements();) { Test test = (Test) e.nextElement(); list.add(test); if (test instanceof TestSuite) realCount += flatten(list, (TestSuite) test); else realCount++; } return realCount; } private void add(List<TestReporter> reporters, TestResult result, TestReporter rp) { reporters.add(rp); result.addListener(rp); } static public String replace(String source, String symbol, String replace) { StringBuffer sb = new StringBuffer(source); int n = sb.indexOf(symbol, 0); while (n > 0) { sb.replace(n, n + symbol.length(), replace); n = n - symbol.length() + replace.length(); n = sb.indexOf(replace, n); } return sb.toString(); } public void trace(String msg, Object... objects) { if (trace) { message("# ", msg, objects); } } private void message(String prefix, String string, Object[] objects) { Throwable e = null; StringBuffer sb = new StringBuffer(); int n = 0; sb.append(prefix); for (int i = 0; i < string.length(); i++) { char c = string.charAt(i); if (c == '%') { c = string.charAt(++i); switch (c) { case 's' : if (n < objects.length) { Object o = objects[n++]; if (o instanceof Throwable) { e = (Throwable) o; if (o instanceof InvocationTargetException) { Throwable t = (InvocationTargetException) o; sb.append(t.getMessage()); e = t; } else sb.append(e.getMessage()); } else { sb.append(o); } } else sb.append("<no more arguments>"); break; default : sb.append(c); } } else { sb.append(c); } } out.println(sb); if (e != null && trace) e.printStackTrace(out); } public void error(String msg, Object... objects) { message("! ", msg, objects); } }
package ecologylab.bigsemantics.metadata.builtins.declarations; import ecologylab.bigsemantics.metadata.builtins.Audio; import ecologylab.bigsemantics.metadata.builtins.Clipping; import ecologylab.bigsemantics.metadata.builtins.Document; import ecologylab.bigsemantics.metadata.builtins.Image; import ecologylab.bigsemantics.metadata.builtins.MetadataBuiltinsTypesScope; import ecologylab.bigsemantics.metadata.builtins.Video; import ecologylab.bigsemantics.metadata.mm_name; import ecologylab.bigsemantics.metadata.scalar.MetadataString; import ecologylab.bigsemantics.metametadata.MetaMetadataCompositeField; import ecologylab.bigsemantics.namesandnums.SemanticsNames; import ecologylab.serialization.annotations.Hint; import ecologylab.serialization.annotations.simpl_collection; import ecologylab.serialization.annotations.simpl_composite; import ecologylab.serialization.annotations.simpl_composite_as_scalar; import ecologylab.serialization.annotations.simpl_hints; import ecologylab.serialization.annotations.simpl_inherit; import ecologylab.serialization.annotations.simpl_scalar; import ecologylab.serialization.annotations.simpl_scope; import java.lang.String; import java.util.ArrayList; import java.util.List; import java.util.Map; @simpl_inherit public class CompoundDocumentDeclaration extends Document { /** *The Title of the Document */ @simpl_scalar @simpl_hints({Hint.XML_LEAF}) @simpl_composite_as_scalar private MetadataString title; /** *For debugging. Type of the structure recognized by information extraction. */ @simpl_scalar private MetadataString pageStructure; /** *The search query */ @simpl_scalar @simpl_hints({Hint.XML_LEAF}) private MetadataString query; @simpl_scalar @simpl_hints({Hint.XML_LEAF}) private MetadataString description; /** *Huamn readable name of the site. */ @simpl_scalar private MetadataString siteName; @simpl_scalar private MetadataString textKeywords; @simpl_composite @mm_name("see_also") private Document seeAlso; /** *Clippings that this document contains. */ @simpl_collection @simpl_scope("repository_clippings") @mm_name("clippings") private List<Clipping> clippings; @simpl_composite @mm_name("thumbnail") private Image thumbnail; @simpl_collection("image") @mm_name("main_images") private List<Image> mainImages; @simpl_collection("video") @mm_name("main_videos") private List<Video> mainVideos; @simpl_collection("audio") @mm_name("main_audio") private List<Audio> mainAudio; @simpl_collection("document") @mm_name("article_bodies") private List<Document> articleBodies; public CompoundDocumentDeclaration() { super(); } public CompoundDocumentDeclaration(MetaMetadataCompositeField mmd) { super(mmd); } public MetadataString title() { MetadataString result = this.title; if (result == null) { result = new MetadataString(); this.title = result; } return result; } public String getTitle() { return this.title == null ? null : title().getValue(); } public MetadataString getTitleMetadata() { return title; } public void setTitle(String title) { if (title != null) this.title().setValue(title); } public void setTitleMetadata(MetadataString title) { this.title = title; } public MetadataString pageStructure() { MetadataString result = this.pageStructure; if (result == null) { result = new MetadataString(); this.pageStructure = result; } return result; } public String getPageStructure() { return this.pageStructure == null ? null : pageStructure().getValue(); } public MetadataString getPageStructureMetadata() { return pageStructure; } public void setPageStructure(String pageStructure) { if (pageStructure != null) this.pageStructure().setValue(pageStructure); } public void setPageStructureMetadata(MetadataString pageStructure) { this.pageStructure = pageStructure; } public MetadataString query() { MetadataString result = this.query; if (result == null) { result = new MetadataString(); this.query = result; } return result; } public String getQuery() { return this.query == null ? null : query().getValue(); } public MetadataString getQueryMetadata() { return query; } public void setQuery(String query) { if (query != null) this.query().setValue(query); } public void setQueryMetadata(MetadataString query) { this.query = query; } public MetadataString description() { MetadataString result = this.description; if (result == null) { result = new MetadataString(); this.description = result; } return result; } public String getDescription() { return this.description == null ? null : description().getValue(); } public MetadataString getDescriptionMetadata() { return description; } public void setDescription(String description) { if (description != null) this.description().setValue(description); } public void setDescriptionMetadata(MetadataString description) { this.description = description; } public MetadataString siteName() { MetadataString result = this.siteName; if (result == null) { result = new MetadataString(); this.siteName = result; } return result; } public String getSiteName() { return this.siteName == null ? null : siteName().getValue(); } public MetadataString getSiteNameMetadata() { return siteName; } public void setSiteName(String siteName) { if (siteName != null) this.siteName().setValue(siteName); } public void setSiteNameMetadata(MetadataString siteName) { this.siteName = siteName; } public MetadataString textKeywords() { MetadataString result = this.textKeywords; if (result == null) { result = new MetadataString(); this.textKeywords = result; } return result; } public String getTextKeywords() { return this.textKeywords == null ? null : textKeywords().getValue(); } public MetadataString getTextKeywordsMetadata() { return textKeywords; } public void setTextKeywords(String textKeywords) { if (textKeywords != null) this.textKeywords().setValue(textKeywords); } public void setTextKeywordsMetadata(MetadataString textKeywords) { this.textKeywords = textKeywords; } public Document getSeeAlso() { return seeAlso; } public void setSeeAlso(Document seeAlso) { this.seeAlso = seeAlso; } public List<Clipping> getClippings() { return clippings; } // lazy evaluation: public List<Clipping> clippings() { if (clippings == null) clippings = new ArrayList<Clipping>(); return clippings; } // addTo: public void addToClippings(Clipping element) { clippings().add(element); } // size: public int clippingsSize() { return clippings == null ? 0 : clippings.size(); } public void setClippings(List<Clipping> clippings) { this.clippings = clippings; } public Image getThumbnail() { return thumbnail; } public void setThumbnail(Image thumbnail) { this.thumbnail = thumbnail; } public List<Image> getMainImages() { return mainImages; } // lazy evaluation: public List<Image> mainImages() { if (mainImages == null) mainImages = new ArrayList<Image>(); return mainImages; } // addTo: public void addToMainImages(Image element) { mainImages().add(element); } // size: public int mainImagesSize() { return mainImages == null ? 0 : mainImages.size(); } public void setMainImages(List<Image> mainImages) { this.mainImages = mainImages; } public List<Video> getMainVideos() { return mainVideos; } // lazy evaluation: public List<Video> mainVideos() { if (mainVideos == null) mainVideos = new ArrayList<Video>(); return mainVideos; } // addTo: public void addToMainVideos(Video element) { mainVideos().add(element); } // size: public int mainVideosSize() { return mainVideos == null ? 0 : mainVideos.size(); } public void setMainVideos(List<Video> mainVideos) { this.mainVideos = mainVideos; } public List<Audio> getMainAudio() { return mainAudio; } // lazy evaluation: public List<Audio> mainAudio() { if (mainAudio == null) mainAudio = new ArrayList<Audio>(); return mainAudio; } // addTo: public void addToMainAudio(Audio element) { mainAudio().add(element); } // size: public int mainAudioSize() { return mainAudio == null ? 0 : mainAudio.size(); } public void setMainAudio(List<Audio> mainAudio) { this.mainAudio = mainAudio; } public List<Document> getArticleBodies() { return articleBodies; } // lazy evaluation: public List<Document> articleBodies() { if (articleBodies == null) articleBodies = new ArrayList<Document>(); return articleBodies; } // addTo: public void addToArticleBodies(Document element) { articleBodies().add(element); } // size: public int articleBodiesSize() { return articleBodies == null ? 0 : articleBodies.size(); } public void setArticleBodies(List<Document> articleBodies) { this.articleBodies = articleBodies; } }
package com.google.moviestvsentiments.service.web; import static com.google.common.truth.Truth.assertThat; import androidx.arch.core.executor.testing.InstantTaskExecutorRule; import androidx.lifecycle.LiveData; import androidx.lifecycle.MutableLiveData; import com.google.moviestvsentiments.util.LiveDataTestUtil; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import retrofit2.Response; @RunWith(JUnitParamsRunner.class) public class NetworkBoundResourceTest { private static final String LOCAL_VALUE = "Local Value"; private static final String SERVER_VALUE = "Server Value"; @Rule public InstantTaskExecutorRule instantExecutorRule = new InstantTaskExecutorRule(); private Object[] invokesCorrectMethodsParameters() { return new Object[] { new Object[] {false, new boolean[] {false, true, true, false} }, new Object[] {true, new boolean[] {true, true, true, true} } }; } @Test @Parameters(method = "invokesCorrectMethodsParameters") public void networkBoundResource_invokesCorrectMethods(boolean shouldFetchReturnValue, boolean[] expectedInvocations) { final boolean[] invocations = {false, false, false, false}; NetworkBoundResource resource = new NetworkBoundResource<String, String>() { @Override protected void saveCallResult(String item) { invocations[0] = true; } @Override protected boolean shouldFetch(String data) { invocations[1] = true; return shouldFetchReturnValue; } @Override protected LiveData<String> loadFromRoom() { invocations[2] = true; return new MutableLiveData<>("testValue"); } @Override protected LiveData<ApiResponse<String>> performNetworkCall() { invocations[3] = true; return new MutableLiveData<>(new ApiResponse(Response.success("Server value"))); } }; resource.getResult().observeForever(data -> {}); assertThat(invocations).isEqualTo(expectedInvocations); } @Test public void networkBoundResource_passesCorrectParameters() { final String[] shouldFetchParameter = new String[1]; final String[] saveCallResultParameter = new String[1]; NetworkBoundResource resource = new NetworkBoundResource<String, String>() { @Override protected void saveCallResult(String item) { saveCallResultParameter[0] = item; } @Override protected boolean shouldFetch(String data) { shouldFetchParameter[0] = data; return true; } @Override protected LiveData<String> loadFromRoom() { return new MutableLiveData<>(LOCAL_VALUE); } @Override protected LiveData<ApiResponse<String>> performNetworkCall() { return new MutableLiveData<>(new ApiResponse(Response.success(SERVER_VALUE))); } }; resource.getResult().observeForever(data -> {}); assertThat(saveCallResultParameter[0]).isEqualTo(SERVER_VALUE); assertThat(shouldFetchParameter[0]).isEqualTo(LOCAL_VALUE); } private Object[] returnsCorrectValueParameters() { return new Object[] { new Object[] { false, null, Resource.success(LOCAL_VALUE) }, new Object[] { true, new ApiResponse(new RuntimeException("Error message")), Resource.error(LOCAL_VALUE, "Error message") }, new Object[] { true, new ApiResponse(Response.success(SERVER_VALUE)), Resource.success(SERVER_VALUE) } }; } @Test @Parameters(method = "returnsCorrectValueParameters") public void networkBoundResource_returnsCorrectValue(boolean shouldFetch, ApiResponse<String> networkResponse, Resource expectedResult) { MutableLiveData<String> roomValue = new MutableLiveData<>(LOCAL_VALUE); NetworkBoundResource resource = new NetworkBoundResource<String, String>() { @Override protected void saveCallResult(String item) { roomValue.setValue(item); } @Override protected boolean shouldFetch(String data) { return shouldFetch; } @Override protected LiveData<String> loadFromRoom() { return roomValue; } @Override protected LiveData<ApiResponse<String>> performNetworkCall() { return new MutableLiveData<>(networkResponse); } }; Resource<String> result = (Resource<String>)LiveDataTestUtil.getValue(resource.getResult()); assertThat(result).isEqualTo(expectedResult); } @Test public void networkBoundResource_noRoomResult_returnsLoadingResource() { NetworkBoundResource resource = new NetworkBoundResource<String, String>() { @Override protected void saveCallResult(String item) {} @Override protected boolean shouldFetch(String data) { return false; } @Override protected LiveData<String> loadFromRoom() { return new MutableLiveData<>(); } @Override protected LiveData<ApiResponse<String>> performNetworkCall() { return null; } }; Resource<String> result = (Resource<String>)LiveDataTestUtil.getValue(resource.getResult()); assertThat(result).isEqualTo(Resource.loading(null)); } @Test public void networkBoundResource_noServerResult_returnsLoadingResource() { NetworkBoundResource resource = new NetworkBoundResource<String, String>() { @Override protected void saveCallResult(String item) {} @Override protected boolean shouldFetch(String data) { return true; } @Override protected LiveData<String> loadFromRoom() { return new MutableLiveData<>(LOCAL_VALUE); } @Override protected LiveData<ApiResponse<String>> performNetworkCall() { return new MutableLiveData<>(); } }; Resource<String> result = (Resource<String>)LiveDataTestUtil.getValue(resource.getResult()); assertThat(result).isEqualTo(Resource.loading(LOCAL_VALUE)); } }
// /* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.*; import edu.wpi.first.wpilibj.networktables.NetworkTable; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory.- */ public class CentralCode extends IterativeRobot { Jaguar jag1, jag2, jag3, jag4; Joystick xBox; Victor victor; Solenoid sol1, sol2, sol4, sol5, sol7, sol8; Relay relay; DigitalInput digi2, digi3; AnalogChannel ultrasonic, encoder; double conf; boolean ready, goShoot; int i, noWait; NetworkTable server = NetworkTable.getTable("smartDashboard"); Drive drive; loadAndShoot loadAndShoot; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { jag1 = new Jaguar(1); jag2 = new Jaguar(3); jag3 = new Jaguar(3); jag4 = new Jaguar(4); victor = new Victor(5); sol1 = new Solenoid(1); sol2 = new Solenoid(2); sol4 = new Solenoid(4); sol5 = new Solenoid(5); sol7 = new Solenoid(7); sol8 = new Solenoid(8); relay = new Relay(1); ultrasonic = new AnalogChannel(8); digi2 = new DigitalInput(2); digi3 = new DigitalInput(3); encoder = new AnalogChannel(2); xBox = new Joystick(1); conf = 0; noWait = 0; i = 0; ready = false; goShoot = false; drive = new Drive(jag1, jag2, jag3, jag4, sol1, sol2, xBox); loadAndShoot = new loadAndShoot(encoder, victor, sol4, sol5, sol7, sol8, xBox, digi2, digi3); } /** * This function is called periodically during autonomous */ public void autonomousInit() { conf = 0; relay.set(Relay.Value.kOn); noWait = 0; sol1.set(true); //change it to fast setting sol2.set(false); sol4.set(true); sol5.set(false); sol7.set(true); sol8.set(false); } public void autonomousPeriodic() { conf = conf + SmartDashboard.getNumber("Confidence") - 70; if (ultrasonic.getVoltage() <= .96) { jag1.set(0); jag3.set(0); ready = true; } else { jag1.set(1); jag3.set(-1); } if (i >= 100) { goShoot = false; sol7.set(true); sol8.set(false); i = 0; } if (ready && conf >= 40) { goShoot = true; } if (ready && conf < 40) { noWait++; } if (ready && conf < 40 && noWait == 150) { goShoot = true; } if (goShoot && i < 3) { i++; sol4.set(false); sol5.set(true); } if (goShoot && i >= 3) { sol7.set(false); sol8.set(true); i++; } } /** * This function is called periodically during operator control */ public void teleopInit() { relay.set(Relay.Value.kOff); if (!drive.running) { drive.start(); } drive.setRun(true); if (!loadAndShoot.running) { loadAndShoot.start(); } loadAndShoot.setRun(true); } public void teleopPeriodic() { if (digi3.get()) { SmartDashboard.putBoolean("ArmBack", true); } if (!digi3.get()) { SmartDashboard.putBoolean("ArmBack", false); } SmartDashboard.putNumber("Distance in.", 102.4 * ultrasonic.getVoltage()); //^need to do this as a boolean eventually } public void disabledInit() { drive.setRun(false); loadAndShoot.setRun(false); } /** * This function is called periodically during test mode */ public void testPeriodic() { } }
import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Point; import java.awt.Polygon; import java.awt.event.MouseAdapter; import java.awt.event.MouseMotionAdapter; import javax.swing.BorderFactory; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import java.util.Map; import java.util.HashMap; import java.util.Optional; import java.util.concurrent.ArrayBlockingQueue; import sodium.*; class Element { Element(Polygon polygon) { this.polygon = polygon; } private final Polygon polygon; public boolean contains(Point pt) { return polygon.contains(pt); } public Element translate(Point orig, Point pt) { int tx = pt.x - orig.x; int ty = pt.y - orig.y; Polygon neu = new Polygon(polygon.xpoints,polygon.ypoints, polygon.npoints); neu.translate(tx, ty); return new Element(neu); } private static final Color darkGreen = new Color(64, 128, 0); public void draw(Graphics g) { g.setColor(darkGreen); g.fillPolygon(polygon); g.setColor(Color.black); g.drawPolygon(polygon); } } class Entry { Entry(String id, Element element) { this.id = id; this.element = element; } public final String id; public final Element element; } class Document { public Document(Map<String,Element> elements) { this.elements = elements; } private final Map<String,Element> elements; public Optional<Element> getByID(String id) { Element p = elements.get(id); return p == null ? Optional.empty() : Optional.of(p); } public Optional<Entry> getByPoint(Point pt) { Optional<Entry> oe = Optional.empty(); for (Map.Entry<String,Element> e : elements.entrySet()) { if (e.getValue().contains(pt)) oe = Optional.of(new Entry(e.getKey(), e.getValue())); } return oe; } public Document insert(String id, Element polygon) { HashMap<String, Element> neu = new HashMap<>(elements); neu.put(id, polygon); return new Document(neu); } public void draw(Graphics g) { for (Element p : elements.values()) p.draw(g); } } enum Type { DOWN, MOVE, UP }; class MouseEvt { MouseEvt(Type type, Point pt) { this.type = type; this.pt = pt; } public final Type type; public final Point pt; } interface Paradigm { interface Callback { void updateDocument(Document doc); } interface Factory { Paradigm create(Document initDoc, Callback cb); } void mouseEvent(MouseEvt me); } class Classic implements Paradigm { public Classic(Document initDoc, Callback cb) { this.doc = initDoc; this.cb = cb; } private Document doc; private final Callback cb; private static class Dragging { Dragging(MouseEvt me1, Entry ent) { this.me1 = me1; this.ent = ent; } final MouseEvt me1; final Entry ent; } private Optional<Dragging> oDragging = Optional.empty(); public void mouseEvent(MouseEvt me) { switch (me.type) { case DOWN: Optional<Entry> oe = doc.getByPoint(me.pt); if (oe.isPresent()) { System.out.println("classic dragging "+oe.get().id); oDragging = Optional.of(new Dragging(me, oe.get())); } break; case MOVE: if (oDragging.isPresent()) { Dragging dr = oDragging.get(); doc = doc.insert(dr.ent.id, dr.ent.element.translate(dr.me1.pt, me.pt)); cb.updateDocument(doc); } break; case UP: oDragging = Optional.empty(); break; } } } class FRP implements Paradigm { public FRP(Document initDoc, Callback cb) { l = Transaction.run(() -> { CellLoop<Document> doc = new CellLoop<>(); Stream<Stream<Document>> sStartDrag = Stream.filterOptional( sMouse.snapshot(doc, (me1, d) -> { if (me1.type == Type.DOWN) { Optional<Entry> oe = d.getByPoint(me1.pt); if (oe.isPresent()) { String id = oe.get().id; Element elt = oe.get().element; System.out.println("FRP dragging "+id); Stream<Document> sMoves = sMouse .filter(me -> me.type == Type.MOVE) .map(me -> d.insert(id, elt.translate(me1.pt, me.pt))); return Optional.of(sMoves); } } return Optional.empty(); })); Stream<Document> sIdle = new Stream<>(); Stream<Stream<Document>> sEndDrag = sMouse.filter(me -> me.type == Type.UP) .map(me -> sIdle); Stream<Document> sDocUpdate = Cell.switchS( sStartDrag.merge(sEndDrag).hold(sIdle) ); doc.loop(sDocUpdate.hold(initDoc)); return sDocUpdate.listen(d -> cb.updateDocument(d)); }); } private final Listener l; private final StreamSink<MouseEvt> sMouse = new StreamSink<>(); public void mouseEvent(MouseEvt me) { sMouse.send(me); } } class Actor implements Paradigm { public Actor(Document initDoc, Callback cb) { in = new ArrayBlockingQueue<>(1); ArrayBlockingQueue<Document> out = new ArrayBlockingQueue<>(1); new Thread(() -> { try { Document doc = initDoc; while (true) { MouseEvt me1 = null; Entry ent = null; while (true) { MouseEvt me = in.take(); if (me.type == Type.DOWN) { Optional<Entry> oe = doc.getByPoint(me.pt); if (oe.isPresent()) { me1 = me; ent = oe.get(); break; } } } System.out.println("actor dragging "+ent.id); while (true) { MouseEvt me = in.take(); if (me.type == Type.MOVE) { doc = doc.insert(ent.id, ent.element.translate(me1.pt, me.pt)); out.put(doc); } else if (me.type == Type.UP) break; } } } catch (InterruptedException e) {} }).start(); new Thread(() -> { try { while (true) cb.updateDocument(out.take()); } catch (InterruptedException e) {} }).start(); } private ArrayBlockingQueue<MouseEvt> in; public void mouseEvent(MouseEvt me) { try { in.put(me); } catch (InterruptedException e) {} } } class ParadigmView extends JPanel implements Paradigm.Callback { public ParadigmView(Document initDoc, Paradigm.Factory factory) { this.doc = initDoc; this.paradigm = factory.create(initDoc, this); setBorder(BorderFactory.createLineBorder(Color.black)); addMouseListener(new MouseAdapter() { public void mousePressed(java.awt.event.MouseEvent ev) { paradigm.mouseEvent(new MouseEvt(Type.DOWN, new Point(ev.getX(), ev.getY()))); } public void mouseReleased(java.awt.event.MouseEvent ev) { paradigm.mouseEvent(new MouseEvt(Type.UP, new Point(ev.getX(), ev.getY()))); } }); addMouseMotionListener(new MouseMotionAdapter() { public void mouseDragged(java.awt.event.MouseEvent ev) { paradigm.mouseEvent(new MouseEvt(Type.MOVE, new Point(ev.getX(), ev.getY()))); } public void mouseMoved(java.awt.event.MouseEvent ev) { paradigm.mouseEvent(new MouseEvt(Type.MOVE, new Point(ev.getX(), ev.getY()))); } }); } private Document doc; private Paradigm paradigm; public Dimension getPreferredSize() { return new Dimension(250, 300); } public void updateDocument(Document doc) { this.doc = doc; repaint(); } public void paintComponent(Graphics g) { super.paintComponent(g); doc.draw(g); } } public class BattleOfTheParadigms { private static Element shape(int ox, int oy, int sides, double angle) { int[] xs = new int[sides]; int[] ys = new int[sides]; angle *= Math.PI / 180.0; for (int i = 0; i < sides; i++) { double theta = angle + Math.PI * 2 * (double) i / (double) sides; xs[i] = (int)((double)ox + Math.sin(theta) * 25); ys[i] = (int)((double)oy - Math.cos(theta) * 25); } return new Element(new Polygon(xs, ys, sides)); } public static void main(String[] args) { HashMap<String, Element> elements = new HashMap<String, Element>(); elements.put("triangle", shape(50, 50, 3, 0.0)); elements.put("square", shape(125, 50, 4, 45.0)); elements.put("pentagon", shape(200, 50, 5, 0.0)); elements.put("hexagon", shape(50, 125, 6, 30.0)); elements.put("heptagon", shape(125, 125, 7, 0.0)); elements.put("octagon", shape(200, 125, 8, 22.5)); Document doc = new Document(elements); JFrame frame = new JFrame("BattleOfTheParadigms"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); JPanel view = new JPanel(); GridBagLayout gridbag = new GridBagLayout(); view.setLayout(gridbag); GridBagConstraints c = new GridBagConstraints(); c.fill = GridBagConstraints.BOTH; c.weightx = 1.0; c.gridwidth = 1; c.gridheight = 1; c.gridx = 0; c.gridy = 0; c.gridwidth = 3; view.add(new JLabel("Drag the polygons with your mouse"), c); c.gridwidth = 1; c.gridx = 0; c.gridy = 1; view.add(new ParadigmView(doc, (initDoc, cb) -> new Classic(initDoc, cb)), c); c.gridx = 0; c.gridy = 2; view.add(new JLabel("classic state machine"), c); c.gridx = 1; c.gridy = 1; view.add(new ParadigmView(doc, (initDoc, cb) -> new FRP(initDoc, cb)), c); c.gridx = 1; c.gridy = 2; view.add(new JLabel("FRP"), c); c.gridx = 2; c.gridy = 1; view.add(new ParadigmView(doc, (initDoc, cb) -> new Actor(initDoc, cb)), c); c.gridx = 2; c.gridy = 2; view.add(new JLabel("actor model"), c); frame.setContentPane(view); frame.pack(); frame.setVisible(true); } }
package kbasesearchengine; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Generated; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; /** * <p>Original spec-file type: SearchObjectsInput</p> * <pre> * Input parameters for 'search_objects' method. * </pre> * */ @JsonInclude(JsonInclude.Include.NON_NULL) @Generated("com.googlecode.jsonschema2pojo") @JsonPropertyOrder({ "object_types", "match_filter", "sorting_rules", "access_filter", "pagination", "post_processing" }) public class SearchObjectsInput { @JsonProperty("object_types") private List<String> objectTypes; /** * <p>Original spec-file type: MatchFilter</p> * <pre> * Optional rules of defining constrains for object properties * including values of keywords or metadata/system properties (like * object name, creation time range) or full-text search in all * properties. * </pre> * */ @JsonProperty("match_filter") private MatchFilter matchFilter; @JsonProperty("sorting_rules") private List<SortingRule> sortingRules; /** * <p>Original spec-file type: AccessFilter</p> * <pre> * Optional rules of access constrains. * - with_private - include data found in workspaces not marked * as public, default value is true, * - with_public - include data found in public workspaces, * default value is false, * - with_all_history - include all versions (last one and all * old versions) of objects matching constrains, default * value is false. * </pre> * */ @JsonProperty("access_filter") private AccessFilter accessFilter; /** * <p>Original spec-file type: Pagination</p> * <pre> * Pagination rules. Default values are: start = 0, count = 50. * </pre> * */ @JsonProperty("pagination") private Pagination pagination; /** * <p>Original spec-file type: PostProcessing</p> * <pre> * Rules for what to return about found objects. * skip_info - do not include brief info for object ('guid, * 'parent_guid', 'object_name' and 'timestamp' fields in * ObjectData structure), * skip_keys - do not include keyword values for object * ('key_props' field in ObjectData structure), * skip_data - do not include raw data for object ('data' and * 'parent_data' fields in ObjectData structure), * ids_only - shortcut to mark all three skips as true. * </pre> * */ @JsonProperty("post_processing") private PostProcessing postProcessing; private Map<String, Object> additionalProperties = new HashMap<String, Object>(); @JsonProperty("object_types") public List<String> getObjectTypes() { return objectTypes; } @JsonProperty("object_types") public void setObjectTypes(List<String> objectTypes) { this.objectTypes = objectTypes; } public SearchObjectsInput withObjectTypes(List<String> objectTypes) { this.objectTypes = objectTypes; return this; } /** * <p>Original spec-file type: MatchFilter</p> * <pre> * Optional rules of defining constrains for object properties * including values of keywords or metadata/system properties (like * object name, creation time range) or full-text search in all * properties. * </pre> * */ @JsonProperty("match_filter") public MatchFilter getMatchFilter() { return matchFilter; } /** * <p>Original spec-file type: MatchFilter</p> * <pre> * Optional rules of defining constrains for object properties * including values of keywords or metadata/system properties (like * object name, creation time range) or full-text search in all * properties. * </pre> * */ @JsonProperty("match_filter") public void setMatchFilter(MatchFilter matchFilter) { this.matchFilter = matchFilter; } public SearchObjectsInput withMatchFilter(MatchFilter matchFilter) { this.matchFilter = matchFilter; return this; } @JsonProperty("sorting_rules") public List<SortingRule> getSortingRules() { return sortingRules; } @JsonProperty("sorting_rules") public void setSortingRules(List<SortingRule> sortingRules) { this.sortingRules = sortingRules; } public SearchObjectsInput withSortingRules(List<SortingRule> sortingRules) { this.sortingRules = sortingRules; return this; } /** * <p>Original spec-file type: AccessFilter</p> * <pre> * Optional rules of access constrains. * - with_private - include data found in workspaces not marked * as public, default value is true, * - with_public - include data found in public workspaces, * default value is false, * - with_all_history - include all versions (last one and all * old versions) of objects matching constrains, default * value is false. * </pre> * */ @JsonProperty("access_filter") public AccessFilter getAccessFilter() { return accessFilter; } /** * <p>Original spec-file type: AccessFilter</p> * <pre> * Optional rules of access constrains. * - with_private - include data found in workspaces not marked * as public, default value is true, * - with_public - include data found in public workspaces, * default value is false, * - with_all_history - include all versions (last one and all * old versions) of objects matching constrains, default * value is false. * </pre> * */ @JsonProperty("access_filter") public void setAccessFilter(AccessFilter accessFilter) { this.accessFilter = accessFilter; } public SearchObjectsInput withAccessFilter(AccessFilter accessFilter) { this.accessFilter = accessFilter; return this; } /** * <p>Original spec-file type: Pagination</p> * <pre> * Pagination rules. Default values are: start = 0, count = 50. * </pre> * */ @JsonProperty("pagination") public Pagination getPagination() { return pagination; } /** * <p>Original spec-file type: Pagination</p> * <pre> * Pagination rules. Default values are: start = 0, count = 50. * </pre> * */ @JsonProperty("pagination") public void setPagination(Pagination pagination) { this.pagination = pagination; } public SearchObjectsInput withPagination(Pagination pagination) { this.pagination = pagination; return this; } /** * <p>Original spec-file type: PostProcessing</p> * <pre> * Rules for what to return about found objects. * skip_info - do not include brief info for object ('guid, * 'parent_guid', 'object_name' and 'timestamp' fields in * ObjectData structure), * skip_keys - do not include keyword values for object * ('key_props' field in ObjectData structure), * skip_data - do not include raw data for object ('data' and * 'parent_data' fields in ObjectData structure), * ids_only - shortcut to mark all three skips as true. * </pre> * */ @JsonProperty("post_processing") public PostProcessing getPostProcessing() { return postProcessing; } /** * <p>Original spec-file type: PostProcessing</p> * <pre> * Rules for what to return about found objects. * skip_info - do not include brief info for object ('guid, * 'parent_guid', 'object_name' and 'timestamp' fields in * ObjectData structure), * skip_keys - do not include keyword values for object * ('key_props' field in ObjectData structure), * skip_data - do not include raw data for object ('data' and * 'parent_data' fields in ObjectData structure), * ids_only - shortcut to mark all three skips as true. * </pre> * */ @JsonProperty("post_processing") public void setPostProcessing(PostProcessing postProcessing) { this.postProcessing = postProcessing; } public SearchObjectsInput withPostProcessing(PostProcessing postProcessing) { this.postProcessing = postProcessing; return this; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperties(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return ((((((((((((((("SearchObjectsInput"+" [objectTypes=")+ objectTypes)+", matchFilter=")+ matchFilter)+", sortingRules=")+ sortingRules)+", accessFilter=")+ accessFilter)+", pagination=")+ pagination)+", postProcessing=")+ postProcessing)+", additionalProperties=")+ additionalProperties)+"]"); } }
/* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.Jaguar; import edu.wpi.first.wpilibj.SimpleRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.Timer; public class Natasha2014 extends SimpleRobot { Joystick leftstick = new Joystick(1); Joystick rightstick = new Joystick(2); DriverStation ds; // Can we get this long line to be split onto 2 lines, for readability? DriveTrain dt = new DriveTrain(Constants.frontLeft, Constants.rearLeft, Constants.frontRight, Constants.rearRight); Throweraterenator thrower = new Throweraterenator(); SinisterSonar sonar = new SinisterSonar(); private Jaguar motorTail = new Jaguar(Constants.PWM_TAIL); protected void robotInit() { System.out.println("RobotInit..."); ds = DriverStation.getInstance(); dt.setMotorsInverted(); thrower.initThrower(); } public void autonomous() { } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { System.out.println("Teleop..."); dt.setSafetyEnabled(true); while(isOperatorControl() && isEnabled()){ if (leftstick.getRawButton(Constants.JB_DRIVE_SLOW)) { dt.arcadeDrive(leftstick.getY() * .7, leftstick.getX() * .5); } else { dt.arcadeDrive(leftstick.getY(), leftstick.getX() * .7); } thrower.setThrowSpeed(ds.getAnalogIn(1)/5); thrower.setThrowArc((int)(ds.getAnalogIn(2)/5 * 300)); thrower.setStowSpeed(-0.2); System.out.print(Timer.getFPGATimestamp() ); System.out.print(" pos:" + thrower.position() ); System.out.print(" arc: " + thrower.getThrowArc() ); System.out.print(" sonar: " + sonar.getDistance() ); System.out.println(" status: " + thrower.getStatus() ); if (leftstick.getRawButton(Constants.JB_THROWER_ENCODER_RESET)) { thrower.resetEncoder(); } if (leftstick.getRawButton(Constants.JB_INIT_THROW_1) && leftstick.getRawButton(Constants.JB_INIT_THROW_2) ) { thrower.startThrow(); } thrower.update(); if (leftstick.getRawButton(Constants.JB_TAIL_EXTEND)){ motorTail.set(-(ds.getAnalogIn(3)/5)); } else if (leftstick.getRawButton(Constants.JB_TAIL_RETRACT)) { motorTail.set((ds.getAnalogIn(3)/5)); } else { motorTail.set(0); } Timer.delay(Constants.TELEOP_LOOP_DELAY_SECS); } } /** * This function is called once each time the robot enters test mode. */ public void test() { } }
package eu.modelwriter.marker.ui.internal.wizards.markerwizard; import org.eclipse.core.resources.IFile; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.wizard.Wizard; import eu.modelwriter.marker.MarkerActivator; import eu.modelwriter.marker.typing.internal.CreateMarkerWithType; public class MarkerWizard extends Wizard { private MarkerPage page; private ISelection selection; private IFile file; public MarkerWizard(ISelection selection, IFile file) { super(); this.selection = selection; this.file = file; } @Override public String getWindowTitle() { return "Marking with Tag"; } @Override public void addPages() { page = new MarkerPage(); super.addPages(); this.addPage(page); } @Override public boolean performFinish() { if (MarkerPage.markTreeViewer.getTree().getSelection().length != 1) { MessageDialog dialog = new MessageDialog(MarkerActivator.getShell(), "Marker Type Information", null, "Please select one marker type", MessageDialog.INFORMATION, new String[] {"OK"}, 0); dialog.open(); } else { CreateMarkerWithType.createMarker(file, selection, MarkerPage.markTreeViewer.getTree().getSelection()[0].getText()); MessageDialog dialog = new MessageDialog(MarkerActivator.getShell(), "Marker Type Information", null, "Marker has been created with selected type", MessageDialog.INFORMATION, new String[] {"OK"}, 0); dialog.open(); } return true; } }
package cs.si.stavor.fragments; import cs.si.stavor.R; import cs.si.stavor.MainActivity; import cs.si.stavor.StavorApplication; import cs.si.stavor.app.Parameters; import cs.si.stavor.database.MissionReaderContract; import cs.si.stavor.database.ReaderDbHelper; import cs.si.stavor.database.SerializationUtil; import cs.si.stavor.database.MissionReaderContract.MissionEntry; import cs.si.stavor.dialogs.CopyMissionDialogFragment; import cs.si.stavor.dialogs.DeleteMissionDialogFragment; import cs.si.stavor.mission.Mission; import cs.si.stavor.mission.MissionAndId; import cs.si.stavor.simulator.Simulator; import cs.si.stavor.simulator.SimulatorStatus; import android.annotation.SuppressLint; import android.app.Activity; import android.app.DialogFragment; import android.app.LoaderManager.LoaderCallbacks; import android.os.Bundle; import android.preference.PreferenceManager; import android.app.Fragment; import android.content.Loader; import android.content.SharedPreferences; import android.database.Cursor; import android.graphics.Paint; import android.view.ContextMenu; import android.view.ContextMenu.ContextMenuInfo; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.AdapterView; import android.widget.AutoCompleteTextView; import android.widget.Button; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.SimpleCursorAdapter; import android.widget.Switch; import android.widget.Toast; import android.widget.AbsListView.OnScrollListener; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.TextView; import android.widget.ViewSwitcher; import com.commonsware.cwac.loaderex.SQLiteCursorLoader; /** * Fragment to show all the simulator configurations * @author Xavier Gibert * */ public final class SimulatorFragment extends Fragment implements LoaderCallbacks<Cursor> { /** * The fragment argument representing the section number for this * fragment. */ private static final String ARG_SECTION_NUMBER = "section_number"; /** * Returns a new instance of this fragment for the given section number. * @param simulation * @param sim_config */ public static SimulatorFragment newInstance(int sectionNumber) { SimulatorFragment fragment = new SimulatorFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); return fragment; } public SimulatorFragment() { } public Simulator simulator; Switch switch_remote; ViewSwitcher sim_container; SharedPreferences sharedPref; Button button_connect; AutoCompleteTextView host_view; EditText port_view; ListView missionsList; @SuppressLint({ "JavascriptInterface", "SetJavaScriptEnabled", "NewApi" }) @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.sim, container, false); ((MainActivity)getActivity()).showTutorialSimulator(); sharedPref = PreferenceManager.getDefaultSharedPreferences(getActivity().getApplicationContext()); simulator = ((MainActivity)getActivity()).getSimulator(); //Load missions in list missionsList = (ListView) rootView.findViewById(R.id.listView1); missionsList.setOnItemClickListener(new AdapterView.OnItemClickListener(){ @Override public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { if(arg1!=null){ activeMissionId = Integer.parseInt(((TextView)arg1.findViewById(R.id.textViewMissionId)).getText().toString()); activeMissionName=((TextView)arg1.findViewById(R.id.textViewMission)).getText().toString(); markActiveMission(); }else{ activeMissionId=-1; activeMissionName=""; } } }); missionsList.setOnScrollListener(new OnScrollListener(){ public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { updateListSelection(); } public void onScrollStateChanged(AbsListView view, int scrollState) { //if(scrollState == 0){//Stop scroll updateListSelection(); } }); adapter = new SimpleCursorAdapter( this.getActivity().getApplicationContext(), R.layout.mission_list_item, null, new String[] {"_id", "name", "description"}, new int[] {R.id.textViewMissionId, R.id.textViewMission, R.id.textViewMissionDescription}, 0 ); missionsList.setAdapter(adapter); registerForContextMenu(missionsList); getLoaderManager().initLoader(R.id.listView1, null, this); //Switch local/remote switch_remote = (Switch) rootView.findViewById(R.id.switch1); sim_container = (ViewSwitcher) rootView.findViewById(R.id.sim_content); loadCorrectSimulatorScreen(rootView); switch_remote.setOnCheckedChangeListener(new OnCheckedChangeListener() { public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { sharedPref.edit().putBoolean(buttonView.getContext().getString(R.string.pref_key_sim_global_remote), isChecked).commit(); loadCorrectSimulatorScreen(buttonView); } }); host_view = (AutoCompleteTextView) rootView.findViewById(R.id.autoCompleteTextViewHost); port_view = (EditText) rootView.findViewById(R.id.editTextPort); String host = sharedPref.getString(getString(R.string.pref_key_sim_remote_host), Parameters.Simulator.Remote.default_host); String port = sharedPref.getString(getString(R.string.pref_key_sim_remote_port), Parameters.Simulator.Remote.default_port); host_view.setText(host); port_view.setText(port); button_connect = (Button) rootView.findViewById(R.id.buttonConnect); simulator.setButtonConnect(button_connect); simulator.setSwitchView(switch_remote); button_connect.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { if(simulator.getSimulatorStatus().equals(SimulatorStatus.Connected)){ //int tmp_sel = last_mission_selection; simulator.disconnect(); //selectMissionInList(tmp_sel); updateListSelection(); }else{ boolean remote = sharedPref.getBoolean(v.getContext().getString(R.string.pref_key_sim_global_remote), false); if(remote){ sharedPref.edit().putString(v.getContext().getString( R.string.pref_key_sim_remote_host), host_view.getText().toString() ).commit(); sharedPref.edit().putString(v.getContext().getString( R.string.pref_key_sim_remote_port), port_view.getText().toString() ).commit(); simulator.connect(); }else{ //Set mission MissionAndId mis = getMission(activeMissionId); if(mis!=null){ simulator.setSelectedMission(mis.mission, mis.id); simulator.connect(); }else{ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_cannot_deserialize_selected_mission), Toast.LENGTH_LONG).show(); } } } } }); //Delete Button button_delete = (Button)rootView.findViewById(R.id.buttonMissionDelete); button_delete.setOnClickListener(new OnClickListener(){ @Override public void onClick(View arg0) { //if(simulator.getSimulatorStatus().equals(SimulatorStatus.Connected)){ //Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_stop_simulator_first), Toast.LENGTH_LONG).show(); //}else{ if(activeMissionId==-1){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_select_first_a_mission), Toast.LENGTH_LONG).show(); }else if (activeMissionId<=Parameters.Simulator.amount_mission_examples ){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_mission_not_removable), Toast.LENGTH_LONG).show(); }else{ showDeleteMissionDialog(activeMissionId, activeMissionName); } } }); //New Button button_new = (Button)rootView.findViewById(R.id.buttonMissionNew); button_new.setOnClickListener(new OnClickListener(){ @Override public void onClick(View arg0) { //if(simulator.getSimulatorStatus().equals(SimulatorStatus.Connected)){ //Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_stop_simulator_first), Toast.LENGTH_LONG).show(); //}else{ ((MainActivity)getActivity()).showMissionCreator(); } }); //Edit Button button_edit = (Button)rootView.findViewById(R.id.buttonMissionEdit); button_edit.setOnClickListener(new OnClickListener(){ @Override public void onClick(View arg0) { //if(simulator.getSimulatorStatus().equals(SimulatorStatus.Connected)){ //Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_stop_simulator_first), Toast.LENGTH_LONG).show(); //}else{ if(activeMissionId==-1){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_select_first_a_mission), Toast.LENGTH_LONG).show(); }else if (activeMissionId<=Parameters.Simulator.amount_mission_examples){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_mission_not_editable), Toast.LENGTH_LONG).show(); }else{ MissionAndId mis = getMission(activeMissionId); if(mis!=null){ ((MainActivity)getActivity()).showMissionEditor(mis); }else{ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_cannot_deserialize_selected_mission), Toast.LENGTH_LONG).show(); } } } }); return rootView; } /** * Returns the selected Mission from the database * @return */ private MissionAndId getMission(int id){ String[] projection = { MissionEntry._ID, MissionEntry.COLUMN_NAME_CLASS }; Cursor c = ((StavorApplication)((MainActivity)getActivity()).getApplication()).db .query( MissionEntry.TABLE_NAME, // The table to query projection, // The columns to return MissionEntry._ID+" = ?", // The columns for the WHERE clause new String[]{Integer.toString(id)}, // The values for the WHERE clause "", // don't group the rows "", // don't filter by row groups null // The sort order ); if (c != null && c.getCount() > 0) { c.moveToFirst(); int idIndex = c.getColumnIndex(MissionEntry._ID); int nameIndex = c.getColumnIndex(MissionEntry.COLUMN_NAME_CLASS); //this.itemId = cursor.getLong(idIndex); byte[] mission_serie = c.getBlob(nameIndex); int mission_id = c.getInt(idIndex); Mission mis = SerializationUtil.deserialize(mission_serie); return new MissionAndId(mis, mission_id); }else{ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_cannot_find_selected_mission_in_db), Toast.LENGTH_LONG).show(); return null; } } /** * Shows the delete mission confirmation dialog * @param id Mission id * @param name Mission name */ public void showDeleteMissionDialog(int id, String name) { DialogFragment newFragment = DeleteMissionDialogFragment.newInstance(id, name); newFragment.setCancelable(true); newFragment.show(getFragmentManager(), "delete"); } /** * Shows the copy mission confirmation dialog * @param id Mission id * @param name Mission name */ public void showCopyMissionDialog(int id, String name, Mission mis) { DialogFragment newFragment = CopyMissionDialogFragment.newInstance(id, name, mis); newFragment.setCancelable(true); newFragment.show(getFragmentManager(), "copy"); } /** * load local or remote simulator view * @param view */ private void loadCorrectSimulatorScreen(View view) { simulator.updateConnectButtonText(); boolean remote = sharedPref.getBoolean(view.getContext().getString(R.string.pref_key_sim_global_remote), false); switch_remote.setChecked(remote); if(remote){ // Remote sim_container.setDisplayedChild(1); }else{ // Local sim_container.setDisplayedChild(0); missionsList.post(new Runnable() { @Override public void run() { if(isAdded()){ boolean remote = sharedPref.getBoolean(getString(R.string.pref_key_sim_global_remote), false); if(!remote){ selectFirstMissionInList(); } } } }); } } /** * Change the text color of the missions in list that is selected */ private void markActiveMission() { if(missionsList!=null){ for(int i = 0; i < missionsList.getChildCount(); i++){ LinearLayout lay = (LinearLayout)missionsList.getChildAt(i); TextView text_id = (TextView)lay.findViewById(R.id.textViewMissionId); TextView text_name = (TextView)lay.findViewById(R.id.textViewMission); if(simulator.getSelectedMissionid()==Integer.parseInt(text_id.getText().toString())){ text_name.setTextColor(getResources().getColor(R.color.selected_mission)); text_name.setPaintFlags(text_name.getPaintFlags() | Paint.UNDERLINE_TEXT_FLAG); }else{ text_name.setTextColor(getResources().getColor(R.color.white)); text_name.setPaintFlags(text_name.getPaintFlags() & (~Paint.UNDERLINE_TEXT_FLAG)); } } } } private void selectFirstMissionInList(){ try{ missionsList.setItemChecked(0, true); missionsList.setSelection(0); Cursor curs = (Cursor)missionsList.getItemAtPosition(0); activeMissionId = curs.getInt(curs.getColumnIndex(MissionReaderContract.MissionEntry._ID)); activeMissionName = curs.getString(curs.getColumnIndex(MissionReaderContract.MissionEntry.COLUMN_NAME_NAME)); markActiveMission(); }catch(Exception e){ e.printStackTrace(); } } private void selectMissionByKey(int key){ if(key!=-1){ boolean found = false; for(int j = 0; j<adapter.getCount(); j++){ adapter.getCursor().moveToPosition(j); int mis_key = adapter.getCursor().getInt( adapter.getCursor().getColumnIndex( MissionReaderContract.MissionEntry._ID)); if(mis_key == key){ found=true; missionsList.setItemChecked(j, true); missionsList.setSelection(j); Cursor curs = (Cursor)missionsList.getItemAtPosition(j); activeMissionId = curs.getInt(curs.getColumnIndex(MissionReaderContract.MissionEntry._ID)); activeMissionName = curs.getString(curs.getColumnIndex(MissionReaderContract.MissionEntry.COLUMN_NAME_NAME)); markActiveMission(); } } if(!found){ selectFirstMissionInList(); } }else{ selectFirstMissionInList(); } } private void updateListSelection(){ markActiveMission(); } @Override public void onAttach(Activity activity) { super.onAttach(activity); ((MainActivity) activity).onSectionAttached(getArguments().getInt( ARG_SECTION_NUMBER)); } @Override public Loader<Cursor> onCreateLoader(int arg0, Bundle arg1) { ReaderDbHelper db_help = ((StavorApplication)((MainActivity)getActivity()).getApplication()).db_help; String sql="SELECT _ID, name, description FROM "+MissionEntry.TABLE_NAME+" ORDER BY name COLLATE NOCASE ASC;"; String[] params = null; SQLiteCursorLoader loader = new SQLiteCursorLoader( getActivity().getApplicationContext(), db_help, sql, params); return loader; } SimpleCursorAdapter adapter; int activeMissionId = -1; String activeMissionName = ""; @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { ((StavorApplication)((MainActivity)getActivity()).getApplication()).loader=(SQLiteCursorLoader)loader; adapter.changeCursor(cursor); if (cursor != null && cursor.getCount() > 0) { cursor.moveToFirst(); missionsList.post(new Runnable() { @Override public void run() { boolean remote = sharedPref.getBoolean(getString(R.string.pref_key_sim_global_remote), false); if(!remote){ selectMissionByKey(activeMissionId); } } }); } } @Override public void onLoaderReset(Loader<Cursor> arg0) { adapter.changeCursor(null); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { if (v.getId()==R.id.listView1) { AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo)menuInfo; missionsList.setItemChecked(info.position, true); missionsList.setSelection(info.position); Cursor curs = (Cursor)missionsList.getItemAtPosition(info.position); activeMissionId = curs.getInt(curs.getColumnIndex(MissionReaderContract.MissionEntry._ID)); activeMissionName = curs.getString(curs.getColumnIndex(MissionReaderContract.MissionEntry.COLUMN_NAME_NAME)); markActiveMission(); if(adapter!=null && adapter.getCursor()!=null){ adapter.getCursor().moveToPosition(info.position); String header = adapter.getCursor().getString( adapter.getCursor().getColumnIndex(MissionReaderContract.MissionEntry.COLUMN_NAME_NAME)); menu.setHeaderTitle(header); } String[] menuItems = getResources().getStringArray(R.array.missions_menu); for (int i = 0; i<menuItems.length; i++) { menu.add(Menu.NONE, i, i, menuItems[i]); } } } @Override public boolean onContextItemSelected(MenuItem item) { AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo)item.getMenuInfo(); int menuItemIndex = item.getItemId(); //String[] menuItems = getResources().getStringArray(R.array.missions_menu); //String menuItemName = menuItems[menuItemIndex]; int listItemKey = -1; try{ if(adapter!=null && adapter.getCursor()!=null){ adapter.getCursor().moveToPosition(info.position); listItemKey = adapter.getCursor().getInt( adapter.getCursor().getColumnIndex(MissionReaderContract.MissionEntry._ID)); String listItemName = adapter.getCursor().getString( adapter.getCursor().getColumnIndex(MissionReaderContract.MissionEntry.COLUMN_NAME_NAME)); if(menuItemIndex==0){ if(listItemKey==-1){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_select_first_a_mission), Toast.LENGTH_LONG).show(); }else if (listItemKey==0 ||listItemKey==1 ||listItemKey==2 || listItemKey==3 ){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_mission_not_removable), Toast.LENGTH_LONG).show(); }else{ showDeleteMissionDialog(listItemKey, listItemName); } }else if(menuItemIndex==1){ showCopyMissionDialog(listItemKey, listItemName, getMission(listItemKey).mission); }else if(menuItemIndex==2){ if(listItemKey==-1){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_select_first_a_mission), Toast.LENGTH_LONG).show(); }else if (listItemKey==0 ||listItemKey==1 ||listItemKey==2 ||listItemKey==3){ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_mission_not_editable), Toast.LENGTH_LONG).show(); }else{ MissionAndId mis = getMission(listItemKey); if(mis!=null){ ((MainActivity)getActivity()).showMissionEditor(mis); }else{ Toast.makeText(getActivity().getApplicationContext(), getString(R.string.sim_local_cannot_deserialize_selected_mission), Toast.LENGTH_LONG).show(); } } } } }catch(Exception e){ } return true; } }
package it.valeriovaudi.documentlibrary.endpoint; import it.valeriovaudi.documentlibrary.web.model.BookMasterDTO; import it.valeriovaudi.documentlibrary.web.model.BookModel; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.cloud.client.loadbalancer.LoadBalanced; import org.springframework.http.RequestEntity; import org.springframework.http.ResponseEntity; import org.springframework.integration.core.MessagingTemplate; import org.springframework.messaging.MessageChannel; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.RestTemplate; import javax.json.*; import java.io.StringReader; import java.net.URI; @RestController @RequestMapping("/bookService") public class BookServiceEndpoint { @Autowired @LoadBalanced private RestTemplate bookRepositoryServiceRestTemplate; @Autowired @LoadBalanced private RestTemplate searchBookServiceRestTemplate; @Autowired private MessagingTemplate messagingTemplate; @Value("${bookRepositoryService.bookServiceEndPoint.baseUrl}") private String bookRepositoryServicBaseUrl; @Value("${searchBookService.searchBookService.baseUrl}") private String searchBookServiceBaseUrl; @Autowired @Qualifier("uploadBookInChannel") private MessageChannel uploadBookInChannel; public void setMessagingTemplate(MessagingTemplate messagingTemplate) { this.messagingTemplate = messagingTemplate; } public void setBookRepositoryServiceRestTemplate(RestTemplate bookRepositoryServiceRestTemplate) { this.bookRepositoryServiceRestTemplate = bookRepositoryServiceRestTemplate; } public void setSearchBookServiceRestTemplate(RestTemplate searchBookServiceRestTemplate) { this.searchBookServiceRestTemplate = searchBookServiceRestTemplate; } public void setUploadBookInChannel(MessageChannel uploadBookInChannel) { this.uploadBookInChannel = uploadBookInChannel; } public void setBookRepositoryServicBaseUrl(String bookRepositoryServicBaseUrl) { this.bookRepositoryServicBaseUrl = bookRepositoryServicBaseUrl; } public void setSearchBookServiceBaseUrl(String searchBookServiceBaseUrl) { this.searchBookServiceBaseUrl = searchBookServiceBaseUrl; } @RequestMapping(value = "/{resourcesId}", method = RequestMethod.GET) public ResponseEntity<Void> readBookDetails(@PathVariable("resourcesId") String resourcesId){ return null; } @RequestMapping(method = RequestMethod.GET) public ResponseEntity<String> readAddBookDetails(){ String forObject = searchBookServiceRestTemplate.getForObject(String.format("%s?page=-1&pageSize=-1",searchBookServiceBaseUrl), String.class); JsonArray searchBookIndexs = Json.createReader(new StringReader(forObject)).readArray(); JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); JsonObject searchBookIndex; JsonObject bookDetails; for(int i = 0 ; i < searchBookIndexs.size() ; i++){ searchBookIndex = searchBookIndexs.getJsonObject(i); bookDetails = Json.createReader(new StringReader(bookRepositoryServiceRestTemplate.getForObject(String.format(bookRepositoryServicBaseUrl + "/book/%s", searchBookIndex.getString("bookId")), String.class))).readObject(); arrayBuilder.add(Json.createObjectBuilder() .add("bookId", searchBookIndex.getString("bookId")) .add("name", searchBookIndex.getString("bookName")) .add("author", bookDetails.getString("author")) .add("description",bookDetails.getString("description")) .add("metadata",searchBookIndex.getJsonArray("searchTags")) .add("published", searchBookIndex.getBoolean("published")) .add("delete", false)); } return ResponseEntity.ok(arrayBuilder.build().toString()); } @RequestMapping(method = RequestMethod.POST) public ResponseEntity<Void> saveBookDetails(@RequestBody BookMasterDTO bookMasterDTO){ messagingTemplate.convertAndSend(uploadBookInChannel, bookMasterDTO); return ResponseEntity.accepted().build(); } @RequestMapping(value = "/{resourcesId}", method = RequestMethod.PUT) public ResponseEntity<Void> updateBook(@PathVariable("resourcesId") String resourcesId, @RequestBody BookModel bookModel){ String bookRepositoryBaseURl = String.format("%s/book/%s",bookRepositoryServicBaseUrl,resourcesId); JsonObjectBuilder objectBuilder = Json.createObjectBuilder(); boolean isCallabel = false; if(bookModel.getAuthor()!=null){ isCallabel = true; objectBuilder.add("author",bookModel.getAuthor()); } if(bookModel.getDescription()!=null){ isCallabel = true; objectBuilder.add("description",bookModel.getDescription()); } if(isCallabel) { bookRepositoryServiceRestTemplate.put(String.format(bookRepositoryBaseURl,resourcesId),objectBuilder.build().toString()); } String searchBookIndexBaseURl = String.format("%s/%s",searchBookServiceBaseUrl,resourcesId); objectBuilder.add("published",bookModel.isPublished()); if(bookModel.getMetadata()!=null){ JsonArrayBuilder arrayBuilder = Json.createArrayBuilder(); for (String tag : bookModel.getMetadata()) { arrayBuilder.add(tag); } objectBuilder.add("searchTags",arrayBuilder); } searchBookServiceRestTemplate.put(String.format(searchBookIndexBaseURl,resourcesId),objectBuilder.build().toString()); return ResponseEntity.noContent().build(); } @RequestMapping(value = "/{resourcesId}", method = RequestMethod.DELETE) public ResponseEntity<Void> deleteBook(@PathVariable("resourcesId") String resourcesId){ String searchBookIndexBaseURl = String.format("%s/%s", searchBookServiceBaseUrl, resourcesId); return searchBookServiceRestTemplate.exchange(RequestEntity.delete(URI.create(String.format(searchBookIndexBaseURl, resourcesId))).build(), Void.class); } }
package com.example.bootweb.accessory.httpclient; import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.client.config.RequestConfig; import org.apache.http.conn.ConnectionKeepAliveStrategy; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.DefaultProxyRoutePlanner; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.ConfigurableBeanFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Scope; /** * HttpClient */ //@Configuration public class HttpClientConfig { /** * FactoryBean */ private CloseableHttpClient client; @Autowired private ConnectionKeepAliveStrategy connectionKeepAliveStrategy; @Autowired private HttpRequestRetryHandler httpRequestRetryHandler; @Autowired private DefaultProxyRoutePlanner proxyRoutePlanner; @Autowired private PoolingHttpClientConnectionManager poolHttpcConnManager; @Autowired private RequestConfig config; // HttpClient public void destroy() throws Exception { /* * httpClient.close()shut down connection * managerHttpClient * connectionsocketconnection manager * httpnewconnection managerbuildHttpClient, * Clientconnection manager. */ if (null != this.client) { this.client.close(); } } @Bean(name = "httpClient")//, destroyMethod = "close()" @Scope(value = ConfigurableBeanFactory.SCOPE_SINGLETON) public CloseableHttpClient client() throws Exception { /* * HttpClients.customHttpClientBuilderHttpClientBuilder. * create()HttpClientBuilder * HttpClientBuilderHttpClientsImmutableimmutable * */ this.client = HttpClients.custom().setConnectionManager(poolHttpcConnManager) .setRetryHandler(httpRequestRetryHandler).setKeepAliveStrategy(connectionKeepAliveStrategy) .setRoutePlanner(proxyRoutePlanner).setDefaultRequestConfig(config).build(); return this.client; } }
package de.ptb.epics.eve.viewer.views; import java.util.Iterator; import java.util.List; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.ScrolledComposite; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.handlers.IHandlerService; import org.eclipse.ui.part.ViewPart; import de.ptb.epics.eve.data.scandescription.Axis; import de.ptb.epics.eve.data.scandescription.Chain; import de.ptb.epics.eve.data.scandescription.Channel; import de.ptb.epics.eve.data.scandescription.ScanModule; import de.ptb.epics.eve.ecp1.client.interfaces.IConnectionStateListener; import de.ptb.epics.eve.ecp1.client.interfaces.IErrorListener; import de.ptb.epics.eve.ecp1.client.model.Error; import de.ptb.epics.eve.ecp1.intern.EngineStatus; import de.ptb.epics.eve.ecp1.intern.ErrorType; import de.ptb.epics.eve.preferences.PreferenceConstants; import de.ptb.epics.eve.viewer.Activator; import de.ptb.epics.eve.viewer.IUpdateListener; /** * A simple view implementation, which only displays a label. * * @author Hartmut Scherr */ public final class EngineView extends ViewPart implements IUpdateListener, IConnectionStateListener, IErrorListener { private Composite top = null; private ScrolledComposite sc = null; private Label engineLabel; private Composite engineComposite; private Button startButton; private Button killButton; private Button connectButton; private Button disconnectButton; private Label statusLabel; private Label scanLabel; private Composite scanComposite; private Button playButton; private Button pauseButton; private Button stopButton; private Button skipButton; private Button haltButton; private Button autoPlayOnButton; private Button autoPlayOffButton; private Label repeatCountLabel; private Text repeatCountText; private Label loadedScmlLabel; private Text loadedScmlText; private Label chainFilenameLabel; private Text filenameText; private Label commentLabel; private Text commentText; private Button commentSendButton; private Table statusTable; private Shell shellTable[] = new Shell[10]; private int repeatCount; /** * {@inheritDoc} */ @Override public void createPartControl( final Composite parent ) { final Image playIcon = Activator.getDefault().getImageRegistry().get("PLAY16"); final Image pauseIcon = Activator.getDefault().getImageRegistry().get("PAUSE16"); final Image stopIcon = Activator.getDefault().getImageRegistry().get("STOP16"); final Image skipIcon = Activator.getDefault().getImageRegistry().get("SKIP16"); final Image haltIcon = Activator.getDefault().getImageRegistry().get("HALT16"); final Image autoPlayIcon = Activator.getDefault().getImageRegistry().get("PLAYALL16"); parent.setLayout( new FillLayout() ); GridLayout gridLayout; GridData gridData; this.sc = new ScrolledComposite(parent, SWT.H_SCROLL | SWT.V_SCROLL | SWT.BORDER); this.top = new Composite( sc, SWT.NONE ); gridLayout = new GridLayout(); gridLayout.numColumns = 4; this.top.setLayout(gridLayout); sc.setContent(this.top); sc.setExpandHorizontal(true); sc.setExpandVertical(true); this.engineLabel = new Label( this.top, SWT.NONE ); this.engineLabel.setText("ENGINE:"); this.engineComposite = new Composite( this.top, SWT.NONE ); gridLayout = new GridLayout(); gridLayout.numColumns = 4; this.engineComposite.setLayout(gridLayout); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 3; this.engineComposite.setLayoutData( gridData ); this.startButton = new Button( this.engineComposite, SWT.PUSH ); this.startButton.setText("start"); this.startButton.setToolTipText( "Start engine" ); this.startButton.addSelectionListener( new StartButtonSelectionListener()); this.killButton = new Button( this.engineComposite, SWT.PUSH ); this.killButton.setText("kill"); this.killButton.setToolTipText( "Kill engine" ); this.killButton.addSelectionListener( new KillButtonSelectionListener()); this.connectButton = new Button( this.engineComposite, SWT.PUSH ); this.connectButton.setText("connect"); this.connectButton.setToolTipText( "Connect to Engine" ); this.connectButton.addSelectionListener( new ConnectButtonSelectionListener()); this.disconnectButton = new Button( this.engineComposite, SWT.PUSH ); this.disconnectButton.setText("disconnect"); this.disconnectButton.setToolTipText( "Disconnect Engine" ); this.disconnectButton.addSelectionListener( new DisconnectButtonSelectionListener()); this.statusLabel = new Label( this.engineComposite, SWT.NONE ); this.statusLabel.setText("not connected"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 4; this.statusLabel.setLayoutData( gridData ); this.scanLabel = new Label( this.top, SWT.NONE ); this.scanLabel.setText("SCAN:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 1; this.scanLabel.setLayoutData( gridData ); this.scanComposite = new Composite( this.top, SWT.NONE ); gridLayout = new GridLayout(); gridLayout.numColumns = 9; this.scanComposite.setLayout(gridLayout); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 3; this.scanComposite.setLayoutData( gridData ); this.playButton = new Button( this.scanComposite, SWT.PUSH ); this.playButton.setImage(playIcon); this.playButton.setToolTipText( "Play" ); this.playButton.addSelectionListener( new PlayButtonSelectionListener()); this.pauseButton = new Button( this.scanComposite, SWT.PUSH ); this.pauseButton.setImage(pauseIcon); this.pauseButton.setToolTipText( "Pause" ); this.pauseButton.addSelectionListener( new PauseButtonSelectionListener()); this.stopButton = new Button( this.scanComposite, SWT.PUSH ); this.stopButton.setImage(stopIcon); this.stopButton.setToolTipText( "Stop" ); this.stopButton.addSelectionListener( new StopButtonSelectionListener()); this.skipButton = new Button( this.scanComposite, SWT.PUSH ); this.skipButton.setImage(skipIcon); this.skipButton.setToolTipText( "Skip" ); this.skipButton.addSelectionListener( new SkipButtonSelectionListener()); this.haltButton = new Button( this.scanComposite, SWT.PUSH ); this.haltButton.setImage(haltIcon); this.haltButton.setToolTipText( "Halt" ); this.haltButton.addSelectionListener( new HaltButtonSelectionListener()); this.autoPlayOnButton = new Button( this.scanComposite, SWT.TOGGLE ); this.autoPlayOnButton.setImage(autoPlayIcon); this.autoPlayOnButton.setToolTipText( "AutoPlayOn" ); this.autoPlayOnButton.addSelectionListener( new AutoPlayOnButtonSelectionListener()); this.autoPlayOffButton = new Button( this.scanComposite, SWT.TOGGLE ); this.autoPlayOffButton.setImage(autoPlayIcon); this.autoPlayOffButton.setToolTipText( "AutoPlayOff" ); this.autoPlayOffButton.addSelectionListener( new AutoPlayOffButtonSelectionListener()); this.repeatCountLabel = new Label( this.scanComposite, SWT.NONE ); this.repeatCountLabel.setText("repeat count:"); this.repeatCountText = new Text( this.scanComposite, SWT.BORDER ); repeatCount = 0; repeatCountText.setText(" "+String.valueOf(repeatCount)); this.repeatCountText.setEditable(false); this.loadedScmlLabel = new Label( this.top, SWT.NONE ); this.loadedScmlLabel.setText("loaded File:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 2; this.loadedScmlLabel.setLayoutData( gridData ); this.loadedScmlText = new Text( this.top, SWT.BORDER ); this.loadedScmlText.setEditable( false ); gridData = new GridData(); gridData.grabExcessHorizontalSpace = true; gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 2; this.loadedScmlText.setLayoutData( gridData ); this.chainFilenameLabel = new Label( this.top, SWT.NONE ); this.chainFilenameLabel.setText("Filename:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 2; this.chainFilenameLabel.setLayoutData( gridData ); this.filenameText = new Text( this.top, SWT.BORDER | SWT.TRAIL); this.filenameText.setEditable( false ); gridData = new GridData(); gridData.grabExcessHorizontalSpace = true; gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 2; this.filenameText.setLayoutData( gridData ); this.commentLabel = new Label( this.top, SWT.NONE ); this.commentLabel.setText("live Comment:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 2; this.commentLabel.setLayoutData( gridData ); this.commentText = new Text( this.top, SWT.BORDER); gridData = new GridData(); gridData.grabExcessHorizontalSpace = true; gridData.horizontalAlignment = GridData.FILL; gridData.horizontalSpan = 1; this.commentText.setLayoutData( gridData ); this.commentSendButton = new Button( this.top, SWT.NONE); this.commentSendButton.setText( "Send to File" ); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; this.commentSendButton.setEnabled(false); this.commentSendButton.setLayoutData( gridData ); this.commentSendButton.addSelectionListener(new CommentSendButtonSelectionListener()); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.verticalAlignment = GridData.FILL; gridData.horizontalSpan = 10; gridData.grabExcessHorizontalSpace = true; gridData.grabExcessVerticalSpace = true; this.statusTable = new Table(top, SWT.NONE); this.statusTable.setHeaderVisible(true); this.statusTable.setLinesVisible(true); this.statusTable.setLayoutData(gridData); TableColumn tableColumn = new TableColumn(this.statusTable, SWT.NONE); tableColumn.setWidth(50); tableColumn.setText("Chain"); TableColumn tableColumn1 = new TableColumn(this.statusTable, SWT.NONE); tableColumn1.setWidth(100); tableColumn1.setText("Scan Module"); TableColumn tableColumn2 = new TableColumn(this.statusTable, SWT.NONE); tableColumn2.setWidth(80); tableColumn2.setText("Status"); TableColumn tableColumn3 = new TableColumn(this.statusTable, SWT.NONE); tableColumn3.setWidth(120); tableColumn3.setText("remaining Time"); // SelectionListener um zu erkennen, wann eine Zeile selektiert wird this.statusTable.addSelectionListener(new StatusTableSelectionListener()); Activator.getDefault().getChainStatusAnalyzer().addUpdateListener( this ); Activator.getDefault().getEcp1Client().addErrorListener(this); this.rebuildText(0); Activator.getDefault().getEcp1Client().addConnectionStateListener( this ); // If Ecp1Client running (connected), enable disconnect and kill // else enable connect and start Button if (Activator.getDefault().getEcp1Client().isRunning()) { this.connectButton.setEnabled(false); this.startButton.setEnabled(false); this.disconnectButton.setEnabled(true); this.killButton.setEnabled(true); } else { this.disconnectButton.setEnabled(false); this.connectButton.setEnabled(true); this.killButton.setEnabled(false); this.startButton.setEnabled(true); // disable scan buttons if engine disconnected playButton.setEnabled(false); pauseButton.setEnabled(false); stopButton.setEnabled(false); skipButton.setEnabled(false); haltButton.setEnabled(false); } } /** * {@inheritDoc} */ @Override public void setFocus() { } public void updateOccured(int remainTime) { this.rebuildText(remainTime); } public void clearStatusTable() { // die Tabelle mit den Statusanzeigen wird geleert this.statusTable.getDisplay().syncExec( new Runnable() { public void run() { statusTable.removeAll(); } }); } public void disableSendToFile() { // send to File wird verboten this.commentSendButton.getDisplay().syncExec( new Runnable() { public void run() { commentSendButton.setEnabled(false); } }); } public void fillStatusTable(final int chainId, final int scanModuleId, final String statString, final int remainTime) { this.statusTable.getDisplay().syncExec( new Runnable() { public void run() { // Frage: An welcher Stelle kann das gesetzt werden? // angezeigt werden soll, das kann im Prinzip auch schon vorher sein // Kommt hier auch noch eine Abfrage auf 0 hin? int height = statusTable.getBounds().y + statusTable.getHeaderHeight() * 4 + 5; int width = scanComposite.getBounds().x + repeatCountText.getBounds().x + repeatCountText.getBounds().width + 5; sc.setMinSize(width, height); final TableItem[] rows = statusTable.getItems(); boolean neu = true; for ( int i=0; i<rows.length; i++) { String text0 = rows[i].getText(0).toString().trim(); String text1 = rows[i].getText(1).toString().trim(); int cell0 = Integer.parseInt(text0); int cell1; if (text1.equals("")) { // smid-Feld ist leer, cell1 wird auf -1 gesetzt cell1 = -1; } else { cell1 = Integer.parseInt(text1.trim()); } if ( (chainId == cell0) && (scanModuleId == cell1)) { neu = false; rows[i].setText(2, statString); if (cell1 == -1) { rows[i].setText(3, ""+remainTime); } } }; if (neu) { TableItem tableItem = new TableItem( statusTable, 0 ); tableItem.setText( 0, " "+chainId); if (scanModuleId == -1) { tableItem.setText( 1, " "); tableItem.setText( 3, ""+remainTime); } else { tableItem.setText( 1, " "+scanModuleId); } tableItem.setText( 2, statString); } } }); } private void rebuildText(int remainTime) { if( Activator.getDefault().getCurrentScanDescription() != null ) { final Iterator< Chain > it = Activator.getDefault().getCurrentScanDescription().getChains().iterator(); while( it.hasNext() ) { final Chain currentChain = it.next(); if( Activator.getDefault().getChainStatusAnalyzer().getRunningChains().contains( currentChain ) ) { fillStatusTable(currentChain.getId(), -1, "running", remainTime); } else if( Activator.getDefault().getChainStatusAnalyzer().getExitedChains().contains( currentChain ) ) { fillStatusTable(currentChain.getId(), -1, "exited", remainTime); } else { fillStatusTable(currentChain.getId(), -1, "idle", remainTime); } final List< ScanModule > scanModules = currentChain.getScanModuls(); final List< ScanModule > running = Activator.getDefault().getChainStatusAnalyzer().getExecutingScanModules(); for( final ScanModule scanModule : running ) { if( scanModules.contains( scanModule ) ) { fillStatusTable(currentChain.getId(), scanModule.getId(), "running", remainTime); } } final List< ScanModule > exited = Activator.getDefault().getChainStatusAnalyzer().getExitingScanModules(); for( final ScanModule scanModule : exited ) { if( scanModules.contains( scanModule ) ) { fillStatusTable(currentChain.getId(), scanModule.getId(), "exited", remainTime); } } final List< ScanModule > initialized = Activator.getDefault().getChainStatusAnalyzer().getInitializingScanModules(); for( final ScanModule scanModule : initialized ) { if( scanModules.contains( scanModule ) ) { fillStatusTable(currentChain.getId(), scanModule.getId(), "initialized", remainTime); } } final List< ScanModule > paused = Activator.getDefault().getChainStatusAnalyzer().getPausedScanModules(); for( final ScanModule scanModule : paused ) { if( scanModules.contains( scanModule ) ) { fillStatusTable(currentChain.getId(), scanModule.getId(), "paused", remainTime); } } final List< ScanModule > waiting = Activator.getDefault().getChainStatusAnalyzer().getWaitingScanModules(); for( final ScanModule scanModule : waiting ) { if( scanModules.contains( scanModule ) ) { fillStatusTable(currentChain.getId(), scanModule.getId(), "waiting for trigger", remainTime); } } } } } /** * {@inheritDoc} */ @Override public void stackConnected() { this.connectButton.setEnabled(false); this.startButton.setEnabled(false); this.disconnectButton.setEnabled(true); this.killButton.setEnabled(true); // Output connected to host. final String engineString = de.ptb.epics.eve.preferences.Activator.getDefault().getPreferenceStore().getString( PreferenceConstants.P_DEFAULT_ENGINE_ADDRESS ); this.statusLabel.getDisplay().syncExec( new Runnable() { public void run() { statusLabel.setText("connected to " + engineString); } }); // Wie ist der EngineStatus? // Activator.getDefault().getEcp1Client(). } /** * {@inheritDoc} */ @Override public void stackDisconnected() { if (!this.loadedScmlText.isDisposed()) this.loadedScmlText.getDisplay().syncExec( new Runnable() { public void run() { if (!loadedScmlText.isDisposed()) { disconnectButton.setEnabled(false); connectButton.setEnabled(true); killButton.setEnabled(false); startButton.setEnabled(true); statusLabel.setText("not connected"); // disable scan buttons if engine disconnected playButton.setEnabled(false); pauseButton.setEnabled(false); stopButton.setEnabled(false); skipButton.setEnabled(false); haltButton.setEnabled(false); } } }); } /** * {@inheritDoc} */ public void setLoadedScmlFile(final String filename) { // der Name des geladenen scml-Files wird angezeigt this.loadedScmlText.getDisplay().syncExec( new Runnable() { public void run() { loadedScmlText.setText(filename); } }); } public void setActualFilename(final String filename) { // der Name des geladenen scml-Files wird angezeigt this.filenameText.getDisplay().syncExec(new Runnable() { public void run() { filenameText.setText(filename); } }); } @Override public void fillEngineStatus(EngineStatus engineStatus, int repeatCount) { setCurrentRepeatCount(repeatCount); switch(engineStatus) { case IDLE_NO_XML_LOADED: this.playButton.getDisplay().syncExec( new Runnable() { public void run() { playButton.setEnabled(false); pauseButton.setEnabled(false); stopButton.setEnabled(false); skipButton.setEnabled(false); haltButton.setEnabled(false); } }); break; case IDLE_XML_LOADED: this.playButton.getDisplay().syncExec( new Runnable() { public void run() { playButton.setEnabled(true); pauseButton.setEnabled(false); stopButton.setEnabled(false); skipButton.setEnabled(false); haltButton.setEnabled(true); for ( int j=0; j<10; j++) { if (shellTable[j] != null) { if (!shellTable[j].isDisposed()) shellTable[j].dispose(); } } } }); break; case EXECUTING: this.playButton.getDisplay().syncExec( new Runnable() { public void run() { playButton.setEnabled(false); pauseButton.setEnabled(true); stopButton.setEnabled(true); skipButton.setEnabled(true); haltButton.setEnabled(true); } }); break; case PAUSED: this.playButton.getDisplay().syncExec( new Runnable() { public void run() { playButton.setEnabled(true); pauseButton.setEnabled(false); stopButton.setEnabled(true); skipButton.setEnabled(true); haltButton.setEnabled(true); } }); break; case STOPPED: this.playButton.getDisplay().syncExec( new Runnable() { public void run() { playButton.setEnabled(false); pauseButton.setEnabled(false); stopButton.setEnabled(false); skipButton.setEnabled(false); haltButton.setEnabled(false); } }); break; case HALTED: this.playButton.getDisplay().syncExec( new Runnable() { public void run() { playButton.setEnabled(false); pauseButton.setEnabled(false); stopButton.setEnabled(false); skipButton.setEnabled(false); haltButton.setEnabled(false); } }); break; } } /** * {@inheritDoc} */ @Override public void setAutoPlayStatus(final boolean autoPlayStatus) { this.autoPlayOnButton.getDisplay().syncExec( new Runnable() { public void run() { if (autoPlayStatus == true) { autoPlayOnButton.setEnabled(false); autoPlayOffButton.setEnabled(true); } else { autoPlayOnButton.setEnabled(true); autoPlayOffButton.setEnabled(false); } } }); } private void setCurrentRepeatCount(final int repeatCount) { if (this.repeatCount != repeatCount){ this.repeatCount = repeatCount; this.repeatCountText.getDisplay().syncExec(new Runnable() { public void run() { repeatCountText.setText(String.valueOf(repeatCount)); } }); } } // Wenn eine Zeile in der Tabelle der Chains und ScanModule angeklickt wird, // Beim nochmaligen anklicken wird das Fenster wieder entfernt. /** * * @author scherr * */ class StatusTableSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { // TODO Auto-generated method stub } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { // TODO Auto-generated method stub // mit den Details der Chain oder des ScanModuls int selection = statusTable.getSelectionIndex(); // Wenn ja, Info wieder wegnehmen. if (shellTable[selection] != null) { // Info ist vorhanden, da shellTable gesetzt if (!shellTable[selection].isDisposed()) { shellTable[selection].dispose(); shellTable[selection] = null; return; } } final TableItem[] rows = statusTable.getItems(); int aktChain = Integer.parseInt(rows[selection].getText(0).trim()); int aktSM; if (rows[selection].getText(1).trim().equals("")) { aktSM = 0; } else { aktSM = Integer.parseInt(rows[selection].getText(1).trim()); } Chain displayChain = Activator.getDefault().getCurrentScanDescription().getChain(aktChain); if (aktSM > 0) { Display display = Activator.getDefault().getWorkbench().getDisplay(); Shell chainShell = new Shell(display); chainShell.setSize(600,400); chainShell.setText("Scan Module Info"); GridLayout gridLayout = new GridLayout(); gridLayout.numColumns = 2; GridData gridData; chainShell.setLayout(gridLayout); Label chainLabel = new Label(chainShell,SWT.NONE); chainLabel.setText("Chain ID:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; chainLabel.setLayoutData(gridData); Label chainText = new Label(chainShell,SWT.NONE); chainText.setText(rows[selection].getText(0)); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; chainText.setLayoutData(gridData); Label smLabel = new Label(chainShell,SWT.NONE); smLabel.setText("Scan Module ID:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; smLabel.setLayoutData(gridData); Label smText = new Label(chainShell,SWT.NONE); smText.setText(rows[selection].getText(1)); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; smText.setLayoutData(gridData); Label trigDelLabel = new Label(chainShell,SWT.NONE); trigDelLabel.setText("Trigger delay:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; trigDelLabel.setLayoutData(gridData); Label trigDelText = new Label(chainShell,SWT.NONE); trigDelText.setText(""+displayChain.getScanModulById(aktSM).getTriggerdelay()); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; trigDelText.setLayoutData(gridData); Label settleLabel = new Label(chainShell,SWT.NONE); settleLabel.setText("Settletime:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; settleLabel.setLayoutData(gridData); Label settleText = new Label(chainShell,SWT.NONE); settleText.setText(""+displayChain.getScanModulById(aktSM).getSettletime()); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; settleText.setLayoutData(gridData); Label confLabel = new Label(chainShell,SWT.NONE); confLabel.setText("Confirm Trigger:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; confLabel.setLayoutData(gridData); Label confText = new Label(chainShell,SWT.NONE); if (displayChain.getScanModulById(aktSM).isTriggerconfirm()) { confText.setText(" YES "); } else { confText.setText(" NO "); } gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; confText.setLayoutData(gridData); Label saveMotLabel = new Label(chainShell,SWT.NONE); saveMotLabel.setText("Save all motorpositions:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; saveMotLabel.setLayoutData(gridData); Label saveMotText = new Label(chainShell,SWT.NONE); saveMotText.setText(displayChain.getScanModulById(aktSM).getSaveAxisPositions().toString()); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; saveMotText.setLayoutData(gridData); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.verticalAlignment = GridData.FILL; gridData.horizontalSpan = 2; gridData.grabExcessHorizontalSpace = true; gridData.grabExcessVerticalSpace = true; Table motTable = new Table(chainShell, SWT.NONE); motTable.setHeaderVisible(true); motTable.setLinesVisible(true); motTable.setLayoutData(gridData); TableColumn motColumn = new TableColumn(motTable, SWT.NONE); motColumn.setWidth(250); motColumn.setText("Motor Axis"); TableColumn motColumn1 = new TableColumn(motTable, SWT.NONE); motColumn1.setWidth(100); motColumn1.setText("Start"); TableColumn motColumn2 = new TableColumn(motTable, SWT.NONE); motColumn2.setWidth(100); motColumn2.setText("Stop"); TableColumn motColumn3 = new TableColumn(motTable, SWT.NONE); motColumn3.setWidth(100); motColumn3.setText("Stepwidth"); Axis[] axis = displayChain.getScanModulById(aktSM).getAxis(); for (int i=0; i<axis.length; i++) { TableItem tableItem = new TableItem( motTable, 0 ); tableItem.setText( 0, axis[i].getAbstractDevice().getFullIdentifyer()); tableItem.setText( 1, axis[i].getStart()); tableItem.setText( 2, axis[i].getStop()); tableItem.setText( 3, axis[i].getStepwidth()); } gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; gridData.verticalAlignment = GridData.FILL; gridData.horizontalSpan = 2; gridData.grabExcessHorizontalSpace = true; gridData.grabExcessVerticalSpace = true; Table detTable = new Table(chainShell, SWT.NONE); detTable.setHeaderVisible(true); detTable.setLinesVisible(true); detTable.setLayoutData(gridData); TableColumn detColumn = new TableColumn(detTable, SWT.NONE); detColumn.setWidth(250); detColumn.setText("Detector Channel"); TableColumn detColumn1 = new TableColumn(detTable, SWT.NONE); detColumn1.setWidth(100); detColumn1.setText("Average"); Channel[] channels = displayChain.getScanModulById(aktSM).getChannels(); for (int i=0; i<channels.length; i++) { TableItem tableItem = new TableItem( detTable, 0 ); tableItem.setText( 0, channels[i].getAbstractDevice().getFullIdentifyer()); tableItem.setText( 1, "" + channels[i].getAverageCount()); } chainShell.open(); shellTable[selection] = chainShell; } else { // Chain Infos anzeigen Display display = Activator.getDefault().getWorkbench().getDisplay(); Shell chainShell = new Shell(display); chainShell.setSize(500,200); chainShell.setText("Chain Info"); GridLayout gridLayout = new GridLayout(); gridLayout.numColumns = 2; GridData gridData; chainShell.setLayout(gridLayout); Label chainLabel = new Label(chainShell,SWT.NONE); chainLabel.setText("Chain ID:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; chainLabel.setLayoutData(gridData); Label chainText = new Label(chainShell,SWT.NONE); chainText.setText(rows[selection].getText(0)); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; chainText.setLayoutData(gridData); Label descLabel = new Label(chainShell,SWT.NONE); descLabel.setText("Save Scan-Description:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; descLabel.setLayoutData(gridData); Label descText = new Label(chainShell,SWT.NONE); if (displayChain.isSaveScanDescription()) { descText.setText(" YES "); } else { descText.setText(" NO "); } gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; descText.setLayoutData(gridData); Label confLabel = new Label(chainShell,SWT.NONE); confLabel.setText("Confirm Save:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; confLabel.setLayoutData(gridData); Label confText = new Label(chainShell,SWT.NONE); if (displayChain.isConfirmSave()) { confText.setText(" YES "); } else { confText.setText(" NO "); } gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; confText.setLayoutData(gridData); Label autoincrLabel = new Label(chainShell,SWT.NONE); autoincrLabel.setText("Add Autoincrementing Number to Filename:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; autoincrLabel.setLayoutData(gridData); Label autoincrText = new Label(chainShell,SWT.NONE); if (displayChain.isAutoNumber()) { autoincrText.setText(" YES "); } else { autoincrText.setText(" NO "); } gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; autoincrText.setLayoutData(gridData); Label commentLabel = new Label(chainShell,SWT.NONE); commentLabel.setText("Comment:"); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; commentLabel.setLayoutData(gridData); Label commentText = new Label(chainShell,SWT.NONE); commentText.setText(displayChain.getComment()); gridData = new GridData(); gridData.horizontalAlignment = GridData.FILL; commentText.setLayoutData(gridData); chainShell.open(); shellTable[selection] = chainShell; } } } @Override public void errorOccured(Error error) { if (error.getErrorType() == ErrorType.FILENAME) { // Aktueller Filename wird gesetzt setActualFilename(error.getText()); // send to File wird erlaubt this.commentSendButton.getDisplay().syncExec( new Runnable() { public void run() { commentSendButton.setEnabled(true); } }); } } // Hier kommen jetzt die verschiedenen Listener Klassen /** * <code>SelectionListener</code> of Play Button from * <code>EngineView</code> */ class PlayButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected( final SelectionEvent e ) { } /** * {@inheritDoc} */ @Override public void widgetSelected( final SelectionEvent e ) { System.out.println("Play Knopf im Engine Window gedrückt!"); Activator.getDefault().getEcp1Client().getPlayController().start(); } } /** * <code>SelectionListener</code> of Pause Button from * <code>EngineView</code> */ class PauseButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { System.out.println("Pause Knopf im Engine Window gedrückt!"); Activator.getDefault().getEcp1Client().getPlayController().pause(); } } /** * <code>SelectionListener</code> of Stop Button from * <code>EngineView</code> */ class StopButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { System.out.println("Stop Knopf im Engine Window gedrückt!"); Activator.getDefault().getEcp1Client().getPlayController().stop(); } } /** * <code>SelectionListener</code> of Skip Button from * <code>EngineView</code> */ class SkipButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { System.out.println("Skip Knopf im Engine Window gedrückt!"); Activator.getDefault().getEcp1Client().getPlayController().breakExecution(); } } /** * <code>SelectionListener</code> of Halt Button from * <code>EngineView</code> */ class HaltButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { System.out.println("Halt Knopf im Engine Window gedrückt!"); Activator.getDefault().getEcp1Client().getPlayController().halt(); } } /** * <code>SelectionListener</code> of Start Button from * <code>EngineView</code> */ class StartButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { // die Engine dann startet. MessageDialog.openWarning(null, "Warning", "Start löst noch keine Aktion aus!"); } } /** * <code>SelectionListener</code> of Kill Button from * <code>EngineView</code> */ class KillButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { Activator.getDefault().getEcp1Client().getPlayController().shutdownEngine(); } } /** * <code>SelectionListener</code> of Connect Button from * <code>EngineView</code> */ class ConnectButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { if( !Activator.getDefault().getEcp1Client().isRunning()) { // start ecp1Client IHandlerService handlerService = (IHandlerService) PlatformUI.getWorkbench().getService(IHandlerService.class); try { handlerService.executeCommand("de.ptb.epics.eve.viewer.connectCommand", null); } catch (Exception e2) { // TODO Auto-generated catch block e2.printStackTrace(); } } } } /** * <code>SelectionListener</code> of Disconnect Button from * <code>EngineView</code> */ class DisconnectButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { if( Activator.getDefault().getEcp1Client().isRunning()) { // start ecp1Client IHandlerService handlerService = (IHandlerService) PlatformUI.getWorkbench().getService(IHandlerService.class); try { handlerService.executeCommand("de.ptb.epics.eve.viewer.disconnectCommand", null); } catch (Exception e2) { // TODO Auto-generated catch block e2.printStackTrace(); } } } } /** * <code>SelectionListener</code> of AutoPlayOn Button from * <code>EngineView</code> */ class AutoPlayOnButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected( final SelectionEvent e ) { } /** * {@inheritDoc} */ @Override public void widgetSelected( final SelectionEvent e ) { System.out.println("AutoPlayOn Knopf im Engine Window gedrückt!"); Activator.getDefault().getEcp1Client().getPlayListController().setAutoplay(true); } } /** * <code>SelectionListener</code> of AutoPlayOff Button from * <code>EngineView</code> */ class AutoPlayOffButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected( final SelectionEvent e ) { } /** * {@inheritDoc} */ @Override public void widgetSelected( final SelectionEvent e ) { System.out.println("AutoPlayOff Knopf im Engine Window gedrückt!"); Activator.getDefault().getEcp1Client().getPlayListController().setAutoplay(false); } } /** * <code>SelectionListener</code> of SendtoFile Button from * <code>EngineView</code> */ class CommentSendButtonSelectionListener implements SelectionListener { /** * {@inheritDoc} */ @Override public void widgetDefaultSelected(SelectionEvent e) { // TODO Auto-generated method stub } /** * {@inheritDoc} */ @Override public void widgetSelected(SelectionEvent e) { // TODO Auto-generated method stub Activator.getDefault().getEcp1Client().getPlayController().addLiveComment(commentText.getText()); } } }
package de.uniba.ppn.tananzeiger.gui; import java.io.File; import javax.swing.JFileChooser; import javax.swing.JMenuItem; public class FileChooser extends JFileChooser { private static final long serialVersionUID = 1L; int choice = 0; JFileChooser fileChooser = new JFileChooser(); File toLoad = null; public FileChooser() { } public File chooseFile(JMenuItem load) { choice = fileChooser.showOpenDialog(load); fileChooser.setDialogTitle("Bitte wählen Sie eine Datei aus"); if (choice == JFileChooser.APPROVE_OPTION) { toLoad = fileChooser.getSelectedFile(); } return toLoad; } }
package mille.bean; import java.util.ArrayList; import java.util.Collections; import mille.bean.CardTypes.*; public class Deck { private ArrayList<Card> cards; public Deck() { cards = new ArrayList<>(); // TODO: change this CardImage defaultImage = new CardImage(); // constructs the deck by filling in all the card types for (int i = 0; i < 3; i++) { cards.add(new AccidentCard(defaultImage)); } for (int i = 0; i < 3; i++) { cards.add(new OutOfGasCard(defaultImage)); } for (int i = 0; i < 3; i++) { cards.add(new FlatTireCard(defaultImage)); } for (int i = 0; i < 4; i++) { cards.add(new SpeedLimitCard(defaultImage)); } for (int i = 0; i < 5; i++) { cards.add(new StopCard(defaultImage)); } for (int i = 0; i < 6; i++) { cards.add(new RepairsCard(defaultImage)); } for (int i = 0; i < 6; i++) { cards.add(new GasolineCard(defaultImage)); } for (int i = 0; i < 6; i++) { cards.add(new SpareTireCard(defaultImage)); } for (int i = 0; i < 6; i++) { cards.add(new EndOfLimitCard(defaultImage)); } for (int i = 0; i < 14; i++) { cards.add(new RollCard(defaultImage)); } cards.add(new DrivingAceCard(defaultImage)); cards.add(new ExtraTankCard(defaultImage)); cards.add(new PunctureProofCard(defaultImage)); cards.add(new RightOfWayCard(defaultImage)); for (int i = 0; i < 10; i++) { cards.add(new Distance25Card(defaultImage)); } for (int i = 0; i < 10; i++) { cards.add(new Distance50Card(defaultImage)); } for (int i = 0; i < 10; i++) { cards.add(new Distance75Card(defaultImage)); } for (int i = 0; i < 12; i++) { cards.add(new Distance100Card(defaultImage)); } for (int i = 0; i < 4; i++) { cards.add(new Distance200Card(defaultImage)); } } public void shuffle() { // rearranges all the cards in the Deck Collections.shuffle(cards); } public Card getTopCard() { // returns the card at the top of the Deck return cards.get(cards.size() - 1); } public Card takeTopCard() { // returns the top card and removes it from the Deck Card topCard = getTopCard(); cards.remove(cards.size() - 1); return topCard; } }
package org.csstudio.platform.simpledal; import static org.junit.Assert.*; import java.util.concurrent.Semaphore; import org.csstudio.platform.internal.simpledal.dal.DalConnector; import org.csstudio.platform.model.pvs.ControlSystemEnum; import org.csstudio.platform.model.pvs.IProcessVariableAddress; import org.csstudio.platform.model.pvs.ProcessVariableAdressFactory; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.Job; import org.epics.css.dal.CharacteristicInfo; import org.epics.css.dal.DoubleProperty; import org.epics.css.dal.Timestamp; import org.junit.After; import org.junit.Before; import org.junit.Test; public class RecordCombinationTest { private final class ObjectValueListener implements IProcessVariableValueListener<Object> { public void connectionStateChanged(ConnectionState connectionState) { // TODO Auto-generated method stub } public void errorOccured(String error) { System.out.println("Error: " + error); } public void valueChanged(Object value, Timestamp timestamp) { System.out.println("Object Value change: " + value + " on " + timestamp); assertNotNull(value); } } // private final class DoubleValueListener implements IProcessVariableValueListener<Double> { // @Override // public void connectionStateChanged(ConnectionState connectionState) { // // TODO Auto-generated method stub // @Override // public void errorOccured(String error) { // System.out.println("Error: " + error); // @Override // public void valueChanged(Double value, Timestamp timestamp) { // System.out.println("Double Value change: " + value + " on " + timestamp); // assertNotNull(value); // private final class EnumValueListener implements IProcessVariableValueListener<Enum> { // @Override // public void connectionStateChanged(ConnectionState connectionState) { // // TODO Auto-generated method stub // @Override // public void errorOccured(String error) { // System.out.println("Error: " + error); // @Override // public void valueChanged(Enum value, Timestamp timestamp) { // System.out.println("Enum Value change: " + value + " on " + timestamp); // assertNotNull(value); private ProcessVariableAdressFactory _addressFactory; private IProcessVariableConnectionService _connectionService; @Before public void setUp() throws Exception { // the factory for pv addresses _addressFactory = ProcessVariableAdressFactory.getInstance(); assertNotNull(_addressFactory); // the connection service _connectionService = ProcessVariableConnectionServiceFactory.getDefault() .createProcessVariableConnectionService(); assertNotNull(_connectionService); } @Test public void testname() throws Exception { CharacteristicInfo[] infos= CharacteristicInfo.getDefaultCharacteristics(DoubleProperty.class, null); // String rawName= ControlSystemEnum.DAL_SIMULATOR.getPrefix()+"://D1:P1"; String rawName = ControlSystemEnum.EPICS.getPrefix() + "://krykWetter:fdUsed_ai"; for (int i = infos.length-1; i >= 0; i String rawNameHHSV = ControlSystemEnum.EPICS.getPrefix() + "://krykWetter:fdUsed_ai["+infos[i].getName()+"]"; // String rawNameHHSV = ControlSystemEnum.EPICS.getPrefix() + "://krykWetter:fdUsed_ai"; // String rawName= ControlSystemEnum.EPICS.getPrefix()+"://krykWetter:fdUsed_ai"; // String rawName= ControlSystemEnum.EPICS.getPrefix()+"://krykWetter:fdUsed_ai"; System.out.println(rawName); // IProcessVariableAddress record = _addressFactory.createProcessVariableAdress(rawName); IProcessVariableAddress hhsv = _addressFactory.createProcessVariableAdress(rawNameHHSV); System.out.println("Characteristic: "+hhsv.getFullName()); System.out.println("Characteristic: "+hhsv.getCharacteristic()); // System.out.println(record.toString()); // System.out.println(record.toDalRemoteInfo().toString()); // _connectionService.readValueAsynchronously(hhsv, ValueType.OBJECT, new ObjectValueListener()); Object readValueSynchronously = _connectionService.readValueSynchronously(hhsv, ValueType.OBJECT); System.out.println("Value: " + readValueSynchronously); System.out.println(" } // _connectionService.readValueAsynchronously(record, ValueType.DOUBLE, // new DoubleValueListener()); // _connectionService.readValueAsynchronously(hhsv, ValueType.DOUBLE, // new DoubleValueListener()); Thread.sleep(5000); } @After public void tearDown() throws Exception { } }
package editor.gui.inventory; import java.awt.Dialog; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStreamReader; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.time.LocalDate; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.JScrollPane; import javax.swing.JTextArea; import javax.swing.SwingWorker; import javax.swing.WindowConstants; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import editor.collection.Inventory; import editor.database.card.Card; import editor.database.card.CardLayout; import editor.database.card.TransformCard; import editor.database.card.FlipCard; import editor.database.card.MeldCard; import editor.database.card.SingleCard; import editor.database.card.SplitCard; import editor.database.characteristics.Expansion; import editor.database.characteristics.Legality; import editor.database.characteristics.ManaType; import editor.database.characteristics.Rarity; import editor.filter.leaf.options.multi.CardTypeFilter; import editor.filter.leaf.options.multi.LegalityFilter; import editor.filter.leaf.options.multi.SubtypeFilter; import editor.filter.leaf.options.multi.SupertypeFilter; import editor.gui.SettingsDialog; /** * This class represents a dialog that shows the progress for loading the * inventory and blocking the main frame until it is finished. * * @author Alec Roelke */ @SuppressWarnings("serial") public class InventoryLoadDialog extends JDialog { private class InventoryLoadWorker extends SwingWorker<Inventory, String> { /** * File to load from. */ private File file; /** * Create a new InventoryWorker. * * @param f #File to load */ public InventoryLoadWorker(File f) { super(); file = f; progressBar.setIndeterminate(true); addPropertyChangeListener((e) -> { if ("progress".equals(e.getPropertyName())) { int p = (Integer)e.getNewValue(); progressBar.setIndeterminate(p < 0); progressBar.setValue(p); } }); } /** * Convert a card that has a single face but incorrectly is loaded as a * multi-faced card into a card with a {@link CardLayout#NORMAL} layout. * * @param card card to convert * @return a {@link Card} with the same information as the input but a * {@link CardLayout#NORMAL} layout. */ private Card convertToNormal(Card card) { return new SingleCard(CardLayout.NORMAL, card.name().get(0), card.manaCost().get(0).toString(), new ArrayList<>(card.colors()), new ArrayList<>(card.colorIdentity()), card.supertypes(), card.types(), card.subtypes(), card.printedTypes().get(0), card.rarity(), card.expansion(), card.oracleText().get(0), card.flavorText().get(0), card.printedText().get(0), card.artist().get(0), card.multiverseid().get(0), card.number().get(0), card.power().get(0).toString(), card.toughness().get(0).toString(), card.loyalty().get(0).toString(), new TreeMap<>(card.rulings()), card.legality()); } @Override protected Inventory doInBackground() throws Exception { publish("Opening " + file.getName() + "..."); List<Card> cards = new ArrayList<>(); Map<Card, List<String>> faces = new HashMap<>(); Set<Expansion> expansions = new HashSet<>(); Set<String> blockNames = new HashSet<>(); Set<String> supertypeSet = new HashSet<>(); Set<String> typeSet = new HashSet<>(); Set<String> subtypeSet = new HashSet<>(); Set<String> formatSet = new HashSet<>(); // Read the inventory file try (BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF8"))) { publish("Parsing " + file.getName() + "..."); JsonObject root = new JsonParser().parse(reader).getAsJsonObject(); int numCards = 0; for (Map.Entry<String, JsonElement> setNode : root.entrySet()) numCards += setNode.getValue().getAsJsonObject().get("cards").getAsJsonArray().size(); publish("Reading cards from " + file.getName() + "..."); setProgress(0); for (Map.Entry<String, JsonElement> setNode : root.entrySet()) { if (isCancelled()) { expansions.clear(); blockNames.clear(); supertypeSet.clear(); typeSet.clear(); subtypeSet.clear(); formatSet.clear(); cards.clear(); return new Inventory(); } // Create the new Expansion JsonObject setProperties = setNode.getValue().getAsJsonObject(); JsonArray setCards = setProperties.get("cards").getAsJsonArray(); Expansion set = new Expansion(setProperties.get("name").getAsString(), setProperties.has("block") ? setProperties.get("block").getAsString() : "<No Block>", setProperties.get("code").getAsString(), setProperties.get(setProperties.has("oldCode") ? "oldCode" : "code").getAsString(), setProperties.get(setProperties.has("magicCardsInfoCode") ? "magicCardsInfoCode" : "code").getAsString().toUpperCase(), setProperties.get(setProperties.has("gathererCode") ? "gathererCode" : "code").getAsString(), setCards.size(), LocalDate.parse(setProperties.get("releaseDate").getAsString(), Expansion.DATE_FORMATTER)); expansions.add(set); blockNames.add(set.block); publish("Loading cards from " + set + "..."); for (JsonElement cardElement : setCards) { // Create the new card for the expansion JsonObject card = cardElement.getAsJsonObject(); // Card's multiverseid. Skip cards that aren't in gatherer long multiverseid = card.has("multiverseId") ? card.get("multiverseId").getAsLong() : -1; if (multiverseid < 0) continue; // Card's name String name = card.get("name").getAsString(); // If the card is a token, skip it CardLayout layout; try { layout = CardLayout.valueOf(card.get("layout").getAsString().toUpperCase().replaceAll("[^A-Z]", "_")); } catch (IllegalArgumentException e) { errors.add(name + " (" + set + "): " + e.getMessage()); continue; } // Card's mana cost String mana = card.has("manaCost") ? card.get("manaCost").getAsString() : ""; // Card's set of colors (which is stored as a list, since order matters) List<ManaType> colors = new ArrayList<>(); if (card.has("colors")) { JsonArray colorsArray = card.get("colors").getAsJsonArray(); for (JsonElement colorElement : colorsArray) colors.add(ManaType.parseManaType(colorElement.getAsString())); } // Card's color identity List<ManaType> colorIdentity = new ArrayList<>(); { if (card.has("colorIdentity")) { JsonArray identityArray = card.get("colorIdentity").getAsJsonArray(); for (JsonElement identityElement : identityArray) colorIdentity.add(ManaType.parseManaType(identityElement.getAsString())); } } // Card's set of supertypes Set<String> supertypes = new LinkedHashSet<>(); if (card.has("supertypes")) { JsonArray superArray = card.get("supertypes").getAsJsonArray(); for (JsonElement superElement : superArray) { supertypes.add(superElement.getAsString()); supertypeSet.add(superElement.getAsString()); } } // Card's set of types Set<String> types = new LinkedHashSet<>(); for (JsonElement typeElement : card.get("types").getAsJsonArray()) { types.add(typeElement.getAsString()); typeSet.add(typeElement.getAsString()); } // Card's set of subtypes Set<String> subtypes = new LinkedHashSet<>(); if (card.has("subtypes")) { for (JsonElement subElement : card.get("subtypes").getAsJsonArray()) { subtypes.add(subElement.getAsString()); subtypeSet.add(subElement.getAsString()); } } // Card's printed types String printedTypes = card.has("originalType") ? card.get("originalType").getAsString() : ""; // Card's rarity Rarity rarity = Rarity.parseRarity(card.get("rarity").getAsString()); // Card's rules text String text = card.has("text") ? card.get("text").getAsString() : ""; // Card's flavor text String flavor = card.has("flavor") ? card.get("flavor").getAsString() : ""; // Card's printed text String printed = card.has("originalText") ? card.get("originalText").getAsString() : ""; // Card's artist String artist = card.get("artist").getAsString(); // Card's number (this is a string since some don't have numbers or are things like "1a") String number = card.has("number") ? card.get("number").getAsString() : " // Card's power and toughness (empty if it doesn't have power or toughness) String power = card.has("power") ? card.get("power").getAsString() : ""; String toughness = card.has("toughness") ? card.get("toughness").getAsString() : ""; // Card's loyalty (empty if it isn't a planeswalker or is Garruk, the Veil-Cursed) String loyalty = ""; if (card.has("loyalty")) { JsonElement element = card.get("loyalty"); loyalty = element.isJsonNull() ? "X" : element.getAsString(); } // Card's rulings TreeMap<Date, List<String>> rulings = new TreeMap<>(); DateFormat format = new SimpleDateFormat("yyyy-MM-dd"); if (card.has("rulings")) { for (JsonElement l : card.get("rulings").getAsJsonArray()) { JsonObject o = l.getAsJsonObject(); Date date = format.parse(o.get("date").getAsString()); String ruling = o.get("text").getAsString(); if (!rulings.containsKey(date)) rulings.put(date, new ArrayList<>()); rulings.get(date).add(ruling); } } Map<String, Legality> legality = new HashMap<>(); if (card.has("legalities")) { for (var entry : card.get("legalities").getAsJsonObject().entrySet()) { formatSet.add(entry.getKey()); legality.put(entry.getKey(), Legality.parseLegality(entry.getValue().getAsString())); } } // Create the new card with all the values acquired above Card c = new SingleCard(layout, name, mana, colors, colorIdentity, supertypes, types, subtypes, printedTypes, rarity, set, text, flavor, printed, artist, multiverseid, number, power, toughness, loyalty, rulings, legality); // Add to map of faces if the card has multiple faces if (layout.isMultiFaced) { List<String> names = new ArrayList<>(); for (JsonElement e : card.get("names").getAsJsonArray()) names.add(e.getAsString()); faces.put(c, names); } cards.add(c); setProgress(cards.size()*100/numCards); } } publish("Processing multi-faced cards..."); List<Card> facesList = new ArrayList<>(faces.keySet()); while (!facesList.isEmpty()) { boolean error = false; Card face = facesList.remove(0); List<String> faceNames = faces.get(face); List<Card> otherFaces = new ArrayList<>(); for (Card c : facesList) if (faceNames.contains(c.unifiedName()) && c.expansion().equals(face.expansion())) otherFaces.add(c); facesList.removeAll(otherFaces); otherFaces.add(face); cards.removeAll(otherFaces); otherFaces.sort(Comparator.comparingInt((a) -> faceNames.indexOf(a.unifiedName()))); switch (face.layout()) { case SPLIT: if (otherFaces.size() < 2) { errors.add(face.toString() + " (" + face.expansion() + "): Can't find other face(s) for split card."); error = true; } else { for (Card f : otherFaces) { if (f.layout() != CardLayout.SPLIT) { errors.add(face.toString() + " (" + face.expansion() + "): Can't join non-split faces into a split card."); error = true; } } } if (!error) cards.add(new SplitCard(otherFaces)); else for (Card f : otherFaces) cards.add(convertToNormal(f)); break; case FLIP: if (otherFaces.size() < 2) { errors.add(face.toString() + " (" + face.expansion() + "): Can't find other side of flip card."); error = true; } else if (otherFaces.size() > 2) { errors.add(face.toString() + " (" + face.expansion() + "): Too many sides for flip card."); error = true; } else if (otherFaces.get(0).layout() != CardLayout.FLIP || otherFaces.get(1).layout() != CardLayout.FLIP) { errors.add(face.toString() + " (" + face.expansion() + "): Can't join non-flip faces into a flip card."); error = true; } if (!error) cards.add(new FlipCard(otherFaces.get(0), otherFaces.get(1))); else for (Card f : otherFaces) cards.add(convertToNormal(f)); break; case TRANSFORM: if (otherFaces.size() < 2) { errors.add(face.toString() + " (" + face.expansion() + "): Can't find other face of double-faced card."); error = true; } else if (otherFaces.size() > 2) { errors.add(face.toString() + " (" + face.expansion() + "): Too many faces for double-faced card."); error = true; } else if (otherFaces.get(0).layout() != CardLayout.TRANSFORM || otherFaces.get(1).layout() != CardLayout.TRANSFORM) { errors.add(face.toString() + " (" + face.expansion() + "): Can't join single-faced cards into double-faced cards."); error = true; } if (!error) cards.add(new TransformCard(otherFaces.get(0), otherFaces.get(1))); else for (Card f : otherFaces) cards.add(convertToNormal(f)); break; case MELD: if (otherFaces.size() < 3) { errors.add(face.toString() + " (" + face.expansion() + "): Can't find some faces of meld card."); error = true; } else if (otherFaces.size() > 3) { errors.add(face.toString() + " (" + face.expansion() + "): Too many faces for meld card."); error = true; } else if (otherFaces.get(0).layout() != CardLayout.MELD || otherFaces.get(1).layout() != CardLayout.MELD || otherFaces.get(2).layout() != CardLayout.MELD) { errors.add(face.toString() + " (" + face.expansion() + "): Can't join single-faced cards into meld cards."); error = true; } if (!error) { cards.add(new MeldCard(otherFaces.get(0), otherFaces.get(2), otherFaces.get(1))); cards.add(new MeldCard(otherFaces.get(2), otherFaces.get(0), otherFaces.get(1))); } else for (Card f : otherFaces) cards.add(convertToNormal(f)); break; default: break; } } publish("Removing duplicate entries..."); Map<Long, Card> unique = new HashMap<>(); for (Card c : cards) if (!unique.containsKey(c.multiverseid().get(0))) unique.put(c.multiverseid().get(0), c); cards = new ArrayList<>(unique.values()); // Store the lists of expansion and block names and types and sort them alphabetically Expansion.expansions = expansions.stream().sorted().toArray(Expansion[]::new); Expansion.blocks = blockNames.stream().sorted().toArray(String[]::new); SupertypeFilter.supertypeList = supertypeSet.stream().sorted().toArray(String[]::new); CardTypeFilter.typeList = typeSet.stream().sorted().toArray(String[]::new); SubtypeFilter.subtypeList = subtypeSet.stream().sorted().toArray(String[]::new); LegalityFilter.formatList = formatSet.stream().sorted().toArray(String[]::new); } Inventory inventory = new Inventory(cards); if (SettingsDialog.getAsString(SettingsDialog.CARD_TAGS) != null) { Matcher m = Pattern.compile("\\((.*?)::\\[(.*?)\\]\\)").matcher(SettingsDialog.getAsString(SettingsDialog.CARD_TAGS)); while (m.find()) Card.tags.put(inventory.get(Long.parseLong(m.group(1))), Arrays.stream(m.group(2).split(",")).collect(Collectors.toSet())); } return inventory; } /** * {@inheritDoc} * Close the dialog and allow it to return the Inventory * that was created. */ @Override protected void done() { setVisible(false); dispose(); if (!SettingsDialog.getAsBoolean(SettingsDialog.SUPPRESS_LOAD_WARNINGS) && !errors.isEmpty()) { System.err.println(errors.size() + " errors found while loading inventory:"); for (String error : errors) System.err.println("\t- " + error); } /* SwingUtilities.invokeLater(() -> { StringJoiner join = new StringJoiner("\n" + UnicodeSymbols.BULLET + " "); join.add("Errors ocurred while loading the following card(s):"); for (String failure : errors) join.add(failure); JOptionPane.showMessageDialog(null, join.toString(), "Warning", JOptionPane.WARNING_MESSAGE); }); */ } /** * {@inheritDoc} * Change the label in the dialog to match the stage this worker is in. */ @Override protected void process(List<String> chunks) { for (String chunk : chunks) { progressLabel.setText(chunk); progressArea.append(chunk + "\n"); } } } /** * List of errors that occurred while loading cards. */ private List<String> errors; /** * Area showing past and current progress of loading. */ private JTextArea progressArea; /** * Progress bar showing overall progress of loading. */ private JProgressBar progressBar; /** * Label showing the current stage of loading. */ private JLabel progressLabel; /** * Worker that loads the inventory. */ private InventoryLoadWorker worker; /** * Create a new InventoryLoadDialog over the given {@link JFrame}. * * @param owner owner of the new InventoryLoadDialog */ public InventoryLoadDialog(JFrame owner) { super(owner, "Loading Inventory", Dialog.ModalityType.APPLICATION_MODAL); setPreferredSize(new Dimension(350, 220)); setResizable(false); setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); worker = null; errors = new ArrayList<>(); // Content panel GridBagLayout layout = new GridBagLayout(); layout.columnWidths = new int[]{0}; layout.columnWeights = new double[]{1.0}; layout.rowHeights = new int[]{0, 0, 0, 0}; layout.rowWeights = new double[]{0.0, 0.0, 1.0, 0.0}; JPanel contentPanel = new JPanel(layout); contentPanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10)); setContentPane(contentPanel); // Stage progress label progressLabel = new JLabel("Loading inventory..."); GridBagConstraints labelConstraints = new GridBagConstraints(); labelConstraints.anchor = GridBagConstraints.WEST; labelConstraints.fill = GridBagConstraints.BOTH; labelConstraints.gridx = 0; labelConstraints.gridy = 0; labelConstraints.insets = new Insets(0, 0, 2, 0); contentPanel.add(progressLabel, labelConstraints); // Overall progress bar progressBar = new JProgressBar(); GridBagConstraints barConstraints = new GridBagConstraints(); barConstraints.fill = GridBagConstraints.BOTH; barConstraints.gridx = 0; barConstraints.gridy = 1; barConstraints.insets = new Insets(0, 0, 2, 0); contentPanel.add(progressBar, barConstraints); // History text area progressArea = new JTextArea(); progressArea.setEditable(false); GridBagConstraints areaConstraints = new GridBagConstraints(); areaConstraints.fill = GridBagConstraints.BOTH; areaConstraints.gridx = 0; areaConstraints.gridy = 2; areaConstraints.insets = new Insets(0, 0, 10, 0); contentPanel.add(new JScrollPane(progressArea), areaConstraints); // Cancel button JButton cancelButton = new JButton("Cancel"); cancelButton.addActionListener((e) -> { if (worker != null) worker.cancel(false); }); GridBagConstraints cancelConstraints = new GridBagConstraints(); cancelConstraints.gridx = 0; cancelConstraints.gridy = 3; contentPanel.add(cancelButton, cancelConstraints); pack(); } /** * Make this dialog visible and then begin loading the inventory. Block until it is * complete, and then return the newly-created Inventory. * * @return the #Inventory that was created. */ public Inventory createInventory(File file) { worker = new InventoryLoadWorker(file); worker.execute(); setVisible(true); progressArea.setText(""); try { return worker.get(); } catch (InterruptedException | ExecutionException e) { JOptionPane.showMessageDialog(null, "Error loading inventory: " + e.getCause().getMessage() + ".", "Error", JOptionPane.ERROR_MESSAGE); e.printStackTrace(); return new Inventory(); } catch (CancellationException e) { return new Inventory(); } } }
package hex; import water.exceptions.H2OIllegalArgumentException; import water.fvec.Frame; import water.util.ArrayUtils; import water.util.MathUtils; public class ModelMetricsBinomial extends ModelMetricsSupervised { public final AUC2 _auc; public final double _logloss; public final GainsLift _gainsLift; public ModelMetricsBinomial(Model model, Frame frame, double mse, String[] domain, double sigma, AUC2 auc, double logloss, GainsLift gainsLift) { super(model, frame, mse, domain, sigma); _auc = auc; _logloss = logloss; _gainsLift = gainsLift; } public static ModelMetricsBinomial getFromDKV(Model model, Frame frame) { ModelMetrics mm = ModelMetrics.getFromDKV(model, frame); if( !(mm instanceof ModelMetricsBinomial) ) throw new H2OIllegalArgumentException("Expected to find a Binomial ModelMetrics for model: " + model._key.toString() + " and frame: " + frame._key.toString(), "Expected to find a ModelMetricsBinomial for model: " + model._key.toString() + " and frame: " + frame._key.toString() + " but found a: " + (mm == null ? null : mm.getClass())); return (ModelMetricsBinomial) mm; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(super.toString()); if (_auc != null) sb.append(" AUC: " + (float)_auc._auc + "\n"); sb.append(" logloss: " + (float)_logloss + "\n"); if (cm() != null) sb.append(" CM: " + cm().toASCII()); if (_gainsLift != null) sb.append(_gainsLift.createTwoDimTable()); return sb.toString(); } public double logloss() { return _logloss; } @Override public AUC2 auc() { return _auc; } @Override public ConfusionMatrix cm() { if( _auc == null ) return null; double[][] cm = _auc.defaultCM(); return cm == null ? null : new ConfusionMatrix(cm, _domain); } public GainsLift gainsLift() { return _gainsLift; } public static class MetricBuilderBinomial<T extends MetricBuilderBinomial<T>> extends MetricBuilderSupervised<T> { protected double _logloss; protected AUC2.AUCBuilder _auc; public MetricBuilderBinomial( String[] domain ) { super(2,domain); _auc = new AUC2.AUCBuilder(AUC2.NBINS); } public double auc() {return new AUC2(_auc)._auc;} // Passed a float[] sized nclasses+1; ds[0] must be a prediction. ds[1...nclasses-1] must be a class // distribution; @Override public double[] perRow(double ds[], float[] yact, Model m) {return perRow(ds, yact, 1, 0, m);} @Override public double[] perRow(double ds[], float[] yact, double w, double o, Model m) { if( Float .isNaN(yact[0]) ) return ds; // No errors if actual is missing if(ArrayUtils.hasNaNs(ds)) return ds; // No errors if prediction has missing values (can happen for GLM) if(w == 0 || Double.isNaN(w)) return ds; final int iact = (int)yact[0]; if( iact != 0 && iact != 1 ) return ds; // The actual is effectively a NaN _count++; _wcount += w; _wY += w*iact; _wYY += w*iact*iact; // Compute error double err = iact+1 < ds.length ? 1-ds[iact+1] : 1; // Error: distance from predicting ycls as 1.0 _sumsqe += w*err*err; // Squared error assert !Double.isNaN(_sumsqe); // Compute log loss final double eps = 1e-15; _logloss -= w*Math.log(Math.max(eps, 1-err)); _auc.perRow(ds[2],iact,w); return ds; // Flow coding } @Override public void reduce( T mb ) { super.reduce(mb); // sumseq, count _logloss += mb._logloss; _auc.reduce(mb._auc); } @Override public ModelMetrics makeModelMetrics(Model m, Frame f, Frame preds) { double mse = Double.NaN; double logloss = Double.NaN; double sigma = Double.NaN; if (_wcount > 0) { sigma = weightedSigma(); mse = _sumsqe / _wcount; logloss = _logloss / _wcount; AUC2 auc = new AUC2(_auc); GainsLift gl = null; if (preds!=null) { gl = new GainsLift(); gl.preds = preds.lastVec(); gl.labels = f.vec(m._parms._response_column); gl.exec(); } return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, _domain, sigma, auc, logloss, gl)); } else { return m._output.addModelMetrics(new ModelMetricsBinomial(m, f, mse, null, sigma, null, logloss, null)); } } public String toString(){ if(_wcount == 0) return "empty, no rows"; return "auc = " + MathUtils.roundToNDigits(auc(),3) + ", logloss = " + _logloss / _wcount; } } }
package edu.cmu.cs.diamond.opendiamond; import java.io.*; import java.util.ArrayList; import java.util.List; public class Searchlet { final private List<Filter> filters = new ArrayList<Filter>(); private String[] dependencies; public void addFilter(Filter f) { filters.add(f); } public void setApplicationDependencies(String dependencies[]) { this.dependencies = new String[dependencies.length]; System.arraycopy(dependencies, 0, this.dependencies, 0, dependencies.length); } File createFilterSpecFile() throws IOException { File out = File.createTempFile("filterspec", ".txt"); out.deleteOnExit(); Writer w = new FileWriter(out); w.write(toString()); w.close(); return out; } File[] createFilterFiles() throws IOException { File result[] = new File[filters.size()]; int i = 0; for (Filter f : filters) { File file = File.createTempFile("filter", ".bin"); file.deleteOnExit(); DataOutputStream out = new DataOutputStream(new FileOutputStream( file)); out.write(f.getFilterCode().getBytes()); out.close(); result[i++] = file; } return result; } @Override public String toString() { StringBuilder sb = new StringBuilder(); for (Filter f : filters) { sb.append(f.toString()); } if (dependencies != null) { sb.append("FILTER APPLICATION\n"); for (String d : dependencies) { sb.append("REQUIRES " + d + "\n"); } } return sb.toString(); } }
package edu.mit.streamjit.apps.test; import com.google.common.collect.ImmutableList; import edu.mit.streamjit.api.CompiledStream; import edu.mit.streamjit.api.Filter; import edu.mit.streamjit.api.Identity; import edu.mit.streamjit.api.IllegalStreamGraphException; import edu.mit.streamjit.api.Joiner; import edu.mit.streamjit.api.OneToOneElement; import edu.mit.streamjit.api.Pipeline; import edu.mit.streamjit.api.Rate; import edu.mit.streamjit.api.RoundrobinJoiner; import edu.mit.streamjit.api.RoundrobinSplitter; import edu.mit.streamjit.api.Splitjoin; import edu.mit.streamjit.api.Splitter; import edu.mit.streamjit.api.StreamCompiler; import edu.mit.streamjit.api.StreamElement; import edu.mit.streamjit.impl.common.BlobHostStreamCompiler; import edu.mit.streamjit.impl.common.CheckVisitor; import edu.mit.streamjit.impl.common.PrintStreamVisitor; import edu.mit.streamjit.impl.compiler.CompilerBlobFactory; import edu.mit.streamjit.impl.interp.DebugStreamCompiler; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Random; import java.util.Set; /** * Generates random streams. * * TODO: This (and all of test/) really doesn't belong under apps/; we should * have a separate package for sanity/regression tests (basically anything that * isn't real-world). * @author Jeffrey Bosboom <jeffreybosboom@gmail.com> * @since 7/26/2013 */ public final class StreamFuzzer { public interface FuzzElement { public OneToOneElement<Integer, Integer> instantiate(); public String toJava(); @Override public boolean equals(Object other); @Override public int hashCode(); } private static final int MAX_DEPTH = 5; public static FuzzElement generate() { return makeStream(MAX_DEPTH); } private static final Random rng = new Random(); private static final int FILTER_PROB = 50, PIPELINE_PROB = 25, SPLITJOIN_PROB = 25; private static FuzzElement makeStream(int depthLimit) { int r = rng.nextInt(FILTER_PROB + PIPELINE_PROB + SPLITJOIN_PROB); if (depthLimit == 0 || r < FILTER_PROB) { return makeFilter(); } else if (r < FILTER_PROB + PIPELINE_PROB) { return makePipeline(depthLimit); } else if (r < FILTER_PROB + PIPELINE_PROB + SPLITJOIN_PROB) { return makeSplitjoin(depthLimit); } else throw new AssertionError(r); } private static final ImmutableList<FuzzFilter> FILTERS = ImmutableList.<FuzzFilter>builder() .add(new FuzzFilter(Identity.class, ImmutableList.of())) .add(new FuzzFilter(Adder.class, ImmutableList.of(1))) .add(new FuzzFilter(Adder.class, ImmutableList.of(20))) .add(new FuzzFilter(Multiplier.class, ImmutableList.of(2))) .add(new FuzzFilter(Multiplier.class, ImmutableList.of(3))) .add(new FuzzFilter(Multiplier.class, ImmutableList.of(100))) .add(new FuzzFilter(Batcher.class, ImmutableList.of(2))) .add(new FuzzFilter(Batcher.class, ImmutableList.of(10))) .build(); private static FuzzFilter makeFilter() { return FILTERS.get(rng.nextInt(FILTERS.size())); } private static final class Adder extends Filter<Integer, Integer> { private final int addend; public Adder(int addend) { super(1, 1); this.addend = addend; } @Override public void work() { push(pop() + addend); } } private static final class Multiplier extends Filter<Integer, Integer> { private final int multiplier; public Multiplier(int multiplier) { super(1, 1); this.multiplier = multiplier; } @Override public void work() { push(pop() * multiplier); } } private static class Permuter extends Filter<Integer, Integer> { private final int[] permutation; public Permuter(int inputSize, int outputSize, int[] permutation) { super(Rate.create(inputSize), Rate.create(outputSize), Rate.create(0, outputSize)); this.permutation = permutation.clone(); for (int i : permutation) assert i >= 0 && i < inputSize; assert permutation.length == outputSize; } @Override public void work() { for (int i : permutation) push(peek(i)); for (int i = 0; i < permutation.length; ++i) pop(); } } private static final class Batcher extends Permuter { public Batcher(int batchSize) { super(batchSize, batchSize, makeIdentityPermutation(batchSize)); } private static int[] makeIdentityPermutation(int batchSize) { int[] retval = new int[batchSize]; for (int i = 0; i < retval.length; ++i) retval[i] = i; return retval; } } private static final int MAX_PIPELINE_LENGTH = 5; private static FuzzPipeline makePipeline(int depthLimit) { int length = rng.nextInt(MAX_PIPELINE_LENGTH) + 1; ImmutableList.Builder<FuzzElement> elements = ImmutableList.builder(); for (int i = 0; i < length; ++i) elements.add(makeStream(depthLimit - 1)); return new FuzzPipeline(elements.build()); } private static final int MAX_SPLITJOIN_BRANCHES = 5; private static FuzzSplitjoin makeSplitjoin(int depthLimit) { int numBranches = rng.nextInt(MAX_SPLITJOIN_BRANCHES) + 1; ImmutableList.Builder<FuzzElement> branches = ImmutableList.builder(); for (int i = 0; i < numBranches; ++i) branches.add(makeStream(depthLimit - 1)); return new FuzzSplitjoin(makeSplitter(), makeJoiner(), branches.build()); } private static FuzzSplitter makeSplitter() { return new FuzzSplitter(RoundrobinSplitter.class, ImmutableList.of()); } private static FuzzJoiner makeJoiner() { return new FuzzJoiner(RoundrobinJoiner.class, ImmutableList.of()); } private static final com.google.common.base.Joiner ARG_JOINER = com.google.common.base.Joiner.on(", "); private static class FuzzStreamElement<T extends StreamElement<Integer, Integer>> { private final Class<? extends T> filterClass; private final ImmutableList<? extends Object> arguments; private transient Constructor<? extends T> constructor; protected FuzzStreamElement(Class<? extends T> filterClass, ImmutableList<? extends Object> arguments) { this.filterClass = filterClass; this.arguments = arguments; } public T instantiate() { if (constructor == null) constructor = findConstructor(); try { return constructor.newInstance(arguments.toArray()); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { throw new AssertionError("Failed to instantiate "+constructor+" with "+arguments, ex); } } private Constructor<? extends T> findConstructor() { @SuppressWarnings("unchecked") Constructor<? extends T>[] constructors = (Constructor<T>[])filterClass.getConstructors(); List<Constructor<? extends T>> retvals = new ArrayList<>(); Map<Constructor<? extends T>, Throwable> exceptions = new HashMap<>(); for (Constructor<? extends T> ctor : constructors) try { ctor.newInstance(arguments.toArray()); retvals.add(ctor); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { exceptions.put(ctor, ex); } if (retvals.isEmpty()) throw new AssertionError("Couldn't create a "+filterClass+" from "+arguments+": exceptions "+exceptions); if (retvals.size() > 1) throw new AssertionError("Creating a "+filterClass+" from "+arguments+" was ambiguous: "+retvals); return retvals.get(0); } public String toJava() { //This will generate unchecked code if the filter is generic. return "new " + filterClass.getCanonicalName() + "(" + ARG_JOINER.join(arguments)+")"; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (getClass() != obj.getClass()) return false; final FuzzStreamElement<T> other = (FuzzStreamElement<T>)obj; if (!Objects.equals(this.filterClass, other.filterClass)) return false; if (!Objects.equals(this.arguments, other.arguments)) return false; return true; } @Override public int hashCode() { int hash = 7; hash = 41 * hash + Objects.hashCode(this.filterClass); hash = 41 * hash + Objects.hashCode(this.arguments); return hash; } } private static final class FuzzFilter extends FuzzStreamElement<Filter<Integer, Integer>> implements FuzzElement { @SuppressWarnings({"unchecked","rawtypes"}) private FuzzFilter(Class<? extends Filter> filterClass, ImmutableList<? extends Object> arguments) { super((Class<Filter<Integer, Integer>>)filterClass, arguments); } @Override public Filter<Integer, Integer> instantiate() { return super.instantiate(); } //use inherited equals()/hashCode() } private static final class FuzzPipeline implements FuzzElement { private final ImmutableList<FuzzElement> elements; private FuzzPipeline(ImmutableList<FuzzElement> elements) { this.elements = elements; } @Override public Pipeline<Integer, Integer> instantiate() { Pipeline<Integer, Integer> pipeline = new Pipeline<>(); for (FuzzElement e : elements) pipeline.add(e.instantiate()); return pipeline; } @Override public String toJava() { List<String> args = new ArrayList<>(elements.size()); for (FuzzElement e : elements) args.add(e.toJava()); return "new Pipeline(" + ARG_JOINER.join(args) + ")"; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (getClass() != obj.getClass()) return false; final FuzzPipeline other = (FuzzPipeline)obj; if (!Objects.equals(this.elements, other.elements)) return false; return true; } @Override public int hashCode() { int hash = 5; hash = 59 * hash + Objects.hashCode(this.elements); return hash; } } /** * Can't implement FuzzElement because Splitter isn't a OneToOneElement, but * can still share the instantiation code. */ private static final class FuzzSplitter extends FuzzStreamElement<Splitter<Integer, Integer>> { @SuppressWarnings({"unchecked","rawtypes"}) private FuzzSplitter(Class<? extends Splitter> filterClass, ImmutableList<? extends Object> arguments) { super((Class<Splitter<Integer, Integer>>)filterClass, arguments); } @Override public Splitter<Integer, Integer> instantiate() { return super.instantiate(); } //use inherited equals()/hashCode() } /** * See comments on FuzzSplitter. */ private static final class FuzzJoiner extends FuzzStreamElement<Joiner<Integer, Integer>> { @SuppressWarnings({"unchecked","rawtypes"}) private FuzzJoiner(Class<? extends Joiner> filterClass, ImmutableList<? extends Object> arguments) { super((Class<Joiner<Integer, Integer>>)filterClass, arguments); } @Override public Joiner<Integer, Integer> instantiate() { return super.instantiate(); } //use inherited equals()/hashCode() } private static final class FuzzSplitjoin implements FuzzElement { private final FuzzSplitter splitter; private final FuzzJoiner joiner; private final ImmutableList<FuzzElement> branches; private FuzzSplitjoin(FuzzSplitter splitter, FuzzJoiner joiner, ImmutableList<FuzzElement> branches) { this.splitter = splitter; this.joiner = joiner; this.branches = branches; } @Override public OneToOneElement<Integer, Integer> instantiate() { Splitjoin<Integer, Integer> splitjoin = new Splitjoin<>(splitter.instantiate(), joiner.instantiate()); for (FuzzElement e : branches) splitjoin.add(e.instantiate()); return splitjoin; } @Override public String toJava() { List<String> args = new ArrayList<>(branches.size()+2); args.add(splitter.toJava()); args.add(joiner.toJava()); for (FuzzElement e : branches) args.add(e.toJava()); return "new Splitjoin(" + ARG_JOINER.join(args) + ")"; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (getClass() != obj.getClass()) return false; final FuzzSplitjoin other = (FuzzSplitjoin)obj; if (!Objects.equals(this.splitter, other.splitter)) return false; if (!Objects.equals(this.joiner, other.joiner)) return false; if (!Objects.equals(this.branches, other.branches)) return false; return true; } @Override public int hashCode() { int hash = 7; hash = 71 * hash + Objects.hashCode(this.splitter); hash = 71 * hash + Objects.hashCode(this.joiner); hash = 71 * hash + Objects.hashCode(this.branches); return hash; } } private static final int INPUT_LENGTH = 1000; private static List<Integer> run(FuzzElement element, StreamCompiler compiler) { OneToOneElement<Integer, Integer> graph = element.instantiate(); CompiledStream<Integer, Integer> stream = compiler.compile(graph); ImmutableList.Builder<Integer> retval = ImmutableList.builder(); Integer o; for (int i = 0; i < INPUT_LENGTH;) { if (stream.offer(i)) ++i; while ((o = stream.poll()) != null) retval.add(o); } stream.drain(); while (!stream.isDrained()) while ((o = stream.poll()) != null) retval.add(o); while ((o = stream.poll()) != null) retval.add(o); return retval.build(); } public static void main(String[] args) { StreamCompiler debugSC = new DebugStreamCompiler(); StreamCompiler compilerSC = new BlobHostStreamCompiler(new CompilerBlobFactory(), 1); Set<FuzzElement> completedCases = new HashSet<>(); int tries, skips = 0; for (tries = 0; true; ++tries) { FuzzElement fuzz = StreamFuzzer.generate(); if (!completedCases.add(fuzz)) { ++skips; continue; } try { fuzz.instantiate().visit(new CheckVisitor()); } catch (IllegalStreamGraphException ex) { System.out.println("Fuzzer generated bad test case"); ex.printStackTrace(System.out); fuzz.instantiate().visit(new PrintStreamVisitor(System.out)); } List<Integer> debugOutput = run(fuzz, debugSC); List<Integer> compilerOutput = null; try { compilerOutput = run(fuzz, compilerSC); } catch (Throwable ex) { System.out.println("Compiler failed"); ex.printStackTrace(System.out); //fall into the if below } if (!debugOutput.equals(compilerOutput)) { fuzz.instantiate().visit(new PrintStreamVisitor(System.out)); System.out.println(fuzz.toJava()); //TODO: show only elements where they differ System.out.println("Debug output: "+debugOutput); System.out.println("Compiler output: "+compilerOutput); break; } System.out.println(fuzz.hashCode()+" matched"); } System.out.format("Generated %d cases, skipped %d (%f run rate)%n", tries, skips, ((double)tries-skips)/tries); } }
package edu.mit.streamjit.impl.compiler; import static com.google.common.base.Preconditions.*; import com.google.common.collect.HashBasedTable; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.ImmutableTable; import com.google.common.collect.Iterables; import com.google.common.collect.Table; import edu.mit.streamjit.api.Filter; import edu.mit.streamjit.api.Identity; import edu.mit.streamjit.api.Joiner; import edu.mit.streamjit.api.OneToOneElement; import edu.mit.streamjit.api.Rate; import edu.mit.streamjit.api.RoundrobinJoiner; import edu.mit.streamjit.api.RoundrobinSplitter; import edu.mit.streamjit.api.Splitjoin; import edu.mit.streamjit.api.Splitter; import edu.mit.streamjit.api.StatefulFilter; import edu.mit.streamjit.api.Worker; import edu.mit.streamjit.impl.blob.Blob; import edu.mit.streamjit.impl.blob.Blob.Token; import edu.mit.streamjit.impl.common.Configuration; import edu.mit.streamjit.impl.common.ConnectWorkersVisitor; import edu.mit.streamjit.impl.common.IOInfo; import edu.mit.streamjit.impl.common.MessageConstraint; import edu.mit.streamjit.impl.common.Workers; import edu.mit.streamjit.impl.compiler.insts.ArrayLoadInst; import edu.mit.streamjit.impl.compiler.insts.ArrayStoreInst; import edu.mit.streamjit.impl.compiler.insts.BinaryInst; import edu.mit.streamjit.impl.compiler.insts.CallInst; import edu.mit.streamjit.impl.compiler.insts.CastInst; import edu.mit.streamjit.impl.compiler.insts.Instruction; import edu.mit.streamjit.impl.compiler.insts.JumpInst; import edu.mit.streamjit.impl.compiler.insts.LoadInst; import edu.mit.streamjit.impl.compiler.insts.NewArrayInst; import edu.mit.streamjit.impl.compiler.insts.ReturnInst; import edu.mit.streamjit.impl.compiler.insts.StoreInst; import edu.mit.streamjit.impl.compiler.types.FieldType; import edu.mit.streamjit.impl.compiler.types.MethodType; import edu.mit.streamjit.impl.compiler.types.RegularType; import edu.mit.streamjit.impl.interp.Channel; import edu.mit.streamjit.impl.interp.ChannelFactory; import edu.mit.streamjit.impl.interp.EmptyChannel; import edu.mit.streamjit.util.Pair; import edu.mit.streamjit.util.TopologicalSort; import java.io.PrintWriter; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; /** * * @author Jeffrey Bosboom <jeffreybosboom@gmail.com> * @since 4/24/2013 */ public final class Compiler { /** * A counter used to generate package names unique to a given machine. */ private static final AtomicInteger PACKAGE_NUMBER = new AtomicInteger(); private final Set<Worker<?, ?>> workers; private final Configuration config; private final int maxNumCores; private final ImmutableSet<IOInfo> ioinfo; private final Worker<?, ?> firstWorker, lastWorker; /** * Maps a worker to the StreamNode that contains it. Updated by * StreamNode's constructors. (It would be static in * StreamNode if statics of inner classes were supported and worked as * though there was one instance per parent instance.) */ private final Map<Worker<?, ?>, StreamNode> streamNodes = new IdentityHashMap<>(); private final Map<Worker<?, ?>, Method> workerWorkMethods = new IdentityHashMap<>(); private ImmutableMap<StreamNode, Integer> schedule; private ImmutableMap<Worker<?, ?>, Integer> initSchedule; private final String packagePrefix; private final Module module = new Module(); private final Klass blobKlass; /** * The steady-state execution multiplier (the number of executions to run * per synchronization). */ private final int multiplier; /** * The work method type, which is void(Object[][], int[], int[], Object[][], * int[], int[]). (There is no receiver argument.) */ private final MethodType workMethodType; private ImmutableMap<Token, BufferData> buffers; public Compiler(Set<Worker<?, ?>> workers, Configuration config, int maxNumCores) { this.workers = workers; this.config = config; this.maxNumCores = maxNumCores; this.ioinfo = IOInfo.create(workers); //We can only have one first and last worker, though they can have //multiple inputs/outputs. Worker<?, ?> firstWorker = null, lastWorker = null; for (IOInfo io : ioinfo) if (io.isInput()) if (firstWorker == null) firstWorker = io.downstream(); else checkArgument(firstWorker == io.downstream(), "two input workers"); else if (lastWorker == null) lastWorker = io.upstream(); else checkArgument(lastWorker == io.upstream(), "two output workers"); assert firstWorker != null : "Can't happen! No first worker?"; assert lastWorker != null : "Can't happen! No last worker?"; this.firstWorker = firstWorker; this.lastWorker = lastWorker; //We require that all rates of workers in our set are fixed, except for //the output rates of the last worker. for (Worker<?, ?> w : workers) { for (Rate r : w.getPeekRates()) checkArgument(r.isFixed()); for (Rate r : w.getPopRates()) checkArgument(r.isFixed()); if (w != lastWorker) for (Rate r : w.getPushRates()) checkArgument(r.isFixed()); } //We don't support messaging. List<MessageConstraint> constraints = MessageConstraint.findConstraints(firstWorker); for (MessageConstraint c : constraints) { checkArgument(!workers.contains(c.getSender())); checkArgument(!workers.contains(c.getRecipient())); } this.packagePrefix = "compiler"+PACKAGE_NUMBER.getAndIncrement()+"."; this.blobKlass = new Klass(packagePrefix + "Blob", module.getKlass(Object.class), Collections.singletonList(module.getKlass(Blob.class)), module); this.multiplier = config.getParameter("multiplier", Configuration.IntParameter.class).getValue(); this.workMethodType = module.types().getMethodType(void.class, Object[][].class, int[].class, int[].class, Object[][].class, int[].class, int[].class); } public Blob compile() { for (Worker<?, ?> w : workers) new StreamNode(w); //adds itself to streamNodes map fuse(); //Compute per-node steady state execution counts. for (StreamNode n : ImmutableSet.copyOf(streamNodes.values())) n.internalSchedule(); externalSchedule(); allocateCores(); declareBuffers(); computeInitSchedule(); //We generate a work method for each worker (which may result in //duplicates, but is required in general to handle worker fields), then //generate core code that stitches them together and does any //required data movement. for (Worker<?, ?> w : streamNodes.keySet()) makeWorkMethod(w); for (StreamNode n : ImmutableSet.copyOf(streamNodes.values())) n.makeWorkMethod(); generateCoreCode(); addBlobPlumbing(); blobKlass.dump(new PrintWriter(System.out, true)); return instantiateBlob(); } /** * Fuses StreamNodes as directed by the configuration. */ private void fuse() { //TODO: some kind of worklist algorithm that fuses until no more fusion //possible, to handle state, peeking, or attempts to fuse with more than //one predecessor. } /** * Allocates StreamNodes to cores as directed by the configuration, possibly * fissing them (if assigning one node to multiple cores). * TODO: do we want to permit unequal fiss allocations (e.g., 6 SSEs here, * 3 SSEs there)? */ private void allocateCores() { //TODO //Note that any node containing a splitter or joiner can only go on one //core (as it has to synchronize for its inputs and outputs). //For now, just put everything on core 0. for (StreamNode n : ImmutableSet.copyOf(streamNodes.values())) n.cores.add(0); } /** * Computes buffer capacity and initial sizes, declaring (but not * arranging for initialization of) the blob class fields pointing to the * buffers. */ private void declareBuffers() { ImmutableMap.Builder<Token, BufferData> builder = ImmutableMap.<Token, BufferData>builder(); for (Pair<Worker<?, ?>, Worker<?, ?>> p : allWorkerPairsInBlob()) //Only declare buffers for worker pairs not in the same node. If //a node needs internal buffering, it handles that itself. (This //implies that peeking filters cannot be fused upwards, but that's //a bad idea anyway.) if (!streamNodes.get(p.first).equals(streamNodes.get(p.second))) { Token t = new Token(p.first, p.second); builder.put(t, makeBuffers(t, p.first, p.second)); } //Make buffers for the inputs and outputs of this blob (which may or //may not be overall inputs of the stream graph). for (IOInfo info : ioinfo) if (firstWorker.equals(info.downstream()) || lastWorker.equals(info.upstream())) builder.put(info.token(), makeBuffers(info.token(), info.upstream(), info.downstream())); buffers = builder.build(); } /** * Creates buffers in the blobKlass for the given workers, returning a * BufferData describing the buffers created. * * One of upstream xor downstream may be null for the overall input and * output. */ private BufferData makeBuffers(Token token, Worker<?, ?> upstream, Worker<?, ?> downstream) { assert upstream != null || downstream != null; assert upstream == null || token.getUpstreamIdentifier() == Workers.getIdentifier(upstream); assert downstream == null || token.getDownstreamIdentifier() == Workers.getIdentifier(downstream); final String upstreamId = upstream != null ? Integer.toString(Workers.getIdentifier(upstream)) : "input"; final String downstreamId = downstream != null ? Integer.toString(Workers.getIdentifier(downstream)) : "output"; final StreamNode upstreamNode = streamNodes.get(upstream); final StreamNode downstreamNode = streamNodes.get(downstream); RegularType objArrayTy = module.types().getRegularType(Object[].class); String fieldName = "buf_"+upstreamId+"_"+downstreamId; assert downstreamNode != upstreamNode; String readerBufferFieldName = token.isOverallOutput() ? null : fieldName + "r"; String writerBufferFieldName = token.isOverallInput() ? null : fieldName + "w"; for (String field : new String[]{readerBufferFieldName, writerBufferFieldName}) if (field != null) new Field(objArrayTy, field, EnumSet.of(Modifier.PRIVATE, Modifier.STATIC), blobKlass); int capacity, initialSize, excessPeeks; if (downstream != null) { //If upstream is null, it's the global input, channel 0. int chanIdx = upstream != null ? Workers.getPredecessors(downstream).indexOf(upstream) : 0; assert chanIdx != -1; int pop = downstream.getPopRates().get(chanIdx).max(), peek = downstream.getPeekRates().get(chanIdx).max(); excessPeeks = Math.max(peek - pop, 0); capacity = downstreamNode.execsPerNodeExec.get(downstream) * schedule.get(downstreamNode) * multiplier * pop + excessPeeks; initialSize = capacity; } else { //downstream == null //If downstream is null, it's the global output, channel 0. int push = upstream.getPushRates().get(0).max(); capacity = upstreamNode.execsPerNodeExec.get(upstream) * schedule.get(upstreamNode) * multiplier * push; initialSize = 0; excessPeeks = 0; } return new BufferData(token, readerBufferFieldName, writerBufferFieldName, capacity, initialSize, excessPeeks); } /** * Computes the initialization schedule using the scheduler. */ private void computeInitSchedule() { ImmutableList.Builder<Scheduler.Channel<Worker<?, ?>>> builder = ImmutableList.<Scheduler.Channel<Worker<?, ?>>>builder(); for (Pair<Worker<?, ?>, Worker<?, ?>> p : allWorkerPairsInBlob()) { int i = Workers.getSuccessors(p.first).indexOf(p.second); int j = Workers.getPredecessors(p.second).indexOf(p.first); int pushRate = p.first.getPushRates().get(i).max(); int popRate = p.second.getPopRates().get(j).max(); builder.add(new Scheduler.Channel<>(p.first, p.second, pushRate, popRate, buffers.get(new Token(p.first, p.second)).initialSize)); } initSchedule = Scheduler.schedule(builder.build()); } @SuppressWarnings({"unchecked", "rawtypes"}) private ImmutableList<Pair<Worker<?, ?>, Worker<?, ?>>> allWorkerPairsInBlob() { ImmutableList.Builder<Pair<Worker<?, ?>, Worker<?, ?>>> builder = ImmutableList.<Pair<Worker<?, ?>, Worker<?, ?>>>builder(); for (Worker<?, ?> u : workers) for (Worker<?, ?> d : Workers.getSuccessors(u)) if (workers.contains(d)) builder.add(new Pair(u, d)); return builder.build(); } /** * Make the work method for the given worker. We actually make two methods * here: first we make a copy with a dummy receiver argument, just to have a * copy to work with. After remapping every use of that receiver (remapping * field accesses to the worker's static fields, remapping JIT-hooks to * their implementations, and remapping utility methods in the worker class * recursively), we then create the actual work method without the receiver * argument. * @param worker */ private void makeWorkMethod(Worker<?, ?> worker) { StreamNode node = streamNodes.get(worker); int id = Workers.getIdentifier(worker); int numInputs = getNumInputs(worker); int numOutputs = getNumOutputs(worker); Klass workerKlass = module.getKlass(worker.getClass()); Method oldWork = workerKlass.getMethod("work", module.types().getMethodType(void.class, worker.getClass())); oldWork.resolve(); //Add a dummy receiver argument so we can clone the user's work method. MethodType rworkMethodType = workMethodType.prependArgument(module.types().getRegularType(workerKlass)); Method newWork = new Method("rwork"+id, rworkMethodType, EnumSet.of(Modifier.PRIVATE, Modifier.STATIC), blobKlass); newWork.arguments().get(0).setName("dummyReceiver"); newWork.arguments().get(1).setName("ichannels"); newWork.arguments().get(2).setName("ioffsets"); newWork.arguments().get(3).setName("iincrements"); newWork.arguments().get(4).setName("ochannels"); newWork.arguments().get(5).setName("ooffsets"); newWork.arguments().get(6).setName("oincrements"); Map<Value, Value> vmap = new IdentityHashMap<>(); vmap.put(oldWork.arguments().get(0), newWork.arguments().get(0)); Cloning.cloneMethod(oldWork, newWork, vmap); BasicBlock entryBlock = new BasicBlock(module, "entry"); newWork.basicBlocks().add(0, entryBlock); //We make copies of the offset arrays. (int[].clone() returns Object, //so we have to cast.) Method clone = Iterables.getOnlyElement(module.getKlass(Object.class).getMethods("clone")); CallInst ioffsetCloneCall = new CallInst(clone, newWork.arguments().get(2)); entryBlock.instructions().add(ioffsetCloneCall); CastInst ioffsetCast = new CastInst(module.types().getArrayType(int[].class), ioffsetCloneCall); entryBlock.instructions().add(ioffsetCast); LocalVariable ioffsetCopy = new LocalVariable((RegularType)ioffsetCast.getType(), "ioffsetCopy", newWork); StoreInst popCountInit = new StoreInst(ioffsetCopy, ioffsetCast); popCountInit.setName("ioffsetInit"); entryBlock.instructions().add(popCountInit); CallInst ooffsetCloneCall = new CallInst(clone, newWork.arguments().get(5)); entryBlock.instructions().add(ooffsetCloneCall); CastInst ooffsetCast = new CastInst(module.types().getArrayType(int[].class), ooffsetCloneCall); entryBlock.instructions().add(ooffsetCast); LocalVariable ooffsetCopy = new LocalVariable((RegularType)ooffsetCast.getType(), "ooffsetCopy", newWork); StoreInst pushCountInit = new StoreInst(ooffsetCopy, ooffsetCast); pushCountInit.setName("ooffsetInit"); entryBlock.instructions().add(pushCountInit); entryBlock.instructions().add(new JumpInst(newWork.basicBlocks().get(1))); //Remap stuff in rwork. for (BasicBlock b : newWork.basicBlocks()) for (Instruction i : ImmutableList.copyOf(b.instructions())) if (Iterables.contains(i.operands(), newWork.arguments().get(0))) remapEliminiatingReceiver(i, worker); //At this point, we've replaced all uses of the dummy receiver argument. assert newWork.arguments().get(0).uses().isEmpty(); Method trueWork = new Method("work"+id, workMethodType, EnumSet.of(Modifier.PRIVATE, Modifier.STATIC), blobKlass); vmap.clear(); vmap.put(newWork.arguments().get(0), null); for (int i = 1; i < newWork.arguments().size(); ++i) vmap.put(newWork.arguments().get(i), trueWork.arguments().get(i-1)); Cloning.cloneMethod(newWork, trueWork, vmap); workerWorkMethods.put(worker, trueWork); newWork.eraseFromParent(); } private void remapEliminiatingReceiver(Instruction inst, Worker<?, ?> worker) { BasicBlock block = inst.getParent(); Method rwork = inst.getParent().getParent(); if (inst instanceof CallInst) { CallInst ci = (CallInst)inst; Method method = ci.getMethod(); Klass filterKlass = module.getKlass(Filter.class); Klass splitterKlass = module.getKlass(Splitter.class); Klass joinerKlass = module.getKlass(Joiner.class); Method pop1Filter = filterKlass.getMethod("pop", module.types().getMethodType(Object.class, Filter.class)); assert pop1Filter != null; Method pop1Splitter = splitterKlass.getMethod("pop", module.types().getMethodType(Object.class, Splitter.class)); assert pop1Splitter != null; Method push1Filter = filterKlass.getMethod("push", module.types().getMethodType(void.class, Filter.class, Object.class)); assert push1Filter != null; Method push1Joiner = joinerKlass.getMethod("push", module.types().getMethodType(void.class, Joiner.class, Object.class)); assert push1Joiner != null; Method pop2 = joinerKlass.getMethod("pop", module.types().getMethodType(Object.class, Joiner.class, int.class)); assert pop2 != null; Method push2 = splitterKlass.getMethod("push", module.types().getMethodType(void.class, Splitter.class, int.class, Object.class)); assert push2 != null; Method inputs = joinerKlass.getMethod("inputs", module.types().getMethodType(int.class, Joiner.class)); assert inputs != null; Method outputs = splitterKlass.getMethod("outputs", module.types().getMethodType(int.class, Splitter.class)); assert outputs != null; Method channelPush = module.getKlass(Channel.class).getMethod("push", module.types().getMethodType(void.class, Channel.class, Object.class)); assert channelPush != null; if (method.equals(pop1Filter) || method.equals(pop1Splitter) || method.equals(pop2)) { Value channelNumber = method.equals(pop2) ? ci.getArgument(1) : module.constants().getSmallestIntConstant(0); Argument ichannels = rwork.getArgument("ichannels"); ArrayLoadInst channel = new ArrayLoadInst(ichannels, channelNumber); LoadInst ioffsets = new LoadInst(rwork.getLocalVariable("ioffsetCopy")); ArrayLoadInst offset = new ArrayLoadInst(ioffsets, channelNumber); ArrayLoadInst item = new ArrayLoadInst(channel, offset); item.setName("poppedItem"); Argument iincrements = rwork.getArgument("iincrements"); ArrayLoadInst increment = new ArrayLoadInst(iincrements, channelNumber); BinaryInst newOffset = new BinaryInst(offset, BinaryInst.Operation.ADD, increment); ArrayStoreInst storeNewOffset = new ArrayStoreInst(ioffsets, channelNumber, newOffset); inst.replaceInstWithInsts(item, channel, ioffsets, offset, item, increment, newOffset, storeNewOffset); } else if ((method.equals(push1Filter) || method.equals(push1Joiner)) || method.equals(push2)) { Value channelNumber = method.equals(push2) ? ci.getArgument(1) : module.constants().getSmallestIntConstant(0); Value item = method.equals(push2) ? ci.getArgument(2) : ci.getArgument(1); Argument ochannels = rwork.getArgument("ochannels"); ArrayLoadInst channel = new ArrayLoadInst(ochannels, channelNumber); LoadInst ooffsets = new LoadInst(rwork.getLocalVariable("ooffsetCopy")); ArrayLoadInst offset = new ArrayLoadInst(ooffsets, channelNumber); ArrayStoreInst store = new ArrayStoreInst(channel, offset, item); Argument oincrements = rwork.getArgument("oincrements"); ArrayLoadInst increment = new ArrayLoadInst(oincrements, channelNumber); BinaryInst newOffset = new BinaryInst(offset, BinaryInst.Operation.ADD, increment); ArrayStoreInst storeNewOffset = new ArrayStoreInst(ooffsets, channelNumber, newOffset); inst.replaceInstWithInsts(store, channel, ooffsets, offset, store, increment, newOffset, storeNewOffset); } else if (method.equals(outputs)) { inst.replaceInstWithValue(module.constants().getSmallestIntConstant(getNumOutputs(worker))); } else if (method.equals(inputs)) { inst.replaceInstWithValue(module.constants().getSmallestIntConstant(getNumInputs(worker))); } else throw new AssertionError(inst); } else if (inst instanceof LoadInst) { LoadInst li = (LoadInst)inst; assert li.getLocation() instanceof Field; LoadInst replacement = new LoadInst(streamNodes.get(worker).fields.get(worker, (Field)li.getLocation())); li.replaceInstWithInst(replacement); } else throw new AssertionError("Couldn't eliminate reciever: "+inst); } private int getNumInputs(Worker<?, ?> w) { return Workers.getInputChannels(w).size(); } private int getNumOutputs(Worker<?, ?> w) { return Workers.getOutputChannels(w).size(); } private void generateCoreCode() { } /** * Adds required plumbing code to the blob class, such as the ctor and the * implementations of the Blob methods. */ private void addBlobPlumbing() { //ctor Method init = new Method("<init>", module.types().getMethodType(module.types().getType(blobKlass)), EnumSet.noneOf(Modifier.class), blobKlass); BasicBlock b = new BasicBlock(module); init.basicBlocks().add(b); Method objCtor = module.getKlass(Object.class).getMethods("<init>").iterator().next(); b.instructions().add(new CallInst(objCtor)); b.instructions().add(new ReturnInst(module.types().getVoidType())); //TODO: other Blob interface methods } private Blob instantiateBlob() { ModuleClassLoader mcl = new ModuleClassLoader(module); try { Class<?> blobClass = mcl.loadClass(blobKlass.getName()); Constructor<?> ctor = blobClass.getDeclaredConstructor(); ctor.setAccessible(true); return (Blob)ctor.newInstance(); } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException | InstantiationException ex) { throw new AssertionError(ex); } } private void externalSchedule() { ImmutableSet<StreamNode> nodes = ImmutableSet.copyOf(streamNodes.values()); ImmutableList.Builder<Scheduler.Channel<StreamNode>> channels = ImmutableList.<Scheduler.Channel<StreamNode>>builder(); for (StreamNode a : nodes) for (StreamNode b : nodes) channels.addAll(a.findChannels(b)); schedule = Scheduler.schedule(channels.build()); System.out.println(schedule); } private final class StreamNode { private final int id; private final ImmutableSet<Worker<?, ?>> workers; private final ImmutableSortedSet<IOInfo> ioinfo; private ImmutableMap<Worker<?, ?>, ImmutableSortedSet<IOInfo>> inputIOs, outputIOs; /** * The number of individual worker executions per steady-state execution * of the StreamNode. */ private ImmutableMap<Worker<?, ?>, Integer> execsPerNodeExec; /** * This node's work method. May be null if the method hasn't been * created yet. TODO: if we put multiplicities inside work methods, * we'll need one per core. Alternately we could put them outside and * inline/specialize as a postprocessing step. */ private Method workMethod; /** * Maps each worker's fields to the corresponding fields in the blob * class. */ private final Table<Worker<?, ?>, Field, Field> fields = HashBasedTable.create(); /** * Maps each worker's fields to the actual values of those fields. */ private final Table<Worker<?, ?>, Field, Object> fieldValues = HashBasedTable.create(); private final List<Integer> cores = new ArrayList<>(); private StreamNode(Worker<?, ?> worker) { this.id = Workers.getIdentifier(worker); this.workers = (ImmutableSet<Worker<?, ?>>)ImmutableSet.of(worker); this.ioinfo = ImmutableSortedSet.copyOf(IOInfo.TOKEN_SORT, IOInfo.create(workers)); buildWorkerData(worker); assert !streamNodes.containsKey(worker); streamNodes.put(worker, this); } /** * Fuses two StreamNodes. They should not yet have been scheduled or * had work functions constructed. */ private StreamNode(StreamNode a, StreamNode b) { this.id = Math.min(a.id, b.id); this.workers = ImmutableSet.<Worker<?, ?>>builder().addAll(a.workers).addAll(b.workers).build(); this.ioinfo = ImmutableSortedSet.copyOf(IOInfo.TOKEN_SORT, IOInfo.create(workers)); this.fields.putAll(a.fields); this.fields.putAll(b.fields); this.fieldValues.putAll(a.fieldValues); this.fieldValues.putAll(b.fieldValues); for (Worker<?, ?> w : a.workers) streamNodes.put(w, this); for (Worker<?, ?> w : b.workers) streamNodes.put(w, this); } /** * Compute the steady-state multiplicities of each worker in this node * for each execution of the node. */ public void internalSchedule() { if (workers.size() == 1) { this.execsPerNodeExec = ImmutableMap.<Worker<?, ?>, Integer>builder().put(workers.iterator().next(), 1).build(); return; } //Find all the channels within this StreamNode. List<Scheduler.Channel<Worker<?, ?>>> channels = new ArrayList<>(); for (Worker<?, ?> w : workers) { @SuppressWarnings("unchecked") List<Worker<?, ?>> succs = (List<Worker<?, ?>>)Workers.getSuccessors(w); for (int i = 0; i < succs.size(); ++i) { Worker<?, ?> s = succs.get(i); if (workers.contains(s)) { int j = Workers.getPredecessors(s).indexOf(w); assert j != -1; channels.add(new Scheduler.Channel<>(w, s, w.getPushRates().get(i).max(), s.getPopRates().get(j).max())); } } } this.execsPerNodeExec = Scheduler.schedule(channels); } /** * Returns a list of scheduler channels from this node to the given * node, with rates corrected for the internal schedule for each node. */ public List<Scheduler.Channel<StreamNode>> findChannels(StreamNode other) { ImmutableList.Builder<Scheduler.Channel<StreamNode>> retval = ImmutableList.<Scheduler.Channel<StreamNode>>builder(); for (IOInfo info : ioinfo) { if (info.isOutput() && other.workers.contains(info.downstream())) { int i = Workers.getSuccessors(info.upstream()).indexOf(info.downstream()); assert i != -1; int j = Workers.getPredecessors(info.downstream()).indexOf(info.upstream()); assert j != -1; retval.add(new Scheduler.Channel<>(this, other, info.upstream().getPushRates().get(i).max() * execsPerNodeExec.get(info.upstream()), info.downstream().getPopRates().get(j).max() * other.execsPerNodeExec.get(info.downstream()))); } } return retval.build(); } private void buildWorkerData(Worker<?, ?> worker) { Klass workerKlass = module.getKlass(worker.getClass()); //Build the new fields. for (Field f : workerKlass.fields()) { java.lang.reflect.Field rf = f.getBackingField(); Set<Modifier> modifiers = EnumSet.of(Modifier.PRIVATE, Modifier.STATIC); //We can make the new field final if the original field is final or //if the worker isn't stateful. if (f.modifiers().contains(Modifier.FINAL) || !(worker instanceof StatefulFilter)) modifiers.add(Modifier.FINAL); Field nf = new Field(f.getType().getFieldType(), "w" + id + "$" + f.getName(), modifiers, blobKlass); fields.put(worker, f, nf); try { rf.setAccessible(true); Object value = rf.get(worker); fieldValues.put(worker, f, value); } catch (IllegalAccessException ex) { //Either setAccessible will succeed or we'll throw a //SecurityException, so we'll never get here. throw new AssertionError("Can't happen!", ex); } } } private void makeWorkMethod() { assert workMethod == null : "remaking node work method"; mapIOInfo(); MethodType nodeWorkMethodType = module.types().getMethodType(module.types().getVoidType(), module.types().getRegularType(int.class)); workMethod = new Method("nodework"+this.id, nodeWorkMethodType, EnumSet.of(Modifier.PRIVATE, Modifier.STATIC), blobKlass); Argument multiple = Iterables.getOnlyElement(workMethod.arguments()); multiple.setName("multiple"); BasicBlock entryBlock = new BasicBlock(module, "entry"); workMethod.basicBlocks().add(entryBlock); Map<Token, Value> localBuffers = new HashMap<>(); ImmutableList<Worker<?, ?>> orderedWorkers = TopologicalSort.sort(new ArrayList<>(workers), new TopologicalSort.PartialOrder<Worker<?, ?>>() { @Override public boolean lessThan(Worker<?, ?> a, Worker<?, ?> b) { return Workers.getAllSuccessors(a).contains(b); } }); for (Worker<?, ?> w : orderedWorkers) { int wid = Workers.getIdentifier(w); //Input buffers List<Worker<?, ?>> preds = (List<Worker<?, ?>>)Workers.getPredecessors(w); List<Value> ichannels; List<Value> ioffsets = new ArrayList<>(); if (preds.isEmpty()) { ichannels = ImmutableList.<Value>of(getReaderBuffer(Token.createOverallInputToken(w))); int r = w.getPopRates().get(0).max() * execsPerNodeExec.get(w); BinaryInst offset = new BinaryInst(multiple, BinaryInst.Operation.MUL, module.constants().getConstant(r)); offset.setName("ioffset0"); entryBlock.instructions().add(offset); ioffsets.add(offset); } else { ichannels = new ArrayList<>(preds.size()); for (int chanIdx = 0; chanIdx < preds.size(); ++chanIdx) { Worker<?, ?> p = preds.get(chanIdx); Token t = new Token(p, w); if (workers.contains(p)) { assert !buffers.containsKey(t) : "BufferData created for internal buffer"; Value localBuffer = localBuffers.get(new Token(p, w)); assert localBuffer != null : "Local buffer needed before created"; ichannels.add(localBuffer); ioffsets.add(module.constants().getConstant(0)); } else { ichannels.add(getReaderBuffer(t)); int r = w.getPopRates().get(chanIdx).max() * execsPerNodeExec.get(w); BinaryInst offset = new BinaryInst(multiple, BinaryInst.Operation.MUL, module.constants().getConstant(r)); offset.setName("ioffset"+chanIdx); entryBlock.instructions().add(offset); ioffsets.add(offset); } } } Pair<Value, List<Instruction>> ichannelArray = createChannelArray(ichannels); ichannelArray.first.setName("ichannels_"+wid); entryBlock.instructions().addAll(ichannelArray.second); Pair<Value, List<Instruction>> ioffsetArray = createIntArray(ioffsets); ioffsetArray.first.setName("ioffsets_"+wid); entryBlock.instructions().addAll(ioffsetArray.second); Pair<Value, List<Instruction>> iincrementArray = createIntArray(Collections.<Value>nCopies(ioffsets.size(), module.constants().getConstant(1))); iincrementArray.first.setName("iincrements_"+wid); entryBlock.instructions().addAll(iincrementArray.second); //Output buffers List<Worker<?, ?>> succs = (List<Worker<?, ?>>)Workers.getSuccessors(w); List<Value> ochannels; List<Value> ooffsets = new ArrayList<>(); if (succs.isEmpty()) { ochannels = ImmutableList.<Value>of(getWriterBuffer(Token.createOverallOutputToken(w))); int r = w.getPushRates().get(0).max() * execsPerNodeExec.get(w); BinaryInst offset = new BinaryInst(multiple, BinaryInst.Operation.MUL, module.constants().getConstant(r)); offset.setName("ooffset0"); entryBlock.instructions().add(offset); ooffsets.add(offset); } else { ochannels = new ArrayList<>(preds.size()); for (int chanIdx = 0; chanIdx < succs.size(); ++chanIdx) { Worker<?, ?> s = succs.get(chanIdx); Token t = new Token(w, s); if (workers.contains(s)) { assert buffers.containsKey(t) : "BufferData created for internal buffer"; Value localBuffer = localBuffers.get(new Token(w, s)); assert localBuffer != null : "Local buffer needed before created"; ochannels.add(localBuffer); ooffsets.add(module.constants().getConstant(0)); } else { ochannels.add(getWriterBuffer(t)); int r = w.getPushRates().get(chanIdx).max() * execsPerNodeExec.get(w); BinaryInst offset0 = new BinaryInst(multiple, BinaryInst.Operation.MUL, module.constants().getConstant(r)); //Leave room to copy the excess peeks in front when //it's time to flip. BinaryInst offset = new BinaryInst(offset0, BinaryInst.Operation.ADD, module.constants().getConstant(buffers.get(t).excessPeeks)); offset.setName("ooffset"+chanIdx); entryBlock.instructions().add(offset0); entryBlock.instructions().add(offset); ooffsets.add(offset); } } } Pair<Value, List<Instruction>> ochannelArray = createChannelArray(ochannels); ochannelArray.first.setName("ochannels_"+wid); entryBlock.instructions().addAll(ochannelArray.second); Pair<Value, List<Instruction>> ooffsetArray = createIntArray(ooffsets); ooffsetArray.first.setName("ooffsets_"+wid); entryBlock.instructions().addAll(ooffsetArray.second); Pair<Value, List<Instruction>> oincrementArray = createIntArray(Collections.<Value>nCopies(ooffsets.size(), module.constants().getConstant(1))); oincrementArray.first.setName("oincrements_"+wid); entryBlock.instructions().addAll(oincrementArray.second); for (int i = 0; i < execsPerNodeExec.get(w); ++i) { CallInst ci = new CallInst(workerWorkMethods.get(w), ichannelArray.first, ioffsetArray.first, iincrementArray.first, ochannelArray.first, ooffsetArray.first, oincrementArray.first); entryBlock.instructions().add(ci); } } entryBlock.instructions().add(new ReturnInst(module.types().getVoidType())); } private Field getReaderBuffer(Token t) { return blobKlass.getField(buffers.get(t).readerBufferFieldName); } private Field getWriterBuffer(Token t) { return blobKlass.getField(buffers.get(t).writerBufferFieldName); } private Pair<Value, List<Instruction>> createChannelArray(List<Value> channels) { ImmutableList.Builder<Instruction> insts = ImmutableList.builder(); NewArrayInst nai = new NewArrayInst(module.types().getArrayType(Object[][].class), module.constants().getConstant(channels.size())); insts.add(nai); for (int i = 0; i < channels.size(); ++i) { Value toStore = channels.get(i); //If the value is a field, load it first. if (toStore.getType() instanceof FieldType) { LoadInst li = new LoadInst((Field)toStore); insts.add(li); toStore = li; } ArrayStoreInst asi = new ArrayStoreInst(nai, module.constants().getConstant(i), toStore); insts.add(asi); } return new Pair<Value, List<Instruction>>(nai, insts.build()); } private Pair<Value, List<Instruction>> createIntArray(List<Value> ints) { ImmutableList.Builder<Instruction> insts = ImmutableList.builder(); NewArrayInst nai = new NewArrayInst(module.types().getArrayType(int[].class), module.constants().getConstant(ints.size())); insts.add(nai); for (int i = 0; i < ints.size(); ++i) { Value toStore = ints.get(i); ArrayStoreInst asi = new ArrayStoreInst(nai, module.constants().getConstant(i), toStore); insts.add(asi); } return new Pair<Value, List<Instruction>>(nai, insts.build()); } private void mapIOInfo() { ImmutableMap.Builder<Worker<?, ?>, ImmutableSortedSet<IOInfo>> inputIOs = ImmutableMap.builder(), outputIOs = ImmutableMap.builder(); for (Worker<?, ?> w : workers) { ImmutableSortedSet.Builder<IOInfo> inputs = ImmutableSortedSet.orderedBy(IOInfo.TOKEN_SORT), outputs = ImmutableSortedSet.orderedBy(IOInfo.TOKEN_SORT); for (IOInfo info : ioinfo) if (w.equals(info.downstream())) inputs.add(info); else if (w.equals(info.upstream())) outputs.add(info); inputIOs.put(w, inputs.build()); outputIOs.put(w, outputs.build()); } this.inputIOs = inputIOs.build(); this.outputIOs = outputIOs.build(); } } /** * Holds information about buffers. This class is used both during * compilation and at runtime, so it doesn't directly refer to the Compiler * or IR-level constructs, to ensure they can be garbage collected when * compilation finishes. */ private static final class BufferData { /** * The Token for the edge this buffer is on. */ public final Token token; /** * The names of the reader and writer buffers. The reader buffer is the * one initially filled with data items for peeking purposes. * * The overall input buffer has no writer buffer; the overall output * buffer has no reader buffer. */ public final String readerBufferFieldName, writerBufferFieldName; /** * The buffer capacity. */ public final int capacity; /** * The buffer initial size. This is generally less than the capacity * for intracore buffers introduced by peeking. Intercore buffers * always get filled to capacity. */ public final int initialSize; /** * The number of items peeked at but not popped; that is, the number of * unconsumed items in the reader buffer that must be copied to the * front of the writer buffer when flipping buffers. */ public final int excessPeeks; private BufferData(Token token, String readerBufferFieldName, String writerBufferFieldName, int capacity, int initialSize, int excessPeeks) { this.token = token; this.readerBufferFieldName = readerBufferFieldName; this.writerBufferFieldName = writerBufferFieldName; this.capacity = capacity; this.initialSize = initialSize; this.excessPeeks = excessPeeks; assert readerBufferFieldName != null || token.isOverallOutput() : this; assert writerBufferFieldName != null || token.isOverallInput() : this; assert capacity >= 0 : this; assert initialSize >= 0 && initialSize <= capacity : this; assert excessPeeks >= 0 && excessPeeks <= capacity : this; } @Override public String toString() { return String.format("[%s: r: %s, w: %s, init: %d, max: %d, peeks: %d]", token, readerBufferFieldName, writerBufferFieldName, initialSize, capacity, excessPeeks); } } public static void main(String[] args) { OneToOneElement<Integer, Integer> graph = new Splitjoin<>(new RoundrobinSplitter<Integer>(), new RoundrobinJoiner<Integer>(), new Identity<Integer>(), new Identity<Integer>()); ConnectWorkersVisitor cwv = new ConnectWorkersVisitor(new ChannelFactory() { @Override public <E> Channel<E> makeChannel(Worker<?, E> upstream, Worker<E, ?> downstream) { return new EmptyChannel<>(); } }); graph.visit(cwv); Set<Worker<?, ?>> workers = Workers.getAllWorkersInGraph(cwv.getSource()); Configuration config = new CompilerBlobFactory().getDefaultConfiguration(workers); int maxNumCores = 1; Compiler compiler = new Compiler(workers, config, maxNumCores); Blob blob = compiler.compile(); blob.getCoreCount(); } }
package ru.ifmo.nds.jfb; import java.util.Arrays; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinTask; import java.util.concurrent.RecursiveAction; import java.util.concurrent.RecursiveTask; import ru.ifmo.nds.NonDominatedSorting; import ru.ifmo.nds.util.*; public abstract class JFBBase extends NonDominatedSorting { private static final int FORK_JOIN_THRESHOLD = 400; // Shared resources (int[] indices from super also belongs here) int[] ranks; // Data which is immutable throughout the actual sorting. private double[][] points; double[][] transposedPoints; int maximalMeaningfulRank; // Data which is interval-shared between threads. private double[] temporary; // also used in 2D-only sweep private SplitMergeHelper splitMerge; private HybridAlgorithmWrapper.Instance hybrid; private ForkJoinPool pool; private final int allowedThreads; private final String nameAddend; JFBBase(int maximumPoints, int maximumDimension, int allowedThreads, HybridAlgorithmWrapper hybridWrapper, String nameAddend) { super(maximumPoints, maximumDimension); if (!hybridWrapper.supportsMultipleThreads()) { allowedThreads = 1; } this.nameAddend = nameAddend + ", hybrid: " + hybridWrapper.getName(); if (allowedThreads != 1 && makesSenseRunInParallel(maximumPoints, maximumDimension)) { pool = allowedThreads > 1 ? new ForkJoinPool(allowedThreads) : new ForkJoinPool(); } else { pool = null; // current thread only execution } this.allowedThreads = allowedThreads > 0 ? allowedThreads : -1; temporary = new double[maximumPoints]; ranks = new int[maximumPoints]; if (maximumDimension > 2) { points = new double[maximumPoints][]; transposedPoints = new double[maximumDimension][maximumPoints]; splitMerge = new SplitMergeHelper(maximumPoints); hybrid = hybridWrapper.create(ranks, indices, points, transposedPoints); } } @Override protected void closeImpl() { temporary = null; ranks = null; points = null; transposedPoints = null; splitMerge = null; if (pool != null) { pool.shutdown(); pool = null; } hybrid = null; } @Override public String getName() { return "Jensen-Fortin-Buzdalov, " + getThreadDescription() + ", " + nameAddend; } @Override protected final void sortChecked(double[][] points, int[] ranks, int maximalMeaningfulRank) { final int n = points.length; final int dim = points[0].length; Arrays.fill(ranks, 0); ArrayHelper.fillIdentity(indices, n); sorter.lexicographicalSort(points, indices, 0, n, dim); this.maximalMeaningfulRank = maximalMeaningfulRank; if (dim == 2) { // 2: Special case: binary search. twoDimensionalCase(points, ranks); } else { // 3: General case. // 3.1: Moving points in a sorted order to internal structures final int newN = ArraySorter.retainUniquePoints(points, indices, this.points, ranks); Arrays.fill(this.ranks, 0, newN, 0); // 3.2: Transposing points. This should fit in cache for reasonable dimensions. for (int i = 0; i < newN; ++i) { for (int j = 0; j < dim; ++j) { transposedPoints[j][i] = this.points[i][j]; } } postTransposePointHook(newN); ArrayHelper.fillIdentity(indices, newN); // 3.3: Calling the actual sorting if (pool != null && makesSenseRunInParallel(n, dim)) { RecursiveAction action = new RecursiveAction() { @Override protected void compute() { helperA(0, newN, dim - 1); } }; pool.invoke(action); } else { helperA(0, newN, dim - 1); } // 3.4: Applying the results back. After that, the argument "ranks" array stops being abused. for (int i = 0; i < n; ++i) { ranks[i] = this.ranks[ranks[i]]; this.points[i] = null; } } } public static int kickOutOverflowedRanks(int[] indices, int[] ranks, int maximalMeaningfulRank, int from, int until) { int newUntil = from; for (int i = from; i < until; ++i) { int ii = indices[i]; if (ranks[ii] <= maximalMeaningfulRank) { indices[newUntil] = ii; ++newUntil; } } return newUntil; } protected void postTransposePointHook(int newN) {} protected abstract int sweepA(int from, int until); protected abstract int sweepB(int goodFrom, int goodUntil, int weakFrom, int weakUntil, int tempFrom); private int helperA(int from, int until, int obj) { int n = until - from; if (n <= 2) { if (n == 2) { int goodIndex = indices[from]; int weakIndex = indices[from + 1]; int goodRank = ranks[goodIndex]; if (ranks[weakIndex] <= goodRank && DominanceHelper.strictlyDominatesAssumingLexicographicallySmaller(points[goodIndex], points[weakIndex], obj)) { ranks[weakIndex] = 1 + goodRank; if (goodRank >= maximalMeaningfulRank) { return from + 1; } } } return until; } else { while (obj > 1) { int hookResponse = hybrid.helperAHook(from, until, obj, maximalMeaningfulRank); if (hookResponse >= 0) { return hookResponse; } if (ArrayHelper.transplantAndCheckIfSame(transposedPoints[obj], indices, from, until, temporary, from)) { --obj; } else { double median = ArrayHelper.destructiveMedian(temporary, from, until); long split = splitMerge.splitInThree(transposedPoints[obj], indices, from, from, until, median); int startMid = SplitMergeHelper.extractMid(split); int startRight = SplitMergeHelper.extractRight(split); int newStartMid = helperA(from, startMid, obj); --obj; int newStartRight = helperB(from, newStartMid, startMid, startRight, obj, from); newStartRight = helperA(startMid, newStartRight, obj); int newUntil = helperB(from, newStartMid, startRight, until, obj, from); newUntil = helperB(startMid, newStartRight, startRight, newUntil, obj, from); ++obj; newUntil = helperA(startRight, newUntil, obj); return splitMerge.mergeThree(indices, from, from, newStartMid, startMid, newStartRight, startRight, newUntil); } } return sweepA(from, until); } } public static int updateByPoint(int[] ranks, int[] indices, double[][] points, int maximalMeaningfulRank, int pointIndex, int from, int until, int obj) { int ri = ranks[pointIndex]; if (ri == maximalMeaningfulRank) { return updateByPointCritical(ranks, indices, points, maximalMeaningfulRank, pointIndex, from, until, obj); } else { updateByPointNormal(ranks, indices, points, pointIndex, ri, from, until, obj); return until; } } private static void updateByPointNormal(int[] ranks, int[] indices, double[][] points, int pointIndex, int pointRank, int from, int until, int obj) { double[] pt = points[pointIndex]; for (int i = from; i < until; ++i) { int ii = indices[i]; if (ranks[ii] <= pointRank && DominanceHelper.strictlyDominatesAssumingLexicographicallySmaller(pt, points[ii], obj)) { ranks[ii] = pointRank + 1; } } } private static int updateByPointCritical(int[] ranks, int[] indices, double[][] points, int maximalMeaningfulRank, int pointIndex, int from, int until, int obj) { int minOverflow = until; double[] pt = points[pointIndex]; for (int i = from; i < until; ++i) { int ii = indices[i]; if (DominanceHelper.strictlyDominatesAssumingLexicographicallySmaller(pt, points[ii], obj)) { ranks[ii] = maximalMeaningfulRank + 1; if (minOverflow > i) { minOverflow = i; } } } return kickOutOverflowedRanks(indices, ranks, maximalMeaningfulRank, minOverflow, until); } private int helperBWeak1Generic(int wi, int obj, int rw, int rw0, double[] wp, int goodMin, int goodMax) { for (int i = goodMax; i >= goodMin; --i) { int gi = indices[i]; int gr = ranks[gi]; if (rw <= gr && DominanceHelper.strictlyDominatesAssumingLexicographicallySmaller(points[gi], wp, obj)) { rw = gr + 1; if (rw > maximalMeaningfulRank) { ranks[wi] = rw; return 0; } } } if (rw != rw0) { ranks[wi] = rw; } return 1; } private int helperBWeak1Rank0(int wi, int obj, double[] wp, int goodMin, int goodMax) { for (int i = goodMax; i >= goodMin; --i) { int gi = indices[i]; if (DominanceHelper.strictlyDominatesAssumingLexicographicallySmaller(points[gi], wp, obj)) { int newRank = ranks[gi] + 1; if (newRank > maximalMeaningfulRank) { ranks[wi] = newRank; return 0; } return helperBWeak1Generic(wi, obj, newRank, 0, wp, goodMin, i - 1); } } return 1; } private int helperBWeak1(int goodFrom, int goodUntil, int weak, int obj) { int wi = indices[weak]; int rw = ranks[wi]; double[] wp = points[wi]; int change = rw == 0 ? helperBWeak1Rank0(wi, obj, wp, goodFrom, goodUntil - 1) : helperBWeak1Generic(wi, obj, rw, rw, wp, goodFrom, goodUntil - 1); return weak + change; } private ForkJoinTask<Integer> helperBAsync(final int goodFrom, final int goodUntil, final int weakFrom, final int weakUntil, final int obj, final int tempFrom) { return new RecursiveTask<Integer>() { @Override protected Integer compute() { return helperB(goodFrom, goodUntil, weakFrom, weakUntil, obj, tempFrom); } }.fork(); } private int helperB(int goodFrom, int goodUntil, int weakFrom, int weakUntil, int obj, int tempFrom) { if (goodUntil - goodFrom > 0 && weakUntil - weakFrom > 0) { goodUntil = ArrayHelper.findLastWhereNotGreater(indices, goodFrom, goodUntil, indices[weakUntil - 1]); weakFrom = ArrayHelper.findWhereNotSmaller(indices, weakFrom, weakUntil, indices[goodFrom]); } int goodN = goodUntil - goodFrom; int weakN = weakUntil - weakFrom; if (goodN > 0 && weakN > 0) { if (goodN == 1) { return updateByPoint(ranks, indices, points, maximalMeaningfulRank, indices[goodFrom], weakFrom, weakUntil, obj); } else if (weakN == 1) { return helperBWeak1(goodFrom, goodUntil, weakFrom, obj); } else { while (obj > 1) { int hookResponse = hybrid.helperBHook(goodFrom, goodUntil, weakFrom, weakUntil, obj, tempFrom, maximalMeaningfulRank); if (hookResponse >= 0) { return hookResponse; } double[] currentPoints = transposedPoints[obj]; switch (ArrayHelper.transplantAndDecide(currentPoints, indices, goodFrom, goodUntil, weakFrom, weakUntil, temporary, tempFrom)) { case ArrayHelper.TRANSPLANT_LEFT_NOT_GREATER: --obj; break; case ArrayHelper.TRANSPLANT_RIGHT_SMALLER: return weakUntil; case ArrayHelper.TRANSPLANT_GENERAL_CASE: double median = ArrayHelper.destructiveMedian(temporary, tempFrom, tempFrom + goodUntil - goodFrom + weakUntil - weakFrom); long goodSplit = splitMerge.splitInThree(currentPoints, indices, tempFrom, goodFrom, goodUntil, median); int goodMidL = SplitMergeHelper.extractMid(goodSplit); int goodMidR = SplitMergeHelper.extractRight(goodSplit); long weakSplit = splitMerge.splitInThree(currentPoints, indices, tempFrom, weakFrom, weakUntil, median); int weakMidL = SplitMergeHelper.extractMid(weakSplit); int weakMidR = SplitMergeHelper.extractRight(weakSplit); int leftCallSize = goodMidL - goodFrom + weakMidL - weakFrom; --obj; int newWeakUntil = helperB(goodFrom, goodMidL, weakMidR, weakUntil, obj, tempFrom); newWeakUntil = helperB(goodMidL, goodMidR, weakMidR, newWeakUntil, obj, tempFrom); int newWeakMidR = helperB(goodFrom, goodMidL, weakMidL, weakMidR, obj, tempFrom); newWeakMidR = helperB(goodMidL, goodMidR, weakMidL, newWeakMidR, obj, tempFrom); ++obj; ForkJoinTask<Integer> newWeakMidLTask = null; if (pool != null && leftCallSize > FORK_JOIN_THRESHOLD) { newWeakMidLTask = helperBAsync(goodFrom, goodMidL, weakFrom, weakMidL, obj, tempFrom); } newWeakUntil = helperB(goodMidR, goodUntil, weakMidR, newWeakUntil, obj, tempFrom + leftCallSize); int newWeakMidL = newWeakMidLTask != null ? newWeakMidLTask.join() : helperB(goodFrom, goodMidL, weakFrom, weakMidL, obj, tempFrom); splitMerge.mergeThree(indices, tempFrom, goodFrom, goodMidL, goodMidL, goodMidR, goodMidR, goodUntil); return splitMerge.mergeThree(indices, tempFrom, weakFrom, newWeakMidL, weakMidL, newWeakMidR, weakMidR, newWeakUntil); } } return sweepB(goodFrom, goodUntil, weakFrom, weakUntil, tempFrom); } } return weakUntil; } private void twoDimensionalCase(double[][] points, int[] ranks) { int maxRank = 1; int n = ranks.length; double[] firstPoint = points[indices[0]]; double lastX = firstPoint[0]; double lastY = firstPoint[1]; int lastRank = 0; // This is used here instead of temporary[0] to make it slightly faster. double minY = lastY; for (int i = 1; i < n; ++i) { int ii = indices[i]; double[] pp = points[ii]; double currX = pp[0]; double currY = pp[1]; if (currX == lastX && currY == lastY) { // Same point as the previous one. // The rank is the same as well. ranks[ii] = lastRank; } else if (currY < minY) { // Y smaller than the smallest Y previously seen. // The rank is thus zero. // ranks[ii] is already 0. minY = currY; lastRank = 0; } else { // At least the Y-smallest point dominates our point. int left, right; if (currY < lastY) { // We are better than the previous point in Y. // This means that we are at least that good. left = 0; right = lastRank; } else { // We are worse (or equal) than the previous point in Y. // This means that we are worse than this point. left = lastRank; right = maxRank; } // Running the binary search. while (right - left > 1) { int mid = (left + right) >>> 1; double midY = temporary[mid]; if (currY < midY) { right = mid; } else { left = mid; } } // "right" is now our rank. ranks[ii] = lastRank = right; temporary[right] = currY; if (right == maxRank && maxRank <= maximalMeaningfulRank) { ++maxRank; } } lastX = currX; lastY = currY; } } private boolean makesSenseRunInParallel(int nPoints, int dimension) { return nPoints > FORK_JOIN_THRESHOLD && dimension > 3; } private String getThreadDescription() { return allowedThreads == -1 ? "unlimited threads" : allowedThreads + " thread(s)"; } }
package edu.mit.streamjit.impl.compiler2; import static com.google.common.base.Preconditions.*; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Lists; import com.google.common.collect.Range; import edu.mit.streamjit.util.Pair; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; /** * Holds information about intermediate storage in the stream graph (buffers, * but the name Buffer is already taken), such as the Actors that read * and write from it. * * Rate information is only valid on an untransformed graph; Actor removal can * introduce ambiguity. * @author Jeffrey Bosboom <jeffreybosboom@gmail.com> * @since 9/27/2013 */ public final class Storage { /** * The upstream and downstream Actors. */ private final List<Actor> upstream, downstream; /** * The type of data stored in this storage. Initially this is Object, but * unboxing may change it to a primitive type after examining the connected * Actors. */ private Class<?> type = Object.class; /** * The initial data in this Storage. The MethodHandle is a write index * function that specifies where the corresponding item in the list goes. * Due to these transformations, items in a later pair might precede items * in an earlier pair. */ private final List<Pair<ImmutableList<Object>, MethodHandle>> initialData = new ArrayList<>(); /** * The number of data items added to and removed from this storage during * each steady state iteration. */ private int throughput = -1; /** * The span of the indices live during a steady-state iteration. */ private int steadyStateCapacity = -1; /** * The indices live in this storage at the end of initialization and the * beginning of each steady-state execution. These indices are migrated * from init to steady-state storage. */ private ImmutableSortedSet<Integer> liveDuringSteadyState; public Storage(Actor upstream, Actor downstream) { this.upstream = Lists.newArrayList(upstream); this.downstream = Lists.newArrayList(downstream); } public List<Actor> upstream() { return upstream; } public ImmutableSet<ActorGroup> upstreamGroups() { ImmutableSet.Builder<ActorGroup> builder = ImmutableSet.builder(); for (Actor a : upstream()) builder.add(a.group()); return builder.build(); } public List<Actor> downstream() { return downstream; } public ImmutableSet<ActorGroup> downstreamGroups() { ImmutableSet.Builder<ActorGroup> builder = ImmutableSet.builder(); for (Actor a : downstream()) builder.add(a.group()); return builder.build(); } public int push() { checkState(upstream().size() == 1, this); return upstream().get(0).push(upstream().get(0).outputs().indexOf(this)); } public int peek() { checkState(downstream().size() == 1, this); return downstream().get(0).peek(downstream().get(0).inputs().indexOf(this)); } public int pop() { checkState(downstream().size() == 1, this); return downstream().get(0).pop(downstream().get(0).inputs().indexOf(this)); } /** * Returns true if this Storage is internal to an ActorGroup; that is, all * Actors reading or writing it are in the same ActorGroup. * @return true iff this Storage is internal to an ActorGroup */ public boolean isInternal() { ActorGroup g = upstream().get(0).group(); for (Actor a : upstream()) if (a.group() != g) return false; for (Actor a : downstream()) if (a.group() != g) return false; return true; } public List<Pair<ImmutableList<Object>, MethodHandle>> initialData() { return initialData; } /** * Returns a set containing the indices live before the initialization * schedule; that is, the indices holding initial data. The set is * recomputed on each call, so should be kept in a local variable. * @return the indices holding initial data */ public ImmutableSortedSet<Integer> indicesLiveBeforeInit() { ImmutableSortedSet.Builder<Integer> builder = ImmutableSortedSet.naturalOrder(); for (Pair<ImmutableList<Object>, MethodHandle> p : initialData()) for (int i = 0; i < p.first.size(); ++i) try { builder.add((int)p.second.invokeExact(i)); } catch (Throwable ex) { throw new AssertionError("index functions should not throw", ex); } return builder.build(); } public Class<?> type() { return type; } public void setType(Class<?> type) { //We could check the new type is compatible with the common type if we //consider primitives compatible with their wrapper type. this.type = type; } /** * Computes the common type of the Actors connected to this Storage. * @return the common type of the Actors connected to this Storage */ public Class<?> commonType() { Set<Class<?>> types = new HashSet<>(); for (Actor a : upstream()) types.add(a.outputType()); for (Actor a : downstream()) types.add(a.inputType()); //TODO: we only really care about the case where the common types are //all one (wrapper) type, so check that and return Object otherwise. if (types.size() == 1) return types.iterator().next(); return Object.class; } /** * Returns the indices read from this storage during an execution of the * given schedule. The returned list is not cached so as to be responsive * to changes in input index functions. * @param externalSchedule the schedule * @return the indices read during the given schedule under the current * index functions */ public ImmutableSortedSet<Integer> readIndices(Map<ActorGroup, Integer> externalSchedule) { ImmutableSortedSet.Builder<Integer> builder = ImmutableSortedSet.naturalOrder(); for (Actor a : downstream()) builder.addAll(a.reads(this, Range.closedOpen(0, a.group().schedule().get(a) * externalSchedule.get(a.group())))); return builder.build(); } /** * Returns the indices written in this storage during an execution of the * given schedule. The returned list is not cached so as to be responsive * to changes in output index functions. * @param externalSchedule the schedule * @return the indices written during the given schedule under the current * index functions */ public ImmutableSortedSet<Integer> writeIndices(Map<ActorGroup, Integer> externalSchedule) { ImmutableSortedSet.Builder<Integer> builder = ImmutableSortedSet.naturalOrder(); for (Actor a : upstream()) builder.addAll(a.writes(this, Range.closedOpen(0, a.group().schedule().get(a) * externalSchedule.get(a.group())))); return builder.build(); } /** * Returns the number of items written to and consumed from this storage * during a steady-state execution. * @return the steady-state throughput */ public int throughput() { checkState(throughput != -1); return throughput; } /** * Returns this Storage's steady-state capacity: the span of live elements * during a steady state iteration. This includes items to be read this * iteration, items buffered for a future iteration, and space for items to * be written this iteration, and possible holes in any of the above. * @return this Storage's steady-state capacity */ public int steadyStateCapacity() { checkState(steadyStateCapacity != -1); return steadyStateCapacity; } /** * Compute this storage's steady-state throughput and capacity. * @param externalSchedule the external schedule */ public void computeSteadyStateRequirements(Map<ActorGroup, Integer> externalSchedule) { ImmutableSortedSet<Integer> readIndices = readIndices(externalSchedule); ImmutableSortedSet<Integer> writeIndices = writeIndices(externalSchedule); this.throughput = writeIndices.size(); int minIdx = Math.min(readIndices.first(), writeIndices.first()); int maxIdx = Math.max(readIndices.last(), writeIndices.last()); this.steadyStateCapacity = maxIdx - minIdx + 1; } public void setIndicesLiveDuringSteadyState(ImmutableSortedSet<Integer> liveDuringSteadyState) { this.liveDuringSteadyState = liveDuringSteadyState; } public ImmutableSortedSet<Integer> indicesLiveDuringSteadyState() { checkState(liveDuringSteadyState != null); return liveDuringSteadyState; } @Override public String toString() { return String.format("(%s, %s)", upstream, downstream); } }
package org.eclipse.birt.report.data.adapter.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.birt.core.data.ExpressionUtil; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.data.engine.core.DataException; import org.eclipse.birt.data.engine.expression.ExpressionCompilerUtil; import org.eclipse.birt.data.engine.olap.api.query.IDerivedMeasureDefinition; import org.eclipse.birt.data.engine.olap.api.query.IMeasureDefinition; import org.eclipse.birt.data.engine.olap.impl.query.DerivedMeasureDefinition; import org.eclipse.birt.data.engine.olap.impl.query.MeasureDefinition; import org.eclipse.birt.report.data.adapter.api.DataAdapterUtil; import org.eclipse.birt.report.data.adapter.api.DataSessionContext; import org.eclipse.birt.report.data.adapter.api.IModelAdapter; import org.eclipse.birt.report.data.adapter.api.IModelAdapter.ExpressionLocation; import org.eclipse.birt.report.data.adapter.i18n.AdapterResourceHandle; import org.eclipse.birt.report.data.adapter.i18n.ResourceConstants; import org.eclipse.birt.report.model.api.Expression; import org.eclipse.birt.report.model.api.olap.CubeHandle; import org.eclipse.birt.report.model.api.olap.MeasureGroupHandle; import org.eclipse.birt.report.model.api.olap.MeasureHandle; import org.eclipse.birt.report.model.elements.interfaces.IMeasureModel; public class CubeMeasureUtil { private static IModelAdapter getModelAdapter( ) throws BirtException { return new DataModelAdapter( new DataSessionContext( DataSessionContext.MODE_DIRECT_PRESENTATION ) ); } /** * Check whether the derived measure reference is valid. * * @param cubeHandle * CubeHandle * @throws BirtException * If invalid measure reference or recursive measrue reference * is detected. */ public static void validateDerivedMeasures( CubeHandle cubeHandle ) throws BirtException { Map<String, IMeasureDefinition> measures = new HashMap<String, IMeasureDefinition>( ); Map<String, IDerivedMeasureDefinition> calculatedMeasures = new HashMap<String, IDerivedMeasureDefinition>( ); Map<String, MeasureHandle> mHandles = new HashMap<String, MeasureHandle>( ); populateMeasures( measures, calculatedMeasures, mHandles, cubeHandle ); for ( Map.Entry<String, IDerivedMeasureDefinition> e : calculatedMeasures.entrySet( ) ) { List<String> resolving = new ArrayList<String>( ); checkDerivedMeasure( e.getValue( ), resolving, measures, calculatedMeasures, mHandles ); } } private static void populateMeasures( Map<String, IMeasureDefinition> measures, Map<String, IDerivedMeasureDefinition> calculatedMeasures, Map<String, MeasureHandle> measureHandles, CubeHandle cubeHandle ) throws BirtException { List measureGroups = cubeHandle.getContents( CubeHandle.MEASURE_GROUPS_PROP ); for ( int i = 0; i < measureGroups.size( ); i++ ) { MeasureGroupHandle mgh = (MeasureGroupHandle) measureGroups.get( i ); List measureGroup = mgh.getContents( MeasureGroupHandle.MEASURES_PROP ); for ( int j = 0; j < measureGroup.size( ); j++ ) { MeasureHandle mHandle = (MeasureHandle) measureGroup.get( j ); if ( measureHandles != null ) measureHandles.put( mHandle.getName( ), mHandle ); if ( mHandle.isCalculated( ) ) { DerivedMeasureDefinition m = new DerivedMeasureDefinition( mHandle.getName( ), DataAdapterUtil.adaptModelDataType( mHandle.getDataType( ) ), getModelAdapter( ).adaptExpression( (Expression) mHandle.getExpressionProperty( IMeasureModel.MEASURE_EXPRESSION_PROP ) .getValue( ), ExpressionLocation.CUBE ) ); calculatedMeasures.put( mHandle.getName( ), m ); } else { MeasureDefinition m = new MeasureDefinition( mHandle.getName( ) ); m.setAggrFunction( DataAdapterUtil.getRollUpAggregationName( mHandle.getFunction( ) ) ); measures.put( m.getName( ), m ); } } } } private static void checkDerivedMeasure( IDerivedMeasureDefinition dmeasure, List<String> resolving, Map<String, IMeasureDefinition> measures, Map<String, IDerivedMeasureDefinition> calculatedMeasure, Map<String, MeasureHandle> mHandles ) throws DataException { List referencedMeasures = ExpressionCompilerUtil.extractColumnExpression( dmeasure.getExpression( ), ExpressionUtil.MEASURE_INDICATOR ); resolving.add( dmeasure.getName( ) ); for ( int i = 0; i < referencedMeasures.size( ); i++ ) { String measureName = referencedMeasures.get( i ).toString( ); if ( measures.containsKey( measureName ) ) { continue; } else { if ( !calculatedMeasure.containsKey( measureName ) ) { MeasureHandle measureHandle = mHandles.get( measureName ); if ( measureHandle == null ) throw new DataException( AdapterResourceHandle.getInstance( ) .getMessage( ResourceConstants.CUBE_DERIVED_MEASURE_INVALID_REF, new Object[]{ dmeasure.getName( ), measureName } ) ); throw new DataException( AdapterResourceHandle.getInstance( ) .getMessage( ResourceConstants.CUBE_DERIVED_MEASURE_RESOLVE_ERROR, new Object[]{ resolving.get( 0 ) } ) ); } for ( int j = 0; j < resolving.size( ); j++ ) { if ( measureName.equals( resolving.get( j ) ) ) { resolving.add( measureName ); throw new DataException( AdapterResourceHandle.getInstance( ) .getMessage( ResourceConstants.CUBE_DERIVED_MEASURE_RECURSIVE_REF, new Object[]{ resolving.get( 0 ), resolving.toString( ) } ) ); } } checkDerivedMeasure( calculatedMeasure.get( measureName ), resolving, measures, calculatedMeasure, mHandles ); } } resolving.remove( resolving.size( ) - 1 ); } /** * Get measures can be referenced by the specific derived measure. * <p> * This method ensures there is not recursive reference between the returned measures. * * @param cubeHandle * @param measureName * @return A list of MeasureHandles which can be referenced by the specified measure. * @throws BirtException */ public static List<MeasureHandle> getIndependentReferences( CubeHandle cubeHandle, String measureName ) throws BirtException { List<MeasureHandle> iMeasures = new ArrayList<MeasureHandle>( ); List<String> mNames = new ArrayList<String>( ); Map<String, IMeasureDefinition> measures = new HashMap<String, IMeasureDefinition>( ); Map<String, IDerivedMeasureDefinition> calculatedMeasures = new HashMap<String, IDerivedMeasureDefinition>( ); Map<String, MeasureHandle> mHandles = new HashMap<String, MeasureHandle>( ); populateMeasures( measures, calculatedMeasures, mHandles, cubeHandle ); if ( mHandles.get( measureName ) != null && !mHandles.get( measureName ).isCalculated( ) ) { // Since the properties in MeasureHandle of the newly added measures // is not set correctly, Here always return all measures. // TODO Remove this temporary fix which does a favor for GUI while // GUI side set the properties correctly. for ( Map.Entry<String, MeasureHandle> e : mHandles.entrySet( ) ) { MeasureHandle handle = e.getValue( ); if ( !measureName.equals( handle.getName( ) ) ) { iMeasures.add( handle ); } } return iMeasures; } for ( Map.Entry<String, IDerivedMeasureDefinition> e : calculatedMeasures.entrySet( ) ) { List<String> resolving = new ArrayList<String>( ); resolving.add( measureName ); try { checkDerivedMeasure( e.getValue( ), resolving, measures, calculatedMeasures, mHandles ); mNames.add( e.getValue( ).getName( ) ); } catch ( BirtException ignore ) { } } for ( String i : mNames ) { if ( i.equals( measureName ) ) continue; iMeasures.add( mHandles.get( i ) ); } for ( Map.Entry<String, IMeasureDefinition> e : measures.entrySet( ) ) { iMeasures.add( mHandles.get( e.getKey( ) ) ); } return iMeasures; } }
package edu.wpi.first.wpilibj.templates; /** * * @author henrypitcairn */ /** * The RobotMap is a mapping from the ports sensors and actuators are wired into * to a variable name. This provides flexibility changing wiring, makes checking * the wiring easier and significantly reduces the number of magic numbers * floating around. */ public class RobotMap { // For example to map the left and right motors, you could define the // following variables to use with your drivetrain subsystem. // public static final int leftMotor = 1; // public static final int rightMotor = 2; // If you are using multiple modules, make sure to define both the port // number and the module. For example you with a rangefinder: // public static final int rangefinderPort = 1; // public static final int rangefinderModule = 1; // Drive Subsystem // Digital Sidecar PWM outputs public static final int DRIVE_LEFT_MOTOR_CHANNEL = 4; public static final int DRIVE_RIGHT_MOTOR_CHANNEL = 3; //Digital IO public static final int KICKER_LIMIT_SWITCH = 1; public static final int ultrasonicPingChannel = 13; public static final int ultrasonicReadChannelLeft = 12; public static final int ultrasonicReadChannelRight = 11; // Analog Breakout public static final int GYRO_CHANNEL = 1; // Driver Station public static final int DRIVE_JOYSTICK_PORT = 1; // The catapult safety button public static final int CATAPULT_SAFETY_BUTTON = 7; //Buttons for Joystick, these numbers are temporary public static final int BUMP_LEFT_BUTTON = 99; public static final int LAUNCH_BUTTON = 8; public static final int BUMP_RIGHT_BUTTON = 99; public static final int BUMP_FORWARD_BUTTON = 99; public static final int BUMP_REVERSE_BUTTON = 99; public static final int ONE_BUTTON_KICK = 3; public static final int JOG_FORKLIFT_UP = 99; public static final int JOG_FORKLIFT_DOWN = 99; public static final int JOG_KICKER_UP = 6; public static final int JOG_KICKER_DOWN = 5; public static final int USE_AUTORANGER_TO_CORRECT = 4; public static final int JOYSTICK_AS_FORKLIFT = 99; public static final int SCORE_A_LOW_GOAL= 2; public static final int KICKER_TIMEOUT = 500; //Milliseconds public static final int SCORE_HIGH_GOAL = 10; // Auto-Ranger Subsystem // Analog Breakout public static final int ULTRASONIC_RANGER_CHANNE_RIGHT = 1; public static final int ULTRASONIC_RANGER_CHANNEL_LEFT = 2; //potentiometers public static final int POTENTIOMETER_CATAPULT = 3; public static final int POTENTIOMETER_FORKLIFT = 4; public static final int POTENTIOMETER_KICKER = 5; // Digital Sidecar GPIO outputs public static final int RANGE_BEACON_RED_CHANNEL = 1; public static final int RANGE_BEACON_YELLOW_CHANNEL = 2; public static final int RANGE_BEACON_GREEN_CHANNEL = 3; public static final int RED_LED_SOLENOID_CHANNEL = 2; public static final int BLUE_LED_SOLENOID_CHANNEL = 3; public static final int GREEN_LED_SOLENOID_CHANNEL = 1; // Launcher Subsystem // Digital Sidecar PWM outputs public static final int LAUNCHER_LEFT_MOTOR_CHANNEL = 6; public static final int LAUNCHER_RIGHT_MOTOR_CHANNEL = 5; // Digital Sidecar GPIO Inputs public static final int LAUNCHER_ENCODER_A_CHANNEL = 4; public static final int LAUNCHER_ENCODER_B_CHANNEL = 5; public static final int LAUNCHER_ENCODER_MODULE = 1; // Forklift Subsystem // Digital Sidecar PWM outputs public static final int KICKER_MOTOR_CHANNEL = 7; public static final int FORKLIFT_MOTOR_CHANNEL = 8; // Keys Used for the Smart Dashboard public static final String SMARTDASHBOARD_INVERTED_DRIVE = "Inverted Controls?"; public static final String SMARTDASHBOARD_AUTORANGER_VALUE = "AutoRanger Value"; public static final String SMARTDASHBOARD_AUTORANGER_VALUE_INCHES = "AutoRanger Value in Inches"; public static final String SMARTDASHBOARD_AUTORANGER_AT_DESIRED_VALUE = "In Position?"; public static final String SMARTDASHBOARD_FORKLIFT_UP_SPEED_CONSTANT = "Forklift Up Speed: "; public static final String SMARTDASHBOARD_FORKLIFT_DOWN_SPEED_CONSTANT = "Forklift Down Speed: "; public static final String SMARTDASHBOARD_KICKER_UP_SPEED_CONSTANT = "Kicker Up Speed: "; public static final String SMARTDASHBOARD_KICKER_DOWN_SPEED_CONSTANT = "Kicker Down Speed: "; public static final String SMARTDASHBOARD_SKEW_OUTPUT = "Skew: "; public static final String SMARTDASHBOARD_LED_TOGGLE = "Alliance LEDs?"; public static final String SMARTDASHBOARD_FORKLIFT_DOWN_DURATION_BEFORE_LAUNCH = "Forklift pre-launch lower duration"; public static final String SMARTDASHBOARD_KICKER_TIMEOUT = "Time it takes to put kicker down and up in milliseconds"; public static final String SMARTDASHBOARD_PULTPOT_MAX_ANGLE = "Pult Max Angle?"; }
/* * $Id: RelayFactory.java 272015 2011-05-21 03:03:57Z cbotev $ */ package com.linkedin.databus2.relay; import java.lang.management.ManagementFactory; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import javax.management.MBeanServer; import javax.sql.DataSource; import org.apache.log4j.Logger; import com.linkedin.databus.core.DbusEventBufferAppendable; import com.linkedin.databus.core.UnsupportedKeyException; import com.linkedin.databus.core.monitoring.mbean.DbusEventsStatisticsCollector; import com.linkedin.databus.core.util.InvalidConfigException; import com.linkedin.databus.monitoring.mbean.EventSourceStatistics; import com.linkedin.databus2.core.DatabusException; import com.linkedin.databus2.core.seq.MaxSCNReaderWriter; import com.linkedin.databus2.producers.ConstantPartitionFunction; import com.linkedin.databus2.producers.EventCreationException; import com.linkedin.databus2.producers.EventProducer; import com.linkedin.databus2.producers.PartitionFunction; import com.linkedin.databus2.producers.db.EventFactory; import com.linkedin.databus2.producers.db.MonitoredSourceInfo; import com.linkedin.databus2.producers.db.OracleAvroGenericEventFactory; import com.linkedin.databus2.producers.db.OracleEventProducer; import com.linkedin.databus2.relay.config.LogicalSourceStaticConfig; import com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig; import com.linkedin.databus2.schemas.NoSuchSchemaException; import com.linkedin.databus2.schemas.SchemaRegistryService; /** * @author Jemiah Westerman<jwesterman@linkedin.com> * @version $Revision: 272015 $ */ public class OracleEventProducerFactory { private final Logger _log = Logger.getLogger(getClass()); public EventProducer buildEventProducer(PhysicalSourceStaticConfig physicalSourceConfig, SchemaRegistryService schemaRegistryService, DbusEventBufferAppendable dbusEventBuffer, MBeanServer mbeanServer, DbusEventsStatisticsCollector dbusEventsStatisticsCollector, MaxSCNReaderWriter _maxScnReaderWriter ) throws DatabusException, EventCreationException, UnsupportedKeyException, SQLException, InvalidConfigException { // Make sure the URI from the configuration file identifies an Oracle JDBC source. String uri = physicalSourceConfig.getUri(); if(!uri.startsWith("jdbc:oracle")) { throw new InvalidConfigException("Invalid source URI (" + physicalSourceConfig.getUri() + "). Only jdbc:oracle: URIs are supported."); } // Parse each one of the logical sources List<MonitoredSourceInfo> sources = new ArrayList<MonitoredSourceInfo>(); for(LogicalSourceStaticConfig sourceConfig : physicalSourceConfig.getSources()) { MonitoredSourceInfo source = buildOracleMonitoredSourceInfo(sourceConfig, physicalSourceConfig, schemaRegistryService); sources.add(source); } DataSource ds = null; try { ds = OracleJarUtils.createOracleDataSource(uri); } catch (Exception e) { String errMsg = "Oracle URI likely not supported. Trouble creating OracleDataSource"; _log.error(errMsg); throw new InvalidConfigException(errMsg + e.getMessage()); } // Create the event producer EventProducer eventProducer = new OracleEventProducer(sources, ds, dbusEventBuffer, true, dbusEventsStatisticsCollector, _maxScnReaderWriter, physicalSourceConfig, ManagementFactory.getPlatformMBeanServer()); _log.info("Created OracleEventProducer for config: " + physicalSourceConfig + " with slowSourceQueryThreshold = " + physicalSourceConfig.getSlowSourceQueryThreshold()); return eventProducer; } protected OracleAvroGenericEventFactory createEventFactory( String eventViewSchema, String eventView, LogicalSourceStaticConfig sourceConfig, PhysicalSourceStaticConfig pConfig, String eventSchema, PartitionFunction partitionFunction) throws EventCreationException, UnsupportedKeyException { return new OracleAvroGenericEventFactory(sourceConfig.getId(), (short)pConfig.getId(), eventSchema, partitionFunction); } public MonitoredSourceInfo buildOracleMonitoredSourceInfo( LogicalSourceStaticConfig sourceConfig, PhysicalSourceStaticConfig pConfig, SchemaRegistryService schemaRegistryService) throws DatabusException, EventCreationException, UnsupportedKeyException, InvalidConfigException { String schema = null; try { schema = schemaRegistryService.fetchLatestSchemaByType(sourceConfig.getName()); } catch (NoSuchSchemaException e) { throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ")."); } if(schema == null) { throw new InvalidConfigException("Unable to load the schema for source (" + sourceConfig.getName() + ")."); } _log.info("Loading schema for source id " + sourceConfig.getId() + ": " + schema); String eventViewSchema; String eventView; if(sourceConfig.getUri().indexOf('.') != -1) { String[] parts = sourceConfig.getUri().split("\\."); eventViewSchema = parts[0]; eventView = parts[1]; } else { eventViewSchema = null; eventView = sourceConfig.getUri(); } if(eventView.toLowerCase().startsWith("sy$")) { eventView = eventView.substring(3); } PartitionFunction partitionFunction = buildPartitionFunction(sourceConfig); EventFactory factory = createEventFactory(eventViewSchema, eventView, sourceConfig, pConfig, schema, partitionFunction); EventSourceStatistics statisticsBean = new EventSourceStatistics(sourceConfig.getName()); MonitoredSourceInfo sourceInfo = new MonitoredSourceInfo(sourceConfig.getId(), sourceConfig.getName(), eventViewSchema, eventView, factory, statisticsBean, sourceConfig.getRegularQueryHints(), sourceConfig.getChunkedTxnQueryHints(), sourceConfig.getChunkedScnQueryHints(), sourceConfig.isSkipInfinityScn()); return sourceInfo; } public PartitionFunction buildPartitionFunction(LogicalSourceStaticConfig sourceConfig) throws InvalidConfigException { String partitionFunction = sourceConfig.getPartitionFunction(); if(partitionFunction.startsWith("constant:")) { try { String numberPart = partitionFunction.substring("constant:".length()).trim(); short constantPartitionNumber = Short.valueOf(numberPart); return new ConstantPartitionFunction(constantPartitionNumber); } catch(Exception ex) { // Could be a NumberFormatException, IndexOutOfBoundsException or other exception when trying to parse the partition number. throw new InvalidConfigException("Invalid partition configuration (" + partitionFunction + "). " + "Could not parse the constant partition number."); } } else { throw new InvalidConfigException("Invalid partition configuration (" + partitionFunction + ")."); } } }
package org.geomajas.gwt.client.widget.attribute; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.geomajas.configuration.AssociationAttributeInfo; import org.geomajas.configuration.AttributeInfo; import org.geomajas.configuration.FeatureInfo; import org.geomajas.configuration.PrimitiveAttributeInfo; import org.geomajas.global.FutureApi; import org.geomajas.gwt.client.map.layer.VectorLayer; import org.geomajas.layer.feature.Attribute; import org.geomajas.layer.feature.attribute.AssociationAttribute; import org.geomajas.layer.feature.attribute.AssociationValue; import org.geomajas.layer.feature.attribute.BooleanAttribute; import org.geomajas.layer.feature.attribute.CurrencyAttribute; import org.geomajas.layer.feature.attribute.DateAttribute; import org.geomajas.layer.feature.attribute.DoubleAttribute; import org.geomajas.layer.feature.attribute.FloatAttribute; import org.geomajas.layer.feature.attribute.ImageUrlAttribute; import org.geomajas.layer.feature.attribute.IntegerAttribute; import org.geomajas.layer.feature.attribute.LongAttribute; import org.geomajas.layer.feature.attribute.ManyToOneAttribute; import org.geomajas.layer.feature.attribute.OneToManyAttribute; import org.geomajas.layer.feature.attribute.PrimitiveAttribute; import org.geomajas.layer.feature.attribute.ShortAttribute; import org.geomajas.layer.feature.attribute.StringAttribute; import org.geomajas.layer.feature.attribute.UrlAttribute; import com.google.gwt.event.shared.GwtEvent; import com.google.gwt.event.shared.HandlerManager; import com.google.gwt.event.shared.HandlerRegistration; import com.smartgwt.client.data.DataSource; import com.smartgwt.client.data.DataSourceField; import com.smartgwt.client.types.DSDataFormat; import com.smartgwt.client.types.DSProtocol; import com.smartgwt.client.widgets.Canvas; import com.smartgwt.client.widgets.form.DynamicForm; import com.smartgwt.client.widgets.form.events.ItemChangedEvent; import com.smartgwt.client.widgets.form.events.ItemChangedHandler; import com.smartgwt.client.widgets.form.fields.FormItem; import com.smartgwt.client.widgets.form.fields.SelectItem; import com.smartgwt.client.widgets.form.fields.events.ChangedEvent; import com.smartgwt.client.widgets.form.fields.events.ChangedHandler; /** * <p> * Default implementation of a {@link FeatureForm} based on a {@link DynamicForm}. The form is initialized by providing * a vector layer or the feature information of an association attribute. The implementation uses the * {@link AttributeFormFieldRegistry} to create the individual form items and fields. The * {@link #createField(AttributeInfo)} and {@link #createItem(AttributeInfo)} methods can be overridden to create custom * item and field implementations if necessary. The {@link #prepareForm(List, DataSource)} method can be overridden to * perform any additional actions on the form or form item list before the form is created. Attributes can be excluded * from the form by overriding the {@link #isIncluded(AttributeInfo)} method. * </p> * <p> * This attribute form definition is used internally in the <code>FeatureAtributeEditor</code> widget. A code example on * how to override the {@link #prepareForm(FormItemList, DataSource)} method is shown below: * </p> * <code> * <pre> * protected void prepareForm(List<FormItem> formItems, DataSource source) { * * getWidget().setGroupTitle("My Custom Attribute Form"); * getWidget().setIsGroup(true); * getWidget().setNumCols(4); * getWidget().setWidth(450); * getWidget().setColWidths(100, 180, 20, 150); * * } * </pre> * </code> * * @author Pieter De Graef * @author Jan De Moerloose */ @FutureApi public class DefaultFeatureForm implements FeatureForm<DynamicForm> { private Map<String, AttributeInfo> attributeInfoMap = new HashMap<String, AttributeInfo>(); private DynamicForm formWidget; private boolean disabled; private FeatureInfo featureInfo; private AttributeProvider attributeProvider; private HandlerManager manager = new HandlerManager(this); // Constructors: /** * Initialize the attribute form with the given layer. Note that this constructor will NOT define all the form * items. This should still be done by some {@link FeatureFormFactory}. The reason for this is that different * implementations of these factories may want different orders or layouts. They may even want to introduce extra * form items.. who knows. * * @param layer The vector layer that should be presented in this form. */ public DefaultFeatureForm(VectorLayer vectorLayer) { this(vectorLayer.getLayerInfo().getFeatureInfo(), new DefaultAttributeProvider(vectorLayer.getLayerInfo() .getServerLayerId())); } /** * Initialize the attribute form with the given feature info. Note that this constructor will NOT define all the * form items. This should still be done by some {@link FeatureFormFactory}. The reason for this is that different * implementations of these factories may want different orders or layouts. They may even want to introduce extra * form items.. who knows. * * @param featureInfo The feature information that should be presented in this form. */ public DefaultFeatureForm(FeatureInfo featureInfo, AttributeProvider attributeProvider) { this.featureInfo = featureInfo; this.attributeProvider = attributeProvider; for (AttributeInfo info : featureInfo.getAttributes()) { attributeInfoMap.put(info.getName(), info); } formWidget = new DynamicForm() { public void setDataSource(com.smartgwt.client.data.DataSource dataSource) { dataSource.setDataFormat(DSDataFormat.CUSTOM); dataSource.setDataProtocol(DSProtocol.CLIENTCUSTOM); dataSource.setClientOnly(false); super.setDataSource(dataSource); }; }; formWidget.setStyleName("featureForm"); DataSource source = new DataSource(); FormItemList formItems = new FormItemList(); for (AttributeInfo info : featureInfo.getAttributes()) { if (isIncluded(info)) { formItems.add(createItem(info)); source.addField(createField(info)); } } prepareForm(formItems, source); getWidget().setDataSource(source); getWidget().setFields(formItems.toArray()); } /** * Creates a form item for a specific attribute. * * @param info the attribute information. * @return the form item */ protected FormItem createItem(AttributeInfo info) { return AttributeFormFieldRegistry.createFormItem(info, attributeProvider.createProvider(info.getName())); } /** * Create a datasource field for a specific attribute. * * @param info the attribute information. * @return the datasource field */ protected DataSourceField createField(AttributeInfo info) { return AttributeFormFieldRegistry.createDataSourceField(info); } /** * Returns whether an attribute should be included in the form. * * @param info the attribute information * @return true if included, false otherwise */ protected boolean isIncluded(AttributeInfo info) { return info.isIncludedInForm(); } /** * Override this method to make some additional modifications to the list of items and data source before the form * is created. The default implementation does nothing. * * @param formItems list of items, with special insert operations * @param source datasource */ protected void prepareForm(FormItemList formItems, DataSource source) { } // Public methods: /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#getWidget() */ public DynamicForm getWidget() { return formWidget; } /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#setDisabled(boolean) */ public void setDisabled(boolean disabled) { this.disabled = disabled; // Don't set disabled on the form, but on the individual items. This way it's easier to overwrite when creating // custom form items. for (AttributeInfo info : featureInfo.getAttributes()) { FormItem formItem = formWidget.getItem(info.getName()); if (formItem != null) { if (info.isEditable()) { formItem.setDisabled(disabled); } else { formItem.setDisabled(true); } } } } /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#isDisabled() */ public boolean isDisabled() { return disabled; } /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#validate() */ public boolean validate() { boolean validate = true; for (FormItem item : formWidget.getFields()) { if (!(item instanceof SelectItem)) { validate = validate & item.validate(); } } return validate; } public boolean silentValidate() { return formWidget.valuesAreValid(false); } public void fireEvent(GwtEvent<?> event) { manager.fireEvent(event); } /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#addItemChangedHandler( * com.smartgwt.client.widgets.form.events .ItemChangedHandler) */ public HandlerRegistration addItemChangedHandler(ItemChangedHandler handler) { MultiHandlerRegistration registration = new MultiHandlerRegistration(); // Due to custom made FormItems, we can't set the handler on the form anymore... final ItemChangedHandler itemChangedHandler = handler; registration.addRegistration(manager.addHandler(ItemChangedEvent.getType(), handler)); for (final FormItem formItem : formWidget.getFields()) { ChangedHandler h = new ChangedHandler() { public void onChanged(ChangedEvent event) { itemChangedHandler.onItemChanged(new ItemChangedEvent(formItem.getJsObj())); } }; registration.addRegistration(formItem.addChangedHandler(h)); } return registration; } public void toForm(AssociationValue value) { for (Map.Entry<String, Attribute<?>> entry : value.getAllAttributes().entrySet()) { toForm(entry.getKey(), entry.getValue()); } } public void fromForm(AssociationValue value) { for (Map.Entry<String, Attribute<?>> entry : value.getAllAttributes().entrySet()) { fromForm(entry.getKey(), entry.getValue()); } } /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#toForm(java.lang.String, * org.geomajas.layer.feature.Attribute) */ public void toForm(String name, Attribute<?> attribute) { AttributeInfo info = attributeInfoMap.get(name); if (info == null || !isIncluded(info)) { return; } FormItem item = formWidget.getField(info.getName()); if (info instanceof PrimitiveAttributeInfo) { PrimitiveAttribute<?> primitive = (PrimitiveAttribute<?>) attribute; if (attribute == null && item != null) { item.setDisabled(true); } else { switch (primitive.getType()) { case BOOLEAN: setValue(info.getName(), (BooleanAttribute) primitive); break; case SHORT: setValue(info.getName(), (ShortAttribute) primitive); break; case INTEGER: setValue(info.getName(), (IntegerAttribute) primitive); break; case LONG: setValue(info.getName(), (LongAttribute) primitive); break; case FLOAT: setValue(info.getName(), (FloatAttribute) primitive); break; case DOUBLE: setValue(info.getName(), (DoubleAttribute) primitive); break; case CURRENCY: setValue(info.getName(), (CurrencyAttribute) primitive); break; case STRING: setValue(info.getName(), (StringAttribute) primitive); break; case URL: setValue(info.getName(), (UrlAttribute) primitive); break; case IMGURL: setValue(info.getName(), (ImageUrlAttribute) primitive); break; case DATE: setValue(info.getName(), (DateAttribute) primitive); break; } } } else if (info instanceof AssociationAttributeInfo) { Object associationItem = item.getAttributeAsObject(AssociationItem.ASSOCIATION_ITEM_ATTRIBUTE_KEY); AssociationAttributeInfo associationInfo = (AssociationAttributeInfo) info; if (associationItem != null) { switch (associationInfo.getType()) { case MANY_TO_ONE: ((ManyToOneItem<?>) associationItem).toItem((ManyToOneAttribute) attribute); break; case ONE_TO_MANY: ((OneToManyItem<?>) associationItem).toItem((OneToManyAttribute) attribute); break; } } } if (item != null) { item.fireEvent(new ChangedEvent(item.getJsObj())); } } /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#fromForm(java.lang.String, * org.geomajas.layer.feature.Attribute) */ public void fromForm(String name, Attribute<?> attribute) { AttributeInfo info = attributeInfoMap.get(name); if (attribute == null || !isIncluded(info)) { return; } if (info instanceof PrimitiveAttributeInfo) { PrimitiveAttribute<?> primitive = (PrimitiveAttribute<?>) attribute; switch (primitive.getType()) { case BOOLEAN: getValue(name, (BooleanAttribute) primitive); break; case SHORT: getValue(name, (ShortAttribute) primitive); break; case INTEGER: getValue(name, (IntegerAttribute) primitive); break; case LONG: getValue(name, (LongAttribute) primitive); break; case FLOAT: getValue(name, (FloatAttribute) primitive); break; case DOUBLE: getValue(name, (DoubleAttribute) primitive); break; case CURRENCY: getValue(name, (CurrencyAttribute) primitive); break; case STRING: getValue(name, (StringAttribute) primitive); break; case URL: getValue(name, (UrlAttribute) primitive); break; case IMGURL: getValue(name, (ImageUrlAttribute) primitive); break; case DATE: getValue(name, (DateAttribute) primitive); break; } } else { AssociationAttribute<?> association = (AssociationAttribute<?>) attribute; FormItem item = formWidget.getItem(name); Object associationItem = item.getAttributeAsObject(AssociationItem.ASSOCIATION_ITEM_ATTRIBUTE_KEY); switch (association.getType()) { case MANY_TO_ONE: ((ManyToOneItem<?>) associationItem).fromItem((ManyToOneAttribute) attribute); break; case ONE_TO_MANY: ((OneToManyItem<?>) associationItem).fromItem((OneToManyAttribute) attribute); break; } } } /* * (non-Javadoc) * * @see org.geomajas.gwt.client.widget.attribute.FeatureForm#clear() */ public void clear() { formWidget.clearValues(); } // Protected methods setting values on the form: /** Apply a boolean attribute value on the form, with the given name. */ protected void setValue(String name, BooleanAttribute attribute) { // formWidget.setValue(name, attribute.getValue()); FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply a short attribute value on the form, with the given name. */ protected void setValue(String name, ShortAttribute attribute) { formWidget.setValue(name, attribute.getValue()); } /** Apply a integer attribute value on the form, with the given name. */ protected void setValue(String name, IntegerAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply a long attribute value on the form, with the given name. */ protected void setValue(String name, LongAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply a float attribute value on the form, with the given name. */ protected void setValue(String name, FloatAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply a double attribute value on the form, with the given name. */ protected void setValue(String name, DoubleAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply a currency attribute value on the form, with the given name. */ protected void setValue(String name, CurrencyAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply a string attribute value on the form, with the given name. */ protected void setValue(String name, StringAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply an URL attribute value on the form, with the given name. */ protected void setValue(String name, UrlAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply an image attribute value on the form, with the given name. */ protected void setValue(String name, ImageUrlAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } /** Apply a date attribute value on the form, with the given name. */ protected void setValue(String name, DateAttribute attribute) { FormItem item = formWidget.getField(name); if (item != null) { item.setValue(attribute.getValue()); } } // Protected methods getting values from the form: /** Get a boolean value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, BooleanAttribute attribute) { attribute.setValue(toBoolean(formWidget.getValue(name))); } /** Get a short value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, ShortAttribute attribute) { attribute.setValue(toShort(formWidget.getValue(name))); } /** Get a integer value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, IntegerAttribute attribute) { attribute.setValue(toInteger(formWidget.getValue(name))); } /** Get a long value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, LongAttribute attribute) { attribute.setValue(toLong(formWidget.getValue(name))); } /** Get a float value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, FloatAttribute attribute) { attribute.setValue(toFloat(formWidget.getValue(name))); } /** Get a double value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, DoubleAttribute attribute) { attribute.setValue(toDouble(formWidget.getValue(name))); } /** Get a currency value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, CurrencyAttribute attribute) { attribute.setValue((String) formWidget.getValue(name)); } /** Get a string value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, StringAttribute attribute) { attribute.setValue((String) formWidget.getValue(name)); } /** Get an URL value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, UrlAttribute attribute) { attribute.setValue((String) formWidget.getItem(name).getValue()); } /** Get an image value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, ImageUrlAttribute attribute) { attribute.setValue((String) formWidget.getValue(name)); } /** Get a date value from the form, and place it in <code>attribute</code>. */ protected void getValue(String name, DateAttribute attribute) { attribute.setValue((Date) formWidget.getValue(name)); } protected FeatureInfo getFeatureInfo() { return featureInfo; } // Private methods for type casting: private Boolean toBoolean(Object value) { if (value == null) { return null; } else if (value instanceof Boolean) { return (Boolean) value; } else if (value instanceof String) { return Boolean.parseBoolean((String) value); } else if (value instanceof Short) { return (Short) value != 0; } else if (value instanceof Integer) { return (Integer) value != 0; } else if (value instanceof Long) { return (Long) value != 0; } else if (value instanceof Float) { return (Float) value != 0; } else if (value instanceof Double) { return (Double) value != 0; } return null; } private Float toFloat(Object value) { if (value == null) { return null; } else if (value instanceof Float) { return (Float) value; } else if (value instanceof String) { return Float.parseFloat((String) value); } else if (value instanceof Integer) { return ((Integer) value).floatValue(); } else if (value instanceof Short) { return ((Short) value).floatValue(); } else if (value instanceof Long) { return ((Long) value).floatValue(); } else if (value instanceof Double) { return ((Double) value).floatValue(); } return null; } private Double toDouble(Object value) { if (value == null) { return null; } else if (value instanceof Double) { return (Double) value; } else if (value instanceof String) { return Double.parseDouble((String) value); } else if (value instanceof Integer) { return ((Integer) value).doubleValue(); } else if (value instanceof Short) { return ((Short) value).doubleValue(); } else if (value instanceof Long) { return ((Long) value).doubleValue(); } else if (value instanceof Float) { return ((Float) value).doubleValue(); } return null; } private Short toShort(Object value) { if (value == null) { return null; } else if (value instanceof Short) { return (Short) value; } else if (value instanceof String) { return Short.parseShort((String) value); } else if (value instanceof Integer) { return ((Integer) value).shortValue(); } else if (value instanceof Long) { return ((Long) value).shortValue(); } else if (value instanceof Float) { return ((Float) value).shortValue(); } else if (value instanceof Double) { return ((Double) value).shortValue(); } return null; } private Integer toInteger(Object value) { if (value == null) { return null; } else if (value instanceof Integer) { return (Integer) value; } else if (value instanceof String) { return Integer.parseInt((String) value); } else if (value instanceof Short) { return ((Short) value).intValue(); } else if (value instanceof Long) { return ((Long) value).intValue(); } else if (value instanceof Float) { return ((Float) value).intValue(); } else if (value instanceof Double) { return ((Double) value).intValue(); } return null; } private Long toLong(Object value) { if (value == null) { return null; } else if (value instanceof Long) { return (Long) value; } else if (value instanceof String) { return Long.parseLong((String) value); } else if (value instanceof Short) { return ((Short) value).longValue(); } else if (value instanceof Integer) { return ((Integer) value).longValue(); } else if (value instanceof Float) { return ((Float) value).longValue(); } else if (value instanceof Double) { return ((Double) value).longValue(); } return null; } public Canvas getCanvas() { return getWidget(); } /** * A list wrapper that allows easy insertion of form items by name. * * @author Jan De Moerloose * */ public class FormItemList implements Iterable<FormItem> { private List<FormItem> list = new ArrayList<FormItem>(); public int size() { return list.size(); } public FormItem[] toArray() { return list.toArray(new FormItem[size()]); } public Iterator<FormItem> iterator() { return list.iterator(); } public boolean add(FormItem e) { return list.add(e); } public boolean addAll(Collection<? extends FormItem> c) { return list.addAll(c); } public boolean addAll(int index, Collection<? extends FormItem> c) { return list.addAll(index, c); } public boolean remove(Object o) { return list.remove(o); } public FormItem set(int index, FormItem element) { return list.set(index, element); } public void add(int index, FormItem element) { list.add(index, element); } public FormItem remove(int index) { return list.remove(index); } public int indexOf(String name) { Iterator<FormItem> it = list.iterator(); int i = 0; while (it.hasNext()) { if (it.next().getName().equals(name)) { return i; } i++; } return -1; } /** * Insert a form item before the item with the specified name. * * @param name name of the item before which to insert * @param newItem the item to insert */ public void insertBefore(String name, FormItem... newItem) { int index = indexOf(name); if (index >= 0) { addAll(index, Arrays.asList(newItem)); } } /** * Insert a form item after the item with the specified name. * * @param name name of the item after which to insert * @param newItem the item to insert */ public void insertAfter(String name, FormItem... newItem) { int index = indexOf(name); if (index >= 0) { addAll(index + 1, Arrays.asList(newItem)); } } } /** * Class that represents multiple registrations as one. * * @author Jan De Moerloose * */ class MultiHandlerRegistration implements HandlerRegistration { private List<HandlerRegistration> registrations = new ArrayList<HandlerRegistration>(); public void addRegistration(HandlerRegistration registration) { registrations.add(registration); } public void removeHandler() { for (HandlerRegistration registration : registrations) { registration.removeHandler(); } } } }
package mockit; import java.util.*; import javax.swing.*; import static org.junit.Assert.*; import org.junit.*; public final class ExpectationsUsingMockedTest { interface Dependency { String doSomething(boolean b); } static class Collaborator { private int value; Collaborator() {} Collaborator(int value) { this.value = value; } void provideSomeService() {} int getValue() { return value; } @SuppressWarnings({"UnusedDeclaration"}) final void simpleOperation(int a, String b, Date c) {} } public abstract static class AbstractBase { protected abstract boolean add(Integer i); } @NonStrict AbstractBase base; static final class DependencyImpl implements Dependency { public String doSomething(boolean b) { return ""; } } @Mocked("do.*") DependencyImpl mockDependency; @Test public void annotatedField() { new Expectations() { @Mocked private Collaborator mock; { new Collaborator().getValue(); } }; new Collaborator().getValue(); } @Test public void annotatedMockFieldWithFilters() { new Expectations() { @Mocked({"(int)", "doInternal()", "[gs]etValue", "complexOperation(Object)"}) Collaborator mock; { mock.getValue(); } }; // Calls the real method, not a mock. Collaborator collaborator = new Collaborator(); collaborator.provideSomeService(); // Calls the mock method. collaborator.getValue(); } @Test public void annotatedMockFieldWithInverseFilters() { new Expectations() { @Mocked( inverse = true, methods = {"(int)", "simpleOperation(int, String, java.util.Date)", "setValue(long)"}) Collaborator mock; { mock.provideSomeService(); } }; Collaborator collaborator = new Collaborator(2); collaborator.simpleOperation(1, "", null); // calls real method collaborator.provideSomeService(); // calls the mock } @Test(expected = IllegalArgumentException.class) public void annotatedFieldWithInvalidFilter() { new Expectations() { @Mocked("setValue(int") Collaborator mock; }; } @Test public void annotatedParameter(@Mocked final List<Integer> mock) { new Expectations() { { mock.get(1); } }; assertNull(mock.get(1)); } @Test public void annotatedFieldAndParameter(@NonStrict final Dependency dependency1) { new Expectations() { @NonStrict private Dependency dependency2; { dependency1.doSomething(true); result = "1"; dependency2.doSomething(false); result = "2"; } }; assertEquals("1", dependency1.doSomething(true)); assertEquals("2", dependency1.doSomething(false)); } @Test public void mockFinalFieldOfInterfaceTypeWithSpecifiedRealClassName() { new NonStrictExpectations() { @Mocked(realClassName = "mockit.ExpectationsUsingMockedTest$DependencyImpl") final Dependency mock = new DependencyImpl(); { mock.doSomething(false); } }; } @Test(expected = IllegalArgumentException.class) public void mockFinalFieldOfInterfaceTypeWithoutRealClassName() { new NonStrictExpectations() { final Dependency mock = null; }; } @Test public void mockFieldForAbstractClass() { new Expectations() { { base.add(1); result = true; } }; assertFalse(base.add(0)); assertTrue(base.add(1)); assertFalse(base.add(2)); } @Test public void partialMockingOfConcreteClassThatExcludesConstructors() { new Expectations() { { mockDependency.doSomething(anyBoolean); minTimes = 2; } }; mockDependency.doSomething(true); mockDependency.doSomething(false); mockDependency.doSomething(true); } @Test public void mockNothingAndStubNoStaticInitializers(@Mocked("") JComponent container) { assertEquals("Test", new JLabel("Test").getText()); } static class ClassWithStaticInitializer { static boolean initialized = true; static int initialized() { return initialized ? 1 : -1; } } @Test public void onlyStubOutStaticInitializers() { new Expectations() { @Mocked("<clinit>") final ClassWithStaticInitializer unused = null; }; assertEquals(-1, ClassWithStaticInitializer.initialized()); } static class AnotherClassWithStaticInitializer { static boolean initialized = true; static int initialized() { return initialized ? 1 : -1; } } @Test public void mockEverythingWithoutStubbingStaticInitializers() { new Expectations() { @Mocked(methods = "<clinit>", inverse = true) final AnotherClassWithStaticInitializer unused = null; }; assertEquals(0, AnotherClassWithStaticInitializer.initialized()); assertTrue(AnotherClassWithStaticInitializer.initialized); } }
package gov.nih.nci.calab.ui.core; /** * This class initializes session and application scope data to prepopulate the drop-down lists required * in different view pages. * * @author pansu */ /* CVS $Id: InitSessionAction.java,v 1.31 2006-04-27 20:26:25 pansu Exp $ */ import gov.nih.nci.calab.dto.administration.AliquotBean; import gov.nih.nci.calab.dto.administration.ContainerInfoBean; import gov.nih.nci.calab.dto.security.SecurityBean; import gov.nih.nci.calab.dto.workflow.ExecuteWorkflowBean; import gov.nih.nci.calab.service.administration.ManageAliquotService; import gov.nih.nci.calab.service.administration.ManageSampleService; import gov.nih.nci.calab.service.common.LookupService; import gov.nih.nci.calab.service.search.SearchSampleService; import gov.nih.nci.calab.service.util.CalabConstants; import gov.nih.nci.calab.service.util.StringUtils; import gov.nih.nci.calab.service.util.file.HttpFileUploadSessionData; import gov.nih.nci.calab.service.workflow.ExecuteWorkflowService; import java.util.Date; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.log4j.Logger; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import org.apache.struts.action.DynaActionForm; public class InitSessionAction extends AbstractBaseAction { private static Logger logger = Logger.getLogger(InitSessionAction.class); public ActionForward executeTask(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { HttpSession session = request.getSession(); ActionForward forward = null; String forwardPage = null; String urlPrefix = request.getContextPath(); try { DynaActionForm theForm = (DynaActionForm) form; forwardPage = (String) theForm.get("forwardPage"); // clean up session data clearSessionData(session, forwardPage); // get user and date information String creator = ""; if (session.getAttribute("user") != null) { SecurityBean user = (SecurityBean) session.getAttribute("user"); creator = user.getLoginId(); } String creationDate = StringUtils.convertDateToString(new Date(), CalabConstants.DATE_FORMAT); session.setAttribute("creator", creator); session.setAttribute("creationDate", creationDate); // retrieve from sesssion first if available assuming these values // are not likely to change within the same session. If changed, the // session should be updated. LookupService lookupService = new LookupService(); if (forwardPage.equals("useAliquot")) { setUseAliquotSession(session, lookupService); } else if (forwardPage.equals("createSample")) { setCreateSampleSession(session, lookupService); } else if (forwardPage.equals("createAliquot")) { setCreateAliquotSession(session, lookupService, urlPrefix); } else if (forwardPage.equals("searchWorkflow")) { setSearchWorkflowSession(session, lookupService); } else if (forwardPage.equals("searchSample")) { setSearchSampleSession(session, lookupService); } else if (forwardPage.equals("createRun") || forwardPage.equals("createAssayRun")) { setCreateRunSession(session, lookupService); } else if (forwardPage.equals("workflowMessage") || forwardPage.equals("fileUploadOption") || forwardPage.equals("fileDownload") || forwardPage.equals("fileMask") || forwardPage.equals("fileMaskSetup")) { setWorkflowMessageSession(session); } else if (forwardPage.equals("uploadForward")) { // refresh tree view setWorkflowMessageSession(session); // read HttpFileUploadSessionData from session HttpFileUploadSessionData hFileUploadData = (HttpFileUploadSessionData) request .getSession().getAttribute("httpFileUploadSessionData"); // based on the type=in/out/upload and runId to modify the // forwardPage String type = hFileUploadData.getFromType(); String runId = hFileUploadData.getRunId(); String inout = hFileUploadData.getInout(); session.removeAttribute("httpFileUploadSessionData"); if (type.equalsIgnoreCase("in") || type.equalsIgnoreCase("out")) { // cannot forward to a page with the request parameter, so // use response response.sendRedirect(urlPrefix + "/workflowForward.do?type=" + type + "&runId=" + runId + "&inout=" + inout); forwardPage = null; } else if (type.equalsIgnoreCase("upload")) { session.setAttribute("runId", runId); forwardPage = "fileUploadOption"; } } if (forwardPage == null) { // for response.setRedirect() forward = null; } else { forward = mapping.findForward(forwardPage); } } catch (Exception e) { ActionMessages errors = new ActionMessages(); ActionMessage error = new ActionMessage("error.initSession", forwardPage); errors.add("error", error); saveMessages(request, errors); logger.error( "Caught exception loading initial drop-down lists data", e); forward = mapping.getInputForward(); } return forward; } public boolean loginRequired() { return true; } /** * Set up session attributes for use aliquot page * * @param session * @param lookupService */ private void setUseAliquotSession(HttpSession session, LookupService lookupService) throws Exception { if (session.getAttribute("allUnmaskedAliquots") == null || session.getAttribute("newAliquotCreated") != null) { List<AliquotBean> allAliquots = lookupService.getUnmaskedAliquots(); session.setAttribute("allUnmaskedAliquots", allAliquots); } ExecuteWorkflowService executeWorkflowService = new ExecuteWorkflowService(); if (session.getAttribute("workflow") == null || session.getAttribute("newWorkflowCreated") != null) { ExecuteWorkflowBean workflowBean = executeWorkflowService .getExecuteWorkflowBean(); session.setAttribute("workflow", workflowBean); } // clear the new aliquote created flag session.removeAttribute("newAliquotCreated"); // clear the new workflow created flag session.removeAttribute("newWorkflowcreated"); } /** * Set up session and application attributes for create sample page * * @param session * @param lookupService */ private void setCreateSampleSession(HttpSession session, LookupService lookupService) throws Exception { ManageSampleService mangeSampleService = new ManageSampleService(); // if values don't exist in the database or if no new samples created. // call the service if (session.getAttribute("allSampleContainerTypes") == null || session.getAttribute("newSampleCreated") != null) { List containerTypes = lookupService.getAllSampleContainerTypes(); session.setAttribute("allSampleContainerTypes", containerTypes); } if (session.getServletContext().getAttribute("allSampleTypes") == null) { List sampleTypes = lookupService.getAllSampleTypes(); session.getServletContext().setAttribute("allSampleTypes", sampleTypes); } if (session.getServletContext().getAttribute("allSampleSOPs") == null) { List sampleSOPs = mangeSampleService.getAllSampleSOPs(); session.getServletContext().setAttribute("allSampleSOPs", sampleSOPs); } if (session.getServletContext().getAttribute("sampleContainerInfo") == null) { ContainerInfoBean containerInfo = lookupService .getSampleContainerInfo(); session.getServletContext().setAttribute("sampleContainerInfo", containerInfo); } // clear the new sample created flag session.removeAttribute("newSampleCreated"); } /** * Set up session and application attributes for create aliquot page * * @param session * @param lookupService */ private void setCreateAliquotSession(HttpSession session, LookupService lookupService, String urlPrefix) throws Exception { ManageAliquotService manageAliquotService = new ManageAliquotService(); if (session.getAttribute("allSamples") == null || session.getAttribute("newSampleCreated") != null) { List samples = lookupService.getAllSamples(); session.setAttribute("allSamples", samples); } if (session.getAttribute("allAliquotContainerTypes") == null || session.getAttribute("newAliquotCreated") != null) { List containerTypes = lookupService.getAllAliquotContainerTypes(); session.setAttribute("allAliquotContainerTypes", containerTypes); } if (session.getAttribute("allUnmaskedAliquots") == null || session.getAttribute("newAliquotCreated") != null) { List aliquots = lookupService.getUnmaskedAliquots(); session.setAttribute("allUnmaskedAliquots", aliquots); } if (session.getServletContext().getAttribute("aliquotContainerInfo") == null) { ContainerInfoBean containerInfo = lookupService .getAliquotContainerInfo(); session.getServletContext().setAttribute("aliquotContainerInfo", containerInfo); } if (session.getServletContext().getAttribute("aliquotCreateMethods") == null) { List methods = manageAliquotService.getAliquotCreateMethods(); session.getServletContext().setAttribute("aliquotCreateMethods", methods); } // clear new aliquot created flag and new sample created flag session.removeAttribute("newAliquotCreated"); session.removeAttribute("newSampleCreated"); } /** * Set up session attributes for search workflow page * * @param session * @param lookupService */ private void setSearchWorkflowSession(HttpSession session, LookupService lookupService) throws Exception { if (session.getServletContext().getAttribute("allAssayTypes") == null) { List assayTypes = lookupService.getAllAssayTypes(); session.getServletContext().setAttribute("allAssayTypes", assayTypes); } if (session.getServletContext().getAttribute("allUsernames") == null) { List allUsernames = lookupService.getAllUsernames(); session.getServletContext().setAttribute("allUsernames", allUsernames); } } /** * Set up session attributes for search sample page * * @param session * @param lookupService */ private void setSearchSampleSession(HttpSession session, LookupService lookupService) throws Exception { SearchSampleService searchSampleService = new SearchSampleService(); if (session.getServletContext().getAttribute("allSampleTypes") == null) { List sampleTypes = lookupService.getAllSampleTypes(); session.getServletContext().setAttribute("allSampleTypes", sampleTypes); } if (session.getAttribute("allSampleSources") == null || session.getAttribute("newSampleCreated") != null) { List sampleSources = searchSampleService.getAllSampleSources(); session.setAttribute("allSampleSources", sampleSources); } if (session.getAttribute("allSourceSampleIds") == null || session.getAttribute("newSampleCreated") != null) { List sourceSampleIds = searchSampleService.getAllSourceSampleIds(); session.setAttribute("allSourceSampleIds", sourceSampleIds); } if (session.getServletContext().getAttribute("allUsernames") == null) { List allUsernames = lookupService.getAllUsernames(); session.getServletContext().setAttribute("allUsernames", allUsernames); } if (session.getServletContext().getAttribute("sampleContainerInfo") == null) { ContainerInfoBean containerInfo = lookupService .getSampleContainerInfo(); session.getServletContext().setAttribute("sampleContainerInfo", containerInfo); } if (session.getServletContext().getAttribute("aliquotContainerInfo") == null) { ContainerInfoBean containerInfo = lookupService .getAliquotContainerInfo(); session.getServletContext().setAttribute("aliquotContainerInfo", containerInfo); } // clear the new sample created flag session.removeAttribute("newSampleCreated"); } /** * Set up session attributes for create Run * * @param session * @param lookupService */ private void setCreateRunSession(HttpSession session, LookupService lookupService) throws Exception { ExecuteWorkflowService executeWorkflowService = new ExecuteWorkflowService(); if (session.getServletContext().getAttribute("allAssayTypes") == null) { List assayTypes = lookupService.getAllAssayTypes(); session.getServletContext().setAttribute("allAssayTypes", assayTypes); } if (session.getAttribute("workflow") == null || session.getAttribute("newWorkflowCreated") != null) { ExecuteWorkflowBean workflowBean = executeWorkflowService .getExecuteWorkflowBean(); session.setAttribute("workflow", workflowBean); } if (session.getAttribute("allUnmaskedAliquots") == null || session.getAttribute("newAliquotCreated") != null) { List aliquots = lookupService.getUnmaskedAliquots(); session.setAttribute("allUnmaskedAliquots", aliquots); } if (session.getAttribute("allAssignedAliquots") == null) { List allAssignedAliquots = lookupService.getAllAssignedAliquots(); session.setAttribute("allAssignedAliquots", allAssignedAliquots); } if (session.getServletContext().getAttribute("allAssayBeans") == null) { List allAssayBeans = lookupService.getAllAssayBeans(); session.getServletContext().setAttribute("allAssayBeans", allAssayBeans); } if (session.getServletContext().getAttribute("allUsernames") == null) { List allUsernames = lookupService.getAllUsernames(); session.getServletContext().setAttribute("allUsernames", allUsernames); } session.removeAttribute("newWorkflowCreated"); session.removeAttribute("newAliquotCreated"); } private void setWorkflowMessageSession(HttpSession session) throws Exception { ExecuteWorkflowService executeWorkflowService = new ExecuteWorkflowService(); if (session.getAttribute("workflow") == null || session.getAttribute("newWorkflowCreated") != null) { ExecuteWorkflowBean workflowBean = executeWorkflowService .getExecuteWorkflowBean(); session.setAttribute("workflow", workflowBean); } session.removeAttribute("newWorkflowCreated"); } private void clearSessionData(HttpSession session, String forwardPage) { if (!forwardPage.equals("createAliquot")) { // clear session attributes created during create aliquot session.removeAttribute("aliquotMatrix"); session.removeAttribute("allSamples"); session.removeAttribute("allAliquotContainerTypes"); } session.removeAttribute("createAliquotForm"); session.removeAttribute("createSampleForm"); if (!forwardPage.equals("createSample")) { // clear session attributes created during create sample session.removeAttribute("allSampleContainerTypes"); } if (!forwardPage.equals("searchSample")) { // clear session attributes created during search sample session.removeAttribute("samples"); session.removeAttribute("aliquots"); } if (!forwardPage.equals("uploadForward")) { // clear session attributes creatd during fileUpload session.removeAttribute("httpFileUploadSessionData"); } if (forwardPage.equals("createSample") || forwardPage.equals("createAliquot") || forwardPage.equals("searchSample") || forwardPage.equals("searchWorkflow")) { // clear session attributes created during execute workflow pages session.removeAttribute("workflow"); } } }
package gov.nih.nci.ncicb.cadsr.loader.ui; import gov.nih.nci.ncicb.cadsr.loader.*; import gov.nih.nci.ncicb.cadsr.loader.event.ReviewEvent; import gov.nih.nci.ncicb.cadsr.loader.event.ReviewListener; import gov.nih.nci.ncicb.cadsr.loader.parser.ElementWriter; import gov.nih.nci.ncicb.cadsr.loader.ui.tree.*; import gov.nih.nci.ncicb.cadsr.loader.ui.event.*; import gov.nih.nci.ncicb.cadsr.loader.util.*; import gov.nih.nci.ncicb.cadsr.loader.ui.util.*; import gov.nih.nci.ncicb.cadsr.loader.validator.*; import java.awt.Component; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeEvent; import java.io.File; import javax.swing.*; import java.util.*; import gov.nih.nci.ncicb.cadsr.domain.*; import javax.swing.tree.DefaultMutableTreeNode; public class MainFrame extends JFrame implements ViewChangeListener, CloseableTabbedPaneListener, PropertyChangeListener { private JMenuBar mainMenuBar = new JMenuBar(); private JMenu fileMenu = new JMenu("File"); private JMenuItem saveMenuItem = new JMenuItem("Save"); private JMenuItem saveAsMenuItem = new JMenuItem("Save As"); private JMenuItem exportErrorsMenuItem = new JMenuItem("Export"); private JMenuItem exitMenuItem = new JMenuItem("Exit"); private JMenu editMenu = new JMenu("Edit"); private JMenuItem findMenuItem = new JMenuItem("Find"); private JMenuItem prefMenuItem = new JMenuItem("Preferences"); private JMenu elementMenu = new JMenu("Element"); private JMenuItem applyMenuItem = new JMenuItem("Apply"); private JMenuItem applyToAllMenuItem = new JMenuItem("Apply to All"); private JMenu runMenu = new JMenu("Run"); private JMenuItem validateMenuItem = new JMenuItem("Validate"); private JMenuItem uploadMenuItem = new JMenuItem("Upload"); private JMenuItem defaultsMenuItem = new JMenuItem("Defaults"); private JMenu helpMenu = new JMenu("Help"); private JMenuItem aboutMenuItem = new JMenuItem("About"); private JMenuItem indexMenuItem = new JMenuItem("Index"); private JMenuItem semanticConnectorMenuItem = new JMenuItem("Semantic Connector"); private JSplitPane jSplitPane1 = new JSplitPane(); private JSplitPane jSplitPane2 = new JSplitPane(); private JTabbedPane jTabbedPane1 = new JTabbedPane(); private CloseableTabbedPane viewTabbedPane = new CloseableTabbedPane(); private JPanel jPanel1 = new JPanel(); private NavigationPanel navigationPanel = new NavigationPanel(); private ErrorPanel errorPanel = null; private MainFrame _this = this; private JLabel infoLabel = new JLabel(" "); private Map<String, UMLElementViewPanel> viewPanels = new HashMap(); private AssociationViewPanel associationViewPanel = null; private ReviewTracker reviewTracker = ReviewTracker.getInstance(); private RunMode runMode = null; private String saveFilename = ""; public MainFrame() { try { jbInit(); UserSelections selections = UserSelections.getInstance(); runMode = (RunMode)(selections.getProperty("MODE")); saveFilename = (String)selections.getProperty("FILENAME"); } catch(Exception e) { e.printStackTrace(); } } public void exit() { System.exit(0); } public void propertyChange(PropertyChangeEvent evt) { if(evt.getPropertyName().equals("APPLY")) { applyMenuItem.setEnabled((Boolean)evt.getNewValue()); applyToAllMenuItem.setEnabled((Boolean)evt.getNewValue()); infoLabel.setText("Changes Applied"); } } private void jbInit() throws Exception { this.getContentPane().setLayout(new BorderLayout()); this.setSize(new Dimension(830, 650)); this.setJMenuBar(mainMenuBar); this.setTitle("Semantic Integration Workbench"); jSplitPane2.setOrientation(JSplitPane.VERTICAL_SPLIT); jSplitPane1.setDividerLocation(160); jSplitPane2.setDividerLocation(400); fileMenu.add(saveMenuItem); fileMenu.add(saveAsMenuItem); fileMenu.addSeparator(); fileMenu.add(findMenuItem); fileMenu.add(exportErrorsMenuItem); fileMenu.addSeparator(); fileMenu.add(exitMenuItem); mainMenuBar.add(fileMenu); editMenu.add(findMenuItem); editMenu.add(prefMenuItem); mainMenuBar.add(editMenu); applyMenuItem.setEnabled(false); applyToAllMenuItem.setEnabled(false); elementMenu.add(applyMenuItem); elementMenu.add(applyToAllMenuItem); mainMenuBar.add(elementMenu); runMenu.add(validateMenuItem); runMenu.add(uploadMenuItem); runMenu.addSeparator(); runMenu.add(defaultsMenuItem); mainMenuBar.add(runMenu); helpMenu.add(indexMenuItem); helpMenu.addSeparator(); helpMenu.add(aboutMenuItem); mainMenuBar.add(helpMenu); errorPanel = new ErrorPanel(TreeBuilder.getInstance().getRootNode()); jTabbedPane1.addTab("Errors", errorPanel); Icon closeIcon = new ImageIcon(Thread.currentThread().getContextClassLoader().getResource("close-tab.gif")); viewTabbedPane.setCloseIcons(closeIcon, closeIcon, closeIcon); viewTabbedPane.addCloseableTabbedPaneListener(this); jTabbedPane1.addTab("Log", new JPanel()); jSplitPane2.add(jTabbedPane1, JSplitPane.BOTTOM); jSplitPane2.add(viewTabbedPane, JSplitPane.TOP); jSplitPane1.add(jSplitPane2, JSplitPane.RIGHT); jSplitPane1.add(navigationPanel, JSplitPane.LEFT); navigationPanel.addViewChangeListener(this); this.getContentPane().add(jSplitPane1, BorderLayout.CENTER); this.getContentPane().add(infoLabel, BorderLayout.SOUTH); exitMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { _this.exit(); } }); defaultsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { new UmlDefaultsPanel().show(); } }); findMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { SearchDialog sd = new SearchDialog(_this); UIUtil.putToCenter(sd); sd.addSearchListener(navigationPanel); sd.setVisible(true); } }); findMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); final PreferenceDialog pd = new PreferenceDialog(_this); prefMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { UIUtil.putToCenter(pd); pd.setVisible(true); } }); saveMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { if(runMode.equals(RunMode.Reviewer)) { JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); return; } ElementWriter writer = BeansAccessor.getWriter(); writer.setOutput(saveFilename); writer.write(ElementsLists.getInstance()); infoLabel.setText("File Saved"); } }); saveMenuItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask())); saveAsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JFileChooser chooser = new JFileChooser(); javax.swing.filechooser.FileFilter filter = new javax.swing.filechooser.FileFilter() { String fileExtension = null; { if(runMode.equals(RunMode.Curator)) fileExtension = "csv"; else if(runMode.equals(RunMode.Reviewer)) fileExtension = "xmi"; } public boolean accept(File f) { if (f.isDirectory()) { return true; } return f.getName().endsWith("." + fileExtension); } public String getDescription() { return fileExtension.toUpperCase() + " Files"; } }; chooser.setFileFilter(filter); int returnVal = chooser.showSaveDialog(null); if(returnVal == JFileChooser.APPROVE_OPTION) { String filePath = chooser.getSelectedFile().getAbsolutePath(); // filePath = filePath + ".csv"; ElementWriter writer = BeansAccessor.getWriter(); writer.setOutput(filePath); saveFilename = filePath; writer.write(ElementsLists.getInstance()); infoLabel.setText("File Saved"); } } }); exportErrorsMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); validateMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { ValidationItems.getInstance().clear(); Validator validator = new UMLValidator(); validator.validate(); errorPanel.update(TreeBuilder.getInstance().getRootNode()); JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); uploadMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { JOptionPane.showMessageDialog(_this, "Sorry, Not Implemented Yet", "Not Implemented", JOptionPane.INFORMATION_MESSAGE); } }); applyMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { UMLElementViewPanel viewPanel = (UMLElementViewPanel)viewTabbedPane .getSelectedComponent(); viewPanel.apply(false); } }); applyToAllMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { UMLElementViewPanel viewPanel = (UMLElementViewPanel)viewTabbedPane .getSelectedComponent(); viewPanel.apply(true); } }); aboutMenuItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { new AboutPanel(); } }); } public void viewChanged(ViewChangeEvent event) { if(event.getType() == ViewChangeEvent.VIEW_CONCEPTS) { UMLNode node = (UMLNode)event.getViewObject(); // If concept is already showing, just bring it up front if(viewPanels.containsKey(node.getFullPath())) { UMLElementViewPanel pa = viewPanels.get(node.getFullPath()); viewTabbedPane.setSelectedComponent(pa); return; } if((event.getInNewTab() == true) || (viewPanels.size() == 0)) { UMLElementViewPanel viewPanel = new UMLElementViewPanel(node); viewPanel.addPropertyChangeListener(this); viewPanel.addReviewListener(navigationPanel); viewPanel.addReviewListener(reviewTracker); viewPanel.addNavigationListener(navigationPanel); navigationPanel.addNavigationListener(viewPanel); String tabTitle = node.getDisplay();; if(node instanceof AttributeNode) tabTitle = node.getParent().getDisplay() + "." + tabTitle; viewTabbedPane.addTab(tabTitle, viewPanel); viewTabbedPane.setSelectedComponent(viewPanel); viewPanel.setName(node.getFullPath()); viewPanels.put(viewPanel.getName(), viewPanel); infoLabel.setText(tabTitle); } else { UMLElementViewPanel viewPanel = (UMLElementViewPanel) viewTabbedPane.getSelectedComponent(); viewPanels.remove(viewPanel.getName()); String tabTitle = node.getDisplay();; if(node instanceof AttributeNode) tabTitle = node.getParent().getDisplay() + "." + tabTitle; viewTabbedPane.setTitleAt(viewTabbedPane.getSelectedIndex(), tabTitle); infoLabel.setText(tabTitle); viewPanel.setName(node.getFullPath()); viewPanel.updateNode(node); viewPanels.put(viewPanel.getName(), viewPanel); } } else if(event.getType() == ViewChangeEvent.VIEW_ASSOCIATION) { UMLNode node = (UMLNode)event.getViewObject(); if(associationViewPanel == null) { associationViewPanel = new AssociationViewPanel((ObjectClassRelationship)node.getUserObject()); viewTabbedPane.addTab("Association", associationViewPanel); associationViewPanel.setName("Association"); infoLabel.setText("Association"); } else associationViewPanel.update((ObjectClassRelationship)node.getUserObject()); viewTabbedPane.setSelectedComponent(associationViewPanel); } } public boolean closeTab(int index) { Component c = viewTabbedPane.getComponentAt(index); viewPanels.remove(c.getName()); return true; } }
package ibis.satin.impl.communication; import ibis.ipl.AlreadyConnectedException; import ibis.ipl.Ibis; import ibis.ipl.IbisCapabilities; import ibis.ipl.IbisFactory; import ibis.ipl.IbisIdentifier; import ibis.ipl.PortType; import ibis.ipl.ReadMessage; import ibis.ipl.ReceivePort; import ibis.ipl.ReceivePortIdentifier; import ibis.ipl.Registry; import ibis.ipl.SendPort; import ibis.ipl.SendPortIdentifier; import ibis.ipl.WriteMessage; import ibis.satin.impl.Config; import ibis.satin.impl.Satin; import ibis.satin.impl.Statistics; import ibis.satin.impl.loadBalancing.Victim; import ibis.satin.impl.sharedObjects.SharedObjects; import java.io.IOException; import java.net.InetAddress; public final class Communication implements Config, Protocol { private Satin s; public PortType portType; public ReceivePort receivePort; private volatile boolean exitStageTwo = false; private volatile int barrierRequests = 0; private volatile boolean gotBarrierReply = false; private volatile int exitReplies = 0; public Ibis ibis; public boolean paused = false; public Communication(Satin s) { this.s = s; IbisCapabilities ibisProperties = createIbisProperties(); commLogger.debug("SATIN '" + "- " + "': init ibis"); portType = createSatinPortType(); try { ibis = IbisFactory.createIbis(ibisProperties, null, true, s.ft.getRegistryEventHandler(), portType, SharedObjects.getSOPortType()); } catch (Exception e) { commLogger.fatal( "SATIN '" + "- " + "': Could not start ibis: " + e, e); System.exit(1); // Could not start ibis } IbisIdentifier ident = ibis.identifier(); commLogger.debug("SATIN '" + "- " + "': init ibis DONE, " + "my cluster is '" + Victim.clusterOf(ident) + "'"); try { MessageHandler messageHandler = new MessageHandler(s); receivePort = ibis.createReceivePort(portType, "satin port", messageHandler, s.ft.getReceivePortConnectHandler(), null); } catch (Exception e) { commLogger.fatal("SATIN '" + ident + "': Could not start ibis: " + e, e); System.exit(1); // Could not start ibis } if (CLOSED) { commLogger.info("SATIN '" + ident + "': running with closed world, " + ibis.registry().getPoolSize() + " host(s)"); } else { commLogger.info("SATIN '" + ident + "': running with open world"); } } public IbisIdentifier electMaster() { Registry r = ibis.registry(); IbisIdentifier ident = ibis.identifier(); IbisIdentifier masterIdent = null; String canonicalMasterHost = null; String localHostName = null; if (MASTER_HOST != null) { try { InetAddress a = InetAddress.getByName(MASTER_HOST); canonicalMasterHost = a.getCanonicalHostName(); } catch (Exception e) { commLogger.warn("satin.masterhost is set to an unknown " + "name: " + MASTER_HOST); commLogger.warn("continuing with default master election"); } try { localHostName = InetAddress.getLocalHost() .getCanonicalHostName(); } catch (Exception e) { commLogger.warn("Could not get local hostname"); canonicalMasterHost = null; } try { if (canonicalMasterHost == null || !canonicalMasterHost.equals(localHostName)) { masterIdent = r.getElectionResult("satin master"); } else { masterIdent = r.elect("satin master"); } } catch (Exception e) { commLogger.fatal("SATIN '" + ident + "': Could not do an election for the master: " + e, e); System.exit(1); // Could not start ibis } } else { try { masterIdent = r.elect("satin master"); } catch (Exception e) { commLogger.fatal("SATIN '" + ident + "': Could not do an election for the master: " + e, e); System.exit(1); // Could not start ibis } } return masterIdent; } public void enableConnections() { receivePort.enableMessageUpcalls(); receivePort.enableConnections(); } public IbisCapabilities createIbisProperties() { if (CLOSED) { return new IbisCapabilities( IbisCapabilities.CLOSEDWORLD, IbisCapabilities.MEMBERSHIP, IbisCapabilities.MEMBERSHIP_ORDERED, IbisCapabilities.MEMBERSHIP_RELIABLE, IbisCapabilities.ELECTIONS); } return new IbisCapabilities( IbisCapabilities.MEMBERSHIP, IbisCapabilities.MEMBERSHIP_ORDERED, IbisCapabilities.MEMBERSHIP_RELIABLE, IbisCapabilities.ELECTIONS); } public PortType createSatinPortType() { return new PortType( PortType.SERIALIZATION_OBJECT, PortType.COMMUNICATION_RELIABLE, PortType.CONNECTION_MANY_TO_ONE, PortType.CONNECTION_UPCALLS, PortType.RECEIVE_EXPLICIT, PortType.RECEIVE_AUTO_UPCALLS); } public void bcastMessage(byte opcode) { Victim[] victims; synchronized (s) { victims = s.victims.victims(); } for (int i = 0; i < victims.length; i++) { WriteMessage writeMessage = null; Victim v = victims[i]; commLogger.debug("SATIN '" + s.ident + "': sending " + opcodeToString(opcode) + " message to " + v.getIdent()); try { writeMessage = v.newMessage(); writeMessage.writeByte(opcode); v.finish(writeMessage); } catch (IOException e) { if (writeMessage != null) { writeMessage.finish(e); } synchronized (s) { ftLogger.info("SATIN '" + s.ident + "': could not send bcast message to " + v.getIdent(), e); try { ibis.registry().maybeDead(v.getIdent()); } catch (IOException e2) { ftLogger.warn("SATIN '" + s.ident + "': got exception in maybeDead", e2); } } } } } public static void disconnect(SendPort s, ReceivePortIdentifier ident) { try { s.disconnect(ident); } catch (IOException e) { // ignored } } public static ReceivePortIdentifier connect(SendPort s, IbisIdentifier ident, String name, long timeoutMillis) { System.err.println("SATIN '" + ident + "': connecting to " + ident); long startTime = System.currentTimeMillis(); ReceivePortIdentifier r = null; do { try { r = s.connect(ident, name, timeoutMillis, false); } catch (AlreadyConnectedException x) { commLogger.info("SATIN '" + ident + "': already connected to " + name + " at " + ident, x); ReceivePortIdentifier[] ports = s.connectedTo(); for (int i = 0; i < ports.length; i++) { commLogger.info("port " + i + " --> " + ports[i]); if (ports[i].ibisIdentifier().equals(ident) && ports[i].name().equals(name)) { commLogger .info("SATIN '" + ident + "': the port was already connected, found it"); return ports[i]; } } commLogger .info("SATIN '" + ident + "': the port was already connected, but could not find it, retry!"); // return null; } catch (IOException e) { commLogger.info( "SATIN '" + ident + "': IOException in connect to " + ident + ": " + e, e); try { Thread.sleep(500); } catch (InterruptedException e2) { // ignore } } } while (r == null && System.currentTimeMillis() - startTime < timeoutMillis); if (r == null) { commLogger.info("SATIN '" + ident + "': could not connect port within given time (" + timeoutMillis + " ms)"); } return r; } /* Only allowed when not stealing. And with a closed world */ private void barrier() { IbisIdentifier ident = ibis.identifier(); commLogger.debug("SATIN '" + ident + "': barrier start"); int size; synchronized (s) { size = s.victims.size(); } try { if (s.isMaster()) { synchronized (s) { while (barrierRequests != size) { try { s.wait(); } catch (Exception e) { // ignore } } barrierRequests = 0; } for (int i = 0; i < size; i++) { Victim v; WriteMessage writeMessage = null; synchronized (s) { v = s.victims.getVictim(i); } if (v == null) { commLogger.fatal("a machine crashed with closed world"); System.exit(1); } try { writeMessage = v.newMessage(); writeMessage.writeByte(Protocol.BARRIER_REPLY); v.finish(writeMessage); } catch(IOException e) { if (writeMessage != null) { writeMessage.finish(e); } throw e; } } } else { Victim v; synchronized (s) { v = s.victims.getVictim(s.getMasterIdent()); } if (v == null) { commLogger.fatal("could not get master victim."); System.exit(1); } WriteMessage writeMessage = null; try { writeMessage = v.newMessage(); writeMessage.writeByte(Protocol.BARRIER_REQUEST); writeMessage.finish(); } catch(IOException e) { if (writeMessage != null) { writeMessage.finish(e); } throw e; } while (!gotBarrierReply/* && !exiting */) { s.handleDelayedMessages(); } /* * Imediately reset gotBarrierReply, we know that a reply has * arrived. */ gotBarrierReply = false; } } catch (IOException e) { commLogger.warn("SATIN '" + ident + "': error in barrier", e); } commLogger.debug("SATIN '" + ident + "': barrier DONE"); } public void waitForExitReplies() { int size; synchronized (s) { size = s.victims.size(); } // wait until everybody has send an ACK synchronized (s) { while (exitReplies != size) { try { s.handleDelayedMessages(); s.wait(250); } catch (Exception e) { // Ignore. } size = s.victims.size(); } } } public void sendExitAck() { Victim v = null; WriteMessage writeMessage = null; synchronized (s) { v = s.victims.getVictim(s.getMasterIdent()); } if (v == null) return; // node might have crashed try { commLogger.debug("SATIN '" + s.ident + "': sending exit ACK message to " + s.getMasterIdent()); writeMessage = v.newMessage(); writeMessage.writeByte(Protocol.EXIT_REPLY); if (STATS) { s.stats.fillInStats(); writeMessage.writeObject(s.stats); } v.finish(writeMessage); } catch (IOException e) { if (writeMessage != null) { writeMessage.finish(e); } ftLogger.info("SATIN '" + s.ident + "': could not send exit message to " + s.getMasterIdent(), e); try { ibis.registry().maybeDead(s.getMasterIdent()); } catch (IOException e2) { ftLogger.warn("SATIN '" + s.ident + "': got exception in maybeDead", e2); } } } public void waitForExitStageTwo() { synchronized (s) { while (!exitStageTwo) { try { s.handleDelayedMessages(); s.wait(250); } catch (Exception e) { // Ignore. } } } } public void closeSendPorts() { // If not closed, free ports. Otherwise, ports will be freed in leave // calls. while (true) { try { Victim v; synchronized (s) { if (s.victims.size() == 0) { break; } v = s.victims.remove(0); commLogger.debug("SATIN '" + s.ident + "': closing sendport to " + v.getIdent()); } if (v != null) { v.close(); } } catch (Throwable e) { commLogger.warn("SATIN '" + s.ident + "': port.close() throws exception", e); } } } public void closeReceivePort() { try { receivePort.close(); } catch (Throwable e) { commLogger.warn("SATIN '" + s.ident + "': port.close() throws exception", e); } } public void end() { try { ibis.end(); } catch (Throwable e) { commLogger.warn("SATIN '" + s.ident + "': ibis.end() throws exception", e); } } public void waitForAllNodes() { commLogger.debug("SATIN '" + s.ident + "': pre barrier"); int poolSize = ibis.registry().getPoolSize(); synchronized (s) { while (s.victims.size() != poolSize - 1) { try { s.wait(); } catch (InterruptedException e) { // Ignore. } } commLogger.debug("SATIN '" + s.ident + "': barrier, everybody has joined"); } barrier(); commLogger.debug("SATIN '" + s.ident + "': post barrier"); } public void handleExitReply(ReadMessage m) { SendPortIdentifier ident = m.origin(); commLogger.debug("SATIN '" + s.ident + "': got exit ACK message from " + ident.ibisIdentifier()); if (STATS) { try { Statistics stats = (Statistics) m.readObject(); s.totalStats.add(stats); } catch (Exception e) { commLogger.warn("SATIN '" + s.ident + "': Got Exception while reading stats: " + e, e); // System.exit(1); } } try { m.finish(); } catch (Exception e) { /* ignore */ } synchronized (s) { exitReplies++; s.notifyAll(); } } public void handleExitMessage(IbisIdentifier ident) { commLogger.debug("SATIN '" + s.ident + "': got exit message from " + ident); synchronized (s) { s.exiting = true; s.notifyAll(); } } public void handleExitStageTwoMessage(IbisIdentifier ident) { commLogger.debug("SATIN '" + s.ident + "': got exit2 message from " + ident); synchronized (s) { exitStageTwo = true; s.notifyAll(); } } public void handleBarrierRequestMessage() { synchronized (s) { barrierRequests++; s.notifyAll(); } } public void disableUpcallsForExit() { if (!CLOSED) { ibis.registry().disableEvents(); } s.ft.disableConnectionUpcalls(); } public void handleBarrierReply(IbisIdentifier sender) { commLogger.debug("SATIN '" + s.ident + "': got barrier reply message from " + sender); synchronized (s) { if (ASSERTS && gotBarrierReply) { commLogger.fatal("Got barrier reply while I already got " + "one."); System.exit(1); // Failed assertion } gotBarrierReply = true; s.notifyAll(); } } public static String opcodeToString(int opcode) { switch (opcode) { case EXIT: return "EXIT"; case EXIT_REPLY: return "EXIT_REPLY"; case BARRIER_REPLY: return "BARRIER_REPLY"; case STEAL_REQUEST: return "STEAL_REQUEST"; case STEAL_REPLY_FAILED: return "STEAL_REPLY_FAILED"; case STEAL_REPLY_SUCCESS: return "STEAL_REPLY_SUCCESS"; case ASYNC_STEAL_REQUEST: return "ASYNC_STEAL_REQUEST"; case ASYNC_STEAL_REPLY_FAILED: return "ASYNC_STEAL_REPLY_FAILED"; case ASYNC_STEAL_REPLY_SUCCESS: return "ASYNC_STEAL_REPLY_SUCCESS"; case JOB_RESULT_NORMAL: return "JOB_RESULT_NORMAL"; case JOB_RESULT_EXCEPTION: return "JOB_RESULT_EXCEPTION"; case ABORT: return "ABORT"; case BLOCKING_STEAL_REQUEST: return "BLOCKING_STEAL_REQUEST"; case CRASH: return "CRASH"; case ABORT_AND_STORE: return "ABORT_AND_STORE"; case RESULT_REQUEST: return "RESULT_REQUEST"; case STEAL_AND_TABLE_REQUEST: return "STEAL_AND_TABLE_REQUEST"; case ASYNC_STEAL_AND_TABLE_REQUEST: return "ASYNC_STEAL_AND_TABLE_REQUEST"; case STEAL_REPLY_FAILED_TABLE: return "STEAL_REPLY_FAILED_TABLE"; case STEAL_REPLY_SUCCESS_TABLE: return "STEAL_REPLY_SUCCESS_TABLE"; case ASYNC_STEAL_REPLY_FAILED_TABLE: return "ASYNC_STEAL_REPLY_FAILED_TABLE"; case ASYNC_STEAL_REPLY_SUCCESS_TABLE: return "ASYNC_STEAL_REPLY_SUCCESS_TABLE"; case RESULT_PUSH: return "RESULT_PUSH"; case SO_INVOCATION: return "SO_INVOCATION"; case SO_REQUEST: return "SO_REQUEST"; case SO_TRANSFER: return "SO_TRANSFER"; case EXIT_STAGE2: return "EXIT_STAGE2"; case BARRIER_REQUEST: return "BARRIER_REQUEST"; } throw new Error("unknown opcode in opcodeToString"); } // FIXME send a resume after a crash public void pause() { paused = true; soBcastLogger.info("SATIN '" + s.ident + "': sending pause"); Victim[] victims; synchronized (s) { victims = s.victims.victims(); } for(int i=0; i<victims.length; i++) { try { WriteMessage m = victims[i].newMessage(); m.writeByte(PAUSE); victims[i].finish(m); } catch (IOException e) { commLogger.warn("SATIN '" + s.ident + "': could not send pause message: " + e); // ignore } } } public void resume() { soBcastLogger.info("SATIN '" + s.ident + "': sending resume"); Victim[] victims; synchronized (s) { victims = s.victims.victims(); } for(int i=0; i<victims.length; i++) { try { WriteMessage m = victims[i].newMessage(); m.writeByte(RESUME); victims[i].finish(m); } catch (IOException e) { commLogger.info("SATIN '" + s.ident + "': could not send pause message: " + e); // ignore } } paused = false; } void gotPause() { soBcastLogger.debug("SATIN '" + s.ident + "': got pause"); synchronized (s) { paused = true; s.notifyAll(); } } void gotResume() { soBcastLogger.debug("SATIN '" + s.ident + "': got resume"); synchronized (s) { paused = false; s.notifyAll(); } } }
package org.openlca.app.editors.processes; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; import org.eclipse.jface.action.Action; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.ColumnLabelProvider; import org.eclipse.jface.viewers.ITableLabelProvider; import org.eclipse.jface.viewers.ITreeContentProvider; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.ui.forms.IManagedForm; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.ScrolledForm; import org.openlca.app.App; import org.openlca.app.M; import org.openlca.app.components.ContributionImage; import org.openlca.app.db.Database; import org.openlca.app.editors.ModelPage; import org.openlca.app.rcp.images.Icon; import org.openlca.app.rcp.images.Images; import org.openlca.app.util.Actions; import org.openlca.app.util.Controls; import org.openlca.app.util.Labels; import org.openlca.app.util.Numbers; import org.openlca.app.util.UI; import org.openlca.app.viewers.Viewers; import org.openlca.app.viewers.combo.ImpactMethodViewer; import org.openlca.app.viewers.trees.Trees; import org.openlca.core.database.ImpactMethodDao; import org.openlca.core.math.ReferenceAmount; import org.openlca.core.matrix.FlowIndex; import org.openlca.core.matrix.ImpactBuilder; import org.openlca.core.matrix.ImpactIndex; import org.openlca.core.matrix.IndexFlow; import org.openlca.core.matrix.MatrixData; import org.openlca.core.matrix.ParameterTable; import org.openlca.core.matrix.ProcessProduct; import org.openlca.core.matrix.TechIndex; import org.openlca.core.matrix.format.JavaMatrix; import org.openlca.core.matrix.format.MatrixBuilder; import org.openlca.core.model.Exchange; import org.openlca.core.model.FlowType; import org.openlca.core.model.ModelType; import org.openlca.core.model.Process; import org.openlca.core.model.descriptors.Descriptor; import org.openlca.core.model.descriptors.ImpactDescriptor; import org.openlca.core.model.descriptors.ImpactMethodDescriptor; import org.openlca.core.results.Contribution; import org.openlca.core.results.ContributionResult; import org.openlca.core.results.providers.EagerResultProvider; import org.openlca.util.Strings; class ImpactPage extends ModelPage<Process> { private ImpactMethodViewer combo; private Button zeroCheck; private TreeViewer tree; private ContributionResult result; ImpactPage(ProcessEditor editor) { super(editor, "ProcessImpactPage", M.ImpactAnalysis); } @Override protected void createFormContent(IManagedForm mform) { ScrolledForm form = UI.formHeader(this); FormToolkit tk = mform.getToolkit(); Composite body = UI.formBody(form, tk); Composite comp = tk.createComposite(body); UI.gridLayout(comp, 4); UI.formLabel(comp, tk, M.ImpactAssessmentMethod); combo = new ImpactMethodViewer(comp); List<ImpactMethodDescriptor> list = new ImpactMethodDao(Database.get()) .getDescriptors() .stream().sorted((m1, m2) -> Strings.compare( m1.name, m2.name)) .collect(Collectors.toList()); combo.setInput(list); combo.addSelectionChangedListener(this::setTreeInput); zeroCheck = tk.createButton(comp, M.ExcludeZeroValues, SWT.CHECK); zeroCheck.setSelection(true); Controls.onSelect( zeroCheck, e -> setTreeInput(combo.getSelected())); Button reload = tk.createButton(comp, M.Reload, SWT.NONE); reload.setImage(Icon.REFRESH.get()); Controls.onSelect(reload, _e -> { result = null; setTreeInput(combo.getSelected()); }); tree = Trees.createViewer(body, M.Name, M.Category, M.Amount, M.Result); UI.gridData(tree.getControl(), true, true); tree.setContentProvider(new Content()); tree.setLabelProvider(new Label()); Trees.bindColumnWidths(tree.getTree(), 0.35, 0.35, 0.15, 0.15); tree.getTree().getColumns()[2].setAlignment(SWT.RIGHT); tree.getTree().getColumns()[3].setAlignment(SWT.RIGHT); Action onOpen = Actions.onOpen(() -> { Contribution<?> c = Viewers.getFirstSelected(tree); if (c == null) return; if (c.item instanceof IndexFlow) { App.open(((IndexFlow) c.item).flow); } if (c.item instanceof ImpactDescriptor) { App.open((ImpactDescriptor) c.item); } }); Actions.bind(tree, onOpen); Trees.onDoubleClick(tree, e -> onOpen.run()); if (!list.isEmpty()) { ImpactMethodDescriptor m = list.get(0); combo.select(m); setTreeInput(m); } form.reflow(true); } private void setTreeInput(ImpactMethodDescriptor method) { if (tree == null) return; if (method == null) { tree.setInput(Collections.emptyList()); return; } if (result == null) { App.runInUI("Compute LCIA results ...", () -> { result = compute(); setTreeInput(method); }); return; } if (!result.hasFlowResults() || !result.hasImpactResults()) { tree.setInput(Collections.emptyList()); return; } List<Contribution<?>> cons = new ImpactMethodDao(Database.get()) .getCategoryDescriptors(method.id) .stream() .sorted((d1, d2) -> Strings.compare(d1.name, d2.name)) .map(d -> { var c = Contribution.of(d, result.getTotalImpactResult(d)); c.unit = d.referenceUnit; return c; }) .collect(Collectors.toList()); tree.setInput(cons); } private ContributionResult compute() { var data = new MatrixData(); // create a virtual demand of 1.0 var refProduct = ProcessProduct.of(getModel()); data.techIndex = new TechIndex(refProduct); data.techIndex.setDemand(1.0); data.techMatrix = JavaMatrix.of( new double[][] { { 1.0 } }); // collect the elementary flow exchanges var elemFlows = new ArrayList<Exchange>(); boolean regionalized = false; for (var e : getModel().exchanges) { if (e.flow == null || e.flow.flowType != FlowType.ELEMENTARY_FLOW) continue; if (e.location != null) { regionalized = true; } elemFlows.add(e); } if (elemFlows.isEmpty()) { // return an empty result if there are no elementary flows return new ContributionResult(EagerResultProvider.create(data)); } // create the flow index and B matrix / vector data.flowIndex = regionalized ? FlowIndex.createRegionalized() : FlowIndex.create(); var enviBuilder = new MatrixBuilder(); for (var e : elemFlows) { var flow = Descriptor.of(e.flow); var loc = e.location != null ? Descriptor.of(e.location) : null; int i = e.isInput ? data.flowIndex.add(IndexFlow.inputOf(flow, loc)) : data.flowIndex.add(IndexFlow.outputOf(flow, loc)); double amount = ReferenceAmount.get(e); if (e.isInput && amount != 0) { amount = -amount; } enviBuilder.add(i, 0, amount); } data.flowMatrix = enviBuilder.finish(); // build the impact index and matrix var db = Database.get(); data.impactIndex = ImpactIndex.of(db); var contexts = new HashSet<Long>(); contexts.add(getModel().id); data.impactIndex.each((i, d) -> contexts.add(d.id)); var interpreter = ParameterTable.interpreter( db, contexts, Collections.emptySet()); data.impactMatrix = ImpactBuilder.of(db, data.flowIndex) .withImpacts(data.impactIndex) .withInterpreter(interpreter) .build().impactMatrix; // create the result var provider = EagerResultProvider.create(data); var result = new ContributionResult(provider); return result; } private class Content extends ArrayContentProvider implements ITreeContentProvider { @Override public Object[] getChildren(Object obj) { if (!(obj instanceof Contribution)) return null; Contribution<?> c = (Contribution<?>) obj; if (c.childs != null) return c.childs.toArray(); if (!(c.item instanceof ImpactDescriptor)) return null; var impact = (ImpactDescriptor) c.item; double total = result.getTotalImpactResult(impact); boolean withoutZeros = zeroCheck.getSelection(); List<Contribution<?>> childs = new ArrayList<>(); for (IndexFlow flow : result.getFlows()) { double value = result.getDirectFlowImpact(flow, impact); if (value == 0 && withoutZeros) continue; Contribution<?> child = Contribution.of(flow, value); child.computeShare(total); child.unit = impact.referenceUnit; childs.add(child); } childs.sort((c1, c2) -> Double.compare(c2.amount, c1.amount)); c.childs = childs; return childs.toArray(); } @Override public Object getParent(Object elem) { return null; } @Override public boolean hasChildren(Object elem) { if (!(elem instanceof Contribution)) return false; Contribution<?> c = (Contribution<?>) elem; if (c.childs != null) return true; return c.item instanceof ImpactDescriptor; } } private class Label extends ColumnLabelProvider implements ITableLabelProvider { private final ContributionImage img = new ContributionImage(); @Override public void dispose() { img.dispose(); super.dispose(); } @Override public Image getColumnImage(Object obj, int col) { if (!(obj instanceof Contribution)) return null; Contribution<?> c = (Contribution<?>) obj; if (col == 0) { return c.item instanceof ImpactDescriptor ? Images.get(ModelType.IMPACT_CATEGORY) : Images.get(FlowType.ELEMENTARY_FLOW); } if (col == 3 && c.item instanceof IndexFlow) return img.getForTable(c.share); return null; } @Override public String getColumnText(Object obj, int col) { if (!(obj instanceof Contribution)) return null; Contribution<?> c = (Contribution<?>) obj; switch (col) { case 0: if (c.item instanceof IndexFlow) return Labels.name((IndexFlow) c.item); if (c.item instanceof ImpactDescriptor) return Labels.name((ImpactDescriptor) c.item); return null; case 1: if (c.item instanceof IndexFlow) return Labels.category((IndexFlow) c.item); return null; case 2: if (!(c.item instanceof IndexFlow)) return null; IndexFlow iFlow = (IndexFlow) c.item; double a = result.getTotalFlowResult(iFlow); return Numbers.format(a) + " " + Labels.refUnit(iFlow); case 3: return Strings.nullOrEmpty(c.unit) ? Numbers.format(c.amount) : Numbers.format(c.amount) + " " + c.unit; default: return null; } } } }
// samskivert library - useful routines for java programs // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.samskivert.jdbc.depot; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import com.samskivert.jdbc.DatabaseLiaison; import com.samskivert.jdbc.JDBCUtil; import com.samskivert.jdbc.depot.clause.FieldOverride; import com.samskivert.jdbc.depot.clause.QueryClause; import com.samskivert.jdbc.depot.clause.SelectClause; import com.samskivert.jdbc.depot.operator.Conditionals.*; import static com.samskivert.jdbc.depot.Log.log; /** * This class implements the functionality required by {@link DepotRepository#findAll): fetch * a collection of persistent objects using one of two included strategies. */ public abstract class FindAllQuery<T extends PersistentRecord> implements Query<List<T>> { /** * The two-pass collection query implementation. {@see DepotRepository#findAll} for details. */ public static class WithCache<T extends PersistentRecord> extends FindAllQuery<T> { public WithCache (PersistenceContext ctx, Class<T> type, Collection<? extends QueryClause> clauses) throws DatabaseException { super(ctx, type); if (_marsh.getComputed() != null) { throw new IllegalArgumentException( "This algorithm doesn't work on @Computed records."); } for (QueryClause clause : clauses) { if (clause instanceof FieldOverride) { throw new IllegalArgumentException( "This algorithm doesn't work with FieldOverrides."); } } SelectClause<T> select = new SelectClause<T>(_type, _marsh.getPrimaryKeyFields(), clauses); _builder = _ctx.getSQLBuilder(DepotTypes.getDepotTypes(ctx, select)); _builder.newQuery(select); } public List<T> invoke (Connection conn, DatabaseLiaison liaison) throws SQLException { Map<Key<T>, T> entities = new HashMap<Key<T>, T>(); List<Key<T>> allKeys = new ArrayList<Key<T>>(); Set<Key<T>> fetchKeys = new HashSet<Key<T>>(); PreparedStatement stmt = _builder.prepare(conn); try { ResultSet rs = stmt.executeQuery(); while (rs.next()) { Key<T> key = _marsh.makePrimaryKey(rs); allKeys.add(key); // TODO: All this cache fiddling needs to move to PersistenceContext? CacheAdapter.CachedValue<T> hit = _ctx.cacheLookup(key); if (hit != null) { T value = hit.getValue(); if (value != null) { @SuppressWarnings("unchecked") T newValue = (T) value.clone(); entities.put(key, newValue); continue; } } fetchKeys.add(key); } } finally { JDBCUtil.close(stmt); } return loadAndResolve(conn, allKeys, fetchKeys, entities); } } /** * The two-pass collection query implementation. {@see DepotRepository#findAll} for details. */ public static class WithKeys<T extends PersistentRecord> extends FindAllQuery<T> { public WithKeys (PersistenceContext ctx, Collection<Key<T>> keys) throws DatabaseException { super(ctx, keys.iterator().next().getPersistentClass()); _keys = keys; _builder = ctx.getSQLBuilder(new DepotTypes(ctx, _type)); } public List<T> invoke (Connection conn, DatabaseLiaison liaison) throws SQLException { Map<Key<T>, T> entities = new HashMap<Key<T>, T>(); Set<Key<T>> fetchKeys = new HashSet<Key<T>>(); for (Key<T> key : _keys) { // TODO: All this cache fiddling needs to move to PersistenceContext? CacheAdapter.CachedValue<T> hit = _ctx.cacheLookup(key); if (hit != null) { T value = hit.getValue(); if (value != null) { @SuppressWarnings("unchecked") T newValue = (T) value.clone(); entities.put(key, newValue); continue; } } fetchKeys.add(key); } return loadAndResolve(conn, _keys, fetchKeys, entities); } protected Collection<Key<T>> _keys; } /** * The single-pass collection query implementation. {@see DepotRepository#findAll} for details. */ public static class Explicitly<T extends PersistentRecord> extends FindAllQuery<T> { public Explicitly (PersistenceContext ctx, Class<T> type, Collection<? extends QueryClause> clauses) throws DatabaseException { super(ctx, type); SelectClause<T> select = new SelectClause<T>(type, _marsh.getFieldNames(), clauses); _builder = ctx.getSQLBuilder(DepotTypes.getDepotTypes(ctx, select)); _builder.newQuery(select); } public List<T> invoke (Connection conn, DatabaseLiaison liaison) throws SQLException { List<T> result = new ArrayList<T>(); PreparedStatement stmt = _builder.prepare(conn); try { ResultSet rs = stmt.executeQuery(); while (rs.next()) { result.add(_marsh.createObject(rs)); } } finally { JDBCUtil.close(stmt); } return result; } } public FindAllQuery (PersistenceContext ctx, Class<T> type) throws DatabaseException { _ctx = ctx; _type = type; _marsh = _ctx.getMarshaller(type); } // from Query public CacheKey getCacheKey () { return null; } // from Query public void updateCache (PersistenceContext ctx, List<T> result) { if (_marsh.hasPrimaryKey()) { for (T bit : result) { ctx.cacheStore(_marsh.getPrimaryKey(bit), bit.clone()); } } } // from Query public List<T> transformCacheHit (CacheKey key, List<T> bits) { if (bits == null) { return bits; } List<T> result = new ArrayList<T>(); for (T bit : bits) { if (bit != null) { @SuppressWarnings("unchecked") T cbit = (T) bit.clone(); result.add(cbit); } else { result.add(null); } } return result; } protected List<T> loadAndResolve (Connection conn, Collection<Key<T>> allKeys, Set<Key<T>> fetchKeys, Map<Key<T>, T> entities) throws SQLException { // if we're fetching a huge number of records, we have to do it in multiple queries if (fetchKeys.size() > In.MAX_KEYS) { int keyCount = fetchKeys.size(); do { Set<Key<T>> keys = new HashSet<Key<T>>(); Iterator<Key<T>> iter = fetchKeys.iterator(); for (int ii = 0; ii < Math.min(keyCount, In.MAX_KEYS); ii++) { keys.add(iter.next()); iter.remove(); } keyCount -= keys.size(); loadRecords(conn, keys, entities); } while (keyCount > 0); } else if (fetchKeys.size() > 0) { loadRecords(conn, fetchKeys, entities); } List<T> result = new ArrayList<T>(); for (Key<T> key : allKeys) { T value = entities.get(key); if (value != null) { result.add(value); } } return result; } protected void loadRecords (Connection conn, Set<Key<T>> keys, Map<Key<T>, T> entities) throws SQLException { _builder.newQuery(new SelectClause<T>(_type, _marsh.getFieldNames(), new KeySet<T>(_type, keys))); PreparedStatement stmt = _builder.prepare(conn); try { ResultSet rs = stmt.executeQuery(); int cnt = 0, dups = 0; while (rs.next()) { T obj = _marsh.createObject(rs); if (entities.put(_marsh.getPrimaryKey(obj), obj) != null) { dups++; } cnt++; } if (cnt != keys.size()) { log.warning("Row count mismatch in second pass [query=" + stmt + ", wanted=" + keys.size() + ", got=" + cnt + ", dups=" + dups + "]"); } } finally { JDBCUtil.close(stmt); } } protected PersistenceContext _ctx; protected SQLBuilder _builder; protected DepotMarshaller<T> _marsh; protected Class<T> _type; }
// $Id: ResourceBundle.java,v 1.19 2003/07/14 22:18:18 ray Exp $ package com.threerings.resource; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Enumeration; import java.util.jar.JarEntry; import java.util.jar.JarFile; import com.samskivert.io.NestableIOException; import com.samskivert.io.StreamUtil; import com.samskivert.util.FileUtil; import com.samskivert.util.StringUtil; import org.apache.commons.io.StreamUtils; /** * A resource bundle provides access to the resources in a jar file. */ public class ResourceBundle { /** * Constructs a resource bundle with the supplied jar file. * * @param source a file object that references our source jar file. */ public ResourceBundle (File source) { this(source, false, false); } /** * Constructs a resource bundle with the supplied jar file. * * @param source a file object that references our source jar file. * @param delay if true, the bundle will wait until someone calls * {@link #sourceIsReady} before allowing access to its resources. * @param unpack if true the bundle will unpack itself into a * temporary directory */ public ResourceBundle (File source, boolean delay, boolean unpack) { _source = source; if (unpack) { String root = stripSuffix(source.getPath()); _unpacked = new File(root + ".stamp"); _cache = new File(root); } if (!delay) { sourceIsReady(); } } /** * Returns the {@link File} from which resources are fetched for this * bundle. */ public File getSource () { return _source; } /** * @return true if the bundle is fully downloaded and successfully * unpacked. */ public boolean isUnpacked () { return (_source.exists() && (_unpacked != null) && (_unpacked.lastModified() == _source.lastModified())); } /** * Called by the resource manager once it has ensured that our * resource jar file is up to date and ready for reading. */ public void sourceIsReady () { // make a note of our source's last modification time _sourceLastMod = _source.lastModified(); // if we are unpacking files, the time to do so is now if (_unpacked != null && _unpacked.lastModified() != _sourceLastMod) { boolean resolved = false; try { resolved = !resolveJarFile(); } catch (IOException ioe) { Log.warning("Failure resolving jar file '" + _source + "': " + ioe + "."); } if (!resolved) { String errmsg = "Source ready but failed to resolve jar " + "[source=" + _source + "]"; throw new IllegalStateException(errmsg); } Log.info("Unpacking into " + _cache + "..."); if (!_cache.exists()) { if (!_cache.mkdir()) { Log.warning("Failed to create bundle cache directory '" + _cache + "'."); // we are hopelessly fucked return; } } else { FileUtil.recursiveClean(_cache); } boolean failure = false; Enumeration entries = _jarSource.entries(); while (entries.hasMoreElements()) { JarEntry entry = (JarEntry)entries.nextElement(); File efile = new File(_cache, entry.getName()); // if we're unpacking a normal jar file, it will have // special path entries that allow us to create our // directories first if (entry.isDirectory()) { if (!efile.exists() && !efile.mkdir()) { Log.warning("Failed to create bundle entry path '" + efile + "'."); failure = true; } continue; } // but some do not, so we want to ensure that our // directories exist prior to getting down and funky File parent = new File(efile.getParent()); if (!parent.exists() && !parent.mkdirs()) { Log.warning("Failed to create bundle entry parent '" + parent + "'."); failure = true; continue; } BufferedOutputStream fout = null; InputStream jin = null; try { fout = new BufferedOutputStream( new FileOutputStream(efile)); jin = _jarSource.getInputStream(entry); StreamUtils.pipe(jin, fout); } catch (IOException ioe) { Log.warning("Failure unpacking " + efile + ": " + ioe); failure = true; } finally { StreamUtil.close(jin); StreamUtil.close(fout); } } // if everything unpacked smoothly, create our unpack stamp if (!failure) { try { _unpacked.createNewFile(); if (!_unpacked.setLastModified(_sourceLastMod)) { Log.warning("Failed to set last mod on stamp file '" + _unpacked + "'."); } } catch (IOException ioe) { Log.warning("Failure creating stamp file '" + _unpacked + "': " + ioe + "."); } } } } /** * Fetches the named resource from this bundle. The path should be * specified as a relative, platform independent path (forward * slashes). For example <code>sounds/scream.au</code>. * * @param path the path to the resource in this jar file. * * @return an input stream from which the resource can be loaded or * null if no such resource exists. * * @exception IOException thrown if an error occurs locating the * resource in the jar file. */ public InputStream getResource (String path) throws IOException { // unpack our resources into a temp directory so that we can load // them quickly and the file system can cache them sensibly File rfile = getResourceFile(path); return (rfile == null) ? null : new FileInputStream(rfile); } /** * Returns a file from which the specified resource can be loaded. * This method will unpack the resource into a temporary directory and * return a reference to that file. * * @param path the path to the resource in this jar file. * * @return a file from which the resource can be loaded or null if no * such resource exists. */ public File getResourceFile (String path) throws IOException { if (resolveJarFile()) { return null; } // make sure said resource exists in the first place JarEntry entry = _jarSource.getJarEntry(path); if (entry == null) { // Log.info("Couldn't locate " + path + " in " + _jarSource + "."); return null; } // if we have been unpacked, return our unpacked file if (_cache != null) { return new File(_cache, path); } // otherwise, we unpack resources as needed into a temp directory String tpath = StringUtil.md5hex(_source.getPath() + "%" + path); File tfile = new File(getCacheDir(), tpath); if (tfile.exists() && (tfile.lastModified() > _sourceLastMod)) { return tfile; } // copy the resource into the temporary file BufferedOutputStream fout = new BufferedOutputStream(new FileOutputStream(tfile)); InputStream jin = _jarSource.getInputStream(entry); StreamUtils.pipe(jin, fout); jin.close(); fout.close(); return tfile; } /** * Returns true if this resource bundle contains the resource with the * specified path. This avoids actually loading the resource, in the * event that the caller only cares to know that the resource exists. */ public boolean containsResource (String path) { try { if (resolveJarFile()) { return false; } return (_jarSource.getJarEntry(path) != null); } catch (IOException ioe) { return false; } } /** * Returns a string representation of this resource bundle. */ public String toString () { try { resolveJarFile(); return (_jarSource == null) ? "[file=" + _source + "]" : "[path=" + _jarSource.getName() + ", entries=" + _jarSource.size() + "]"; } catch (IOException ioe) { return "[file=" + _source + ", ioe=" + ioe + "]"; } } /** * Creates the internal jar file reference if we've not already got * it; we do this lazily so as to avoid any jar- or zip-file-related * antics until and unless doing so is required, and because the * resource manager would like to be able to create bundles before the * associated files have been fully downloaded. * * @return true if the jar file could not yet be resolved because we * haven't yet heard from the resource manager that it is ready for us * to access, false if all is cool. */ protected boolean resolveJarFile () throws IOException { // if we don't yet have our resource bundle's last mod time, we // have not yet been notified that it is ready if (_sourceLastMod == -1) { return true; } try { if (_jarSource == null) { _jarSource = new JarFile(_source); } return false; } catch (IOException ioe) { Log.warning("Failure creating jar file '" + _source + "'."); Log.logStackTrace(ioe); throw new NestableIOException( "Failed to resolve resource bundle jar file '" + _source + "'", ioe); } } /** * Returns the cache directory used for unpacked resources. */ public static File getCacheDir () { if (_tmpdir == null) { String tmpdir = System.getProperty("java.io.tmpdir"); if (tmpdir == null) { Log.info("No system defined temp directory. Faking it."); tmpdir = System.getProperty("user.home"); } setCacheDir(new File(tmpdir, ".narcache")); } return _tmpdir; } /** * Specifies the directory in which our temporary resource files * should be stored. */ public static void setCacheDir (File tmpdir) { String rando = Long.toHexString((long)(Math.random() * Long.MAX_VALUE)); _tmpdir = new File(tmpdir, rando); if (!_tmpdir.exists()) { Log.info("Creating narya temp cache directory '" + _tmpdir + "'."); _tmpdir.mkdirs(); } // add a hook to blow away the temp directory when we exit Runtime.getRuntime().addShutdownHook(new Thread() { public void run () { Log.info("Clearing narya temp cache '" + _tmpdir + "'."); FileUtil.recursiveDelete(_tmpdir); } }); } /** Strips the .jar off of jar file paths. */ protected static String stripSuffix (String path) { if (path.endsWith(".jar")) { return path.substring(0, path.length()-4); } else { // we have to change the path somehow return path + "-cache"; } } /** The file from which we construct our jar file. */ protected File _source; /** The last modified time of our source jar file. */ protected long _sourceLastMod = -1; /** A file whose timestamp indicates whether or not our existing jar * file has been unpacked. */ protected File _unpacked; /** A directory into which we unpack files from our bundle. */ protected File _cache; /** The jar file from which we load resources. */ protected JarFile _jarSource; /** A directory in which we temporarily unpack our resource files. */ protected static File _tmpdir; }
package org.apache.commons.dbcp; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; public class AbandonedTrace { private static SimpleDateFormat format = new SimpleDateFormat ("'DBCP object created' yyyy-MM-dd HH:mm:ss " + "'by the following code was never closed:'"); // DBCP AbandonedConfig private AbandonedConfig config = null; // Parent object private AbandonedTrace parent; // A stack trace of the code that created me (if in debug mode) **/ private Exception createdBy; private Date createdDate; private List trace = new ArrayList(); // Last time this connection was used private long lastUsed = 0; /** * Create a new AbandonedTrace without config and * without doing abandoned tracing. */ public AbandonedTrace() { init(parent); } /** * Construct a new AbandonedTrace with no parent object. * * @param AbandonedConfig */ public AbandonedTrace(AbandonedConfig config) { this.config = config; init(parent); } /** * Construct a new AbandonedTrace with a parent object. * * @param AbandonedTrace parent object */ public AbandonedTrace(AbandonedTrace parent) { this.config = parent.getConfig(); init(parent); } /** * Initialize abandoned tracing for this object. * * @param AbandonedTrace parent object */ private void init(AbandonedTrace parent) { if (parent != null) { parent.addTrace(this); } if (config == null) { return; } if (config.getLogAbandoned()) { createdBy = new Exception(); createdDate = new Date(); } } /** * Get the abandoned config for this object. * * @return AbandonedConfig for this object */ protected AbandonedConfig getConfig() { return config; } /** * Get the last time this object was used in ms. * * @return long time in ms */ protected long getLastUsed() { if (parent != null) { return parent.getLastUsed(); } return lastUsed; } /** * Set the time this object was last used to the * current time in ms. */ protected void setLastUsed() { if (parent != null) { parent.setLastUsed(); } else { lastUsed = new Date().getTime(); } } /** * Set the time in ms this object was last used. * * @param long time in ms */ protected void setLastUsed(long time) { if (parent != null) { parent.setLastUsed(time); } else { lastUsed = time; } } /** * If logAbandoned=true generate a stack trace * for this object then add this object to the parent * object trace list. */ protected void setStackTrace() { if (config == null) { return; } if (config.getLogAbandoned()) { createdBy = new Exception(); createdDate = new Date(); } if (parent != null) { parent.addTrace(this); } } /** * Add an object to the list of objects being * traced. * * @param AbandonedTrace object to add */ protected synchronized void addTrace(AbandonedTrace trace) { this.trace.add(trace); setLastUsed(); } /** * Clear the list of objects being traced by this * object. */ protected synchronized void clearTrace() { if (trace != null) { trace.clear(); } } /** * Get a list of objects being traced by this object. * * @return List of objects */ protected List getTrace() { return trace; } /** * If logAbandoned=true, print a stack trace of the code that * created this object. */ public synchronized void printStackTrace() { if (createdBy != null) { System.err.println(format.format(createdDate)); createdBy.printStackTrace(); } Iterator it = trace.iterator(); while (it.hasNext()) { AbandonedTrace at = (AbandonedTrace)it.next(); at.printStackTrace(); } } /** * Remove a child object this object is tracing. * * @param AbandonedTrace object to remvoe */ protected synchronized void removeTrace(AbandonedTrace trace) { if (this.trace != null) { this.trace.remove(trace); } } }
package org.concord.otrunk.util; import org.concord.framework.otrunk.OTObject; import org.concord.framework.otrunk.OTObjectInterface; public interface OTLabbookEntry extends OTObjectInterface { public OTObject getOTObject(); public void setOTObject(OTObject object); public OTObject getOriginalObject(); public void setOriginalObject(OTObject originalObject); public OTObject getContainer(); public void setContainer(OTObject container); public String getTimeStamp(); public void setTimeStamp(String timeStamp); public String getNote(); public void setNote(String note); public String getType(); public void setType(String type); }
package lbms.plugins.mldht.kad.tasks; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.function.Consumer; import lbms.plugins.mldht.kad.AnnounceNodeCache; import lbms.plugins.mldht.kad.DBItem; import lbms.plugins.mldht.kad.DHT; import lbms.plugins.mldht.kad.DHT.DHTtype; import lbms.plugins.mldht.kad.DHTConstants; import lbms.plugins.mldht.kad.KBucketEntry; import lbms.plugins.mldht.kad.KBucketEntryAndToken; import lbms.plugins.mldht.kad.KClosestNodesSearch; import lbms.plugins.mldht.kad.Key; import lbms.plugins.mldht.kad.Node; import lbms.plugins.mldht.kad.PeerAddressDBItem; import lbms.plugins.mldht.kad.RPCCall; import lbms.plugins.mldht.kad.RPCServer; import lbms.plugins.mldht.kad.ScrapeResponseHandler; import lbms.plugins.mldht.kad.messages.GetPeersRequest; import lbms.plugins.mldht.kad.messages.GetPeersResponse; import lbms.plugins.mldht.kad.messages.MessageBase; import lbms.plugins.mldht.kad.messages.MessageBase.Method; import lbms.plugins.mldht.kad.utils.AddressUtils; import lbms.plugins.mldht.kad.utils.PackUtil; import the8472.utils.concurrent.SerializedTaskExecutor; /** * @author Damokles * */ public class PeerLookupTask extends Task { private boolean noAnnounce; private boolean lowPriority; private boolean noSeeds; private boolean fastTerminate; // nodes which have answered with tokens private Queue<KBucketEntryAndToken> announceCanidates; private ScrapeResponseHandler scrapeHandler; Consumer<PeerAddressDBItem> resultHandler = (x) -> {}; private Set<PeerAddressDBItem> returnedItems; private SortedSet<KBucketEntryAndToken> closestSet; int responsesSinceLastClosestSetModification; AnnounceNodeCache cache; public PeerLookupTask (RPCServer rpc, Node node, Key info_hash) { super(info_hash, rpc, node); announceCanidates = new ConcurrentLinkedQueue<KBucketEntryAndToken>(); returnedItems = Collections.newSetFromMap(new ConcurrentHashMap<PeerAddressDBItem, Boolean>()); this.closestSet = new TreeSet<KBucketEntryAndToken>(new KBucketEntry.DistanceOrder(targetKey)); cache = rpc.getDHT().getCache(); // register key even before the task is started so the cache can already accumulate entries cache.register(targetKey,false); DHT.logDebug("PeerLookupTask started: " + getTaskID()); addListener(t -> updatePopulationEstimator()); } public void setScrapeHandler(ScrapeResponseHandler scrapeHandler) { this.scrapeHandler = scrapeHandler; } public void setResultHandler(Consumer<PeerAddressDBItem> handler) { resultHandler = handler; } public void setNoSeeds(boolean avoidSeeds) { noSeeds = avoidSeeds; } /** * enabling this also enables noAnnounce */ public void setFastTerminate(boolean fastTerminate) { if(!isQueued()) throw new IllegalStateException("cannot change lookup mode after startup"); this.fastTerminate = fastTerminate; if(fastTerminate) setNoAnnounce(true); } public void setLowPriority(boolean lowPriority) { this.lowPriority = lowPriority; } public void setNoAnnounce(boolean noAnnounce) { this.noAnnounce = noAnnounce; } public boolean isNoAnnounce() { return noAnnounce; } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#callFinished(lbms.plugins.mldht.kad.RPCCall, lbms.plugins.mldht.kad.messages.MessageBase) */ @Override void callFinished (RPCCall c, MessageBase rsp) { if (c.getMessageMethod() != Method.GET_PEERS) { return; } GetPeersResponse gpr = (GetPeersResponse) rsp; for (DHTtype type : DHTtype.values()) { byte[] nodes = gpr.getNodes(type); if (nodes == null) continue; int nval = nodes.length / type.NODES_ENTRY_LENGTH; if (type == rpc.getDHT().getType()) { for (int i = 0; i < nval; i++) { // add node to todo list KBucketEntry e = PackUtil.UnpackBucketEntry(nodes, i * type.NODES_ENTRY_LENGTH, type); if(!AddressUtils.isBogon(e.getAddress()) && !node.isLocalId(e.getID()) && !hasVisited(e)) todo.add(e); } } else { rpc.getDHT().getSiblings().stream().filter(sib -> sib.getType() == type).forEach(sib -> { for (int i = 0; i < nval; i++) { KBucketEntry e = PackUtil.UnpackBucketEntry(nodes, i * type.NODES_ENTRY_LENGTH, type); sib.addDHTNode(e.getAddress().getAddress().getHostAddress(), e.getAddress().getPort()); } }); } } List<DBItem> items = gpr.getPeerItems(); //if(items.size() > 0) // System.out.println("unique:"+new HashSet<DBItem>(items).size()+" all:"+items.size()+" ver:"+gpr.getVersion()+" entries:"+items); for (DBItem item : items) { if(!(item instanceof PeerAddressDBItem)) continue; PeerAddressDBItem it = (PeerAddressDBItem) item; // also add the items to the returned_items list if(!AddressUtils.isBogon(it)) { resultHandler.accept(it); returnedItems.add(it); } } if(returnedItems.size() > 0 && firstResultTime == 0) firstResultTime = System.currentTimeMillis(); KBucketEntry entry = new KBucketEntry(rsp.getOrigin(), rsp.getID()); KBucketEntryAndToken toAdd = new KBucketEntryAndToken(entry, gpr.getToken()); // if someone has peers he might have filters, collect for scrape if (!items.isEmpty() && scrapeHandler != null) synchronized (scrapeHandler) { scrapeHandler.addGetPeersRespone(gpr); } // add the peer who responded to the closest nodes list, so we can do an announce if (gpr.getToken() != null) announceCanidates.add(toAdd); // if we scrape we don't care about tokens. // otherwise we're only done if we have found the closest nodes that also returned tokens if (noAnnounce || gpr.getToken() != null) { synchronized (this) { closestSet.add(toAdd); if (closestSet.size() > DHTConstants.MAX_ENTRIES_PER_BUCKET) { KBucketEntryAndToken last = closestSet.last(); closestSet.remove(last); if(last == toAdd) responsesSinceLastClosestSetModification++; else responsesSinceLastClosestSetModification = 0; } } } } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#callTimeout(lbms.plugins.mldht.kad.RPCCall) */ @Override void callTimeout (RPCCall c) { } @Override boolean canDoRequest() { if(lowPriority) return getNumOutstandingRequestsExcludingStalled() < DHTConstants.MAX_CONCURRENT_REQUESTS_LOWPRIO; return super.canDoRequest(); } // go over the todo list and send get_peers requests // until we have nothing left final Runnable exclusiveUpdate = SerializedTaskExecutor.whileTrue(() -> !todo.isEmpty() && canDoRequest() && !isClosestSetStable(), () -> { synchronized (this) { KBucketEntry e = todo.first(); if(hasVisited(e)) { todo.remove(e); return; } // send a findNode to the node GetPeersRequest gpr = new GetPeersRequest(targetKey); gpr.setWant4(rpc.getDHT().getType() == DHTtype.IPV4_DHT || rpc.getDHT().getSiblings().stream().anyMatch(sib -> sib.getType() == DHTtype.IPV4_DHT && sib.getNode().getNumEntriesInRoutingTable() < DHTConstants.BOOTSTRAP_IF_LESS_THAN_X_PEERS)); gpr.setWant6(rpc.getDHT().getType() == DHTtype.IPV6_DHT || rpc.getDHT().getSiblings().stream().anyMatch(sib -> sib.getType() == DHTtype.IPV6_DHT && sib.getNode().getNumEntriesInRoutingTable() < DHTConstants.BOOTSTRAP_IF_LESS_THAN_X_PEERS)); gpr.setDestination(e.getAddress()); gpr.setScrape(scrapeHandler != null); gpr.setNoSeeds(noSeeds); if(rpcCall(gpr, e.getID(), call -> { call.addListener(cache.getRPCListener()); long rtt = e.getRTT(); rtt = rtt + rtt / 2; // *1.5 since this is the average and not the 90th percentile like the timeout filter if(rtt < DHTConstants.RPC_CALL_TIMEOUT_MAX && rtt < rpc.getTimeoutFilter().getStallTimeout()) call.setExpectedRTT(rtt); // only set a node-specific timeout if it's better than what the server would apply anyway })) { todo.remove(e); visited(e); } } }); @Override void update () { // check if the cache has any closer nodes after the initial query Collection<KBucketEntry> cacheResults = cache.get(targetKey, lowPriority ? DHTConstants.MAX_CONCURRENT_REQUESTS_LOWPRIO : DHTConstants.MAX_CONCURRENT_REQUESTS); cacheResults.forEach(e -> { if(!hasVisited(e)) todo.add(e); }); exclusiveUpdate.run(); } private boolean isClosestSetStable() { synchronized (this) { if(closestSet.size() < DHTConstants.MAX_ENTRIES_PER_BUCKET) return false; if(responsesSinceLastClosestSetModification < DHTConstants.MAX_CONCURRENT_REQUESTS && !fastTerminate) return false; boolean haveBetterTodosForClosestSet = !todo.isEmpty() && targetKey.threeWayDistance(todo.first().getID(), closestSet.last().getID()) < 0; return !haveBetterTodosForClosestSet; } } @Override protected boolean isDone() { int waitingFor = fastTerminate ? getNumOutstandingRequestsExcludingStalled() : getNumOutstandingRequests(); if (todo.isEmpty() && waitingFor == 0) { return true; } return waitingFor == 0 && isClosestSetStable(); } private void updatePopulationEstimator() { synchronized (this) { // feed the estimator if we have usable results if(!todo.isEmpty() && isClosestSetStable()) { SortedSet<Key> toEstimate = new TreeSet<Key>(); for(KBucketEntryAndToken e : closestSet) toEstimate.add(e.getID()); rpc.getDHT().getEstimator().update(toEstimate,targetKey); } } } public Collection<KBucketEntryAndToken> getAnnounceCanidates() { if(fastTerminate || noAnnounce) throw new IllegalStateException("cannot use fast lookups for announces"); return announceCanidates; } /** * @return the returned_items */ public Set<PeerAddressDBItem> getReturnedItems () { return Collections.unmodifiableSet(returnedItems); } /** * @return the info_hash */ public Key getInfoHash () { return targetKey; } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#start() */ @Override public void start () { //delay the filling of the todo list until we actually start the task KClosestNodesSearch kns = new KClosestNodesSearch(targetKey, DHTConstants.MAX_ENTRIES_PER_BUCKET * 4,rpc.getDHT()); kns.fill(); todo.addAll(kns.getEntries()); // re-register once we actually started cache.register(targetKey,fastTerminate); todo.addAll(cache.get(targetKey,DHTConstants.MAX_CONCURRENT_REQUESTS * 2)); super.start(); } }
package java.awt; import java.lang.reflect.Field; import java.text.AttributedCharacterIterator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.awt.image.AreaAveragingScaleFilter; import java.awt.image.BufferedImage; import java.awt.image.ImageConsumer; import java.awt.image.ImageObserver; import org.dvb.ui.DVBBufferedImage; import sun.awt.ConstrainableGraphics; import org.videolan.Logger; class BDGraphics extends Graphics2D implements ConstrainableGraphics { private static final Color DEFAULT_COLOR = Color.BLACK; private static final Font DEFAULT_FONT = new Font("Dialog", Font.PLAIN, 12); private int width; private int height; private int[] backBuffer; private Area dirty; private GraphicsConfiguration gc; private Color foreground; private Color background; private Font font; private BDFontMetrics fontMetrics; private AlphaComposite composite; /** The current xor color. If null then we are in paint mode. */ private Color xorColor; /** Translated X, Y offset from native offset. */ private int originX; private int originY; /** The actual clip rectangle that is intersection of user clip and constrained rectangle. */ private Rectangle actualClip; /** The current user clip rectangle or null if no clip has been set. This is stored in the native coordinate system and not the (possibly) translated Java coordinate system. */ private Rectangle clip = null; /** The rectangle this graphics object has been constrained too. This is stored in the native coordinate system and not the (possibly) translated Java coordinate system. If it is null then this graphics has not been constrained. The constrained rectangle is another layer of clipping independant of the user clip. */ private Rectangle constrainedRect = null; BDGraphics(BDGraphics g) { backBuffer = g.backBuffer; dirty = g.dirty; width = g.width; height = g.height; gc = g.gc; foreground = g.foreground; background = g.background; composite = g.composite; font = g.font; fontMetrics = g.fontMetrics; originX = g.originX; originY = g.originY; if (g.clip != null) { clip = new Rectangle(g.clip); } setupClip(); } BDGraphics(BDRootWindow window) { width = window.getWidth(); height = window.getHeight(); backBuffer = window.getBdBackBuffer(); dirty = window.getDirtyArea(); gc = window.getGraphicsConfiguration(); foreground = window.getForeground(); background = window.getBackground(); font = window.getFont(); if (foreground == null) foreground = DEFAULT_COLOR; if (background == null) background = DEFAULT_COLOR; if (font == null) font = DEFAULT_FONT; fontMetrics = BDFontMetrics.getFontMetrics(font); composite = AlphaComposite.SrcOver; setupClip(); } BDGraphics(BDImage image) { width = image.getWidth(); height = image.getHeight(); backBuffer = image.getBdBackBuffer(); dirty = image.getDirtyArea(); gc = image.getGraphicsConfiguration(); Component component = image.getComponent(); if (component != null) { foreground = component.getForeground(); background = component.getBackground(); font = component.getFont(); } if (foreground == null) foreground = DEFAULT_COLOR; if (background == null) background = new Color(0, 0, 0, 0); if (font == null) font = DEFAULT_FONT; fontMetrics = BDFontMetrics.getFontMetrics(font); composite = AlphaComposite.SrcOver; setupClip(); } public Graphics create() { return new BDGraphics(this); } public void translate(int x, int y) { originX += x; originY += y; } public void setFont(Font font) { if (font != null && !font.equals(this.font)) { this.font = font; fontMetrics = BDFontMetrics.getFontMetrics(font); } } public Font getFont() { return font; } public FontMetrics getFontMetrics() { return fontMetrics; } public FontMetrics getFontMetrics(Font font) { return BDFontMetrics.getFontMetrics(font); } public void setColor(Color c) { if ((c != null) && (c != foreground)) foreground = c; } public Color getColor() { return foreground; } public Composite getComposite() { return composite; } public GraphicsConfiguration getDeviceConfiguration() { return gc; } public void setComposite(Composite comp) { if ((comp != null) && (comp != composite)) { if (!(comp instanceof AlphaComposite)) throw new IllegalArgumentException("Only AlphaComposite is supported"); composite = (AlphaComposite) comp; } } public void setPaintMode() { xorColor = null; composite = AlphaComposite.SrcOver; } public void setXORMode(Color color) { xorColor = color; } /** Gets the current clipping area. */ public Rectangle getClipBounds() { if (clip != null) return new Rectangle (clip.x - originX, clip.y - originY, clip.width, clip.height); return null; } public void constrain(int x, int y, int w, int h) { Rectangle rect; if (constrainedRect != null) rect = constrainedRect; else rect = new Rectangle(0, 0, width, height); constrainedRect = rect.intersection(new Rectangle(rect.x + x, rect.y + y, w, h)); originX = constrainedRect.x; originY = constrainedRect.y; setupClip(); } /** Returns a Shape object representing the clip. */ public Shape getClip() { return getClipBounds(); } /** Crops the clipping rectangle. */ public void clipRect(int x, int y, int w, int h) { Rectangle rect = new Rectangle(x + originX, y + originY, w, h); if (clip != null) clip = clip.intersection(rect); else clip = rect; setupClip(); } /** Sets the clipping rectangle. */ public void setClip(int x, int y, int w, int h) { clip = new Rectangle (x + originX, y + originY, w, h); setupClip(); } /** Sets the clip to a Shape (only Rectangle allowed). */ public void setClip(Shape clip) { if (clip == null) { this.clip = null; setupClip(); } else if (clip instanceof Rectangle) { Rectangle rect = (Rectangle) clip; setClip(rect.x, rect.y, rect.width, rect.height); } else throw new IllegalArgumentException("setClip(Shape) only supports Rectangle objects"); } private void setupClip() { Rectangle rect; if (constrainedRect != null) rect = constrainedRect; else rect = new Rectangle(0, 0, width, height); if (clip != null) actualClip = clip.intersection(rect); else actualClip = rect; } private int alphaBlend(int dest, int src) { int As = src >>> 24; if (As == 0) return dest; if (As == 255) return src; int Ad = (dest >>> 24); if (Ad == 0) return src; int R, G, B; R = ((src >>> 16) & 255) * As * 255; G = ((src >>> 8) & 255) * As * 255; B = (src & 255) * As * 255; Ad = Ad * (255 - As); As = As * 255 + Ad; R = (R + ((dest >>> 16) & 255) * Ad) / As; G = (G + ((dest >>> 8) & 255) * Ad) / As; B = (B + (dest & 255) * Ad) / As; R = Math.min(255, R); G = Math.min(255, G); B = Math.min(255, B); Ad = As / 255; Ad = Math.min(255, Ad); return (Ad << 24) | (R << 16) | (G << 8) | B; } private int applyComposite(int rgb) { return ((int)((rgb >>> 24) * composite.getAlpha()) << 24) | (rgb & 0x00FFFFFF); } private void drawSpanN(int x, int y, int length, int rgb) { Rectangle rect = new Rectangle(x, y, length, 1); rect = actualClip.intersection(rect); if (rect.width <= 0 || rect.height <= 0 || rect.x < 0 || rect.y < 0) { return; } x = rect.x; length = rect.width; dirty.add(rect); if (xorColor != null) { for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] ^= xorColor.getRGB() ^ rgb; } return; } switch (composite.getRule()) { case AlphaComposite.CLEAR: for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] = 0; } break; case AlphaComposite.SRC: rgb = applyComposite(rgb); for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] = rgb; } break; case AlphaComposite.SRC_OVER: rgb = applyComposite(rgb); for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] = alphaBlend(backBuffer[y * width + x + i], rgb); } break; } } private void drawSpanN(int x, int y, int length, int src[], int srcOffset) { Rectangle rect = new Rectangle(x, y, length, 1); rect = actualClip.intersection(rect); if (rect.width <= 0 || rect.height <= 0 || rect.x < 0 || rect.y < 0) { return; } srcOffset += rect.x - x; x = rect.x; length = rect.width; dirty.add(rect); if (xorColor != null) { for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] ^= xorColor.getRGB() ^ src[srcOffset + i]; } return; } switch (composite.getRule()) { case AlphaComposite.CLEAR: for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] = 0; } break; case AlphaComposite.SRC: for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] = applyComposite(src[srcOffset + i]); } break; case AlphaComposite.SRC_OVER: for (int i = 0; i < length; i++) { backBuffer[y * width + x + i] = alphaBlend(backBuffer[y * width + x + i], applyComposite(src[srcOffset + i])); } break; } } private void drawSpan(int x, int y, int length, int rgb) { x += originX; y += originY; drawSpanN(x, y, length, rgb); } private void drawSpan(int x, int y, int length, int src[], int srcOffset) { x += originX; y += originY; drawSpanN(x, y, length, src, srcOffset); } private void drawPointN(int x, int y, int rgb) { drawSpanN(x, y, 1, rgb); } private void drawGlyph(int[] rgbArray, int x0, int y0, int w, int h) { for (int y = 0; y < h; y++) for (int x = 0; x < w; x++) drawPoint(x + x0, y + y0, rgbArray[y * w + x]); } private void drawPoint(int x, int y, int rgb) { x += originX; y += originY; if (actualClip.contains(x, y)) drawPointN(x, y, rgb); } public void clearRect(int x, int y, int w, int h) { x += originX; y += originY; Rectangle rect = new Rectangle(x, y, w, h); rect = actualClip.intersection(rect); if (rect.isEmpty()) { return; } x = rect.x; y = rect.y; w = rect.width; h = rect.height; int rgb = background.getRGB(); for (int i = 0; i < h; i++) Arrays.fill(backBuffer, (y + i) * width + x, (y + i) * width + x + w, rgb); dirty.add(rect); } public void fillRect(int x, int y, int w, int h) { x += originX; y += originY; Rectangle rect = new Rectangle(x, y, w, h); rect = actualClip.intersection(rect); x = rect.x; y = rect.y; w = rect.width; h = rect.height; int rgb = foreground.getRGB(); for (int Y = y; Y < (y + h); Y++) drawSpanN(x, Y, w, rgb); } public void drawRect(int x, int y, int w, int h) { x += originX; y += originY; drawLineN(x, y, x + w, y); drawLineN(x, y + h, x + w, y + h); drawLineN(x, y, x, y + h); drawLineN(x + w, y, x + w, y + h); } private void drawLineN(int x1, int y1, int x2, int y2) { int rgb = foreground.getRGB(); int dy = y2 - y1; int dx = x2 - x1; int stepx, stepy; int fraction; if (dy < 0) { dy = -dy; stepy = -1; } else { stepy = 1; } if (dx < 0) { dx = -dx; stepx = -1; } else { stepx = 1; } dy <<= 1; dx <<= 1; drawPointN(x1, y1, rgb); if (dx > dy) { fraction = dy - (dx >> 1); while (x1 != x2) { if (fraction >= 0) { y1 += stepy; fraction -= dx; } x1 += stepx; fraction += dy; drawPointN(x1, y1, rgb); } } else { fraction = dx - (dy >> 1); while (y1 != y2) { if (fraction >= 0) { x1 += stepx; fraction -= dy; } y1 += stepy; fraction += dx; drawPointN(x1, y1, rgb); } } } public void drawLine(int x1, int y1, int x2, int y2) { x1 += originX; y1 += originY; x2 += originX; y2 += originY; drawLineN(x1, y1, x2, y2); } /** * Copies an area of the canvas that this graphics context paints to. * @param X the x-coordinate of the source. * @param Y the y-coordinate of the source. * @param W the width. * @param H the height. * @param dx the horizontal distance to copy the pixels. * @param dy the vertical distance to copy the pixels. */ public void copyArea(int x, int y, int w, int h, int dx, int dy) { x += originX; y += originY; Rectangle rect = new Rectangle(x, y, w, h); rect = actualClip.intersection(rect); if (rect.width <= 0 || rect.height <= 0) { return; } x = rect.x; y = rect.y; w = rect.width; h = rect.height; int subImage[] = new int[w * h]; // copy back buffer for (int i = 0; i < h; i++) { System.arraycopy(backBuffer, ((y + i) * width) + x, subImage, w * i, w); } // draw sub image for (int i = 0; i < h; i++) { drawSpanN(x + dx, y + i + dy, w, subImage, w * i); } } /** Draws lines defined by an array of x points and y points */ public void drawPolyline(int xPoints[], int yPoints[], int nPoints) { if (nPoints == 1) { drawPoint(xPoints[0], yPoints[0], foreground.getRGB()); } else { for (int i = 0; i < (nPoints - 1); i++) drawLine(xPoints[i], xPoints[i], xPoints[i + 1], xPoints[i + 1]); } } /** Draws a polygon defined by an array of x points and y points */ public void drawPolygon(int xPoints[], int yPoints[], int nPoints) { if (nPoints == 1) { drawPoint(xPoints[0], yPoints[0], foreground.getRGB()); } else { for (int i = 0; i < (nPoints - 1); i++) drawLine(xPoints[i], xPoints[i], xPoints[i + 1], xPoints[i + 1]); if (nPoints > 2) drawLine(xPoints[0], xPoints[0], xPoints[nPoints - 1], xPoints[nPoints - 1]); } } /** Fills a polygon with the current fill mask */ public void fillPolygon(int xPoints[], int yPoints[], int nPoints) { int minY = Integer.MAX_VALUE; int maxY = Integer.MIN_VALUE; int colour = foreground.getRGB(); if (nPoints < 3) { return; } for (int i = 0; i < nPoints; i++) { if (yPoints[i] > maxY) { maxY = yPoints[i]; } if (yPoints[i] < minY) { minY = yPoints[i]; } } // check the last point to see if its the same as the first if (xPoints[0] == xPoints[nPoints - 1] && yPoints[0] == yPoints[nPoints - 1]) { nPoints } PolyEdge[] polyEdges = new PolyEdge[nPoints]; for (int i = 0; i < nPoints - 1; i++) { polyEdges[i] = new PolyEdge(xPoints[i], yPoints[i], xPoints[i + 1], yPoints[i + 1]); } // add the last one polyEdges[nPoints - 1] = new PolyEdge(xPoints[nPoints - 1], yPoints[nPoints - 1], xPoints[0], yPoints[0]); ArrayList xList = new ArrayList(); for (int i = minY; i <= maxY; i++) { for (int j = 0; j < nPoints; j++) { if (polyEdges[j].intersects(i)) { int x = polyEdges[j].intersectionX(i); xList.add(new Integer(x)); } } // probably a better way of doing this (removing duplicates); HashSet hs = new HashSet(); hs.addAll(xList); xList.clear(); xList.addAll(hs); if (xList.size() % 2 > 0) { xList.clear(); continue; // this should be impossible unless the poly is open somewhere } Collections.sort(xList); for (int j = 0; j < xList.size(); j +=2 ) { int x1 = ((Integer)xList.get(j)).intValue(); int x2 = ((Integer)xList.get(j + 1)).intValue(); drawSpan(x1, i, x2 - x1, colour); } xList.clear(); } } /** Draws an oval to fit in the given rectangle */ public void drawOval(int x, int y, int w, int h) { int startX; int endX; int offset; int[] xList; int[] yList; int numPoints; int count; float as; float bs; if (w <= 0 || h <=0 ) { return; } count = 0; numPoints = ((h/2) + (h/2) + 1) * 2; numPoints += 1; // to close xList = new int[numPoints]; yList = new int[numPoints]; as = (w/2.0f) * (w/2.0f); bs = (h/2.0f) * (h/2.0f); for (int i = -h/2; i <= h/2; i++) { offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); startX = x - offset + w/2; xList[count] = startX; yList[count] = y + i + h/2; count++; } for (int i = h/2; i >= -h/2; i offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); endX = x + offset + w/2; xList[count] = endX; yList[count] = y + i + h/2; count++; } xList[count] = xList[0]; // close the loop yList[count] = yList[0]; // close the loop drawPolyline(xList, yList, numPoints); } /** Fills an oval to fit in the given rectangle */ public void fillOval(int x, int y, int w, int h) { int startX; int endX; int offset; int colour; float as; float bs; if (w <= 0 || h <= 0) { return; } as = (w/2.0f) * (w/2.0f); bs = (h/2.0f) * (h/2.0f); colour = foreground.getRGB(); for(int i=-h/2; i<=h/2; i++) { offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); startX = x - offset + w/2; endX = x + offset + w/2; drawSpan(startX, y + i + h/2, endX - startX + 1, colour); } } /** * Draws an arc bounded by the given rectangle from startAngle to * endAngle. 0 degrees is a vertical line straight up from the * center of the rectangle. Positive start angle indicate clockwise * rotations, negative angle are counter-clockwise. */ public void drawArc(int x, int y, int w, int h, int startAngle, int endAngle) { logger.unimplemented("drawArc"); } /** fills an arc. arguments are the same as drawArc. */ public void fillArc(int x, int y, int w, int h, int startAngle, int endAngle) { logger.unimplemented("fillArc"); } /** Draws a rounded rectangle. */ public void drawRoundRect(int x, int y, int w, int h, int arcWidth, int arcHeight) { int[] xList; int[] yList; int numPoints; int count; int startX; int endX; int offset; if (w <= 0 || h <= 0) { return; } if (arcWidth == 0 || arcHeight == 0) { drawRect(x, y, w, h); return; } if (arcWidth < 0) { // matches behaviour of normal java version arcWidth *= -1; } if (arcHeight < 0) { arcHeight *= -1; } count = 0; numPoints = ((arcHeight/2) + 1) * 2; numPoints += ((arcHeight/2) + 1) * 2; numPoints += 1; // last point to close the loop xList = new int[numPoints]; yList = new int[numPoints]; float as = (arcWidth/2.0f) * (arcWidth/2.0f); float bs = (arcHeight/2.0f) * (arcHeight/2.0f); // draw top curved half of box for (int i = 0; -arcHeight/2 <= i; i offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); startX = x - offset + arcWidth/2; xList[count] = startX; yList[count] = y+i+(arcHeight/2); count++; } for (int i = -arcHeight / 2; i <= 0; i++) { offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); endX = x + offset + (w-arcWidth) + arcWidth/2; xList[count] = endX; yList[count] = y + i + (arcHeight/2); count++; } // draw bottom box for (int i = 0; i <= arcHeight / 2; i++) { offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); startX = x - offset + arcWidth/2; endX = x + offset + (w - arcWidth) + arcWidth/2; xList[count] = endX; yList[count] = y + i + h - arcHeight/2; count++; } // draw bottom box for (int i = arcHeight / 2; i >= 0; i offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); startX = x - offset + arcWidth/2; endX = x + offset + (w-arcWidth) + arcWidth/2; xList[count] = startX; yList[count] = y+i+h-arcHeight/2; count++; } xList[count] = xList[0]; yList[count] = yList[0]; drawPolyline(xList, yList, numPoints); } /** Draws a filled rounded rectangle. */ public void fillRoundRect(int x, int y, int w, int h, int arcWidth, int arcHeight) { int startX; int endX; int offset; int colour; if (w <= 0 || h <= 0) { return; } if (arcWidth == 0 || arcHeight == 0) { fillRect(x,y,w,h); return; } if (arcWidth < 0) { // matches behaviour of normal java version arcWidth *= -1; } if (arcHeight < 0) { arcHeight *= -1; } float as = (arcWidth/2.0f) * (arcWidth/2.0f); float bs = (arcHeight/2.0f) * (arcHeight/2.0f); colour = foreground.getRGB(); // draw top curved half of box for (int i = -arcHeight/2; i < 0; i++) { offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); startX = x - offset + arcWidth/2; endX = x + offset + (w - arcWidth) + arcWidth/2; drawSpan(startX, y + i + (arcHeight/2), endX - startX + 1, colour); } // draw middle section for (int i = 0; i < h - arcHeight; i++) { drawSpan(x, y + i + arcHeight/2, w, colour); } // draw bottom box for (int i = 0; i <= arcHeight/2; i++) { offset = (int) Math.sqrt( (1.0 - ((i*i)/bs)) * as ); startX = x - offset + arcWidth/2; endX = x + offset + (w - arcWidth) + arcWidth/2; drawSpan(startX, y + i + h - 1 - arcHeight/2, endX - startX + 1, colour); } } protected native void drawStringN(long ftFace, String string, int x, int y, int rgb); /** Draws the given string. */ public void drawString(String string, int x, int y) { if (fontMetrics != null) { fontMetrics.drawString(this, string, x, y, foreground.getRGB()); } } /** Draws the given character array. */ public void drawChars(char chars[], int offset, int length, int x, int y) { drawString(new String(chars, offset, length), x, y); } public void drawString(AttributedCharacterIterator arg0, int arg1, int arg2) { logger.unimplemented("drawString"); } /** * Draws an image at x,y in nonblocking mode with a callback object. */ public boolean drawImage(Image img, int x, int y, ImageObserver observer) { return drawImage(img, x, y, null, observer); } /** * Draws an image at x,y in nonblocking mode with a solid background * color and a callback object. */ public boolean drawImage(Image img, int x, int y, Color bg, ImageObserver observer) { return drawImageN(img, x, y, -1, -1, 0, 0, -1, -1, bg, observer); } /** * Draws an image scaled to x,y,w,h in nonblocking mode with a * callback object. */ public boolean drawImage(Image img, int x, int y, int w, int h, ImageObserver observer) { return drawImage(img, x, y, w, h, null, observer); } /** * Draws an image scaled to x,y,w,h in nonblocking mode with a * solid background color and a callback object. */ public boolean drawImage(Image img, int x, int y, int w, int h, Color bg, ImageObserver observer) { return drawImageN(img, x, y, w, h, 0, 0, -1, -1, bg, observer); } /** * Draws a subrectangle of an image scaled to a destination rectangle * in nonblocking mode with a callback object. */ public boolean drawImage(Image img, int dx1, int dy1, int dx2, int dy2, int sx1, int sy1, int sx2, int sy2, ImageObserver observer) { return drawImage(img, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, null, observer); } /** * Draws a subrectangle of an image scaled to a destination rectangle in * nonblocking mode with a solid background color and a callback object. */ public boolean drawImage(Image img, int dx1, int dy1, int dx2, int dy2, int sx1, int sy1, int sx2, int sy2, Color bg, ImageObserver observer) { if (dx1 > dx2) { int swap = dx1; dx1 = dx2; dx2 = swap; } if (dy1 > dy2) { int swap = dy1; dy1 = dy2; dy2 = swap; } if (sx1 > sx2) { int swap = sx1; sx1 = sx2; sx2 = swap; } if (sy1 > sy2) { int swap = sy1; sy1 = sy2; sy2 = swap; } return drawImageN(img, dx1, dy1, dx2 - dx1, dy2 - dy1, sx1, sy1, sx2 - sx1, sy2 - sy1, bg, observer); } /** * Draws a subrectangle of an image scaled to a destination rectangle in * nonblocking mode with a solid background color and a callback object. */ protected boolean drawImageN(Image img, int dx, int dy, int dw, int dh, int sx, int sy, int sw, int sh, Color bg, ImageObserver observer) { if ((sx < 0) || (sy < 0) || (sw == 0) || (sh == 0) || (dw == 0) || (dh == 0)) return false; BDImage bdImage; if (img instanceof BDImage) { bdImage = (BDImage)img; } else if (img instanceof DVBBufferedImage) { bdImage = (BDImage)getBufferedImagePeer( (BufferedImage)(((DVBBufferedImage)img).getImage())); } else if (img instanceof BufferedImage) { bdImage = (BDImage)getBufferedImagePeer((BufferedImage)img); } else { logger.unimplemented("drawImageN: unsupported image type " + img.getClass().getName()); return false; } if (bdImage instanceof BDImageConsumer) { BDImageConsumer consumer = (BDImageConsumer)bdImage; if (!consumer.isComplete(observer)) { return false; } } if(sx + sw > bdImage.width || sy + sh > bdImage.height) { logger.info("drawImageN: fixing too small src image (src " + sx + "," + sy + " " + sw + "x" + sh + " ; img " + bdImage.width + "x" + bdImage.height + ")"); BDImage subImage = new BDImage(null, sw, sh, null); bdImage.getRGB(sx, sy, Math.min(sw, bdImage.width - sx), Math.min(sh, bdImage.height - sy), subImage.getBdBackBuffer(), 0, sw); bdImage = subImage; sx = 0; sy = 0; } /* if(sx + sw > bdImage.width) { int n = sx + sw - bdImage.width; dw -= dw * n / sw; sw -= n; } if(sy + sh > bdImage.height) { int n = sy + sh - bdImage.height; dh -= dh * n / sh; sh -= n; } */ if ((sw > 0) && (sh > 0) && ((sx != 0) || (sy != 0) || (sw != bdImage.width) || (sh != bdImage.height))) { BDImage subImage = new BDImage(null, sw, sh, null); bdImage.getRGB(sx, sy, sw, sh, subImage.getBdBackBuffer(), 0, sw); bdImage = subImage; } if ((dw > 0) && (dh > 0) && ((dw != bdImage.width) || (dh != bdImage.height))) { BDImageConsumer scaledImage = new BDImageConsumer(null); AreaAveragingScaleFilter scaleFilter = new AreaAveragingScaleFilter(dw, dh); scaleFilter = (AreaAveragingScaleFilter)scaleFilter.getFilterInstance(scaledImage); scaleFilter.setDimensions(bdImage.width, bdImage.height); scaleFilter.setPixels(0, 0, bdImage.width, bdImage.height, bdImage.getColorModel(), bdImage.getBdBackBuffer(), 0, bdImage.width); scaleFilter.imageComplete(ImageConsumer.STATICIMAGEDONE); bdImage = scaledImage; } int[] rgbArray = bdImage.getBdBackBuffer(); int bgColor; if (bg != null) bgColor = bg.getRGB(); else bgColor = 0; for (int y = dy; y < (dy + bdImage.height); y++) { if (bg != null) { drawSpan(dx, y, bdImage.width, bgColor); } drawSpan(dx, y, bdImage.width, rgbArray, (y - dy) * bdImage.width); } return true; } public Stroke getStroke() { logger.unimplemented("getStroke"); throw new Error(); } public void setStroke(Stroke stroke) { logger.unimplemented("setStroke"); } public void dispose() { } public String toString() { return getClass().getName() + "[" + originX + "," + originY + "]"; } private static Image getBufferedImagePeer(BufferedImage image) { try { return (Image)bufferedImagePeer.get(image); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } return null; } private static Field bufferedImagePeer; static { try { Class c = Class.forName("java.awt.image.BufferedImage"); bufferedImagePeer = c.getDeclaredField("peer"); bufferedImagePeer.setAccessible(true); } catch (ClassNotFoundException e) { throw new AWTError("java.awt.image.BufferedImage not found"); } catch (SecurityException e) { throw new AWTError("java.awt.image.BufferedImage.peer not accessible"); } catch (NoSuchFieldException e) { throw new AWTError("java.awt.image.BufferedImage.peer not found"); } } private static final Logger logger = Logger.getLogger(BDGraphics.class.getName()); }
package ProblemeDesReines.chessPiece; import ProblemeDesReines.chessBoard.ChessBoard; public class Pawn implements IChessPiece { @Override public void applyPattern (Object sender, int row, int col) { if(!(sender instanceof ChessBoard)) return; ChessBoard board = (ChessBoard) sender; //Check top left if(row > 0 && col > 0 && board.getStatus(row - 1, col - 1) == 0) board.setStatus(row - 1, col - 1, -1); //Check bottom right if(row > 0 && col < board.width - 1 && board.getStatus(row - 1, col + 1) == 0) board.setStatus(row - 1, col + 1, -1); //Check bottom left if(row < board.height - 1 && col > 0 && board.getStatus(row + 1, col - 1) == 0) board.setStatus(row + 1, col - 1, -1); //Check bottom right if(row < board.height - 1 && col < board.width - 1 && board.getStatus(row + 1, col + 1) == 0) board.setStatus(row + 1, col + 1, -1); } }
package bj.pranie.controller; import bj.pranie.dao.ReservationDao; import bj.pranie.dao.UserDao; import bj.pranie.dao.WashTimeDao; import bj.pranie.entity.Reservation; import bj.pranie.entity.User; import bj.pranie.entity.WashTime; import bj.pranie.entity.myEnum.ReservationType; import bj.pranie.model.WmModel; import bj.pranie.util.TimeUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.*; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; @Controller @RequestMapping(value = "/wm") public class WmController { private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd"); private static final SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm"); private static final SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd HH:mm"); @Autowired private ReservationDao reservationDao; @Autowired private WashTimeDao washTimeDao; @Autowired private UserDao userDao; @RequestMapping(path = "/{year}/{month}/{day}/{washTimeId}", method = RequestMethod.GET) public String wm(@PathVariable int year, @PathVariable int month, @PathVariable int day, @PathVariable long washTimeId, Model model) throws ParseException { Date date = dateFormat.parse(year + "/" + month + "/" + day); WashTime washTime = washTimeDao.findOne(washTimeId); List<Reservation> reservationList = reservationDao.findByWashTimeIdAndDate(washTimeId, new java.sql.Date(date.getTime())); int wmFree = 3 - reservationList.size(); model.addAttribute("washTimeId", washTimeId); model.addAttribute("dayName", getDayName(date)); model.addAttribute("date", dateFormat.format(date)); model.addAttribute("time", getWashTime(washTime)); model.addAttribute("wmFree", wmFree); model.addAttribute("reservations", getWmModels(reservationList, date, washTime)); model.addAttribute("user", SecurityContextHolder.getContext().getAuthentication().getPrincipal()); return "wm/wm"; } @RequestMapping(path = "/{year}/{month}/{day}/{washTimeId}/unregister", method = RequestMethod.POST) public String unregisterWm(@PathVariable int year, @PathVariable int month, @PathVariable int day, @PathVariable long washTimeId, @RequestParam long reservationId) { reservationDao.delete(reservationId); User user = (User) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); user.setTokens(user.getTokens() + 1); userDao.save(user); return "redirect:/wm/" + year + "/" + month + "/" + day + "/" + washTimeId; } @RequestMapping(path = "/{year}/{month}/{day}/{washTimeId}/remove", method = RequestMethod.POST) public String removeWm(@PathVariable int year, @PathVariable int month, @PathVariable int day, @PathVariable long washTimeId, @RequestParam long reservationId) { Reservation reservation = reservationDao.findOne(reservationId); reservationDao.delete(reservationId); User user = reservation.getUser(); user.setTokens(user.getTokens() + 1); userDao.save(user); return "redirect:/wm/" + year + "/" + month + "/" + day + "/" + washTimeId; } @RequestMapping(path = "/{year}/{month}/{day}/{washTimeId}/block", method = RequestMethod.POST) public String blockWm(@PathVariable int year, @PathVariable int month, @PathVariable int day, @PathVariable long washTimeId, @RequestParam int wmNumber) { User user = (User) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); makeReservation(user, year, month, day, washTimeId, wmNumber, ReservationType.BLOCKED); return "redirect:/wm/" + year + "/" + month + "/" + day + "/" + washTimeId; } @PostMapping(path = "/{year}/{month}/{day}/{washTimeId}/register") public String registerWm(@PathVariable int year, @PathVariable int month, @PathVariable int day, @PathVariable long washTimeId, @RequestParam int wmNumber) { User user = (User) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); makeReservation(user, year, month, day, washTimeId, wmNumber, ReservationType.USER); user.setTokens(user.getTokens() - 1); userDao.save(user); return "redirect:/wm/" + year + "/" + month + "/" + day + "/" + washTimeId; } private void makeReservation(User user, int year, int month, int day, long washTimeId, int wmNumber, ReservationType reservationType) { Reservation reservation = new Reservation(); reservation.setDate(getSqlDate(year, month, day)); reservation.setUser(user); reservation.setWashTime(washTimeDao.findOne(washTimeId)); reservation.setWm(wmNumber); reservation.setType(reservationType); reservationDao.save(reservation); } private java.sql.Date getSqlDate(int year, int month, int day) { try { Date date = dateFormat.parse(year + "/" + month + "/" + day); return new java.sql.Date(date.getTime()); } catch (ParseException e) { e.printStackTrace(); } return null; } private String getWashTime(WashTime washTime) { return timeFormat.format(washTime.getFromTime()) + " - " + timeFormat.format(washTime.getToTime()); } private List<WmModel> getWmModels(List<Reservation> reservationList, Date date, WashTime washTime) { List<WmModel> wmModels = new ArrayList<>(); List<Integer> brokenWm = getBrokenWm(); boolean isPast = TimeUtil.isPast(timeFormat.format(washTime.getFromTime()), dateFormat.format(date)); for (int i = 0; i != 3; i++) { WmModel wmModel = new WmModel(); Reservation currentReservation = null; for (Reservation reservation : reservationList ) { if (reservation.getWm() == i) { currentReservation = reservation; break; } } if (currentReservation == null) { if (isPast) { wmModel.setType(WmModel.TYPE.PAST); wmModel.setColor("#FF0000"); } else if (brokenWm.contains(i)) { wmModel.setType(WmModel.TYPE.UNAVAILABLE); wmModel.setColor("#FF0000"); } else { wmModel.setType(WmModel.TYPE.FREE); wmModel.setColor("#1E9600"); } } else { wmModel.setReservationId(currentReservation.getId()); if (currentReservation.getType() == ReservationType.BLOCKED) { wmModel.setColor("#FF0000"); wmModel.setType(WmModel.TYPE.UNAVAILABLE); } else if (isMyReservation(currentReservation.getUser())) { wmModel.setType(WmModel.TYPE.MY); wmModel.setColor("#FFF200"); wmModel.setUser(currentReservation.getUser()); } else { wmModel.setColor("#FF0000"); wmModel.setType(WmModel.TYPE.RESERVED); wmModel.setUser(currentReservation.getUser()); } } wmModels.add(wmModel); } return wmModels; } private boolean isMyReservation(User reservationUser) { Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); if (authentication.getPrincipal() instanceof User) { User currentUser = (User) authentication.getPrincipal(); if (currentUser.getId() == reservationUser.getId()) { return true; } } return false; } private List<Integer> getBrokenWm() { return new ArrayList<>(); } private boolean isRegistrationAvailable() { return false; } private boolean isPast() { return false; } private String getDayName(Date date) { Calendar calendar = TimeUtil.getCalendar(); calendar.setTime(date); switch (calendar.get(Calendar.DAY_OF_WEEK)) { case 2: return "Poniedziałek"; case 3: return "Wtorek"; case 4: return "Środa"; case 5: return "Czwartek"; case 6: return "Piątek"; case 7: return "Sobota"; case 1: return "Niedziela"; default: return "Null"; } } }
package br.gov.servicos.v3.schema; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.experimental.FieldDefaults; import lombok.experimental.Wither; import org.springframework.data.annotation.Id; import org.springframework.data.elasticsearch.annotations.Document; import javax.xml.bind.annotation.*; import static br.gov.servicos.config.PortalDeServicosIndex.IMPORTADOR; import static lombok.AccessLevel.PRIVATE; @XmlAccessorType(XmlAccessType.NONE) @XmlType(name = "Servico", propOrder = { "nome", "sigla", "nomesPopulares", "descricao", "gratuito", "solicitantes", "tempoTotalEstimado", "etapas", "orgao", "segmentosDaSociedade", "areasDeInteresse", "palavrasChave", "legislacoes", }) @Document(indexName = IMPORTADOR, type = "servico") @Data @Wither @NoArgsConstructor @AllArgsConstructor @FieldDefaults(level = PRIVATE) public class Servico { @Id @XmlTransient String id; @XmlElement(required = true) String nome; @XmlElement String sigla; @XmlElement(name = "nomes-populares") NomesPopulares nomesPopulares; @XmlElement String descricao; @XmlElement String gratuito; @XmlElement Solicitantes solicitantes; @XmlElement(name = "tempo-total-estimado") TempoTotalEstimado tempoTotalEstimado; @XmlElement Etapas etapas; @XmlElement Orgao orgao; @XmlElement(name = "segmentos-da-sociedade") SegmentosDaSociedade segmentosDaSociedade; @XmlElement(name = "areas-de-interesse") AreasDeInteresse areasDeInteresse; @XmlElement(name = "palavras-chave") PalavrasChave palavrasChave; @XmlElement Legislacoes legislacoes; }
package cn.wizzer.modules.sys; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import cn.wizzer.common.action.BaseAction; import cn.wizzer.common.filter.GlobalsFilter; import cn.wizzer.common.filter.UserLoginFilter; import cn.wizzer.modules.sys.bean.Sys_resource; import cn.wizzer.modules.sys.bean.Sys_user; import org.apache.commons.lang.math.NumberUtils; import org.nutz.dao.Chain; import org.nutz.dao.Cnd; import org.nutz.dao.Dao; import org.nutz.dao.Sqls; import org.nutz.dao.sql.Sql; import org.nutz.ioc.loader.annotation.Inject; import org.nutz.ioc.loader.annotation.IocBean; import org.nutz.lang.Lang; import org.nutz.lang.Strings; import org.nutz.mvc.annotation.At; import org.nutz.mvc.annotation.By; import org.nutz.mvc.annotation.Filters; import org.nutz.mvc.annotation.Ok; import org.nutz.mvc.annotation.Param; /** * @author Wizzer.cn * @time 2012-9-13 10:54:04 */ @IocBean @At("/private") @Filters({@By(type = GlobalsFilter.class), @By(type = UserLoginFilter.class)}) public class IndexAction extends BaseAction { @Inject protected Dao dao; @At public void dolock(HttpServletRequest req, HttpSession session) { session.setAttribute("validate", "openLockWindow();"); } @At @Ok("vm:template.private.lock") public void lock(HttpServletRequest req, HttpSession session) { } @At @Ok("raw") public boolean reload(@Param("resid") String resid, HttpSession session) { Sys_user user = (Sys_user) session.getAttribute("userSession"); if (daoCtl.update(dao, Sys_user.class, Chain.make("loginresid", resid), Cnd.where("id", "=", user.getId()))) { user.setLoginresid(resid); session.setAttribute("userSession", user); return true; } return false; } @At @Ok("raw") public String dounlock(@Param("password") String password, HttpServletRequest req, HttpSession session) { Sys_user user = (Sys_user) session.getAttribute("userSession"); if (!Lang.digest("MD5", Strings.sNull(password).getBytes(), Strings.sNull(user.getSalt()).getBytes(), 3).equals(user.getPassword())) { return ""; } else { session.setAttribute("validate", ""); return "true"; } } @At @Ok("vm:template.private.index") public void index(HttpServletRequest req, HttpSession session) { Sys_user user = (Sys_user) session.getAttribute("userSession"); Sql sql = Sqls.create("select * from sys_role where id in(select roleid from sys_user_role where userid=@userid)"); sql.params().set("userid", user.getId()); List<Record> rolelist = daoCtl.list(dao, sql); List<Integer> rolelist1 = new ArrayList<Integer>(); List<Integer> plist = new ArrayList<Integer>(); for (Record map : rolelist) { rolelist1.add(NumberUtils.toInt(Strings.sNull(map.get("id")))); int pid = NumberUtils.toInt(Strings.sNull(map.get("pid"))); if (!plist.contains(pid)) plist.add(pid); } if (rolelist1.contains(2)) { user.setSysrole(true); } else { user.setSysrole(false); } user.setRolelist(rolelist1); user.setProlist(plist); session.setAttribute("userSession", user); String resid = Strings.sNull(user.getLoginresid()); Sql sql1 = Sqls .create("select distinct resourceid from sys_role_resource where ( roleid in(select roleid from sys_user_role where userid=@userid) or roleid=1) and resourceid not in(select id from sys_resource where state=1)"); sql1.params().set("userid", user.getId()); user.setReslist(daoCtl.getStrRowValues(dao, sql1)); if (user.getReslist() != null && user.getReslist().size() > 0) { List<Sys_resource> moduleslist = daoCtl.list(dao, Sys_resource.class, Cnd.where("id", "like", "____").and("id", "in", user.getReslist()).asc("location") ); req.setAttribute("moduleslist", moduleslist); if ("".equals(resid)) { for (Sys_resource res : moduleslist) { resid = res.getId(); break; } } List<Sys_resource> modulessublist = daoCtl.list(dao, Sys_resource.class, Cnd.where("id", "like", resid + "____").and("id", "in", user.getReslist()).asc("location") ); req.setAttribute("modulessublist", modulessublist); } req.setAttribute("resid", resid); // button HashMap List<List<String>> reslist = daoCtl .getMulRowValue(dao, Sqls .create("SELECT a.url,b.button FROM sys_resource a,sys_role_resource b WHERE a.ID=b.RESOURCEID " + " AND (b.button<>'' or b.button is not null) AND ( b.roleid IN(SELECT roleid FROM sys_user_role WHERE userid=" + user.getId() + ") OR roleid=1) " + " AND b.resourceid NOT IN(SELECT id FROM sys_resource WHERE state=1)")); Hashtable<String, String> btnmap = new Hashtable<String, String>(); for (List<String> obj : reslist) { String key = Strings.sNull(obj.get(0)); String value = Strings.sNull(btnmap.get(key)) + Strings.sNull(obj.get(1)); btnmap.put(key, value); } user.setBtnmap(btnmap); req.setAttribute("validate", session.getAttribute("validate")); } @At @Ok("vm:template.private.left") public void left(@Param("sys_menuid") String sys_menuid, HttpServletRequest req, HttpSession session) { Sys_user user = (Sys_user) session.getAttribute("userSession"); List<Sys_resource> menulist = daoCtl.list(dao, Sys_resource.class, Cnd.where("id", "like", sys_menuid + "____").and("id", "in", user.getReslist()).asc("LOCATION")); Hashtable<String, List<Sys_resource>> threemenu = new Hashtable<String, List<Sys_resource>>(); for (int i = 0; i < menulist.size(); i++) { List<Sys_resource> threemenulist = daoCtl.list(dao, Sys_resource.class, Cnd.where("id", "like", menulist.get(i).getId() + "____").and("id", "in", user.getReslist()).asc("LOCATION") ); threemenu.put(menulist.get(i).getId(), threemenulist); } req.setAttribute("menulist", menulist); req.setAttribute("threemenulist", threemenu); } @At @Ok("vm:template.private.welcome") public void welcome() { } }
package com.almasb.fxgl.asset; import com.almasb.fxgl.app.FXGL; import com.almasb.fxgl.audio.Music; import com.almasb.fxgl.audio.Sound; import com.almasb.fxgl.logging.Logger; import com.almasb.fxgl.parser.KVFile; import com.almasb.fxgl.scene.CSS; import com.almasb.fxgl.texture.Texture; import com.almasb.fxgl.ui.FontFactory; import com.almasb.fxgl.ui.UIController; import com.almasb.fxgl.util.LRUCache; import com.badlogic.gdx.ai.btree.BehaviorTree; import com.badlogic.gdx.ai.btree.utils.BehaviorTreeParser; import com.google.inject.Inject; import com.google.inject.Singleton; import com.google.inject.name.Named; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.image.Image; import javafx.scene.media.AudioClip; import javafx.scene.media.Media; import javafx.scene.text.Font; import java.io.*; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.CodeSource; import java.util.ArrayList; import java.util.List; import java.util.PropertyResourceBundle; import java.util.ResourceBundle; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** * Handles all resource (asset) loading operations. * <p> * "assets" directory must be located in source folder ("src" by default). * <p> * Resources (assets) will be searched for in these specified directories: * <ul> * <li>Texture - /assets/textures/</li> * <li>Sound - /assets/sounds/</li> * <li>Music - /assets/music/</li> * <li>Text (List&lt;String&gt;) - /assets/text/</li> * <li>KVFile - /assets/kv/</li> * <li>Data - /assets/data/</li> * <li>Scripts - /assets/scripts/</li> * <li>CSS - /assets/ui/css/</li> * <li>Font - /assets/ui/fonts/</li> * <li>App icons - /assets/ui/icons/</li> * <li>Cursors - /assets/ui/cursors/</li> * </ul> * * @author Almas Baimagambetov (AlmasB) (almaslvl@gmail.com) */ @Singleton public class AssetLoader { private static final String ASSETS_DIR = "/assets/"; private static final String TEXTURES_DIR = ASSETS_DIR + "textures/"; private static final String SOUNDS_DIR = ASSETS_DIR + "sounds/"; private static final String MUSIC_DIR = ASSETS_DIR + "music/"; private static final String TEXT_DIR = ASSETS_DIR + "text/"; private static final String KV_DIR = ASSETS_DIR + "kv/"; private static final String BINARY_DIR = ASSETS_DIR + "data/"; private static final String SCRIPTS_DIR = ASSETS_DIR + "scripts/"; private static final String PROPERTIES_DIR = ASSETS_DIR + "properties/"; private static final String AI_DIR = ASSETS_DIR + "ai/"; private static final String UI_DIR = ASSETS_DIR + "ui/"; private static final String CSS_DIR = UI_DIR + "css/"; private static final String FONTS_DIR = UI_DIR + "fonts/"; private static final String ICON_DIR = UI_DIR + "icons/"; private static final String CURSORS_DIR = UI_DIR + "cursors/"; private static final Logger log = FXGL.getLogger("FXGL.AssetLoader"); private final LRUCache<String, Object> cachedAssets; @Inject private AssetLoader(@Named("asset.cache.size") int cacheSize) { cachedAssets = new LRUCache<>(cacheSize); log.debug("Service [AssetLoader] initialized"); } public Texture loadTexture(String name) { Object asset = getAssetFromCache(TEXTURES_DIR + name); if (asset != null) { return Texture.class.cast(asset).copy(); } try (InputStream is = getStream(TEXTURES_DIR + name)) { Texture texture = new Texture(new Image(is)); cachedAssets.put(TEXTURES_DIR + name, texture); return texture; } catch (Exception e) { throw loadFailed(name, e); } } public Sound loadSound(String name) { Object asset = getAssetFromCache(SOUNDS_DIR + name); if (asset != null) { return Sound.class.cast(asset); } try { Sound sound = new Sound(new AudioClip(getURL(SOUNDS_DIR + name).toExternalForm())); cachedAssets.put(SOUNDS_DIR + name, sound); return sound; } catch (Exception e) { throw loadFailed(name, e); } } public Music loadMusic(String name) { Object asset = getAssetFromCache(MUSIC_DIR + name); if (asset != null) { return Music.class.cast(asset); } try { Music music = new Music(new Media(getURL(MUSIC_DIR + name).toExternalForm())); cachedAssets.put(MUSIC_DIR + name, music); return music; } catch (Exception e) { throw loadFailed(name, e); } } public List<String> loadText(String name) { Object asset = getAssetFromCache(TEXT_DIR + name); if (asset != null) { return (List<String>)asset; } List<String> text = readAllLines(TEXT_DIR + name); cachedAssets.put(TEXT_DIR + name, text); return text; } public KVFile loadKV(String name) { return new KVFile(readAllLines(KV_DIR + name)); } public String loadScript(String name) { StringBuilder builder = new StringBuilder(); readAllLines(SCRIPTS_DIR + name) .forEach(line -> builder.append(line).append('\n')); return builder.toString(); } public ResourceBundle loadResourceBundle(String name) { try (InputStream is = getStream(PROPERTIES_DIR + name)) { return new PropertyResourceBundle(is); } catch (Exception e) { throw loadFailed(name, e); } } public Image loadCursorImage(String name) { try (InputStream is = getStream(CURSORS_DIR + name)) { return new Image(is); } catch (Exception e) { throw loadFailed(name, e); } } public Parent loadFXML(String name, UIController controller) { try (InputStream is = getStream(UI_DIR + name)) { FXMLLoader loader = new FXMLLoader(); loader.setController(controller); Parent ui = loader.load(is); controller.init(); return ui; } catch (Exception e) { throw loadFailed(name, e); } } public CSS loadCSS(String name) { try { return new CSS(getURL(CSS_DIR + name).toExternalForm()); } catch (Exception e) { throw loadFailed(name, e); } } public FontFactory loadFont(String name) { Object asset = getAssetFromCache(FONTS_DIR + name); if (asset != null) { return FontFactory.class.cast(asset); } try (InputStream is = getStream(FONTS_DIR + name)) { Font font = Font.loadFont(is, 12); if (font == null) font = Font.font(12); FontFactory fontFactory = new FontFactory(font); cachedAssets.put(FONTS_DIR + name, fontFactory); return fontFactory; } catch (Exception e) { throw loadFailed(name, e); } } public Image loadAppIcon(String name) { try (InputStream is = getStream(ICON_DIR + name)) { return new Image(is); } catch (Exception e) { throw loadFailed(name, e); } } public <T> BehaviorTree<T> loadBehaviorTree(String name) { try (InputStream is = getStream(AI_DIR + name)) { return new BehaviorTreeParser<T>().parse(is, null); } catch (Exception e) { throw loadFailed(name, e); } } @SuppressWarnings("unchecked") private <T> T loadDataInternal(String name) { try (ObjectInputStream ois = new ObjectInputStream(getStream(BINARY_DIR + name))) { return (T) ois.readObject(); } catch (Exception e) { throw loadFailed(name, e); } } private URL getURL(String name) { log.debug("Loading from disk: " + name); URL url = getClass().getResource(name); if (url == null) { throw new IllegalArgumentException("Asset \"" + name + "\" was not found!"); } return url; } public InputStream getStream(String name) { try { InputStream is = getURL(name).openStream(); if (is == null) throw new IOException("Input stream to \"" + name + "\" is null!"); return is; } catch (IOException e) { throw new IllegalArgumentException("Failed to obtain input stream to URL: " + e.getMessage()); } } /** * Load an asset from cache. * * @param name asset name * @return asset object or null if not found */ private Object getAssetFromCache(String name) { Object asset = cachedAssets.get(name); if (asset != null) { log.debug("Loading from cache: " + name); return asset; } else { return null; } } /** * Read all lines from a file. Bytes from the file are decoded into characters * using the {@link StandardCharsets#UTF_8 UTF-8} {@link Charset charset}. * * @param name resource name * @return the lines from the file as a {@code List} */ private List<String> readAllLines(String name) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(getStream(name)))) { List<String> result = new ArrayList<>(); for (; ; ) { String line = reader.readLine(); if (line == null) break; result.add(line); } return result; } catch (Exception e) { throw loadFailed(name, e); } } /** * Pre-loads all textures / sounds / music / text / fonts and binary data * from their respective folders. */ public void cache() { try { loadFileNames(TEXTURES_DIR).forEach(this::loadTexture); loadFileNames(SOUNDS_DIR).forEach(this::loadSound); loadFileNames(MUSIC_DIR).forEach(this::loadMusic); loadFileNames(TEXT_DIR).forEach(this::loadText); loadFileNames(FONTS_DIR).forEach(this::loadFont); loadFileNames(BINARY_DIR).forEach(this::loadDataInternal); } catch (Exception e) { throw loadFailed("Caching Failed", e); } } /** * Release all cached assets. */ public void clearCache() { log.debug("Clearing assets cache"); cachedAssets.clear(); } /** * Loads file names from a directory. * Note: directory name must be in the format "/assets/...". * Returned file names are relativized to the given directory name. * * @param directory name of directory * @return list of file names * @throws Exception */ public List<String> loadFileNames(String directory) throws Exception { URL url = getClass().getResource(directory); if (url != null) { if (url.toString().startsWith("jar")) return loadFileNamesJar(directory.substring(1)); Path dir = Paths.get(url.toURI()); if (Files.exists(dir)) { try (Stream<Path> files = Files.walk(dir)) { return files.filter(Files::isRegularFile) .map(file -> dir.relativize(file).toString().replace("\\", "/")) .collect(Collectors.toList()); } } } return loadFileNamesJar(directory.substring(1)); } /** * Loads file names from a directory when running within a jar. * If it contains other folders they'll be searched too. * * @param folderName folder files of which need to be retrieved * @return list of file names */ private static List<String> loadFileNamesJar(String folderName) { List<String> fileNames = new ArrayList<>(); CodeSource src = AssetLoader.class.getProtectionDomain().getCodeSource(); if (src != null) { URL jar = src.getLocation(); try (InputStream is = jar.openStream(); ZipInputStream zip = new ZipInputStream(is)) { ZipEntry ze; while ((ze = zip.getNextEntry()) != null) { String entryName = ze.getName(); if (entryName.startsWith(folderName)) { if (entryName.endsWith("/")) continue; fileNames.add(entryName.substring(entryName.indexOf(folderName) + folderName.length())); } } } catch (IOException e) { log.warning("Failed to load file names from jar - " + e.getMessage()); } } else { log.warning("Failed to load file names from jar - No code source"); } return fileNames; } private IllegalArgumentException loadFailed(String assetName, Throwable error) { log.debug("Loading failed for asset: " + assetName + ". Cause: " + error.getMessage()); return new IllegalArgumentException("Failed to load asset: " + assetName + ". Cause: " + error.getMessage()); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.areen.jlib.util; /** * Various (static) methods for all kind of String manipulation. * * @author Dejan */ public class StringUtility { /** * Get a "proper" String object that contains a commonly accepted format for user e-mail in the form * "Fname Sname <email@example.com>" . * * @param argFullName * @param argEmail * @return */ public static String toEmailString(String argFullName, String argEmail) { return argFullName + "<" + argEmail + ">"; } // toEmailString() method /** * Same as the method above, however here we expect argData to be in the form: * "Fname Sname, email@example.com" * * @param argData * @return */ public static String toEmailString(String argData) { String[] parts = argData.split(","); return toEmailString(parts[0].trim(), parts[1].trim()); } // toEmailString() method } // StringUtility class
package com.bio4j.model; import java.util.Arrays; import java.util.Optional; import java.util.Locale; import com.bio4j.angulillos.*; import com.bio4j.angulillos.Arity.*; public final class NCBITaxonomyGraph<V,E> extends TypedGraph<NCBITaxonomyGraph<V,E>,V,E> { public NCBITaxonomyGraph(UntypedGraph<V,E> graph) { super(graph); } @Override public final NCBITaxonomyGraph<V,E> self() { return this; } /* ## Taxon */ public final class Taxon extends Vertex<Taxon> { private Taxon(V vertex) { super(vertex, taxon); } @Override public final Taxon self() { return this; } } public final TaxonType taxon = new TaxonType(); public final class TaxonType extends VertexType<Taxon> { @Override public final Taxon fromRaw(V vertex) { return new Taxon(vertex); } public final ID id = new ID(); public final class ID extends Property<String> implements FromAtMostOne, ToOne { private ID() { super(String.class); } public final Index index = new Index(); public final class Index extends UniqueIndex<ID, String> { private Index() { super(id); } } } public final Name name = new Name(); public final class Name extends Property<String> implements FromAny, ToOne { private Name() { super(String.class); } } public final Rank rank = new Rank(); public final class Rank extends Property<TaxonomicRank> implements FromAny, ToAtMostOne { private Rank() { super(TaxonomicRank.class); } } } /* This is the set of ranks stored in Bio4j. There is no general list of valid ranks, because there is an indeterminate number of ranks as a taxonomist may invent a new rank at will at any time if they feel this is necessary. */ public static enum TaxonomicRank { Superkingdom, Kingdom, Superphylum, Phylum, Subphylum, Class, Subclass, Superclass, Infraclass, Order, Parvorder, Suborder, Infraorder, Family, Subfamily, Superfamily, Tribe, Subtribe, Genus, Subgenus, SpeciesGroup, SpeciesSubgroup, Species, Subspecies, Varietas, Forma; /* Converts values of the enum to lower-case strings */ @Override public String toString() { return this.name().toLowerCase(Locale.ENGLISH); } /* Converts strings to enum values _ignoring case_, returns `Optional` */ public static Optional<TaxonomicRank> fromString(String name) { return Arrays.stream(TaxonomicRank.values()) .filter(rank -> rank.toString().equalsIgnoreCase(name)) .findFirst(); } } public final class Parent extends Edge<Taxon, Parent, Taxon> { private Parent(E edge) { super(edge, parent); } @Override public final Parent self() { return this; } } public final ParentType parent = new ParentType(); public final class ParentType extends EdgeType<Taxon, Parent, Taxon> implements FromAny, ToAtMostOne { private ParentType() { super(taxon, taxon); } @Override public final Parent fromRaw(E edge) { return new Parent(edge); } } }
package com.cronutils.model; import com.cronutils.model.field.*; import com.google.common.collect.Lists; import org.joda.time.DateTime; import java.util.Collections; import java.util.List; import java.util.Map; //Approach 1: [Discarded] brute force, iterate over all possible future dates until finding first matching cron. //Approach 2: [...] precalculate possible values for each field and take nearest ones //Approach 3: [...] nearestValue(FieldExpression, int timeValue): return plus -> method to retrieve value for field an sum //Aproach 4: similar to previous one, but holding data that would contain possible values in structure and set them to date class ExecutionTime { private CronTimes cronTimes; private ExecutionTime(Map<CronFieldName, CronField> fields){ cronTimes.setSeconds( fromFieldToTimeValues( fields.get(CronFieldName.SECOND).getExpression(), getMaxForCronField(CronFieldName.SECOND) ) ); } public static ExecutionTime forCron(Cron cron){ return new ExecutionTime(cron.retrieveFieldsAsMap()); } // public DateTime afterDate(DateTime date){ // date.plus() // public DateTime beforeDate(DateTime date){ private List<Integer> fromFieldToTimeValues(FieldExpression fieldExpression, int max){ List<Integer> values = Lists.newArrayList(); if(fieldExpression instanceof And){ values = fromFieldToTimeValues((And)fieldExpression, max); } if(fieldExpression instanceof Between){ values = fromFieldToTimeValues((Between)fieldExpression, max); } if(fieldExpression instanceof On){ values = fromFieldToTimeValues((On)fieldExpression, max); } if(fieldExpression instanceof Always){ values = fromFieldToTimeValues((Always)fieldExpression, max); } Collections.sort(values); return values; } private List<Integer> fromFieldToTimeValues(And fieldExpression, int max){ List<Integer> values = Lists.newArrayList(); for(FieldExpression expression : fieldExpression.getExpressions()){ values.addAll(fromFieldToTimeValues(expression, max)); } return values; } private List<Integer> fromFieldToTimeValues(Between fieldExpression, int max){ List<Integer> values = Lists.newArrayList(); int every = fieldExpression.getEvery().getTime(); for(int j = fieldExpression.getFrom(); j < fieldExpression.getTo() + 1; j+=every){ values.add(j); } return values; } private List<Integer> fromFieldToTimeValues(On fieldExpression, int max){ List<Integer> values = Lists.newArrayList(); values.add(fieldExpression.getTime()); return values; } private List<Integer> fromFieldToTimeValues(Always fieldExpression, int max){ List<Integer> values = Lists.newArrayList(); int every = fieldExpression.getEvery().getTime(); for(int j = 1; j <= max; j+=every){ values.add(j); } return values; } private int getMaxForCronField(CronFieldName cronFieldName){ switch (cronFieldName){ case YEAR: return DateTime.now().getYear() + 60; case MONTH: return 12; case DAY_OF_MONTH: return 31; case DAY_OF_WEEK: return 7; default: return 60; } } private static class CronTimes { //universal private List<Integer> seconds; private List<Integer> minutes; private List<Integer> hours; private List<Integer> months; private List<Integer> years; //specific to year and/or month. Should be evaluated after universal values are set. private List<Integer> daysOfMonth; private List<Integer> daysOfWeek; private CronTimes() {} public void setSeconds(List<Integer> seconds) { this.seconds = seconds; } public void setMinutes(List<Integer> minutes) { this.minutes = minutes; } public void setHours(List<Integer> hours) { this.hours = hours; } public void setMonths(List<Integer> months) { this.months = months; } public void setYears(List<Integer> years) { this.years = years; } public void setDaysOfMonth(List<Integer> daysOfMonth) { this.daysOfMonth = daysOfMonth; } public void setDaysOfWeek(List<Integer> daysOfWeek) { this.daysOfWeek = daysOfWeek; } } }
package com.easypost.model; import com.easypost.exception.EasyPostException; import com.easypost.net.EasyPostResource; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.List; public class CarrierAccount extends EasyPostResource { public String id; String readable; String description; String reference; Map<String, Object> credentials; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getReadable() { return readable; } public void setReadable(String readable) { this.readable = readable; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getReference() { return reference; } public void setReference(String reference) { this.reference = reference; } public Map<String, Object> getCredentials() { return credentials; } public void setCredentials(Map<String, Object> credentials) { this.credentials = credentials; } // update public CarrierAccount update(Map<String, Object> params) throws EasyPostException { return this.update(params, null); } public CarrierAccount update(Map<String, Object> params, String apiKey) throws EasyPostException { Map<String, Object> wrappedParams = new HashMap<String, Object>(); wrappedParams.put("carrier_account", params); CarrierAccount response = request(RequestMethod.PUT, instanceURL(CarrierAccount.class, this.getId()), wrappedParams, CarrierAccount.class, apiKey); this.merge(this, response); return this; } // delete public void delete() throws EasyPostException { this.delete(null); } public void delete(String apiKey) throws EasyPostException { request(RequestMethod.DELETE, instanceURL(CarrierAccount.class, this.getId()), null, CarrierAccount.class, apiKey); } // create public static CarrierAccount create(Map<String, Object> params) throws EasyPostException { return create(params, null); } public static CarrierAccount create(Map<String, Object> params, String apiKey) throws EasyPostException { Map<String, Object> wrappedParams = new HashMap<String, Object>(); wrappedParams.put("carrier_account", params); return request(RequestMethod.POST, classURL(CarrierAccount.class), wrappedParams, CarrierAccount.class, apiKey); } // retrieve public static CarrierAccount retrieve(String id) throws EasyPostException { return retrieve(id, null); } public static CarrierAccount retrieve(String id, String apiKey) throws EasyPostException { return request(RequestMethod.GET, instanceURL(CarrierAccount.class, id), null, CarrierAccount.class, apiKey); } // all public static List<CarrierAccount> all(Map<String, Object> params) throws EasyPostException { return all(params, null); } public static List<CarrierAccount> all(Map<String, Object> params, String apiKey) throws EasyPostException { CarrierAccount[] response = request(RequestMethod.GET, classURL(CarrierAccount.class), params, CarrierAccount[].class, apiKey); return Arrays.asList(response); } }
package com.frostwire.jlibtorrent; import com.frostwire.jlibtorrent.alerts.*; import com.frostwire.jlibtorrent.plugins.Plugin; import com.frostwire.jlibtorrent.plugins.SwigPlugin; import com.frostwire.jlibtorrent.swig.*; import com.frostwire.jlibtorrent.swig.session_handle.options_t; import java.io.File; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; /** * The session holds all state that spans multiple torrents. Among other * things it runs the network loop and manages all torrents. Once it's * created, the session object will spawn the main thread that will do all * the work. The main thread will be idle as long it doesn't have any * torrents to participate in. * <p/> * This class belongs to a middle logical layer of abstraction. It's a wrapper * of the underlying swig session object (from libtorrent), but it does not * expose all the raw features, not expose a very high level interface * like {@link com.frostwire.jlibtorrent.DHT DHT} or * {@link com.frostwire.jlibtorrent.Downloader Downloader}. * * @author gubatron * @author aldenml */ public final class Session extends SessionHandle { private static final Logger LOG = Logger.getLogger(Session.class); private static final long REQUEST_STATS_RESOLUTION_MILLIS = 1000; private static final long ALERTS_LOOP_WAIT_MILLIS = 500; private static final LruCache<String, byte[]> MAGNET_CACHE = new LruCache<String, byte[]>(50); private static final Object MAGNET_LOCK = new Object(); private final session s; private final JavaStat stat; private final SessionStats stats; private long lastStatsRequestTime; private long lastStatSecondTick; private final SparseArray<ArrayList<AlertListener>> listeners; private final SparseArray<AlertListener[]> listenerSnapshots; private boolean running; private final LinkedList<SwigPlugin> plugins; /** * The flag alert_mask is always set to all_categories. * * @param settings * @param logging */ public Session(SettingsPack settings, boolean logging, AlertListener listener) { super(createSession(settings, logging)); this.s = (session) super.s; this.stat = new JavaStat(); this.stats = new SessionStats(stat); this.listeners = new SparseArray<ArrayList<AlertListener>>(); this.listenerSnapshots = new SparseArray<AlertListener[]>(); if (listener != null) { addListener(listener); } this.running = true; alertsLoop(); for (Pair<String, Integer> router : defaultRouters()) { s.add_dht_router(router.to_string_int_pair()); } this.plugins = new LinkedList<SwigPlugin>(); } public Session() { this(new SettingsPack(), false, null); } /** * This constructor allow you to specify the listen interfaces in the * same format libtorrent accepts. Like for example, IPv4+IPv6 in the * first available port: "0.0.0.0:0,[::]:0". * <p/> * The {@code retries} parameter correspond to the internal libtorrent * setting of {@code max_retry_port_bind}. That is: if binding to a * specific port fails, should the port be incremented by one and tried * again. This setting specifies how many times to retry a failed port * bind. * * @param retries * @param interfaces * @param logging * @param listener */ public Session(String interfaces, int retries, boolean logging, AlertListener listener) { this(createSettings(interfaces, retries), logging, listener); } public session getSwig() { return s; } public void addListener(AlertListener listener) { modifyListeners(true, listener); } public void removeListener(AlertListener listener) { modifyListeners(false, listener); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param ti * @param saveDir * @param priorities * @param resumeFile * @return */ public TorrentHandle addTorrent(TorrentInfo ti, File saveDir, Priority[] priorities, File resumeFile) { return addTorrentSupport(ti, saveDir, priorities, resumeFile, false); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir * @param resumeFile * @return */ public TorrentHandle addTorrent(File torrent, File saveDir, File resumeFile) { return addTorrent(new TorrentInfo(torrent), saveDir, null, resumeFile); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir * @return */ public TorrentHandle addTorrent(File torrent, File saveDir) { return addTorrent(torrent, saveDir, null); } /** * In order to add torrents more efficiently, consider using this which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as {@link com.frostwire.jlibtorrent.alerts.AddTorrentAlert}. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * * @param ti * @param saveDir * @param priorities * @param resumeFile */ public void asyncAddTorrent(TorrentInfo ti, File saveDir, Priority[] priorities, File resumeFile) { addTorrentSupport(ti, saveDir, priorities, resumeFile, true); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir * @param resumeFile */ public void asyncAddTorrent(File torrent, File saveDir, File resumeFile) { asyncAddTorrent(new TorrentInfo(torrent), saveDir, null, resumeFile); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir */ public void asyncAddTorrent(File torrent, File saveDir) { asyncAddTorrent(torrent, saveDir, null); } /** * This method will close all peer connections associated with the torrent and tell the * tracker that we've stopped participating in the swarm. This operation cannot fail. * When it completes, you will receive a torrent_removed_alert. * <p/> * The optional second argument options can be used to delete all the files downloaded * by this torrent. To do so, pass in the value session::delete_files. The removal of * the torrent is asyncronous, there is no guarantee that adding the same torrent immediately * after it was removed will not throw a libtorrent_exception exception. Once the torrent * is deleted, a torrent_deleted_alert is posted. * * @param th */ public void removeTorrent(TorrentHandle th, Options options) { s.remove_torrent(th.getSwig(), options.getSwig()); } /** * This method will close all peer connections associated with the torrent and tell the * tracker that we've stopped participating in the swarm. This operation cannot fail. * When it completes, you will receive a torrent_removed_alert. * * @param th */ public void removeTorrent(TorrentHandle th) { if (th.isValid()) { s.remove_torrent(th.getSwig()); } } /** * Applies the settings specified by the settings pack {@code sp}. This is an * asynchronous operation that will return immediately and actually apply * the settings to the main thread of libtorrent some time later. * * @param sp */ public void applySettings(SettingsPack sp) { s.apply_settings(sp.getSwig()); } /** * In case you want to destruct the session asynchronously, you can * request a session destruction proxy. If you don't do this, the * destructor of the session object will block while the trackers are * contacted. If you keep one ``session_proxy`` to the session when * destructing it, the destructor will not block, but start to close down * the session, the destructor of the proxy will then synchronize the * threads. So, the destruction of the session is performed from the * ``session`` destructor call until the ``session_proxy`` destructor * call. The ``session_proxy`` does not have any operations on it (since * the session is being closed down, no operations are allowed on it). * The only valid operation is calling the destructor:: * * @return */ public SessionProxy abort() { running = false; return new SessionProxy(s.abort()); } /** * Pausing the session has the same effect as pausing every torrent in * it, except that torrents will not be resumed by the auto-manage * mechanism. */ public void pause() { s.pause(); } /** * Resuming will restore the torrents to their previous paused * state. i.e. the session pause state is separate from the torrent pause * state. A torrent is inactive if it is paused or if the session is * paused. */ public void resume() { s.resume(); } public boolean isPaused() { return s.is_paused(); } /** * returns the port we ended up listening on. Since you * just pass a port-range to the constructor and to ``listen_on()``, to * know which port it ended up using, you have to ask the session using * this function. * * @return */ public int getListenPort() { return s.listen_port(); } public int getSslListenPort() { return s.ssl_listen_port(); } /** * will tell you whether or not the session has * successfully opened a listening port. If it hasn't, this function will * return false, and then you can use ``listen_on()`` to make another * attempt. * * @return */ public boolean isListening() { return s.is_listening(); } /** * Loads and saves all session settings, including dht settings, * encryption settings and proxy settings. {@link #saveState()} * internally writes all keys to an {@link entry} that's passed in, * which needs to either not be initialized, or initialized as a dictionary. * <p/> * The {@code flags} arguments passed in to this method can be used to * filter which parts of the session state to save. By default, all state * is saved (except for the individual torrents). * See {@link com.frostwire.jlibtorrent.swig.session_handle.save_state_flags_t} * * @return */ public byte[] saveState(long flags) { entry e = new entry(); s.save_state(e); return Vectors.byte_vector2bytes(e.bencode()); } /** * Same as calling {@link #saveState(long)} with all save state flags. * * @return * @see #saveState(long) */ public byte[] saveState() { entry e = new entry(); s.save_state(e); return Vectors.byte_vector2bytes(e.bencode()); } /** * Loads and saves all session settings, including dht_settings, * encryption settings and proxy settings. ``save_state`` writes all keys * to the ``entry`` that's passed in, which needs to either not be * initialized, or initialized as a dictionary. * <p/> * ``load_state`` expects a lazy_entry which can be built from a bencoded * buffer with lazy_bdecode(). * <p/> * The ``flags`` arguments passed in to ``save_state`` can be used to * filter which parts of the session state to save. By default, all state * is saved (except for the individual torrents). see save_state_flags_t * * @param data */ public void loadState(byte[] data) { byte_vector buffer = Vectors.bytes2byte_vector(data); bdecode_node n = new bdecode_node(); error_code ec = new error_code(); int ret = bdecode_node.bdecode(buffer, n, ec); if (ret == 0) { s.load_state(n); } else { LOG.error("failed to decode torrent: " + ec.message()); } } /** * This functions instructs the session to post the state_update_alert, * containing the status of all torrents whose state changed since the * last time this function was called. * <p/> * Only torrents who has the state subscription flag set will be * included. This flag is on by default. See add_torrent_params. * the ``flags`` argument is the same as for torrent_handle::status(). * see torrent_handle::status_flags_t. * * @param flags */ public void postTorrentUpdates(TorrentHandle.StatusFlags flags) { s.post_torrent_updates(flags.getSwig()); } /** * This functions instructs the session to post the state_update_alert, * containing the status of all torrents whose state changed since the * last time this function was called. * <p/> * Only torrents who has the state subscription flag set will be * included. */ public void postTorrentUpdates() { s.post_torrent_updates(); } /** * This function will post a {@link com.frostwire.jlibtorrent.alerts.SessionStatsAlert} object, containing a * snapshot of the performance counters from the internals of libtorrent. * To interpret these counters, query the session via * session_stats_metrics(). */ public void postSessionStats() { s.post_session_stats(); } /** * This will cause a dht_stats_alert to be posted. */ public void postDHTStats() { s.post_dht_stats(); } /** * Looks for a torrent with the given info-hash. In * case there is such a torrent in the session, a torrent_handle to that * torrent is returned. * <p/> * In case the torrent cannot be found, a null is returned. * * @param infoHash * @return */ public TorrentHandle findTorrent(Sha1Hash infoHash) { torrent_handle th = s.find_torrent(infoHash.swig()); return th != null && th.is_valid() ? new TorrentHandle(th) : null; } /** * Returns a list of torrent handles to all the * torrents currently in the session. * * @return */ public List<TorrentHandle> getTorrents() { torrent_handle_vector v = s.get_torrents(); long size = v.size(); List<TorrentHandle> l = new ArrayList<TorrentHandle>((int) size); for (int i = 0; i < size; i++) { l.add(new TorrentHandle(v.get(i))); } return l; } // starts/stops UPnP, NATPMP or LSD port mappers they are stopped by // default These functions are not available in case // ``TORRENT_DISABLE_DHT`` is defined. ``start_dht`` starts the dht node // and makes the trackerless service available to torrents. The startup // state is optional and can contain nodes and the node id from the // previous session. The dht node state is a bencoded dictionary with the // following entries: // nodes // A list of strings, where each string is a node endpoint encoded in // binary. If the string is 6 bytes long, it is an IPv4 address of 4 // bytes, encoded in network byte order (big endian), followed by a 2 // byte port number (also network byte order). If the string is 18 // bytes long, it is 16 bytes of IPv6 address followed by a 2 bytes // port number (also network byte order). // node-id // The node id written as a readable string as a hexadecimal number. // ``dht_state`` will return the current state of the dht node, this can // be used to start up the node again, passing this entry to // ``start_dht``. It is a good idea to save this to disk when the session // is closed, and read it up again when starting. // If the port the DHT is supposed to listen on is already in use, and // exception is thrown, ``asio::error``. // ``stop_dht`` stops the dht node. // ``add_dht_node`` adds a node to the routing table. This can be used if // your client has its own source of bootstrapping nodes. // ``set_dht_settings`` sets some parameters availavle to the dht node. // See dht_settings for more information. // ``is_dht_running()`` returns true if the DHT support has been started // and false // otherwise. void setDHTSettings(DhtSettings settings) { s.set_dht_settings(settings.swig()); } public boolean isDHTRunning() { return s.is_dht_running(); } /** * takes a host name and port pair. That endpoint will be * pinged, and if a valid DHT reply is received, the node will be added to * the routing table. * * @param node */ public void addDHTNode(Pair<String, Integer> node) { s.add_dht_node(node.to_string_int_pair()); } /** * adds the given endpoint to a list of DHT router nodes. * If a search is ever made while the routing table is empty, those nodes will * be used as backups. Nodes in the router node list will also never be added * to the regular routing table, which effectively means they are only used * for bootstrapping, to keep the load off them. * <p/> * An example routing node that you could typically add is * ``router.bittorrent.com``. * * @param node */ public void addDHTRouter(Pair<String, Integer> node) { s.add_dht_router(node.to_string_int_pair()); } /** * Query the DHT for an immutable item at the target hash. * the result is posted as a {@link DhtImmutableItemAlert}. * * @param target */ public void dhtGetItem(Sha1Hash target) { s.dht_get_item(target.swig()); } /** * Query the DHT for a mutable item under the public key {@code key}. * this is an ed25519 key. The {@code salt} argument is optional and may be left * as an empty string if no salt is to be used. * <p/> * if the item is found in the DHT, a {@link DhtMutableItemAlert} is * posted. * * @param key * @param salt */ public void dhtGetItem(byte[] key, byte[] salt) { s.dht_get_item(Vectors.bytes2byte_vector(key), Vectors.bytes2byte_vector(salt)); } /** * Store the given bencoded data as an immutable item in the DHT. * the returned hash is the key that is to be used to look the item * up agan. It's just the sha-1 hash of the bencoded form of the * structure. * * @param entry * @return */ public Sha1Hash dhtPutItem(Entry entry) { return new Sha1Hash(s.dht_put_item(entry.getSwig())); } // store an immutable item. The ``key`` is the public key the blob is // to be stored under. The optional ``salt`` argument is a string that // is to be mixed in with the key when determining where in the DHT // the value is to be stored. The callback function is called from within // the libtorrent network thread once we've found where to store the blob, // possibly with the current value stored under the key. // The values passed to the callback functions are: // entry& value // the current value stored under the key (may be empty). Also expected // to be set to the value to be stored by the function. // boost::array<char,64>& signature // the signature authenticating the current value. This may be zeroes // if there is currently no value stored. The functon is expected to // fill in this buffer with the signature of the new value to store. // To generate the signature, you may want to use the // ``sign_mutable_item`` function. // boost::uint64_t& seq // current sequence number. May be zero if there is no current value. // The function is expected to set this to the new sequence number of // the value that is to be stored. Sequence numbers must be monotonically // increasing. Attempting to overwrite a value with a lower or equal // sequence number will fail, even if the signature is correct. // std::string const& salt // this is the salt that was used for this put call. // Since the callback function ``cb`` is called from within libtorrent, // it is critical to not perform any blocking operations. Ideally not // even locking a mutex. Pass any data required for this function along // with the function object's context and make the function entirely // self-contained. The only reason data blobs' values are computed // via a function instead of just passing in the new value is to avoid // race conditions. If you want to *update* the value in the DHT, you // must first retrieve it, then modify it, then write it back. The way // the DHT works, it is natural to always do a lookup before storing and // calling the callback in between is convenient. public void dhtPutItem(byte[] publicKey, byte[] privateKey, Entry entry, byte[] salt) { s.dht_put_item(Vectors.bytes2byte_vector(publicKey), Vectors.bytes2byte_vector(privateKey), entry.getSwig(), Vectors.bytes2byte_vector(salt)); } public void dhtGetPeers(Sha1Hash infoHash) { s.dht_get_peers(infoHash.swig()); } public void dhtAnnounce(Sha1Hash infoHash, int port, int flags) { s.dht_announce(infoHash.swig(), port, flags); } public void dhtAnnounce(Sha1Hash infoHash) { s.dht_announce(infoHash.swig()); } public void dhtDirectRequest(UdpEndpoint endp, Entry entry) { s.dht_direct_request(endp.swig(), entry.getSwig()); } public void addExtension(Plugin p) { SwigPlugin sp = new SwigPlugin(p); s.add_swig_extension(sp); plugins.add(sp); } /** * add_port_mapping adds a port forwarding on UPnP and/or NAT-PMP, * whichever is enabled. The return value is a handle referring to the * port mapping that was just created. Pass it to delete_port_mapping() * to remove it. * * @param t * @param externalPort * @param localPort * @return */ public int addPortMapping(ProtocolType t, int externalPort, int localPort) { return s.add_port_mapping(t.getSwig(), externalPort, localPort); } public void deletePortMapping(int handle) { s.delete_port_mapping(handle); } public SessionStats getStats() { return stats; } public SettingsPack getSettingsPack() { return new SettingsPack(s.get_settings()); } // You add torrents through the add_torrent() function where you give an // object with all the parameters. The add_torrent() overloads will block // until the torrent has been added (or failed to be added) and returns // an error code and a torrent_handle. In order to add torrents more // efficiently, consider using async_add_torrent() which returns // immediately, without waiting for the torrent to add. Notification of // the torrent being added is sent as add_torrent_alert. // The overload that does not take an error_code throws an exception on // error and is not available when building without exception support. // The torrent_handle returned by add_torrent() can be used to retrieve // information about the torrent's progress, its peers etc. It is also // used to abort a torrent. // If the torrent you are trying to add already exists in the session (is // either queued for checking, being checked or downloading) // ``add_torrent()`` will throw libtorrent_exception which derives from // ``std::exception`` unless duplicate_is_error is set to false. In that // case, add_torrent() will return the handle to the existing torrent. // all torrent_handles must be destructed before the session is destructed! public TorrentHandle addTorrent(AddTorrentParams params, ErrorCode ec) { return new TorrentHandle(s.add_torrent(params.getSwig(), ec.getSwig())); } public void asyncAddTorrent(AddTorrentParams params) { s.async_add_torrent(params.getSwig()); } /** * @param uri * @param timeout in seconds * @return */ public byte[] fetchMagnet(String uri, int timeout) { add_torrent_params p = add_torrent_params.create_instance_disabled_storage(); error_code ec = new error_code(); libtorrent.parse_magnet_uri(uri, p, ec); if (ec.value() != 0) { throw new IllegalArgumentException(ec.message()); } final sha1_hash info_hash = p.getInfo_hash(); String sha1 = info_hash.to_hex(); byte[] data = MAGNET_CACHE.get(sha1); if (data != null) { return data; } boolean add; torrent_handle th; synchronized (MAGNET_LOCK) { th = s.find_torrent(info_hash); if (th != null && th.is_valid()) { // we have a download with the same info-hash, let's wait add = false; } else { add = true; } if (add) { p.setName("fetch_magnet:" + uri); p.setSave_path("fetch_magnet/" + uri); long flags = p.get_flags(); flags &= ~add_torrent_params.flags_t.flag_auto_managed.swigValue(); p.set_flags(flags); ec.clear(); th = s.add_torrent(p, ec); th.resume(); } } int n = 0; do { try { Thread.sleep(1000); } catch (InterruptedException e) { // ignore } data = MAGNET_CACHE.get(sha1); n++; } while (data != null || n < timeout); synchronized (MAGNET_LOCK) { if (add && th != null && th.is_valid()) { s.remove_torrent(th); } } return data; } @Override protected void finalize() throws Throwable { this.running = false; super.finalize(); } private void fireAlert(Alert<?> a, int type) { AlertListener[] listeners = listenerSnapshots.get(type); if (listeners != null) { for (int i = 0; i < listeners.length; i++) { try { AlertListener l = listeners[i]; if (l != null) { l.alert(a); } } catch (Throwable e) { LOG.warn("Error calling alert listener", e); } } } } private TorrentHandle addTorrentSupport(TorrentInfo ti, File saveDir, Priority[] priorities, File resumeFile, boolean async) { String savePath = null; if (saveDir != null) { savePath = saveDir.getAbsolutePath(); } else if (resumeFile == null) { throw new IllegalArgumentException("Both saveDir and resumeFile can't be null at the same time"); } add_torrent_params p = add_torrent_params.create_instance(); p.set_ti(ti.getSwig()); if (savePath != null) { p.setSave_path(savePath); } if (priorities != null) { byte_vector v = new byte_vector(); for (int i = 0; i < priorities.length; i++) { v.push_back((byte) priorities[i].getSwig()); } p.set_file_priorities(v); } p.setStorage_mode(storage_mode_t.storage_mode_sparse); long flags = p.get_flags(); flags &= ~add_torrent_params.flags_t.flag_auto_managed.swigValue(); if (resumeFile != null) { try { byte[] data = Files.bytes(resumeFile); p.set_resume_data(Vectors.bytes2byte_vector(data)); flags |= add_torrent_params.flags_t.flag_use_resume_save_path.swigValue(); } catch (Throwable e) { LOG.warn("Unable to set resume data", e); } } p.set_flags(flags); if (async) { s.async_add_torrent(p); return null; } else { error_code ec = new error_code(); torrent_handle th = s.add_torrent(p, ec); return new TorrentHandle(th); } } private void alertsLoop() { Runnable r = new Runnable() { @Override public void run() { alert_ptr_vector vector = new alert_ptr_vector(); high_resolution_clock.duration max_wait = libtorrent.to_milliseconds(ALERTS_LOOP_WAIT_MILLIS); while (running) { alert ptr = s.wait_for_alert(max_wait); if (ptr != null) { s.pop_alerts(vector); long size = vector.size(); for (int i = 0; i < size; i++) { alert swigAlert = vector.get(i); int type = swigAlert.type(); Alert<?> alert = null; if (type == AlertType.SESSION_STATS.getSwig()) { alert = Alerts.cast(swigAlert); updateSessionStat((SessionStatsAlert) alert); } if (type == AlertType.METADATA_RECEIVED.getSwig()) { alert = Alerts.cast(swigAlert); saveMagnetData((MetadataReceivedAlert) alert); } if (listeners.indexOfKey(type) >= 0) { if (alert == null) { alert = Alerts.cast(swigAlert); } fireAlert(alert, type); } if (type != AlertType.SESSION_STATS.getSwig() && listeners.indexOfKey(-1) >= 0) { if (alert == null) { alert = Alerts.cast(swigAlert); } fireAlert(alert, -1); } } vector.clear(); } long now = System.currentTimeMillis(); if ((now - lastStatsRequestTime) >= REQUEST_STATS_RESOLUTION_MILLIS) { lastStatsRequestTime = now; postSessionStats(); } } } }; Thread t = new Thread(r, "Session-alertsLoop"); t.setDaemon(true); t.start(); } private void modifyListeners(boolean adding, AlertListener listener) { if (listener != null) { int[] types = listener.types(); //all alert-type including listener if (types == null) { modifyListeners(adding, -1, listener); } else { for (int i = 0; i < types.length; i++) { if (types[i] == -1) { throw new IllegalArgumentException("Type can't be the key of all (-1)"); } modifyListeners(adding, types[i], listener); } } } } private void modifyListeners(boolean adding, int type, AlertListener listener) { ArrayList<AlertListener> l = listeners.get(type); if (l == null) { l = new ArrayList<AlertListener>(); listeners.append(type, l); } if (adding) { l.add(listener); } else { l.remove(listener); } listenerSnapshots.append(type, l.toArray(new AlertListener[0])); } private static List<Pair<String, Integer>> defaultRouters() { List<Pair<String, Integer>> list = new LinkedList<Pair<String, Integer>>(); list.add(new Pair<String, Integer>("router.bittorrent.com", 6881)); list.add(new Pair<String, Integer>("dht.transmissionbt.com", 6881)); return list; } private void updateSessionStat(SessionStatsAlert alert) { long now = System.currentTimeMillis(); long tickIntervalMs = now - lastStatSecondTick; lastStatSecondTick = now; long received = alert.value(counters.stats_counter_t.recv_bytes.swigValue()); long payload = alert.value(counters.stats_counter_t.recv_payload_bytes.swigValue()); long protocol = received - payload; long ip = alert.value(counters.stats_counter_t.recv_ip_overhead_bytes.swigValue()); payload -= stat.downloadPayload(); protocol -= stat.downloadProtocol(); ip -= stat.downloadIPProtocol(); stat.received(payload, protocol, ip); long sent = alert.value(counters.stats_counter_t.sent_bytes.swigValue()); payload = alert.value(counters.stats_counter_t.sent_payload_bytes.swigValue()); protocol = sent - payload; ip = alert.value(counters.stats_counter_t.sent_ip_overhead_bytes.swigValue()); payload -= stat.uploadPayload(); protocol -= stat.uploadProtocol(); ip -= stat.uploadIPProtocol(); stat.sent(payload, protocol, ip); stat.secondTick(tickIntervalMs); } private void saveMagnetData(MetadataReceivedAlert alert) { try { torrent_handle th = alert.getHandle().getSwig(); TorrentInfo ti = new TorrentInfo(th.get_torrent_copy()); String sha1 = ti.getInfoHash().toHex(); byte[] data = ti.bencode(); MAGNET_CACHE.put(sha1, data); } catch (Throwable e) { LOG.error("Error in saving magnet in internal cache", e); } } private static session createSession(SettingsPack settings, boolean logging) { settings_pack sp = settings.getSwig(); int alert_mask = alert.category_t.all_categories.swigValue(); if (!logging) { int log_mask = alert.category_t.session_log_notification.swigValue() | alert.category_t.torrent_log_notification.swigValue() | alert.category_t.peer_log_notification.swigValue() | alert.category_t.dht_log_notification.swigValue() | alert.category_t.port_mapping_log_notification.swigValue(); alert_mask = alert_mask & ~log_mask; } // we always override alert_mask since we use it for our internal operations sp.set_int(settings_pack.int_types.alert_mask.swigValue(), alert_mask); return new session(sp); } private static SettingsPack createSettings(String interfaces, int retries) { settings_pack sp = new settings_pack(); sp.set_str(settings_pack.string_types.listen_interfaces.swigValue(), interfaces); sp.set_int(settings_pack.int_types.max_retry_port_bind.swigValue(), retries); return new SettingsPack(sp); } /** * Flags to be passed in to remove_torrent(). */ public enum Options { /** * Delete the files belonging to the torrent from disk. */ DELETE_FILES(options_t.delete_files.swigValue()), UNKNOWN(-1); Options(int swigValue) { this.swigValue = swigValue; } private final int swigValue; public int getSwig() { return swigValue; } } /** * protocols used by add_port_mapping(). */ public enum ProtocolType { UDP(session.protocol_type.udp), TCP(session.protocol_type.tcp); ProtocolType(session.protocol_type swigObj) { this.swigObj = swigObj; } private final session.protocol_type swigObj; public session.protocol_type getSwig() { return swigObj; } } }
package com.frostwire.jlibtorrent; import com.frostwire.jlibtorrent.alerts.*; import com.frostwire.jlibtorrent.plugins.Plugin; import com.frostwire.jlibtorrent.plugins.SwigPlugin; import com.frostwire.jlibtorrent.swig.*; import com.frostwire.jlibtorrent.swig.session_handle.options_t; import java.io.File; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * The session holds all state that spans multiple torrents. Among other * things it runs the network loop and manages all torrents. Once it's * created, the session object will spawn the main thread that will do all * the work. The main thread will be idle as long it doesn't have any * torrents to participate in. * <p/> * This class belongs to a middle logical layer of abstraction. It's a wrapper * of the underlying swig session object (from libtorrent), but it does not * expose all the raw features, not expose a very high level interface * like {@link com.frostwire.jlibtorrent.DHT DHT} or * {@link com.frostwire.jlibtorrent.Downloader Downloader}. * * @author gubatron * @author aldenml */ public final class Session extends SessionHandle { private static final Logger LOG = Logger.getLogger(Session.class); private static final long REQUEST_STATS_RESOLUTION_MILLIS = 1000; private static final long ALERTS_LOOP_WAIT_MILLIS = 500; private final session s; private final JavaStat stat; private final SessionStats stats; private long lastStatsRequestTime; private long lastStatSecondTick; private final SparseArray<ArrayList<AlertListener>> listeners; private final SparseArray<AlertListener[]> listenerSnapshots; private boolean running; private final LinkedList<SwigPlugin> plugins; /** * The flag alert_mask is always set to all_categories. * * @param settings * @param logging */ public Session(SettingsPack settings, boolean logging) { super(createSession(settings, logging)); this.s = (session) super.s; this.stat = new JavaStat(); this.stats = new SessionStats(stat); this.listeners = new SparseArray<ArrayList<AlertListener>>(); this.listenerSnapshots = new SparseArray<AlertListener[]>(); this.running = true; alertsLoop(); for (Pair<String, Integer> router : defaultRouters()) { s.add_dht_router(router.to_string_int_pair()); } this.plugins = new LinkedList<SwigPlugin>(); } @Deprecated public Session(Fingerprint print, Pair<Integer, Integer> prange, String iface, List<Pair<String, Integer>> routers, boolean logging) { super(createSessionDeprecated(print, prange, iface, routers, logging)); this.s = (session) super.s; this.stat = new JavaStat(); this.stats = new SessionStats(stat); this.listeners = new SparseArray<ArrayList<AlertListener>>(); this.listenerSnapshots = new SparseArray<AlertListener[]>(); this.running = true; alertsLoop(); for (Pair<String, Integer> router : routers) { s.add_dht_router(router.to_string_int_pair()); } this.plugins = new LinkedList<SwigPlugin>(); } public Session(Fingerprint print, Pair<Integer, Integer> prange, String iface, List<Pair<String, Integer>> routers) { this(print, prange, iface, routers, false); } public Session(Fingerprint print, Pair<Integer, Integer> prange, String iface) { this(print, prange, iface, defaultRouters()); } public Session(Fingerprint print) { this(print, new Pair<Integer, Integer>(0, 0), "0.0.0.0"); } public Session(Pair<Integer, Integer> prange, String iface) { this(new Fingerprint(), prange, iface); } public Session() { this(new SettingsPack(), false); } public session getSwig() { return s; } public void addListener(AlertListener listener) { modifyListeners(true, listener); } public void removeListener(AlertListener listener) { modifyListeners(false, listener); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param ti * @param saveDir * @param priorities * @param resumeFile * @return */ public TorrentHandle addTorrent(TorrentInfo ti, File saveDir, Priority[] priorities, File resumeFile) { return addTorrentSupport(ti, saveDir, priorities, resumeFile, false); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir * @param resumeFile * @return */ public TorrentHandle addTorrent(File torrent, File saveDir, File resumeFile) { return addTorrent(new TorrentInfo(torrent), saveDir, null, resumeFile); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir * @return */ public TorrentHandle addTorrent(File torrent, File saveDir) { return addTorrent(torrent, saveDir, null); } /** * In order to add torrents more efficiently, consider using this which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as {@link com.frostwire.jlibtorrent.alerts.AddTorrentAlert}. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * * @param ti * @param saveDir * @param priorities * @param resumeFile */ public void asyncAddTorrent(TorrentInfo ti, File saveDir, Priority[] priorities, File resumeFile) { addTorrentSupport(ti, saveDir, priorities, resumeFile, true); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir * @param resumeFile */ public void asyncAddTorrent(File torrent, File saveDir, File resumeFile) { asyncAddTorrent(new TorrentInfo(torrent), saveDir, null, resumeFile); } /** * You add torrents through the add_torrent() function where you give an * object with all the parameters. The add_torrent() overloads will block * until the torrent has been added (or failed to be added) and returns * an error code and a torrent_handle. In order to add torrents more * efficiently, consider using async_add_torrent() which returns * immediately, without waiting for the torrent to add. Notification of * the torrent being added is sent as add_torrent_alert. * <p/> * The overload that does not take an error_code throws an exception on * error and is not available when building without exception support. * The torrent_handle returned by add_torrent() can be used to retrieve * information about the torrent's progress, its peers etc. It is also * used to abort a torrent. * <p/> * If the torrent you are trying to add already exists in the session (is * either queued for checking, being checked or downloading) * ``add_torrent()`` will throw libtorrent_exception which derives from * ``std::exception`` unless duplicate_is_error is set to false. In that * case, add_torrent() will return the handle to the existing torrent. * <p/> * all torrent_handles must be destructed before the session is destructed! * * @param torrent * @param saveDir */ public void asyncAddTorrent(File torrent, File saveDir) { asyncAddTorrent(torrent, saveDir, null); } /** * This method will close all peer connections associated with the torrent and tell the * tracker that we've stopped participating in the swarm. This operation cannot fail. * When it completes, you will receive a torrent_removed_alert. * <p/> * The optional second argument options can be used to delete all the files downloaded * by this torrent. To do so, pass in the value session::delete_files. The removal of * the torrent is asyncronous, there is no guarantee that adding the same torrent immediately * after it was removed will not throw a libtorrent_exception exception. Once the torrent * is deleted, a torrent_deleted_alert is posted. * * @param th */ public void removeTorrent(TorrentHandle th, Options options) { s.remove_torrent(th.getSwig(), options.getSwig()); } /** * This method will close all peer connections associated with the torrent and tell the * tracker that we've stopped participating in the swarm. This operation cannot fail. * When it completes, you will receive a torrent_removed_alert. * * @param th */ public void removeTorrent(TorrentHandle th) { if (th.isValid()) { s.remove_torrent(th.getSwig()); } } /** * Applies the settings specified by the settings_pack ``sp``. This is an * asynchronous operation that will return immediately and actually apply * the settings to the main thread of libtorrent some time later. * * @param sp */ public void applySettings(SettingsPack sp) { s.apply_settings(sp.getSwig()); } /** * In case you want to destruct the session asynchrounously, you can * request a session destruction proxy. If you don't do this, the * destructor of the session object will block while the trackers are * contacted. If you keep one ``session_proxy`` to the session when * destructing it, the destructor will not block, but start to close down * the session, the destructor of the proxy will then synchronize the * threads. So, the destruction of the session is performed from the * ``session`` destructor call until the ``session_proxy`` destructor * call. The ``session_proxy`` does not have any operations on it (since * the session is being closed down, no operations are allowed on it). * The only valid operation is calling the destructor:: * * @return */ public SessionProxy abort() { running = false; return new SessionProxy(s.abort()); } /** * Pausing the session has the same effect as pausing every torrent in * it, except that torrents will not be resumed by the auto-manage * mechanism. */ public void pause() { s.pause(); } /** * Resuming will restore the torrents to their previous paused * state. i.e. the session pause state is separate from the torrent pause * state. A torrent is inactive if it is paused or if the session is * paused. */ public void resume() { s.resume(); } public boolean isPaused() { return s.is_paused(); } /** * returns the port we ended up listening on. Since you * just pass a port-range to the constructor and to ``listen_on()``, to * know which port it ended up using, you have to ask the session using * this function. * * @return */ public int getListenPort() { return s.listen_port(); } public int getSslListenPort() { return s.ssl_listen_port(); } /** * will tell you whether or not the session has * successfully opened a listening port. If it hasn't, this function will * return false, and then you can use ``listen_on()`` to make another * attempt. * * @return */ public boolean isListening() { return s.is_listening(); } /** * Loads and saves all session settings, including dht_settings, * encryption settings and proxy settings. ``save_state`` writes all keys * to the ``entry`` that's passed in, which needs to either not be * initialized, or initialized as a dictionary. * <p/> * ``load_state`` expects a lazy_entry which can be built from a bencoded * buffer with lazy_bdecode(). * <p/> * The ``flags`` arguments passed in to ``save_state`` can be used to * filter which parts of the session state to save. By default, all state * is saved (except for the individual torrents). see save_state_flags_t * * @return */ public byte[] saveState() { entry e = new entry(); s.save_state(e); return Vectors.char_vector2bytes(e.bencode()); } /** * Loads and saves all session settings, including dht_settings, * encryption settings and proxy settings. ``save_state`` writes all keys * to the ``entry`` that's passed in, which needs to either not be * initialized, or initialized as a dictionary. * <p/> * ``load_state`` expects a lazy_entry which can be built from a bencoded * buffer with lazy_bdecode(). * <p/> * The ``flags`` arguments passed in to ``save_state`` can be used to * filter which parts of the session state to save. By default, all state * is saved (except for the individual torrents). see save_state_flags_t * * @param data */ public void loadState(byte[] data) { char_vector buffer = Vectors.bytes2char_vector(data); bdecode_node n = new bdecode_node(); error_code ec = new error_code(); int ret = bdecode_node.bdecode(buffer, n, ec); if (ret == 0) { s.load_state(n); } else { LOG.error("failed to decode torrent: " + ec.message()); } } /** * This functions instructs the session to post the state_update_alert, * containing the status of all torrents whose state changed since the * last time this function was called. * <p/> * Only torrents who has the state subscription flag set will be * included. This flag is on by default. See add_torrent_params. * the ``flags`` argument is the same as for torrent_handle::status(). * see torrent_handle::status_flags_t. * * @param flags */ public void postTorrentUpdates(TorrentHandle.StatusFlags flags) { s.post_torrent_updates(flags.getSwig()); } /** * This functions instructs the session to post the state_update_alert, * containing the status of all torrents whose state changed since the * last time this function was called. * <p/> * Only torrents who has the state subscription flag set will be * included. */ public void postTorrentUpdates() { s.post_torrent_updates(); } /** * This function will post a {@link com.frostwire.jlibtorrent.alerts.SessionStatsAlert} object, containing a * snapshot of the performance counters from the internals of libtorrent. * To interpret these counters, query the session via * session_stats_metrics(). */ public void postSessionStats() { s.post_session_stats(); } /** * This will cause a dht_stats_alert to be posted. */ public void postDHTStats() { s.post_dht_stats(); } /** * Looks for a torrent with the given info-hash. In * case there is such a torrent in the session, a torrent_handle to that * torrent is returned. * <p/> * In case the torrent cannot be found, a null is returned. * * @param infoHash * @return */ public TorrentHandle findTorrent(Sha1Hash infoHash) { torrent_handle th = s.find_torrent(infoHash.getSwig()); return th != null && th.is_valid() ? new TorrentHandle(th) : null; } /** * Returns a list of torrent handles to all the * torrents currently in the session. * * @return */ public List<TorrentHandle> getTorrents() { torrent_handle_vector v = s.get_torrents(); long size = v.size(); List<TorrentHandle> l = new ArrayList<TorrentHandle>((int) size); for (int i = 0; i < size; i++) { l.add(new TorrentHandle(v.get(i))); } return l; } // starts/stops UPnP, NATPMP or LSD port mappers they are stopped by // default These functions are not available in case // ``TORRENT_DISABLE_DHT`` is defined. ``start_dht`` starts the dht node // and makes the trackerless service available to torrents. The startup // state is optional and can contain nodes and the node id from the // previous session. The dht node state is a bencoded dictionary with the // following entries: // nodes // A list of strings, where each string is a node endpoint encoded in // binary. If the string is 6 bytes long, it is an IPv4 address of 4 // bytes, encoded in network byte order (big endian), followed by a 2 // byte port number (also network byte order). If the string is 18 // bytes long, it is 16 bytes of IPv6 address followed by a 2 bytes // port number (also network byte order). // node-id // The node id written as a readable string as a hexadecimal number. // ``dht_state`` will return the current state of the dht node, this can // be used to start up the node again, passing this entry to // ``start_dht``. It is a good idea to save this to disk when the session // is closed, and read it up again when starting. // If the port the DHT is supposed to listen on is already in use, and // exception is thrown, ``asio::error``. // ``stop_dht`` stops the dht node. // ``add_dht_node`` adds a node to the routing table. This can be used if // your client has its own source of bootstrapping nodes. // ``set_dht_settings`` sets some parameters availavle to the dht node. // See dht_settings for more information. // ``is_dht_running()`` returns true if the DHT support has been started // and false // otherwise. void setDHTSettings(DHTSettings settings) { s.set_dht_settings(settings.getSwig()); } public boolean isDHTRunning() { return s.is_dht_running(); } /** * takes a host name and port pair. That endpoint will be * pinged, and if a valid DHT reply is received, the node will be added to * the routing table. * * @param node */ public void addDHTNode(Pair<String, Integer> node) { s.add_dht_node(node.to_string_int_pair()); } /** * adds the given endpoint to a list of DHT router nodes. * If a search is ever made while the routing table is empty, those nodes will * be used as backups. Nodes in the router node list will also never be added * to the regular routing table, which effectively means they are only used * for bootstrapping, to keep the load off them. * <p/> * An example routing node that you could typically add is * ``router.bittorrent.com``. * * @param node */ public void addDHTRouter(Pair<String, Integer> node) { s.add_dht_router(node.to_string_int_pair()); } /** * Query the DHT for an immutable item at the target hash. * the result is posted as a {@link DhtImmutableItemAlert}. * * @param target */ public void dhtGetItem(Sha1Hash target) { s.dht_get_item(target.getSwig()); } /** * Query the DHT for a mutable item under the public key ``key``. * this is an ed25519 key. ``salt`` is optional and may be left * as an empty string if no salt is to be used. * if the item is found in the DHT, a dht_mutable_item_alert is * posted. * * @param key */ public void dhtGetItem(byte[] key) { s.dht_get_item(Vectors.bytes2char_vector(key)); } /** * Query the DHT for a mutable item under the public key ``key``. * this is an ed25519 key. ``salt`` is optional and may be left * as an empty string if no salt is to be used. * if the item is found in the DHT, a dht_mutable_item_alert is * posted. * * @param key * @param salt */ public void dhtGetItem(byte[] key, String salt) { s.dht_get_item(Vectors.bytes2char_vector(key), salt); } /** * Store the given bencoded data as an immutable item in the DHT. * the returned hash is the key that is to be used to look the item * up agan. It's just the sha-1 hash of the bencoded form of the * structure. * * @param entry * @return */ public Sha1Hash dhtPutItem(Entry entry) { return new Sha1Hash(s.dht_put_item(entry.getSwig())); } // store an immutable item. The ``key`` is the public key the blob is // to be stored under. The optional ``salt`` argument is a string that // is to be mixed in with the key when determining where in the DHT // the value is to be stored. The callback function is called from within // the libtorrent network thread once we've found where to store the blob, // possibly with the current value stored under the key. // The values passed to the callback functions are: // entry& value // the current value stored under the key (may be empty). Also expected // to be set to the value to be stored by the function. // boost::array<char,64>& signature // the signature authenticating the current value. This may be zeroes // if there is currently no value stored. The functon is expected to // fill in this buffer with the signature of the new value to store. // To generate the signature, you may want to use the // ``sign_mutable_item`` function. // boost::uint64_t& seq // current sequence number. May be zero if there is no current value. // The function is expected to set this to the new sequence number of // the value that is to be stored. Sequence numbers must be monotonically // increasing. Attempting to overwrite a value with a lower or equal // sequence number will fail, even if the signature is correct. // std::string const& salt // this is the salt that was used for this put call. // Since the callback function ``cb`` is called from within libtorrent, // it is critical to not perform any blocking operations. Ideally not // even locking a mutex. Pass any data required for this function along // with the function object's context and make the function entirely // self-contained. The only reason data blobs' values are computed // via a function instead of just passing in the new value is to avoid // race conditions. If you want to *update* the value in the DHT, you // must first retrieve it, then modify it, then write it back. The way // the DHT works, it is natural to always do a lookup before storing and // calling the callback in between is convenient. public void dhtPutItem(byte[] publicKey, byte[] privateKey, Entry entry) { s.dht_put_item(Vectors.bytes2char_vector(publicKey), Vectors.bytes2char_vector(privateKey), entry.getSwig()); } // store an immutable item. The ``key`` is the public key the blob is // to be stored under. The optional ``salt`` argument is a string that // is to be mixed in with the key when determining where in the DHT // the value is to be stored. The callback function is called from within // the libtorrent network thread once we've found where to store the blob, // possibly with the current value stored under the key. // The values passed to the callback functions are: // entry& value // the current value stored under the key (may be empty). Also expected // to be set to the value to be stored by the function. // boost::array<char,64>& signature // the signature authenticating the current value. This may be zeroes // if there is currently no value stored. The functon is expected to // fill in this buffer with the signature of the new value to store. // To generate the signature, you may want to use the // ``sign_mutable_item`` function. // boost::uint64_t& seq // current sequence number. May be zero if there is no current value. // The function is expected to set this to the new sequence number of // the value that is to be stored. Sequence numbers must be monotonically // increasing. Attempting to overwrite a value with a lower or equal // sequence number will fail, even if the signature is correct. // std::string const& salt // this is the salt that was used for this put call. // Since the callback function ``cb`` is called from within libtorrent, // it is critical to not perform any blocking operations. Ideally not // even locking a mutex. Pass any data required for this function along // with the function object's context and make the function entirely // self-contained. The only reason data blobs' values are computed // via a function instead of just passing in the new value is to avoid // race conditions. If you want to *update* the value in the DHT, you // must first retrieve it, then modify it, then write it back. The way // the DHT works, it is natural to always do a lookup before storing and // calling the callback in between is convenient. public void dhtPutItem(byte[] publicKey, byte[] privateKey, Entry entry, String salt) { s.dht_put_item(Vectors.bytes2char_vector(publicKey), Vectors.bytes2char_vector(privateKey), entry.getSwig(), salt); } public void dhtGetPeers(Sha1Hash infoHash) { s.dht_get_peers(infoHash.getSwig()); } public void dhtAnnounce(Sha1Hash infoHash, int port, int flags) { s.dht_announce(infoHash.getSwig(), port, flags); } public void dhtAnnounce(Sha1Hash infoHash) { s.dht_announce(infoHash.getSwig()); } public void dhtDirectRequest(UdpEndpoint endp, Entry entry) { s.dht_direct_request(endp.getSwig(), entry.getSwig()); } public void addExtension(Plugin p) { SwigPlugin sp = new SwigPlugin(p); s.add_swig_extension(sp); plugins.add(sp); } /** * add_port_mapping adds a port forwarding on UPnP and/or NAT-PMP, * whichever is enabled. The return value is a handle referring to the * port mapping that was just created. Pass it to delete_port_mapping() * to remove it. * * @param t * @param externalPort * @param localPort * @return */ public int addPortMapping(ProtocolType t, int externalPort, int localPort) { return s.add_port_mapping(t.getSwig(), externalPort, localPort); } public void deletePortMapping(int handle) { s.delete_port_mapping(handle); } public SessionStats getStats() { return stats; } @Deprecated public SessionSettings getSettings() { return new SessionSettings(s.get_settings()); } @Deprecated public ProxySettings getProxy() { return new ProxySettings(new settings_pack()); } @Deprecated public void setProxy(ProxySettings s) { this.s.apply_settings(s.getSwig()); } @Deprecated public void setSettings(SessionSettings s) { this.applySettings(s.toPack()); } @Deprecated public SessionStatus getStatus() { return new SessionStatus(stats); } // You add torrents through the add_torrent() function where you give an // object with all the parameters. The add_torrent() overloads will block // until the torrent has been added (or failed to be added) and returns // an error code and a torrent_handle. In order to add torrents more // efficiently, consider using async_add_torrent() which returns // immediately, without waiting for the torrent to add. Notification of // the torrent being added is sent as add_torrent_alert. // The overload that does not take an error_code throws an exception on // error and is not available when building without exception support. // The torrent_handle returned by add_torrent() can be used to retrieve // information about the torrent's progress, its peers etc. It is also // used to abort a torrent. // If the torrent you are trying to add already exists in the session (is // either queued for checking, being checked or downloading) // ``add_torrent()`` will throw libtorrent_exception which derives from // ``std::exception`` unless duplicate_is_error is set to false. In that // case, add_torrent() will return the handle to the existing torrent. // all torrent_handles must be destructed before the session is destructed! public TorrentHandle addTorrent(AddTorrentParams params) { return new TorrentHandle(s.add_torrent(params.getSwig())); } public TorrentHandle addTorrent(AddTorrentParams params, ErrorCode ec) { return new TorrentHandle(s.add_torrent(params.getSwig(), ec.getSwig())); } public void asyncAddTorrent(AddTorrentParams params) { s.async_add_torrent(params.getSwig()); } @Override protected void finalize() throws Throwable { this.running = false; super.finalize(); } void fireAlert(Alert<?> a) { int type = a.getSwig() != null ? a.getSwig().type() : a.getType().getSwig(); fireAlert(a, type); fireAlert(a, -1); } private void fireAlert(Alert<?> a, int type) { AlertListener[] listeners = listenerSnapshots.get(type); if (listeners != null) { for (int i = 0; i < listeners.length; i++) { try { AlertListener l = listeners[i]; if (l != null) { l.alert(a); } } catch (Throwable e) { LOG.warn("Error calling alert listener", e); } } } } private TorrentHandle addTorrentSupport(TorrentInfo ti, File saveDir, Priority[] priorities, File resumeFile, boolean async) { String savePath = null; if (saveDir != null) { savePath = saveDir.getAbsolutePath(); } else if (resumeFile == null) { throw new IllegalArgumentException("Both saveDir and resumeFile can't be null at the same time"); } add_torrent_params p = add_torrent_params.create_instance(); p.set_ti(ti.getSwig()); if (savePath != null) { p.setSave_path(savePath); } if (priorities != null) { p.setFile_priorities(Vectors.priorities2unsigned_char_vector(priorities)); } p.setStorage_mode(storage_mode_t.storage_mode_sparse); long flags = p.getFlags(); flags &= ~add_torrent_params.flags_t.flag_auto_managed.swigValue(); if (resumeFile != null) { try { byte[] data = Files.bytes(resumeFile); p.setResume_data(Vectors.bytes2char_vector(data)); flags |= add_torrent_params.flags_t.flag_use_resume_save_path.swigValue(); } catch (Throwable e) { LOG.warn("Unable to set resume data", e); } } p.setFlags(flags); if (async) { s.async_add_torrent(p); return null; } else { torrent_handle th = s.add_torrent(p); return new TorrentHandle(th); } } private void alertsLoop() { Runnable r = new Runnable() { @Override public void run() { alert_ptr_vector vector = new alert_ptr_vector(); high_resolution_clock.duration max_wait = libtorrent.to_milliseconds(ALERTS_LOOP_WAIT_MILLIS); while (running) { alert ptr = s.wait_for_alert(max_wait); if (ptr != null) { s.pop_alerts(vector); long size = vector.size(); for (int i = 0; i < size; i++) { alert swigAlert = vector.get(i); int type = swigAlert.type(); Alert<?> alert = null; if (type == AlertType.SESSION_STATS.getSwig()) { alert = Alerts.cast(swigAlert); updateSessionStat((SessionStatsAlert) alert); } if (listeners.indexOfKey(type) >= 0) { if (alert == null) { alert = Alerts.cast(swigAlert); } fireAlert(alert, type); } if (type != AlertType.SESSION_STATS.getSwig() && listeners.indexOfKey(-1) >= 0) { if (alert == null) { alert = Alerts.cast(swigAlert); } fireAlert(alert, -1); } } vector.clear(); } long now = System.currentTimeMillis(); if ((now - lastStatsRequestTime) >= REQUEST_STATS_RESOLUTION_MILLIS) { lastStatsRequestTime = now; postSessionStats(); } } } }; Thread t = new Thread(r, "Session-alertsLoop"); t.setDaemon(true); t.start(); } private void modifyListeners(boolean adding, AlertListener listener) { if (listener != null) { int[] types = listener.types(); //all alert-type including listener if (types == null) { modifyListeners(adding, -1, listener); } else { for (int i = 0; i < types.length; i++) { if (types[i] == -1) { throw new IllegalArgumentException("Type can't be the key of all (-1)"); } modifyListeners(adding, types[i], listener); } } } } private void modifyListeners(boolean adding, int type, AlertListener listener) { ArrayList<AlertListener> l = listeners.get(type); if (l == null) { l = new ArrayList<AlertListener>(); listeners.append(type, l); } if (adding) { l.add(listener); } else { l.remove(listener); } listenerSnapshots.append(type, l.toArray(new AlertListener[0])); } private static List<Pair<String, Integer>> defaultRouters() { List<Pair<String, Integer>> list = new LinkedList<Pair<String, Integer>>(); list.add(new Pair<String, Integer>("router.bittorrent.com", 6881)); list.add(new Pair<String, Integer>("dht.transmissionbt.com", 6881)); return list; } private void updateSessionStat(SessionStatsAlert alert) { long now = System.currentTimeMillis(); long tickIntervalMs = now - lastStatSecondTick; lastStatSecondTick = now; long received = alert.value(counters.stats_counter_t.recv_bytes.swigValue()); long payload = alert.value(counters.stats_counter_t.recv_payload_bytes.swigValue()); long protocol = received - payload; long ip = alert.value(counters.stats_counter_t.recv_ip_overhead_bytes.swigValue()); payload -= stat.downloadPayload(); protocol -= stat.downloadProtocol(); ip -= stat.downloadIPProtocol(); stat.received(payload, protocol, ip); long sent = alert.value(counters.stats_counter_t.sent_bytes.swigValue()); payload = alert.value(counters.stats_counter_t.sent_payload_bytes.swigValue()); protocol = sent - payload; ip = alert.value(counters.stats_counter_t.sent_ip_overhead_bytes.swigValue()); payload -= stat.uploadPayload(); protocol -= stat.uploadProtocol(); ip -= stat.uploadIPProtocol(); stat.sent(payload, protocol, ip); stat.secondTick(tickIntervalMs); } private static session createSession(SettingsPack settings, boolean logging) { settings_pack sp = settings.getSwig(); int alert_mask = alert.category_t.all_categories.swigValue(); if (!logging) { int log_mask = alert.category_t.session_log_notification.swigValue() | alert.category_t.torrent_log_notification.swigValue() | alert.category_t.peer_log_notification.swigValue() | alert.category_t.dht_log_notification.swigValue() | alert.category_t.port_mapping_log_notification.swigValue(); alert_mask = alert_mask & ~log_mask; } // we always override alert_mask since we use it for our internal operations sp.set_int(settings_pack.int_types.alert_mask.swigValue(), alert_mask); return new session(sp); } private static session createSessionDeprecated(Fingerprint print, Pair<Integer, Integer> prange, String iface, List<Pair<String, Integer>> routers, boolean logging) { int alert_mask = alert.category_t.all_categories.swigValue(); if (!logging) { int log_mask = alert.category_t.session_log_notification.swigValue() | alert.category_t.torrent_log_notification.swigValue() | alert.category_t.peer_log_notification.swigValue() | alert.category_t.dht_log_notification.swigValue() | alert.category_t.port_mapping_log_notification.swigValue(); alert_mask = alert_mask & ~log_mask; } settings_pack sp = new settings_pack(); sp.set_int(settings_pack.int_types.alert_mask.swigValue(), alert_mask); sp.set_int(settings_pack.int_types.max_retry_port_bind.swigValue(), prange.second - prange.first); sp.set_str(settings_pack.string_types.peer_fingerprint.swigValue(), print.toString()); String if_string = String.format("%s:%d", iface, prange.first); sp.set_str(settings_pack.string_types.listen_interfaces.swigValue(), if_string); return new session(sp); } /** * Flags to be passed in to remove_torrent(). */ public enum Options { /** * Delete the files belonging to the torrent from disk. */ DELETE_FILES(options_t.delete_files.swigValue()), UNKNOWN(-1); Options(int swigValue) { this.swigValue = swigValue; } private final int swigValue; public int getSwig() { return swigValue; } } /** * protocols used by add_port_mapping(). */ public enum ProtocolType { UDP(session.protocol_type.udp), TCP(session.protocol_type.tcp); ProtocolType(session.protocol_type swigObj) { this.swigObj = swigObj; } private final session.protocol_type swigObj; public session.protocol_type getSwig() { return swigObj; } } }
package com.gmail.nossr50.util; import org.bukkit.CropState; import org.bukkit.block.BlockState; import org.bukkit.material.CocoaPlant; import org.bukkit.material.CocoaPlant.CocoaPlantSize; import com.gmail.nossr50.config.Config; public final class BlockUtils { private BlockUtils() {} /** * Checks to see if a given block awards XP. * * @param blockState The {@link BlockState} of the block to check * @return true if the block awards XP, false otherwise */ public static boolean shouldBeWatched(BlockState blockState) { switch (blockState.getType()) { case BROWN_MUSHROOM: case CACTUS: case CLAY: case COAL_ORE: case DIAMOND_ORE: case DIRT: case ENDER_STONE: case GLOWING_REDSTONE_ORE: case GLOWSTONE: case GOLD_ORE: case GRASS: case GRAVEL: case IRON_ORE: case LAPIS_ORE: case LOG: case MELON_BLOCK: case MOSSY_COBBLESTONE: case MYCEL: case NETHERRACK: case OBSIDIAN: case PUMPKIN: case QUARTZ_ORE: case RED_MUSHROOM: case RED_ROSE: case REDSTONE_ORE: case SAND: case SANDSTONE: case SOUL_SAND: case STONE: case SUGAR_CANE_BLOCK: case VINE: case WATER_LILY: case YELLOW_FLOWER: case COCOA: case EMERALD_ORE: return true; default: return ModUtils.getCustomBlock(blockState) != null; } } /** * Check if a given block should allow for the activation of abilities * * @param blockState The {@link BlockState} of the block to check * @return true if the block should allow ability activation, false otherwise */ public static boolean canActivateAbilities(BlockState blockState) { switch (blockState.getType()) { case BED_BLOCK: case BREWING_STAND: case BOOKSHELF: case BURNING_FURNACE: case CAKE_BLOCK: case CHEST: case DISPENSER: case ENCHANTMENT_TABLE: case ENDER_CHEST: case FENCE_GATE: case FURNACE: case IRON_DOOR_BLOCK: case JUKEBOX: case LEVER: case NOTE_BLOCK: case STONE_BUTTON: case WOOD_BUTTON: case TRAP_DOOR: case WALL_SIGN: case WOODEN_DOOR: case WORKBENCH: case BEACON: case ANVIL: case DROPPER: case HOPPER: case TRAPPED_CHEST: return false; default: int blockId = blockState.getTypeId(); if (blockId == Config.getInstance().getRepairAnvilId() || blockId == Config.getInstance().getSalvageAnvilId()) { return false; } if (ModUtils.isCustomAbilityBlock(blockState)) { return false; } return true; } } /** * Check if a given block is an ore * * @param blockState The {@link BlockState} of the block to check * @return true if the block is an ore, false otherwise */ public static boolean isOre(BlockState blockState) { switch (blockState.getType()) { case COAL_ORE: case DIAMOND_ORE: case GLOWING_REDSTONE_ORE: case GOLD_ORE: case IRON_ORE: case LAPIS_ORE: case QUARTZ_ORE: case REDSTONE_ORE: case EMERALD_ORE: return true; default: return ModUtils.isCustomOreBlock(blockState); } } /** * Determine if a given block can be made mossy * * @param blockState The {@link BlockState} of the block to check * @return true if the block can be made mossy, false otherwise */ public static boolean canMakeMossy(BlockState blockState) { switch (blockState.getType()) { case COBBLESTONE: case DIRT: return true; case SMOOTH_BRICK: case COBBLE_WALL: return blockState.getRawData() == (byte) 0x0; default: return false; } } /** * Determine if a given block should be affected by Green Terra * * @param blockState The {@link BlockState} of the block to check * @return true if the block should affected by Green Terra, false otherwise */ public static boolean affectedByGreenTerra(BlockState blockState) { switch (blockState.getType()) { case BROWN_MUSHROOM: case CACTUS: case MELON_BLOCK: case PUMPKIN: case RED_MUSHROOM: case RED_ROSE: case SUGAR_CANE_BLOCK: case VINE: case WATER_LILY: case YELLOW_FLOWER: return true; case CARROT: case CROPS: case POTATO: return blockState.getRawData() == CropState.RIPE.getData(); case NETHER_WARTS: return blockState.getRawData() == (byte) 0x3; case COCOA: return ((CocoaPlant) blockState.getData()).getSize() == CocoaPlantSize.LARGE; default: return ModUtils.isCustomHerbalismBlock(blockState); } } /** * Determine if a given block should be affected by Super Breaker * * @param blockState The {@link BlockState} of the block to check * @return true if the block should affected by Super Breaker, false otherwise */ public static Boolean affectedBySuperBreaker(BlockState blockState) { switch (blockState.getType()) { case COAL_ORE: case DIAMOND_ORE: case ENDER_STONE: case GLOWING_REDSTONE_ORE: case GLOWSTONE: case GOLD_ORE: case IRON_ORE: case LAPIS_ORE: case MOSSY_COBBLESTONE: case NETHERRACK: case OBSIDIAN: case QUARTZ_ORE: case REDSTONE_ORE: case SANDSTONE: case STONE: case EMERALD_ORE: return true; default: return ModUtils.isCustomMiningBlock(blockState); } } /** * Determine if a given block should be affected by Giga Drill Breaker * * @param blockState The {@link BlockState} of the block to check * @return true if the block should affected by Giga Drill Breaker, false otherwise */ public static boolean affectedByGigaDrillBreaker(BlockState blockState) { switch (blockState.getType()) { case CLAY: case DIRT: case GRASS: case GRAVEL: case MYCEL: case SAND: case SOUL_SAND: return true; default: return ModUtils.isCustomExcavationBlock(blockState); } } /** * Determine if a given block should be affected by Tree Feller * * @param blockState The {@link BlockState} of the block to check * @return true if the block should affected by Tree Feller, false otherwise */ public static boolean affectedByTreeFeller(BlockState blockState) { switch (blockState.getType()) { case LOG: case LEAVES: case HUGE_MUSHROOM_1: case HUGE_MUSHROOM_2: return true; default: return ModUtils.isCustomWoodcuttingBlock(blockState); } } /** * Check if a given block is a log * * @param blockState The {@link BlockState} of the block to check * @return true if the block is a log, false otherwise */ public static boolean isLog(BlockState blockState) { switch (blockState.getType()) { case LOG: case HUGE_MUSHROOM_1: case HUGE_MUSHROOM_2: return true; default: return ModUtils.isCustomLogBlock(blockState); } } /** * Check if a given block is a leaf * * @param blockState The {@link BlockState} of the block to check * @return true if the block is a leaf, false otherwise */ public static boolean isLeaves(BlockState blockState) { switch (blockState.getType()) { case LEAVES: return true; default: return ModUtils.isCustomLeafBlock(blockState); } } /** * Determine if a given block should be affected by Flux Mining * * @param blockState The {@link BlockState} of the block to check * @return true if the block should affected by Flux Mining, false otherwise */ public static boolean affectedByFluxMining(BlockState blockState) { switch (blockState.getType()) { case IRON_ORE: case GOLD_ORE: return true; default: return false; } } /** * Determine if a given block can activate Herbalism abilities * * @param blockState The {@link BlockState} of the block to check * @return true if the block can be activate Herbalism abilities, false otherwise */ public static boolean canActivateHerbalism(BlockState blockState) { switch (blockState.getType()) { case DIRT: case GRASS: case SOIL: return false; default: return true; } } /** * Determine if a given block should be affected by Block Cracker * * @param blockState The {@link BlockState} of the block to check * @return true if the block should affected by Block Cracker, false otherwise */ public static boolean affectedByBlockCracker(BlockState blockState) { switch (blockState.getType()) { case SMOOTH_BRICK: return blockState.getRawData() == (byte) 0x0; default: return false; } } /** * Determine if a given block can be made into Mycelium * * @param blockState The {@link BlockState} of the block to check * @return true if the block can be made in Mycelium, false otherwise */ public static boolean canMakeShroomy(BlockState blockState) { switch (blockState.getType()) { case DIRT: case GRASS: return true; default: return false; } } }
package com.groupbyinc.api; import com.groupbyinc.api.config.ConnectionConfiguration; import com.groupbyinc.api.model.RefinementsResult; import com.groupbyinc.api.model.Results; import com.groupbyinc.api.request.RefinementsRequest; import com.groupbyinc.api.request.Request; import com.groupbyinc.common.apache.commons.collections4.MapUtils; import com.groupbyinc.common.apache.commons.lang3.StringUtils; import com.groupbyinc.common.apache.http.ConnectionClosedException; import com.groupbyinc.common.apache.http.Header; import com.groupbyinc.common.apache.http.HttpEntity; import com.groupbyinc.common.apache.http.NoHttpResponseException; import com.groupbyinc.common.apache.http.StatusLine; import com.groupbyinc.common.apache.http.client.config.RequestConfig; import com.groupbyinc.common.apache.http.client.methods.CloseableHttpResponse; import com.groupbyinc.common.apache.http.client.methods.HttpPost; import com.groupbyinc.common.apache.http.client.utils.URIBuilder; import com.groupbyinc.common.apache.http.entity.StringEntity; import com.groupbyinc.common.apache.http.impl.client.CloseableHttpClient; import com.groupbyinc.common.apache.http.impl.client.HttpClientBuilder; import com.groupbyinc.common.apache.http.impl.conn.PoolingHttpClientConnectionManager; import com.groupbyinc.common.apache.http.message.BasicHeader; import com.groupbyinc.common.apache.http.util.EntityUtils; import com.groupbyinc.common.jackson.Mappers; import com.groupbyinc.common.security.AesContent; import com.groupbyinc.common.security.AesEncryption; import com.groupbyinc.common.util.ThreadUtils; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.net.SocketException; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; /** * <code> * The Bridge is the class responsible for marshalling a query to and from the search service. * Because the bridge holds a connection pool that is expensive to create, it is highly recommended * that the bridge is held in the application memory scope and reused where appropriate. * <b>Do not create a new bridge object for each request as you will incur overhead that will * bring down your UI servers when under heavy load!</b> * </code> */ public abstract class AbstractBridge { private static final Logger LOG = Logger.getLogger(AbstractBridge.class.getName()); public static final int DEFAULT_RETRY_TIMEOUT = 80; public static final int DEFAULT_MAX_TRIES = 3; public static final String CLUSTER = "/cluster"; protected static final String COLON = ":"; protected static final String HTTP = "http: protected static final String HTTPS = "https: private static final String SEARCH = "/search"; private static final String REFINEMENTS = "/refinements"; private static final String BODY = "\nbody:\n"; private static final String EXCEPTION_FROM_BRIDGE = "Exception from bridge: "; public static final Charset UTF_8 = Charset.forName("UTF-8"); private final ConnectionConfiguration config; private final RequestConfig requestConfig; private final String bridgeUrl; private final String bridgeRefinementsUrl; private final String bridgeClusterUrl; protected String clientKey; private CloseableHttpClient httpClient; private long retryTimeout = DEFAULT_RETRY_TIMEOUT; private long maxTries = DEFAULT_MAX_TRIES; private volatile List<Header> headers = new ArrayList<Header>(); private ScheduledExecutorService idleConnectionMonitor = Executors.newSingleThreadScheduledExecutor(ThreadUtils.defaultThreadFactory("idle-connections", false, false)); /** * <code> * Constructor to create a bridge object that connects to the search api. * * JSON Reference: * The key as found in your key management page in the command center * * {"clientKey": "<client key>"} * * </code> * * @param clientKey * The key as found in your key management page in the command * center. * @param baseUrl * The base url the bridge is serving on. */ public AbstractBridge(String clientKey, String baseUrl) { this(clientKey, baseUrl, true, new ConnectionConfiguration()); } /** * <code> * Constructor to create a bridge object that connects to the search api. * * JSON Reference: * The key as found in your key management page in the command center * * {"clientKey": "<client key>"} * * </code> * * @param clientKey * The key as found in your key management page in the command * center. * @param baseUrl * The base url the bridge is serving on. * @param compressResponse * true to compress the response content, false to send uncompressed response. * @param config * Configuration for the underlying HttpClient instance. */ public AbstractBridge(String clientKey, String baseUrl, boolean compressResponse, ConnectionConfiguration config) { try { new URI(baseUrl); } catch (URISyntaxException e) { throw new IllegalStateException("Invalid url: " + baseUrl); } this.config = config; requestConfig = RequestConfig.custom().setConnectTimeout(config.getConnectTimeout()).setConnectionRequestTimeout(config.getConnectionRequestTimeout()).setSocketTimeout(config.getSocketTimeout()).build(); this.clientKey = clientKey; createClient(compressResponse); bridgeUrl = baseUrl + SEARCH; bridgeRefinementsUrl = bridgeUrl + REFINEMENTS; bridgeClusterUrl = baseUrl + CLUSTER; } private void createClient(boolean compressResponse) { final PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(); cm.setMaxTotal(config.getMaxConnections()); cm.setDefaultMaxPerRoute(config.getMaxConnectionsPerRoute()); HttpClientBuilder b = HttpClientBuilder.create(); if (!compressResponse) { b.disableContentCompression(); } httpClient = b.setConnectionManager(cm).setDefaultRequestConfig(requestConfig).build(); idleConnectionMonitor.scheduleAtFixedRate(new Runnable() { @Override public void run() { cm.closeExpiredConnections(); cm.closeIdleConnections(20, TimeUnit.SECONDS); } }, 10, 30, TimeUnit.SECONDS); } /** * <code> * Constructor to create a bridge object that connects to the search api. * * JSON Reference: * The key as found in your key management page in the command center * * {"clientKey": "<client key>"} * * </code> * * @param clientKey * The key as found in your key management page in the command * center. * @param baseUrl * The base url the bridge is serving on. * @param config * Configuration for the underlying HttpClient instance. */ public AbstractBridge(String clientKey, String baseUrl, ConnectionConfiguration config) { this(clientKey, baseUrl, true, config); } /** * <code> * Constructor to create a bridge object that connects to the search api. * * JSON Reference: * The key as found in your key management page in the command center * * {"clientKey": "<client key>"} * * </code> * * @param clientKey * The key as found in your key management page in the command * center. * @param baseUrl * The base url the bridge is serving on. * @param compressResponse * true to compress the response content, false to send uncompressed response. */ public AbstractBridge(String clientKey, String baseUrl, boolean compressResponse) { this(clientKey, baseUrl, compressResponse, new ConnectionConfiguration()); } /** * @internal */ public String getClusterBridgeUrl() { return bridgeClusterUrl; } /** * <code> * Connects to the search service, parses the response into a model * </code> * * @param query * A query representing the search. * * @return Results object from the search service */ public Results search(Query query) throws IOException { return search(clientKey, Collections.<String, String>emptyMap(), query); } protected Results search(String clientKey, Map<String, String> headers, Query query) throws IOException { InputStream data = fireRequest(getBridgeUrl(), query.getQueryUrlParams(), query.getBridgeJson(clientKey), query.isReturnBinary()); return map(data, query.isReturnBinary()); } protected InputStream fireRequest(String url, Map<String, String> urlParams, String body, boolean returnBinary) throws IOException { return fireRequest(url, urlParams, Collections.<String, String>emptyMap(), body, returnBinary); } protected InputStream fireRequest(String url, Map<String, String> urlParams, Map<String, String> headers, String body, boolean returnBinary) throws IOException { CloseableHttpResponse response = postToBridge(url, urlParams, headers, body); HttpEntity entity = response.getEntity(); if (response.getStatusLine().getStatusCode() == 200) { return entity.getContent(); } else { String status = response.getStatusLine().toString(); handleErrorStatus(status, EntityUtils.toByteArray(entity), returnBinary); return null; } } /** * @internal */ public String getBridgeUrl() { return bridgeUrl; } protected Results map(InputStream data, boolean returnBinary) { return Mappers.readValue(data, Results.class, returnBinary); } protected static void addHeader(List<Header> headers, String key, String value) { removeHeader(headers, key); headers.add(new BasicHeader(key, value)); } protected static void removeHeader(List<Header> headers, String key) { Iterator<Header> iterator = headers.iterator(); while (iterator.hasNext()) { Header header = iterator.next(); if (header.getName().equalsIgnoreCase(key)) { iterator.remove(); } } } protected static boolean containsHeader(List<Header> headers, String key) { for (Header header : headers) { if (header.getName().equalsIgnoreCase(key)) { return true; } } return false; } private CloseableHttpResponse postToBridge(String url, Map<String, String> urlParams, Map<String, String> headers, String bridgeJson) throws IOException { StringEntity entity = new StringEntity(bridgeJson, UTF_8); entity.setContentType("application/json"); CloseableHttpResponse response = null; boolean successful = false; int tries = 0; Exception lastError = null; List<Header> finalHeaders = new ArrayList<Header>(); finalHeaders.addAll(this.headers); for (Map.Entry<String, String> header : headers.entrySet()) { addHeader(finalHeaders, header.getKey(), header.getValue()); } while (!successful && tries < maxTries) { try { HttpPost httpPost = new HttpPost(generateURI(url, urlParams, tries)); for (Header header : finalHeaders) { httpPost.addHeader(header); } httpPost.setEntity(entity); response = httpClient.execute(httpPost); StatusLine statusLine = response.getStatusLine(); if (statusLine.getStatusCode() == 502) { ThreadUtils.sleep(retryTimeout); LOG.warning("Connection failed, retrying"); lastError = new IOException(statusLine.getReasonPhrase()); tries++; } else { successful = true; } } catch (URISyntaxException e) { LOG.severe("Invalid request, failing"); break; } catch (SocketException e) { ThreadUtils.sleep(retryTimeout); LOG.warning("Connection failed, retrying"); lastError = e; tries++; } catch (ConnectionClosedException e) { ThreadUtils.sleep(retryTimeout); LOG.warning("Connection failed, retrying"); lastError = e; tries++; } catch (NoHttpResponseException e) { ThreadUtils.sleep(retryTimeout); LOG.warning("Connection failed, retrying"); lastError = e; tries++; } } if (tries < maxTries) { return response; } throw new IOException("Tried to connect three times to: " + url, lastError); } protected void handleErrorStatus(String status, byte[] bytes, boolean returnBinary) throws IOException { StringBuilder msg = new StringBuilder(); try { String errors = map(new ByteArrayInputStream(bytes), returnBinary).getErrors(); if (StringUtils.isNotBlank(errors)) { msg.append(", ").append(errors); } } catch (Exception e) { LOG.warning("unable to parse error from response."); } finally { if (StringUtils.isBlank(msg)) { msg.append(BODY).append(StringUtils.toEncodedString(bytes, UTF_8)); } } throw new IOException(EXCEPTION_FROM_BRIDGE + status + msg.toString()); } protected URI generateURI(String url, Map<String, String> params, int tries) throws URISyntaxException { URIBuilder u = new URIBuilder(url); if (MapUtils.isNotEmpty(params)) { for (Map.Entry<String, String> e : params.entrySet()) { u.addParameter(e.getKey(), e.getValue()); } } u.addParameter("retry", Integer.toString(tries)); return u.build(); } /** * @internal * using the request object instead of the query object. */ public Results search(Request request) throws IOException { return search(request, Collections.<String, String>emptyMap()); } /** * @internal * using the request object instead of the query object. */ public Results search(Request request, Map<String, String> headers) throws IOException { makeBackwardsCompatible(request); String json = getJson(request); Boolean returnBinary = request.getReturnBinary() == null ? false : request.getReturnBinary(); InputStream data = fireRequest(getBridgeUrl(), request.getQueryUrlParams(), headers, json, returnBinary); return map(data, returnBinary); } private void makeBackwardsCompatible(Request request) { request.setClientKey(clientKey); if (request.getSkip() == null) { request.setSkip(0); } if (request.getPageSize() == null) { request.setPageSize(10); } if (request.getReturnBinary() != null && !request.getReturnBinary()) { request.setReturnBinary(null); } } private String getJson(Object request) { String json; try { json = Mappers.writeValueAsString(request); } catch (IllegalArgumentException e) { json = "{}"; } return json; } /** * <code> * Connects to the refinements service, parses the response into a model * Retrieves at most 10,000 refinements for the navigation specified. * </code> * * @param query * A query representing the search. * @param navigationName * The name of the navigation to get more refinements for. * * @return RefinementsResult object from the refinements service * * @throws IOException */ public RefinementsResult refinements(Query query, String navigationName) throws IOException { InputStream data = fireRequest(getBridgeRefinementsUrl(), query.getQueryUrlParams(), query.getBridgeRefinementsJson(clientKey, navigationName), query.isReturnBinary()); return mapRefinements(data, query.isReturnBinary()); } /** * @internal */ public String getBridgeRefinementsUrl() { return bridgeRefinementsUrl; } protected RefinementsResult mapRefinements(InputStream data, boolean returnBinary) { return Mappers.readValue(data, RefinementsResult.class, returnBinary); } /** * @internal * use RefinementsRequest object for refinement searches */ public RefinementsResult refinements(RefinementsRequest request) throws IOException { return refinements(request, Collections.<String, String>emptyMap()); } /** * @internal * use RefinementsRequest object for refinement searches */ public RefinementsResult refinements(RefinementsRequest request, Map<String, String> headers) throws IOException { makeBackwardsCompatible(request.getOriginalQuery()); String json = getJson(request); Boolean returnBinary = request.getOriginalQuery().getReturnBinary() == null ? false : request.getOriginalQuery().getReturnBinary(); InputStream data = fireRequest(getBridgeRefinementsUrl(), request.getOriginalQuery().getQueryUrlParams(), headers, json, returnBinary); return mapRefinements(data, returnBinary); } /** * <code> * Cleanup HTTP connection pool. * </code> */ public void shutdown() { try { idleConnectionMonitor.shutdown(); httpClient.close(); } catch (IOException e) { // silently close } } /** * <code> * Sets the retry timeout for a failed request. * </code> * * @param retryTimeout the retry timeout */ public void setRetryTimeout(long retryTimeout) { this.retryTimeout = retryTimeout; } /** * <code> * Sets the maximum number of times to try a request before returning an error. * </code> * * @param maxTries the maximum number of request attempts */ public void setMaxTries(long maxTries) { this.maxTries = maxTries; } public List<Header> getHeaders() { return headers; } /** * <code> * Set a list of headers. Use `getHeaders().add(new BasicHeader())` * </code> * @param headers The list of headers */ public void setHeaders(List<Header> headers) { this.headers = headers; } /** * <code> * Generates a secured payload * </code> * @param customerId The customerId as seen in Command Center. Ensure this is not the subdomain, which can be `customerId-cors.groupbycloud.com` * @param clientKey The customerId as seen in Command Center * @param query The query to encrypt */ public static AesContent generateSecuredPayload(String customerId, String clientKey, Query query) throws GeneralSecurityException { return generateSecuredPayload(customerId, clientKey, query.getBridgeJson(null)); } /** * <code> * Generates a secured payload * </code> * @param customerId The customerId as seen in Command Center. Ensure this is not the subdomain, which can be `customerId-cors.groupbycloud.com` * @param clientKey The customerId as seen in Command Center * @param requestJson The query to encrypt */ public static AesContent generateSecuredPayload(String customerId, String clientKey, String requestJson) throws GeneralSecurityException { AesEncryption encryption = new AesEncryption(clientKey, customerId); return encryption.encrypt(requestJson); } /** * <code> * Generates a secured payload * </code> * @param customerId The customerId as seen in Command Center. Ensure this is not the subdomain, which can be `customerId-cors.groupbycloud.com` * @param query The query to encrypt */ public AesContent generateSecuredPayload(String customerId, Query query) throws GeneralSecurityException { return generateSecuredPayload(customerId, clientKey, query); } }
package org.eclipse.persistence.internal.oxm.record; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.net.URL; import java.util.Map; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import javax.xml.transform.Source; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.sax.SAXSource; import javax.xml.transform.sax.SAXResult; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.ValidatorHandler; import org.eclipse.persistence.exceptions.EclipseLinkException; import org.eclipse.persistence.exceptions.XMLMarshalException; import org.eclipse.persistence.internal.helper.ClassConstants; import org.eclipse.persistence.internal.oxm.XMLConversionManager; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.oxm.XMLContext; import org.eclipse.persistence.oxm.XMLDescriptor; import org.eclipse.persistence.oxm.mappings.UnmarshalKeepAsElementPolicy; import org.eclipse.persistence.oxm.record.XMLRootRecord; import org.eclipse.persistence.oxm.XMLUnmarshaller; import org.eclipse.persistence.oxm.record.UnmarshalRecord; import org.eclipse.persistence.platform.xml.DefaultErrorHandler; import org.eclipse.persistence.platform.xml.SAXDocumentBuilder; import org.eclipse.persistence.platform.xml.XMLParser; import org.eclipse.persistence.platform.xml.XMLPlatformFactory; import org.eclipse.persistence.oxm.XMLUnmarshallerHandler; import org.eclipse.persistence.platform.xml.XMLTransformer; import org.w3c.dom.Node; import org.xml.sax.ContentHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.eclipse.persistence.internal.oxm.record.XMLReader; /** * INTERNAL: * <p><b>Purpose:</b>Provide an implementation of PlatformUnmarshaller that makes use of the SAX parser * to build Java Objects from SAX Events. * <p><b>Responsibilities:</b><ul> * <li>Implement the required unmarshal methods from PlatformUnmarshaller</li> * <li>Check to see if document preservation is enabled, and if so, always unmarshal from a node</li> * </ul> * * @author bdoughan * @see org.eclipse.persistence.oxm.platform.SAXPlatform */ public class SAXUnmarshaller implements PlatformUnmarshaller { private static final String VALIDATING = "http://xml.org/sax/features/validation"; private static final String SCHEMA_LANGUAGE = "http://java.sun.com/xml/jaxp/properties/schemaLanguage"; private static final String SCHEMA_SOURCE = "http://java.sun.com/xml/jaxp/properties/schemaSource"; private static final String XML_SCHEMA = "http: private static SAXParserFactory SHARED_PARSER_FACTORY; private int validationMode; private Object[] schemas; private SAXParser saxParser; private XMLReader xmlReader; private XMLUnmarshaller xmlUnmarshaller; private XMLParser xmlParser; private boolean isResultAlwaysXMLRoot; private SAXParserFactory saxParserFactory; static { SHARED_PARSER_FACTORY = createSAXParserFactory(); } public SAXUnmarshaller(XMLUnmarshaller xmlUnmarshaller, Map<String, Boolean> parserFeatures) throws XMLMarshalException { super(); try { if(null != parserFeatures) { saxParserFactory = createSAXParserFactory(); for(Map.Entry<String, Boolean> parserFeature : parserFeatures.entrySet()) { try { saxParserFactory.setFeature(parserFeature.getKey(), parserFeature.getValue()); } catch(org.xml.sax.SAXNotRecognizedException ex) { //ignore if the parser doesn't recognize or support this feature } catch(org.xml.sax.SAXNotSupportedException ex) { } } } saxParser = getSAXParserFactory().newSAXParser(); xmlReader = new XMLReader(saxParser.getXMLReader()); xmlReader.setErrorHandler(new DefaultErrorHandler()); xmlParser = XMLPlatformFactory.getInstance().getXMLPlatform().newXMLParser(); xmlParser.setNamespaceAware(true); xmlParser.setValidationMode(XMLParser.NONVALIDATING); this.xmlUnmarshaller = xmlUnmarshaller; } catch (Exception e) { throw XMLMarshalException.errorInstantiatingSchemaPlatform(e); } } public EntityResolver getEntityResolver() { return xmlReader.getEntityResolver(); } public void setEntityResolver(EntityResolver entityResolver) { xmlReader.setEntityResolver(entityResolver); xmlParser.setEntityResolver(entityResolver); } public ErrorHandler getErrorHandler() { return xmlParser.getErrorHandler(); } public void setErrorHandler(ErrorHandler errorHandler) { xmlReader.setErrorHandler(errorHandler); xmlParser.setErrorHandler(errorHandler); } public int getValidationMode() { return validationMode; } public void setValidationMode(int validationMode) { try { this.validationMode = validationMode; xmlParser.setValidationMode(validationMode); switch (validationMode) { case XMLParser.NONVALIDATING: { xmlReader.setFeature(VALIDATING, false); break; } case XMLParser.DTD_VALIDATION: { xmlReader.setFeature(VALIDATING, true); break; } case XMLParser.SCHEMA_VALIDATION: { try { xmlReader.setFeature(VALIDATING, true); saxParser.setProperty(SCHEMA_LANGUAGE, XML_SCHEMA); saxParser.setProperty(SCHEMA_SOURCE, schemas); } catch (Exception e) { xmlReader.setFeature(VALIDATING, false); } break; } } } catch (Exception e) { // Don't change the validation mode. } } public void setWhitespacePreserving(boolean isWhitespacePreserving) { xmlParser.setWhitespacePreserving(isWhitespacePreserving); } public void setSchemas(Object[] schemas) { this.schemas = schemas; } public void setSchema(Schema schema) { xmlParser.setXMLSchema(schema); if(saxParserFactory == null) { saxParserFactory = createSAXParserFactory(); } saxParserFactory.setSchema(schema); try { saxParser = saxParserFactory.newSAXParser(); XMLReader newXmlReader = new XMLReader(saxParser.getXMLReader()); newXmlReader.setFeature(VALIDATING, xmlReader.getFeature(VALIDATING)); newXmlReader.setEntityResolver(xmlReader.getEntityResolver()); newXmlReader.setErrorHandler(xmlReader.getErrorHandler()); xmlReader = newXmlReader; xmlParser.setXMLSchema(schema); } catch (Exception e) { throw XMLMarshalException.errorInstantiatingSchemaPlatform(e); } } public Schema getSchema() { Schema schema = null; try { schema = xmlParser.getXMLSchema(); } catch(UnsupportedOperationException ex) { //if this parser doesn't support the setSchema/getSchema API, just return null; } return schema; } public Object unmarshal(File file) { try { if (xmlUnmarshaller.getXMLContext().hasDocumentPreservation()) { Node domElement = xmlParser.parse(file).getDocumentElement(); return unmarshal(domElement); } FileInputStream inputStream = new FileInputStream(file); try { return unmarshal(inputStream); } finally { inputStream.close(); } } catch (FileNotFoundException e) { throw XMLMarshalException.unmarshalException(e); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } } public Object unmarshal(File file, Class clazz) { try { if (xmlUnmarshaller.getXMLContext().hasDocumentPreservation()) { Node domElement = xmlParser.parse(file).getDocumentElement(); return unmarshal(domElement, clazz); } FileInputStream inputStream = new FileInputStream(file); try { return unmarshal(inputStream, clazz); } finally { inputStream.close(); } } catch (FileNotFoundException e) { throw XMLMarshalException.unmarshalException(e); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } } public Object unmarshal(InputStream inputStream) { if (xmlUnmarshaller.getXMLContext().hasDocumentPreservation()) { Node domElement = xmlParser.parse(inputStream).getDocumentElement(); return unmarshal(domElement); } InputSource inputSource = new InputSource(inputStream); return unmarshal(inputSource); } public Object unmarshal(InputStream inputStream, Class clazz) { if (xmlUnmarshaller.getXMLContext().hasDocumentPreservation()) { Node domElement = xmlParser.parse(inputStream).getDocumentElement(); return unmarshal(domElement, clazz); } InputSource inputSource = new InputSource(inputStream); return unmarshal(inputSource, clazz); } public Object unmarshal(InputSource inputSource) { return unmarshal(xmlReader, inputSource); } public Object unmarshal(InputSource inputSource, XMLReader xmlReader) { try { SAXUnmarshallerHandler saxUnmarshallerHandler = new SAXUnmarshallerHandler(xmlUnmarshaller.getXMLContext()); saxUnmarshallerHandler.setXMLReader(xmlReader); saxUnmarshallerHandler.setUnmarshaller(xmlUnmarshaller); xmlReader.setContentHandler(saxUnmarshallerHandler); xmlReader.parse(inputSource); // resolve any mapping references saxUnmarshallerHandler.resolveReferences(); return saxUnmarshallerHandler.getObject(); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } } public Object unmarshal(InputSource inputSource, Class clazz) { return unmarshal(xmlReader, inputSource, clazz); } public Object unmarshal(InputSource inputSource, Class clazz, XMLReader xmlReader) { boolean isPrimitiveWrapper = isPrimitiveWrapper(clazz); UnmarshalRecord unmarshalRecord; XMLDescriptor xmlDescriptor = null; // for XMLObjectReferenceMappings we need a non-shared cache, so // try and get a Unit Of Work from the XMLContext AbstractSession session = null; // check for case where the reference class is a primitive wrapper - in this case, we // need to use the conversion manager to convert the node's value to the primitive // wrapper class, then create, populate and return an XMLRoot. This will be done // via XMLRootRecord. if (isPrimitiveWrapper) { unmarshalRecord = new XMLRootRecord(clazz); unmarshalRecord.setSession((AbstractSession) xmlUnmarshaller.getXMLContext().getSession(0)); } else if(clazz == ClassConstants.OBJECT) { try{ SAXUnmarshallerHandler saxUnmarshallerHandler = new SAXUnmarshallerHandler(xmlUnmarshaller.getXMLContext()); saxUnmarshallerHandler.setXMLReader(xmlReader); saxUnmarshallerHandler.setUnmarshaller(xmlUnmarshaller); saxUnmarshallerHandler.setKeepAsElementPolicy(UnmarshalKeepAsElementPolicy.KEEP_UNKNOWN_AS_ELEMENT); xmlReader.setContentHandler(saxUnmarshallerHandler); xmlReader.parse(inputSource); // resolve any mapping references saxUnmarshallerHandler.resolveReferences(); return saxUnmarshallerHandler.getObject(); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } } else { // for XMLObjectReferenceMappings we need a non-shared cache, so // try and get a Unit Of Work from the XMLContext session = xmlUnmarshaller.getXMLContext().getReadSession(clazz); xmlDescriptor = (XMLDescriptor) session.getDescriptor(clazz); unmarshalRecord = (UnmarshalRecord) xmlDescriptor.getObjectBuilder().createRecord(session); } try { unmarshalRecord.setXMLReader(xmlReader); unmarshalRecord.setUnmarshaller(xmlUnmarshaller); xmlReader.setContentHandler(unmarshalRecord); xmlReader.setLexicalHandler(unmarshalRecord); xmlReader.parse(inputSource); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } // resolve mapping references xmlUnmarshaller.resolveReferences(session); if (isPrimitiveWrapper) { return unmarshalRecord.getCurrentObject(); } return xmlDescriptor.wrapObjectInXMLRoot(unmarshalRecord, this.isResultAlwaysXMLRoot); } public Object unmarshal(Node node) { DOMReader reader = new DOMReader(xmlUnmarshaller); return unmarshal(reader, node); } public Object unmarshal(DOMReader reader, Node node) { try { SAXUnmarshallerHandler handler = new SAXUnmarshallerHandler(xmlUnmarshaller.getXMLContext()); reader.setContentHandler(handler); handler.setXMLReader(reader); handler.setUnmarshaller(xmlUnmarshaller); reader.parse(node); handler.resolveReferences(); return handler.getObject(); } catch (SAXException e) { throw convertSAXException(e); } } public Object unmarshal(Node node, Class clazz) { DOMReader reader = new DOMReader(xmlUnmarshaller); return unmarshal(reader, node, clazz); } public Object unmarshal(DOMReader domReader, Node node, Class clazz) { boolean isPrimitiveWrapper = isPrimitiveWrapper(clazz); UnmarshalRecord unmarshalRecord; XMLDescriptor xmlDescriptor = null; AbstractSession session = null; // check for case where the reference class is a primitive wrapper - in this case, we // need to use the conversion manager to convert the node's value to the primitive // wrapper class, then create, populate and return an XMLRoot. This will be done // via XMLRootRecord. if (isPrimitiveWrapper) { unmarshalRecord = new XMLRootRecord(clazz); unmarshalRecord.setSession((AbstractSession)xmlUnmarshaller.getXMLContext().getSession(0)); } else if(clazz == ClassConstants.OBJECT) { SAXUnmarshallerHandler saxUnmarshallerHandler = new SAXUnmarshallerHandler(xmlUnmarshaller.getXMLContext()); saxUnmarshallerHandler.setXMLReader(domReader); saxUnmarshallerHandler.setUnmarshaller(xmlUnmarshaller); saxUnmarshallerHandler.setKeepAsElementPolicy(UnmarshalKeepAsElementPolicy.KEEP_UNKNOWN_AS_ELEMENT); domReader.setContentHandler(saxUnmarshallerHandler); try{ domReader.parse(node); } catch (SAXException e) { throw convertSAXException(e); } // resolve any mapping references saxUnmarshallerHandler.resolveReferences(); return saxUnmarshallerHandler.getObject(); } else { // for XMLObjectReferenceMappings we need a non-shared cache, so // try and get a Unit Of Work from the XMLContext session = xmlUnmarshaller.getXMLContext().getReadSession(clazz); xmlDescriptor = (XMLDescriptor) session.getDescriptor(clazz); unmarshalRecord = (UnmarshalRecord) xmlDescriptor.getObjectBuilder().createRecord(session); } try { unmarshalRecord.setXMLReader(domReader); unmarshalRecord.setUnmarshaller(xmlUnmarshaller); domReader.setContentHandler(unmarshalRecord); domReader.setLexicalHandler(unmarshalRecord); domReader.parse(node); } catch (SAXException e) { throw convertSAXException(e); } // resolve mapping references xmlUnmarshaller.resolveReferences(session); if (isPrimitiveWrapper) { return unmarshalRecord.getCurrentObject(); } return xmlDescriptor.wrapObjectInXMLRoot(unmarshalRecord, this.isResultAlwaysXMLRoot); } public Object unmarshal(Reader reader) { if (xmlUnmarshaller.getXMLContext().hasDocumentPreservation()) { Node domElement = xmlParser.parse(reader).getDocumentElement(); return unmarshal(domElement); } InputSource inputSource = new InputSource(reader); return unmarshal(inputSource); } public Object unmarshal(Reader reader, Class clazz) { if (xmlUnmarshaller.getXMLContext().hasDocumentPreservation()) { Node domElement = xmlParser.parse(reader).getDocumentElement(); return unmarshal(domElement, clazz); } InputSource inputSource = new InputSource(reader); return unmarshal(inputSource, clazz); } public Object unmarshal(Source source) { if (source instanceof SAXSource) { SAXSource saxSource = (SAXSource) source; XMLReader xmlReader = null; if (saxSource.getXMLReader() != null) { if(saxSource.getXMLReader() instanceof XMLReader) { xmlReader = (XMLReader) saxSource.getXMLReader(); } else { xmlReader = new XMLReader(saxSource.getXMLReader()); } setValidatorHandler(xmlReader); } if (null == xmlReader) { return unmarshal(saxSource.getInputSource()); } else { return unmarshal(saxSource.getInputSource(), xmlReader); } } else if (source instanceof DOMSource) { DOMSource domSource = (DOMSource) source; return unmarshal(domSource.getNode()); } else if (source instanceof StreamSource) { StreamSource streamSource = (StreamSource) source; if (null != streamSource.getReader()) { return unmarshal(streamSource.getReader()); } else if (null != streamSource.getInputStream()) { return unmarshal(streamSource.getInputStream()); } else { return unmarshal(streamSource.getSystemId()); } } else { XMLUnmarshallerHandler handler = this.xmlUnmarshaller.getUnmarshallerHandler(); XMLTransformer transformer = XMLPlatformFactory.getInstance().getXMLPlatform().newXMLTransformer(); SAXResult result = new SAXResult(handler); transformer.transform(source, result); return handler.getResult(); } } public Object unmarshal(Source source, Class clazz) { if (source instanceof SAXSource) { SAXSource saxSource = (SAXSource) source; XMLReader xmlReader = null; if (saxSource.getXMLReader() != null) { if(saxSource.getXMLReader() instanceof XMLReader) { xmlReader = (XMLReader) saxSource.getXMLReader(); } else { xmlReader = new XMLReader(saxSource.getXMLReader()); } setValidatorHandler(xmlReader); } if (null == saxSource.getXMLReader()) { return unmarshal(saxSource.getInputSource(), clazz); } else { return unmarshal(saxSource.getInputSource(), clazz, xmlReader); } } else if (source instanceof DOMSource) { DOMSource domSource = (DOMSource) source; return unmarshal(domSource.getNode(), clazz); } else if (source instanceof StreamSource) { StreamSource streamSource = (StreamSource) source; if (null != streamSource.getReader()) { return unmarshal(streamSource.getReader(), clazz); } else if (null != streamSource.getInputStream()) { return unmarshal(streamSource.getInputStream(), clazz); } else { return unmarshal(streamSource.getSystemId(), clazz); } } else { DOMResult result = new DOMResult(); XMLTransformer transformer = XMLPlatformFactory.getInstance().getXMLPlatform().newXMLTransformer(); transformer.transform(source, result); return unmarshal(result.getNode(), clazz); } } public Object unmarshal(URL url) { InputStream inputStream = null; try { inputStream = url.openStream(); } catch (Exception e) { throw XMLMarshalException.unmarshalException(e); } boolean hasThrownException = false; try { return unmarshal(inputStream); } catch (RuntimeException runtimeException) { hasThrownException = true; throw runtimeException; } finally { try { inputStream.close(); } catch (IOException e) { if (!hasThrownException) { throw XMLMarshalException.unmarshalException(e); } } } } public Object unmarshal(URL url, Class clazz) { try { InputStream inputStream = url.openStream(); Object result = unmarshal(inputStream, clazz); inputStream.close(); return result; } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } } public Object unmarshal(String systemId) { try { SAXUnmarshallerHandler saxUnmarshallerHandler = new SAXUnmarshallerHandler(xmlUnmarshaller.getXMLContext()); saxUnmarshallerHandler.setXMLReader(xmlReader); saxUnmarshallerHandler.setUnmarshaller(xmlUnmarshaller); xmlReader.setContentHandler(saxUnmarshallerHandler); xmlReader.parse(systemId); // resolve mapping references saxUnmarshallerHandler.resolveReferences(); return saxUnmarshallerHandler.getObject(); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } } public Object unmarshal(String systemId, Class clazz) { boolean isPrimitiveWrapper = isPrimitiveWrapper(clazz); UnmarshalRecord unmarshalRecord; XMLDescriptor xmlDescriptor = null; AbstractSession session = null; // check for case where the reference class is a primitive wrapper - in this case, we // need to use the conversion manager to convert the node's value to the primitive // wrapper class, then create, populate and return an XMLRoot. This will be done // via XMLRootRecord. if (isPrimitiveWrapper) { unmarshalRecord = new XMLRootRecord(clazz); } else if(clazz == ClassConstants.OBJECT) { SAXUnmarshallerHandler saxUnmarshallerHandler = new SAXUnmarshallerHandler(xmlUnmarshaller.getXMLContext()); try { saxUnmarshallerHandler.setXMLReader(xmlReader); saxUnmarshallerHandler.setUnmarshaller(xmlUnmarshaller); saxUnmarshallerHandler.setKeepAsElementPolicy(UnmarshalKeepAsElementPolicy.KEEP_UNKNOWN_AS_ELEMENT); xmlReader.setContentHandler(saxUnmarshallerHandler); xmlReader.parse(systemId); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } // resolve any mapping references saxUnmarshallerHandler.resolveReferences(); return saxUnmarshallerHandler.getObject(); } else { // for XMLObjectReferenceMappings we need a non-shared cache, so // try and get a Unit Of Work from the XMLContext session = xmlUnmarshaller.getXMLContext().getReadSession(clazz); xmlDescriptor = (XMLDescriptor) session.getDescriptor(clazz); unmarshalRecord = (UnmarshalRecord) xmlDescriptor.getObjectBuilder().createRecord(session); } try { unmarshalRecord.setXMLReader(xmlReader); unmarshalRecord.setUnmarshaller(xmlUnmarshaller); xmlReader.setContentHandler(unmarshalRecord); xmlReader.setLexicalHandler(unmarshalRecord); xmlReader.parse(systemId); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } // resolve mapping references xmlUnmarshaller.resolveReferences(session); if (isPrimitiveWrapper) { return unmarshalRecord.getCurrentObject(); } return xmlDescriptor.wrapObjectInXMLRoot(unmarshalRecord, this.isResultAlwaysXMLRoot); } public Object unmarshal(org.xml.sax.XMLReader xmlReader, InputSource inputSource) { try { XMLContext xmlContext = xmlUnmarshaller.getXMLContext(); if (xmlContext.hasDocumentPreservation()) { SAXDocumentBuilder saxDocumentBuilder = new SAXDocumentBuilder(); xmlReader.setContentHandler(saxDocumentBuilder); xmlReader.parse(inputSource); return unmarshal(saxDocumentBuilder.getDocument().getDocumentElement()); } XMLReader extendedXMLReader; if(xmlReader instanceof XMLReader) { extendedXMLReader = (XMLReader) xmlReader; } else { extendedXMLReader = new XMLReader(xmlReader); } SAXUnmarshallerHandler saxUnmarshallerHandler = new SAXUnmarshallerHandler(xmlContext); saxUnmarshallerHandler.setXMLReader(extendedXMLReader); saxUnmarshallerHandler.setUnmarshaller(xmlUnmarshaller); extendedXMLReader.setContentHandler(saxUnmarshallerHandler); extendedXMLReader.parse(inputSource); // resolve any mapping references saxUnmarshallerHandler.resolveReferences(); return saxUnmarshallerHandler.getObject(); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } } public Object unmarshal(org.xml.sax.XMLReader xmlReader, InputSource inputSource, Class clazz) { try { XMLContext xmlContext = xmlUnmarshaller.getXMLContext(); if (xmlContext.hasDocumentPreservation()) { SAXDocumentBuilder saxDocumentBuilder = new SAXDocumentBuilder(); xmlReader.setContentHandler(saxDocumentBuilder); xmlReader.parse(inputSource); return unmarshal(saxDocumentBuilder.getDocument().getDocumentElement(), clazz); } boolean isPrimitiveWrapper = isPrimitiveWrapper(clazz); UnmarshalRecord unmarshalRecord; XMLDescriptor xmlDescriptor = null; AbstractSession session = null; // check for case where the reference class is a primitive wrapper - in this case, we // need to use the conversion manager to convert the node's value to the primitive // wrapper class, then create, populate and return an XMLRoot. This will be done // via XMLRootRecord. if (isPrimitiveWrapper) { unmarshalRecord = new XMLRootRecord(clazz); unmarshalRecord.setSession((AbstractSession) xmlUnmarshaller.getXMLContext().getSession(0)); } else if(clazz == ClassConstants.OBJECT) { SAXUnmarshallerHandler saxUnmarshallerHandler = new SAXUnmarshallerHandler(xmlUnmarshaller.getXMLContext()); saxUnmarshallerHandler.setXMLReader((XMLReader)xmlReader); saxUnmarshallerHandler.setUnmarshaller(xmlUnmarshaller); saxUnmarshallerHandler.setKeepAsElementPolicy(UnmarshalKeepAsElementPolicy.KEEP_UNKNOWN_AS_ELEMENT); xmlReader.setContentHandler(saxUnmarshallerHandler); xmlReader.parse(inputSource); // resolve any mapping references saxUnmarshallerHandler.resolveReferences(); return saxUnmarshallerHandler.getObject(); } else { // for XMLObjectReferenceMappings we need a non-shared cache, so // try and get a Unit Of Work from the XMLContext session = xmlContext.getReadSession(clazz); xmlDescriptor = (XMLDescriptor) session.getDescriptor(clazz); unmarshalRecord = (UnmarshalRecord) xmlDescriptor.getObjectBuilder().createRecord(session); } XMLReader extendedXMLReader; if(xmlReader instanceof XMLReader) { extendedXMLReader = (XMLReader) xmlReader; } else { extendedXMLReader = new XMLReader(xmlReader); } unmarshalRecord.setXMLReader(extendedXMLReader); unmarshalRecord.setUnmarshaller(xmlUnmarshaller); extendedXMLReader.setContentHandler(unmarshalRecord); extendedXMLReader.setLexicalHandler(unmarshalRecord); extendedXMLReader.parse(inputSource); // resolve mapping references xmlUnmarshaller.resolveReferences(session); if (isPrimitiveWrapper || clazz == ClassConstants.OBJECT) { return unmarshalRecord.getCurrentObject(); } return xmlDescriptor.wrapObjectInXMLRoot(unmarshalRecord, this.isResultAlwaysXMLRoot); } catch (IOException e) { throw XMLMarshalException.unmarshalException(e); } catch (SAXException e) { throw convertSAXException(e); } } private EclipseLinkException convertSAXException(SAXException saxException) { Exception internalException = saxException.getException(); if (internalException != null) { if (EclipseLinkException.class.isAssignableFrom(internalException.getClass())) { return (EclipseLinkException) internalException; } else { return XMLMarshalException.unmarshalException(internalException); } } return XMLMarshalException.unmarshalException(saxException); } public boolean isResultAlwaysXMLRoot() { return this.isResultAlwaysXMLRoot; } public void setResultAlwaysXMLRoot(boolean alwaysReturnRoot) { this.isResultAlwaysXMLRoot = alwaysReturnRoot; } private boolean isPrimitiveWrapper(Class clazz){ return XMLConversionManager.getDefaultJavaTypes().get(clazz) != null ||ClassConstants.XML_GREGORIAN_CALENDAR.isAssignableFrom(clazz) ||ClassConstants.DURATION.isAssignableFrom(clazz); } /** * If a Schema was set on the unmarshaller then wrap the ContentHandler in * a ValidatorHandler. */ private void setContentHandler(XMLReader xmlReader, ContentHandler contentHandler) { setValidatorHandler(xmlReader); xmlReader.setContentHandler(contentHandler); } private void setValidatorHandler(XMLReader xmlReader) { Schema schema = null; try { schema = getSAXParserFactory().getSchema(); } catch (UnsupportedOperationException e) { // Oracle XDK does not support getSchema() } if (null != schema) { ValidatorHandler validatorHandler = schema.newValidatorHandler(); xmlReader.setValidatorHandler(validatorHandler); validatorHandler.setErrorHandler(getErrorHandler()); } } public SAXParserFactory getSAXParserFactory() { if(this.saxParserFactory == null) { return SHARED_PARSER_FACTORY; } return this.saxParserFactory; } private static SAXParserFactory createSAXParserFactory() { SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setNamespaceAware(true); try { factory.setFeature(XMLReader.NAMESPACE_PREFIXES_FEATURE, true); factory.setFeature(XMLReader.REPORT_IGNORED_ELEMENT_CONTENT_WHITESPACE_FEATURE, true); } catch(org.xml.sax.SAXNotRecognizedException ex) { // ignore if the parser doesn't recognize or support this feature } catch(org.xml.sax.SAXNotSupportedException ex) { } catch (ParserConfigurationException e) { } return factory; } }
package org.jetel.data; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.jetel.metadata.DataRecordMetadata; /** * This class serves the role of DataRecords comparator.<br> * It can compare two records with different structure based on * specified fields. It is used when sorting, hashing or (in general) * comparing data.<br> * <br> * <i>Usage:</i><br> * <code> * key = new RecordKey(keyFieldNames,recordMetadata);<br> * key.init();<br> * key.compare(recordA,recordB); * </code> * * @author dpavlis * @since May 2, 2002 * @revision $Revision$ * @created January 26, 2003 */ public class RecordKey { private int keyFields[]; private DataRecordMetadata metadata; private String keyFieldNames[]; private final static char KEY_ITEMS_DELIMITER = ':'; private final static int DEFAULT_STRING_KEY_LENGTH = 32; private StringBuffer keyStr; private boolean equalNULLs = false; // specifies whether two NULLs are deemed equal /** * Constructor for the RecordKey object * * @param keyFieldNames names of individual fields composing the key * @param metadata metadata describing structure of DataRecord for which the key is built * @since May 2, 2002 */ public RecordKey(String keyFieldNames[], DataRecordMetadata metadata) { this.metadata = metadata; this.keyFieldNames = keyFieldNames; } /** * @param keyFields indices of fields composing the key * @param metadata metadata describing structure of DataRecord for which the key is built */ public RecordKey(int keyFields[], DataRecordMetadata metadata) { this.metadata = metadata; this.keyFields = keyFields; } // end init /** * Assembles string representation of the key based on current record's value. * * @param record DataRecord whose field's values will be used to create key string. * @return The KeyString value * @since May 2, 2002 */ public String getKeyString(DataRecord record) { if (keyStr == null){ keyStr = new StringBuffer(DEFAULT_STRING_KEY_LENGTH); }else{ keyStr.setLength(0); } for (int i = 0; i < keyFields.length; i++) { keyStr.append(record.getField(keyFields[i]).toString()); // not used for now keyStr.append(KEY_ITEMS_DELIMITER); } return keyStr.toString(); } /** * Performs initialization of internal data structures * * @since May 2, 2002 */ public void init() { if (keyFields == null) { Integer position; keyFields = new int[keyFieldNames.length]; Map fields = metadata.getFieldNames(); for (int i = 0; i < keyFieldNames.length; i++) { if ((position = (Integer) fields.get(keyFieldNames[i])) != null) { keyFields[i] = position.intValue(); } else { throw new RuntimeException( "Field name specified as a key doesn't exist: " + keyFieldNames[i]); } } }else if (keyFieldNames==null){ keyFieldNames=new String[keyFields.length]; for (int i=0;i<keyFields.length;i++){ keyFieldNames[i]=metadata.getField(keyFields[i]).getName(); } } } /** * Gets the keyFields attribute of the RecordKey object * * @return The keyFields value */ public int[] getKeyFields() { return keyFields; } /** * Gets fields (indexes) which are not part of the key * * @return * @since 31.1.2007 */ public int[] getNonKeyFields(){ Set<Integer> allFields=new LinkedHashSet<Integer>(); for(int i=0;i<metadata.getNumFields();i++){ allFields.add(new Integer(i)); } allFields.removeAll(Arrays.asList(keyFields)); int[] nonKey=new int[allFields.size()]; int counter=0; for(Integer index : allFields){ nonKey[counter++]=index.intValue(); } return nonKey; } /** * Gets number of fields defined by this key. * @return length of key */ public int getLenght() { return keyFields.length; } /** * Compares two records (of the same layout) based on defined key-fields and returns (-1;0;1) if (< ; = ; >) * * @param record1 Description of the Parameter * @param record2 Description of the Parameter * @return -1 ; 0 ; 1 */ public int compare(DataRecord record1, DataRecord record2) { int compResult; if (record1.getMetadata() != record2.getMetadata()) { throw new RuntimeException("Can't compare - records have different metadata associated." + " Possibly different structure"); } if (equalNULLs){ for (int i = 0; i < keyFields.length; i++) { compResult = record1.getField(keyFields[i]).compareTo(record2.getField(keyFields[i])); if (compResult != 0) { if (!(record1.getField(keyFields[i]).isNull&&record2.getField(keyFields[i]).isNull)){ return compResult; } } } }else { for (int i = 0; i < keyFields.length; i++) { compResult = record1.getField(keyFields[i]).compareTo(record2.getField(keyFields[i])); if (compResult != 0) { return compResult; } } } return 0; // seem to be the same } /** * Compares two records (can have different layout) based on defined key-fields * and returns (-1;0;1) if (< ; = ; >).<br> * The particular fields to be compared have to be of the same type ! * * @param secondKey RecordKey defined for the second record * @param record1 First record * @param record2 Second record * @return -1 ; 0 ; 1 */ public int compare(RecordKey secondKey, DataRecord record1, DataRecord record2) { int compResult; int[] record2KeyFields = secondKey.getKeyFields(); if (keyFields.length != record2KeyFields.length) { throw new RuntimeException("Can't compare. keys have different number of DataFields"); } if (equalNULLs){ for (int i = 0; i < keyFields.length; i++) { compResult = record1.getField(keyFields[i]).compareTo(record2.getField(record2KeyFields[i])); if (compResult != 0) { if (!(record1.getField(keyFields[i]).isNull&&record2.getField(keyFields[i]).isNull)){ return compResult; } } } }else{ for (int i = 0; i < keyFields.length; i++) { compResult = record1.getField(keyFields[i]).compareTo(record2.getField(record2KeyFields[i])); if (compResult != 0) { return compResult; } } } return 0; // seem to be the same } /** * Description of the Method * * @param record1 Description of the Parameter * @param record2 Description of the Parameter * @return Description of the Return Value */ public boolean equals(DataRecord record1, DataRecord record2) { if (record1.getMetadata() != record2.getMetadata()) { throw new RuntimeException("Can't compare - records have different metadata associated." + " Possibly different structure"); } if (equalNULLs){ for (int i = 0; i < keyFields.length; i++) { if (!record1.getField(keyFields[i]).equals(record2.getField(keyFields[i]))) { if (!(record1.getField(keyFields[i]).isNull&&record2.getField(keyFields[i]).isNull)){ return false; } } } }else{ for (int i = 0; i < keyFields.length; i++) { if (!record1.getField(keyFields[i]).equals(record2.getField(keyFields[i]))) { return false; } } } return true; } /** * This method serializes (saves) content of key fields only (for specified record) into * buffer. * * @param buffer ByteBuffer into which serialize key fields * @param record data record from which key fields will be serialized into ByteBuffer */ public void serializeKeyFields(ByteBuffer buffer,DataRecord record) { for (int i = 0; i < keyFields.length; i++) { record.getField(keyFields[i]).serialize(buffer); } } /** * This method deserializes (restores) content of key fields only (for specified record) from * buffer. * * @param buffer ByteBuffer from which deserialize key fields * @param record data record whose key fields will be deserialized from ByteBuffer * @since 29.1.2007 */ public void deserializeKeyFileds(ByteBuffer buffer,DataRecord record){ for (int i = 0; i < keyFields.length; i++) { record.getField(keyFields[i]).deserialize(buffer); } } /** * This method creates DataRecordMetadata object which represents fields composing this key. It can * be used for creating data record composed from key fields only. * @return DataRecordMetadata object */ public DataRecordMetadata generateKeyRecordMetadata(){ DataRecordMetadata metadata = new DataRecordMetadata(this.metadata.getName()+"key"); for (int i = 0; i < keyFields.length; i++) { metadata.addField(this.metadata.getField(keyFields[i])); } return metadata; } /** * toString method: creates a String representation of the object * @return the String representation */ public String toString() { StringBuffer buffer = new StringBuffer(); buffer.append("RecordKey["); if (keyFields == null) { buffer.append("keyFields = ").append("null"); } else { buffer.append("keyFields = ").append("["); for (int i = 0; i < keyFields.length; i++) { if (i != 0) { buffer.append(", "); } buffer.append(keyFields[i]); } buffer.append("]"); } buffer.append(", metadata = ").append(metadata.toString()); if (keyFieldNames == null) { buffer.append(", keyFieldNames = ").append("null"); } else { buffer.append(", keyFieldNames = ").append(Arrays.asList(keyFieldNames).toString()); } buffer.append(", KEY_ITEMS_DELIMITER = ").append(KEY_ITEMS_DELIMITER); buffer.append(", DEFAULT_KEY_LENGTH = ").append(DEFAULT_STRING_KEY_LENGTH); buffer.append(", EQUAL_NULLS = ").append(equalNULLs); buffer.append(", keyStr = ").append(keyStr); buffer.append("]"); return buffer.toString(); } /** * True if two NULL values (fields with NULL flag set) are considered equal * * @return Returns the equalNULLs. */ public boolean isEqualNULLs() { return equalNULLs; } /** * Sets whether two NULL values (fields with NULL flag set) are considered equal.<br> * Default is false. * * @param equalNULLs The equalNULLs to set. */ public void setEqualNULLs(boolean equalNULLs) { this.equalNULLs = equalNULLs; } /** * This method checks if two RecordKeys are comparable * * @param secondKey * @return Integer array with numbers of incomparable fields, odd numbers are from this key * and even numbers are from second key. When lenghts of the keys differ, proper numbers are * returned as null. When keys are comparable there is returned array of length zero. */ public Integer[] getIncomparableFields(RecordKey secondKey){ List<Integer> incomparable = new ArrayList<Integer>(); int[] record2KeyFields = secondKey.getKeyFields(); DataRecordMetadata secondMetadata = secondKey.metadata; for (int i = 0; i < Math.max(keyFields.length, record2KeyFields.length); i++) { if (i<keyFields.length && i<record2KeyFields.length) { if (metadata.getFieldType(keyFields[i]) != secondMetadata.getFieldType(record2KeyFields[i])) { incomparable.add(keyFields[i]); incomparable.add(record2KeyFields[i]); } }else if (i>=keyFields.length) { incomparable.add(null); incomparable.add(record2KeyFields[i]); }else { incomparable.add(keyFields[i]); incomparable.add(null); } } return incomparable.toArray(new Integer[0]); } } // end RecordKey
package com.ideaheap; import java.util.Comparator; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; public class LoopsBasedCalculator { public Map<String, String> getStringTransition( final Set<String> stringSet, final Map<String, Double> stringCosts) { String[] stringArray = stringSet.toArray(new String[0]); Map.Entry<String, Double>[] stringCostArray = stringCosts .entrySet() .toArray(new Map.Entry[0]); return stringCosts.keySet().parallelStream().collect( Collectors.toConcurrentMap( (state) -> state, (state) -> stringTransitionCost( state, stringArray, stringCostArray ) ) ); } private String stringTransitionCost( final String state, final String[] stringSet, final Map.Entry<String, Double>[] expectedUtilities) { String maxString = null; double maxStringValue = Double.NEGATIVE_INFINITY; for (String str : stringSet) { double stringValue = 0.0; for (Map.Entry<String, Double> entry : expectedUtilities) { stringValue += getStateProbability(state, str, entry.getKey()) * entry.getValue(); } if (maxString == null || stringValue > maxStringValue) { maxStringValue = stringValue; maxString = str; } } return maxString; } private Double getStateProbability(String state, String act, String key) { return 5.0; } }
package com.imsweb.naaccrxml; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * A format encapsulates a NAACCR version and a record type. It also makes the flat-file line length available based on those two fields. */ public final class NaaccrFormat { // version constants public static final String NAACCR_VERSION_160 = "160"; public static final String NAACCR_VERSION_150 = "150"; public static final String NAACCR_VERSION_140 = "140"; // list of supported versions private static final List<String> _SUPPORTED_VERSIONS = new ArrayList<>(); static { _SUPPORTED_VERSIONS.add(NAACCR_VERSION_160); _SUPPORTED_VERSIONS.add(NAACCR_VERSION_150); _SUPPORTED_VERSIONS.add(NAACCR_VERSION_140); } public static boolean isVersionSupported(String version) { return _SUPPORTED_VERSIONS.contains(version); } public static Set<String> getSupportedVersions() { return new HashSet<>(_SUPPORTED_VERSIONS); } // format constants public static final String NAACCR_FORMAT_16_ABSTRACT = "naaccr-160-abstract"; public static final String NAACCR_FORMAT_16_MODIFIED = "naaccr-160-modified"; public static final String NAACCR_FORMAT_16_CONFIDENTIAL = "naaccr-160-confidential"; public static final String NAACCR_FORMAT_16_INCIDENCE = "naaccr-160-incidence"; public static final String NAACCR_FORMAT_15_ABSTRACT = "naaccr-150-abstract"; public static final String NAACCR_FORMAT_15_MODIFIED = "naaccr-150-modified"; public static final String NAACCR_FORMAT_15_CONFIDENTIAL = "naaccr-150-confidential"; public static final String NAACCR_FORMAT_15_INCIDENCE = "naaccr-150-incidence"; public static final String NAACCR_FORMAT_14_ABSTRACT = "naaccr-140-abstract"; public static final String NAACCR_FORMAT_14_MODIFIED = "naaccr-140-modified"; public static final String NAACCR_FORMAT_14_CONFIDENTIAL = "naaccr-140-confidential"; public static final String NAACCR_FORMAT_14_INCIDENCE = "naaccr-140-incidence"; // list of supported formats private static final List<String> _SUPPORTED_FORMATS = new ArrayList<>(); static { _SUPPORTED_FORMATS.add(NAACCR_FORMAT_16_ABSTRACT); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_16_MODIFIED); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_16_CONFIDENTIAL); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_16_INCIDENCE); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_15_ABSTRACT); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_15_MODIFIED); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_15_CONFIDENTIAL); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_15_INCIDENCE); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_14_ABSTRACT); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_14_MODIFIED); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_14_CONFIDENTIAL); _SUPPORTED_FORMATS.add(NAACCR_FORMAT_14_INCIDENCE); } public static boolean isFormatSupported(String format) { return _SUPPORTED_FORMATS.contains(format); } public static Set<String> getSupportedFormats() { return new HashSet<>(_SUPPORTED_FORMATS); } // record type constants public static final String NAACCR_REC_TYPE_ABSTRACT = "A"; public static final String NAACCR_REC_TYPE_MODIFIED = "M"; public static final String NAACCR_REC_TYPE_CONFIDENTIAL = "C"; public static final String NAACCR_REC_TYPE_INCIDENCE = "I"; // list of supported record types private static final List<String> _SUPPORTED_REC_TYPES = new ArrayList<>(); static { _SUPPORTED_REC_TYPES.add(NAACCR_REC_TYPE_ABSTRACT); _SUPPORTED_REC_TYPES.add(NAACCR_REC_TYPE_MODIFIED); _SUPPORTED_REC_TYPES.add(NAACCR_REC_TYPE_CONFIDENTIAL); _SUPPORTED_REC_TYPES.add(NAACCR_REC_TYPE_INCIDENCE); } public static boolean isRecordTypeSupported(String recordType) { return _SUPPORTED_REC_TYPES.contains(recordType); } public static Set<String> getSupportedRecordTypes() { return new HashSet<>(_SUPPORTED_REC_TYPES); } public static NaaccrFormat getInstance(String format) { return new NaaccrFormat(format); } public static NaaccrFormat getInstance(String naaccrVersion, String recordType) { return new NaaccrFormat(getFormatFromVersionAndType(naaccrVersion, recordType)); } private String _naaccrVersion; private String _recordType; private int _lineLength; private NaaccrFormat(String format) { if (!isFormatSupported(format)) throw new RuntimeException("Unsupported format: " + format); String[] parts = format.split("\\-"); if (!isVersionSupported(parts[1])) throw new RuntimeException("Unsupported version: " + parts[1]); _naaccrVersion = parts[1]; switch (parts[2]) { case "abstract": _recordType = "A"; _lineLength = 22824; break; case "modified": _recordType = "M"; _lineLength = 22824; break; case "confidential": _recordType = "C"; _lineLength = 5564; break; case "incidence": _recordType = "I"; _lineLength = 3339; break; default: throw new RuntimeException("Unsupported format: " + parts[2]); } } private static String getFormatFromVersionAndType(String version, String type) { String format; switch (type) { case "A": format = "naaccr-" + version + "-abstract"; break; case "M": format = "naaccr-" + version + "-modified"; break; case "C": format = "naaccr-" + version + "-confidential"; break; case "I": format = "naaccr-" + version + "-incidence"; break; default: format = null; } return format; } public String getNaaccrVersion() { return _naaccrVersion; } public String getRecordType() { return _recordType; } public int getLineLength() { return _lineLength; } @Override public String toString() { return getFormatFromVersionAndType(_naaccrVersion, _recordType); } public String getDisplayName() { String formattedType; switch (_recordType) { case "A": formattedType = "Abstract"; break; case "M": formattedType = "Modified"; break; case "C": formattedType = "Confidential"; break; case "I": formattedType = "Incidence"; break; default: formattedType = "?"; } return "NAACCR " + _naaccrVersion.substring(0, 2) + "." + _naaccrVersion.substring(2) + " " + formattedType; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof NaaccrFormat)) return false; NaaccrFormat that = (NaaccrFormat)o; if (!_naaccrVersion.equals(that._naaccrVersion)) return false; return _recordType.equals(that._recordType); } @Override public int hashCode() { int result = _naaccrVersion.hashCode(); result = 31 * result + _recordType.hashCode(); return result; } }
package org.intermine.dwr; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Constructor; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.apache.lucene.queryParser.ParseException; import org.apache.struts.Globals; import org.apache.struts.util.MessageResources; import org.directwebremoting.WebContext; import org.directwebremoting.WebContextFactory; import org.intermine.InterMineException; import org.intermine.api.InterMineAPI; import org.intermine.api.bag.BagManager; import org.intermine.api.bag.TypeConverter; import org.intermine.api.mines.FriendlyMineManager; import org.intermine.api.profile.InterMineBag; import org.intermine.api.profile.Profile; import org.intermine.api.profile.ProfileAlreadyExistsException; import org.intermine.api.profile.ProfileManager; import org.intermine.api.profile.SavedQuery; import org.intermine.api.profile.TagManager; import org.intermine.api.query.WebResultsExecutor; import org.intermine.api.results.WebTable; import org.intermine.api.search.Scope; import org.intermine.api.search.SearchFilterEngine; import org.intermine.api.search.SearchRepository; import org.intermine.api.search.WebSearchable; import org.intermine.api.tag.TagNames; import org.intermine.api.template.TemplateManager; import org.intermine.api.template.TemplateQuery; import org.intermine.api.template.TemplateSummariser; import org.intermine.api.util.NameUtil; import org.intermine.metadata.FieldDescriptor; import org.intermine.metadata.Model; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.ResultsRow; import org.intermine.pathquery.OrderDirection; import org.intermine.pathquery.Path; import org.intermine.pathquery.PathConstraint; import org.intermine.pathquery.PathException; import org.intermine.pathquery.PathQuery; import org.intermine.util.StringUtil; import org.intermine.util.TypeUtil; import org.intermine.web.autocompletion.AutoCompleter; import org.intermine.web.logic.Constants; import org.intermine.web.logic.config.Type; import org.intermine.web.logic.config.WebConfig; import org.intermine.web.logic.query.PageTableQueryMonitor; import org.intermine.web.logic.query.QueryMonitorTimeout; import org.intermine.web.logic.results.PagedTable; import org.intermine.web.logic.results.WebState; import org.intermine.web.logic.session.QueryCountQueryMonitor; import org.intermine.web.logic.session.SessionMethods; import org.intermine.web.logic.widget.EnrichmentWidget; import org.intermine.web.logic.widget.GraphWidget; import org.intermine.web.logic.widget.HTMLWidget; import org.intermine.web.logic.widget.TableWidget; import org.intermine.web.logic.widget.config.EnrichmentWidgetConfig; import org.intermine.web.logic.widget.config.GraphWidgetConfig; import org.intermine.web.logic.widget.config.HTMLWidgetConfig; import org.intermine.web.logic.widget.config.TableWidgetConfig; import org.intermine.web.logic.widget.config.WidgetConfig; import org.intermine.web.util.InterMineLinkGenerator; import org.json.JSONException; import org.json.JSONObject; /** * This class contains the methods called through DWR Ajax * * @author Xavier Watkins * */ public class AjaxServices { protected static final Logger LOG = Logger.getLogger(AjaxServices.class); private static final Object ERROR_MSG = "Error happened during DWR ajax service."; private static final String INVALID_NAME_MSG = "Invalid name. Names may only contain letters, " + "numbers, spaces, and underscores."; /** * Creates a favourite Tag for the given templateName * * @param name the name of the template we want to set as a favourite * @param type type of tag (bag or template) * @param isFavourite whether or not this item is currently a favourite */ public void setFavourite(String name, String type, boolean isFavourite) { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); Profile profile = SessionMethods.getProfile(session); String nameCopy = name.replaceAll("#039;", "'"); TagManager tagManager = getTagManager(); // already a favourite. turning off. if (isFavourite) { tagManager.deleteTag(TagNames.IM_FAVOURITE, nameCopy, type, profile.getUsername()); // not a favourite. turning on. } else { tagManager.addTag(TagNames.IM_FAVOURITE, nameCopy, type, profile.getUsername()); } } catch (RuntimeException e) { processException(e); } } private static void processWidgetException(Exception e, String widgetId) { String msg = "Failed to render widget: " + widgetId; LOG.error(msg, e); } private static void processException(Exception e) { LOG.error(ERROR_MSG, e); if (e instanceof RuntimeException) { throw (RuntimeException) e; } throw new RuntimeException(e); } /** * Precomputes the given template query * @param templateName the template query name * @return a String to guarantee the service ran properly */ public String preCompute(String templateName) { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); Map<String, TemplateQuery> templates = profile.getSavedTemplates(); TemplateQuery t = templates.get(templateName); WebResultsExecutor executor = im.getWebResultsExecutor(profile); try { session.setAttribute("precomputing_" + templateName, "true"); executor.precomputeTemplate(t); } catch (ObjectStoreException e) { LOG.error("Error while precomputing", e); } finally { session.removeAttribute("precomputing_" + templateName); } } catch (RuntimeException e) { processException(e); } return "precomputed"; } /** * Summarises the given template query. * * @param templateName the template query name * @return a String to guarantee the service ran properly */ public String summarise(String templateName) { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); Map<String, TemplateQuery> templates = profile.getSavedTemplates(); TemplateQuery template = templates.get(templateName); TemplateSummariser summariser = im.getTemplateSummariser(); try { session.setAttribute("summarising_" + templateName, "true"); summariser.summarise(template); } catch (ObjectStoreException e) { LOG.error("Failed to summarise " + templateName, e); } catch (NullPointerException e) { NullPointerException e2 = new NullPointerException("No such template " + templateName); e2.initCause(e); throw e2; } finally { session.removeAttribute("summarising_" + templateName); } } catch (RuntimeException e) { processException(e); } return "summarised"; } /** * Rename a element such as history, name, bag * @param name the name of the element * @param type history, saved, bag * @param reName the new name for the element * @return the new name of the element as a String * @exception Exception if the application business logic throws * an exception */ public String rename(String name, String type, String reName) throws Exception { String newName; try { newName = reName.trim(); WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); Profile profile = SessionMethods.getProfile(session); SavedQuery sq; if (name.equals(newName) || StringUtils.isEmpty(newName)) { return name; } // TODO get error text from properties file if (!NameUtil.isValidName(newName)) { return INVALID_NAME_MSG; } if ("history".equals(type)) { if (profile.getHistory().get(name) == null) { return "<i>" + name + " does not exist</i>"; } if (profile.getHistory().get(newName) != null) { return "<i>" + newName + " already exists</i>"; } profile.renameHistory(name, newName); } else if ("saved".equals(type)) { if (profile.getSavedQueries().get(name) == null) { return "<i>" + name + " does not exist</i>"; } if (profile.getSavedQueries().get(newName) != null) { return "<i>" + newName + " already exists</i>"; } sq = profile.getSavedQueries().get(name); profile.deleteQuery(sq.getName()); sq = new SavedQuery(newName, sq.getDateCreated(), sq.getPathQuery()); profile.saveQuery(sq.getName(), sq); } else if ("bag".equals(type)) { try { profile.renameBag(name, newName); } catch (IllegalArgumentException e) { return "<i>" + name + " does not exist</i>"; } catch (ProfileAlreadyExistsException e) { return "<i>" + newName + " already exists</i>"; } } else { return "Type unknown"; } return newName; } catch (RuntimeException e) { processException(e); return null; } } /** * Generate a new API key for a given user. * @param username the user to generate the key for. * @return A new API key, or null if something untoward happens. * @throws Exception an exception. */ public String generateApiKey(String username) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ProfileManager pm = im.getProfileManager(); Profile p = pm.getProfile(username); return pm.generateApiKey(p); } catch (RuntimeException e) { processException(e); return null; } } /** * Delete a user's API key, thus disabling webservice access. A message "deleted" * is returned to confirm success. * @param username The user whose key we should delete. * @return A confirmation string. * @throws Exception if somethign bad happens */ public String deleteApiKey(String username) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ProfileManager pm = im.getProfileManager(); Profile p = pm.getProfile(username); p.setApiKey(null); return "deleted"; } catch (RuntimeException e) { processException(e); return null; } } /** * For a given bag, set its description * @param bagName the bag * @param description the description as entered by the user * @return the description for display on the jsp page * @throws Exception an exception */ public String saveBagDescription(String bagName, String description) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); Profile profile = SessionMethods.getProfile(session); InterMineBag bag = profile.getSavedBags().get(bagName); if (bag == null) { throw new InterMineException("List \"" + bagName + "\" not found."); } bag.setDescription(description); profile.getSearchRepository().descriptionChanged(bag); return description; } catch (RuntimeException e) { processException(e); return null; } } /** * Set the description of a view path. * @param pathString the string representation of the path * @param description the new description * @return the description, or null if the description was empty */ public String changeViewPathDescription(String pathString, String description) { try { String descr = description; if (description.trim().length() == 0) { descr = null; } WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); PathQuery query = SessionMethods.getQuery(session); Path path = query.makePath(pathString); Path prefixPath = path.getPrefix(); if (descr == null) { // setting to null removes the description query.setDescription(prefixPath.getNoConstraintsString(), null); } else { query.setDescription(prefixPath.getNoConstraintsString(), descr); } if (descr == null) { return null; } return descr.replaceAll("&", "&amp;").replaceAll("<", "&lt;").replaceAll(">", "&gt;"); } catch (RuntimeException e) { processException(e); return null; } catch (PathException e) { processException(e); return null; } } /* * Cannot be refactored from AjaxServices, else WebContextFactory.get() returns null */ private static WebState getWebState() { HttpSession session = WebContextFactory.get().getSession(); return SessionMethods.getWebState(session); } /** * Get the summary for the given column * @param summaryPath the path for the column as a String * @param tableName name of column-owning table * @return a collection of rows * @throws Exception an exception */ public static List getColumnSummary(String tableName, String summaryPath) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); WebResultsExecutor webResultsExecutor = im.getWebResultsExecutor(profile); WebTable webTable = (SessionMethods.getResultsTable(session, tableName)) .getWebTable(); PathQuery pathQuery = webTable.getPathQuery(); List<ResultsRow> results = (List) webResultsExecutor.summariseQuery(pathQuery, summaryPath); // Start the count of results Query countQuery = webResultsExecutor.makeSummaryQuery(pathQuery, summaryPath); QueryCountQueryMonitor clientState = new QueryCountQueryMonitor(Constants.QUERY_TIMEOUT_SECONDS * 1000, countQuery); MessageResources messages = (MessageResources) ctx.getHttpServletRequest() .getAttribute(Globals.MESSAGES_KEY); String qid = SessionMethods.startQueryCount(clientState, session, messages); List<ResultsRow> pageSizeResults = new ArrayList<ResultsRow>(); int rowCount = 0; for (ResultsRow row : results) { rowCount++; if (rowCount > 10) { break; } pageSizeResults.add(row); } return Arrays.asList(new Object[] {pageSizeResults, qid, new Integer(rowCount)}); } catch (RuntimeException e) { processException(e); return null; } } /** * Return the number of rows of results from the query with the given query id. If the size * isn't yet available, return null. The query must be started with * SessionMethods.startPagedTableCount(). * @param qid the id * @return the row count or null if not yet available */ public static Integer getResultsSize(String qid) { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); QueryMonitorTimeout controller = (QueryMonitorTimeout) SessionMethods.getRunningQueryController(qid, session); // this could happen if the user navigates away then back to the page if (controller == null) { return null; } // First tickle the controller to avoid timeout controller.tickle(); if (controller.isCancelledWithError()) { LOG.debug("query qid " + qid + " error"); return null; } else if (controller.isCancelled()) { LOG.debug("query qid " + qid + " cancelled"); return null; } else if (controller.isCompleted()) { LOG.debug("query qid " + qid + " complete"); if (controller instanceof PageTableQueryMonitor) { PagedTable pt = ((PageTableQueryMonitor) controller).getPagedTable(); return new Integer(pt.getExactSize()); } if (controller instanceof QueryCountQueryMonitor) { return new Integer(((QueryCountQueryMonitor) controller).getCount()); } LOG.debug("query qid " + qid + " - unknown controller type"); return null; } else { // query still running LOG.debug("query qid " + qid + " still running, making client wait"); return null; } } catch (RuntimeException e) { processException(e); return null; } } /** * Given a scope, type, tags and some filter text, produce a list of matching WebSearchable, in * a format useful in JavaScript. Each element of the returned List is a List containing a * WebSearchable name, a score (from Lucene) and a string with the matching parts of the * description highlighted. * @param scope the scope (from TemplateHelper.GLOBAL_TEMPLATE or TemplateHelper.USER_TEMPLATE, * even though not all WebSearchables are templates) * @param type the type (from TagTypes) * @param tags the tags to filter on * @param filterText the text to pass to Lucene * @param filterAction toggles favourites filter off an on; will be blank or 'favourites' * @param callId unique id * @return a List of Lists */ public static List<String> filterWebSearchables(String scope, String type, List<String> tags, String filterText, String filterAction, String callId) { try { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); ProfileManager pm = im.getProfileManager(); Profile profile = SessionMethods.getProfile(session); Map<String, WebSearchable> wsMap; Map<WebSearchable, Float> hitMap = new LinkedHashMap<WebSearchable, Float>(); Map<WebSearchable, String> highlightedDescMap = new HashMap<WebSearchable, String>(); if (filterText != null && filterText.length() > 1) { wsMap = new LinkedHashMap<String, WebSearchable>(); //Map<WebSearchable, String> scopeMap = new LinkedHashMap<WebSearchable, String>(); SearchRepository globalSearchRepository = SessionMethods.getGlobalSearchRepository(servletContext); try { long time = SearchRepository.runLeuceneSearch(filterText, scope, type, profile, globalSearchRepository, hitMap, null, highlightedDescMap); LOG.info("Lucene search took " + time + " milliseconds"); } catch (ParseException e) { LOG.error("couldn't run lucene filter", e); ArrayList<String> emptyList = new ArrayList<String>(); emptyList.add(callId); return emptyList; } catch (IOException e) { LOG.error("couldn't run lucene filter", e); ArrayList<String> emptyList = new ArrayList<String>(); emptyList.add(callId); return emptyList; } //long time = System.currentTimeMillis(); for (WebSearchable ws: hitMap.keySet()) { wsMap.put(ws.getName(), ws); } } else { if (scope.equals(Scope.USER)) { SearchRepository searchRepository = profile.getSearchRepository(); wsMap = (Map<String, WebSearchable>) searchRepository.getWebSearchableMap(type); } else { SearchRepository globalRepository = SessionMethods .getGlobalSearchRepository(servletContext); if (scope.equals(Scope.GLOBAL)) { wsMap = (Map<String, WebSearchable>) globalRepository. getWebSearchableMap(type); } else { // must be "all" SearchRepository userSearchRepository = profile.getSearchRepository(); Map<String, ? extends WebSearchable> userWsMap = userSearchRepository.getWebSearchableMap(type); Map<String, ? extends WebSearchable> globalWsMap = globalRepository.getWebSearchableMap(type); wsMap = new HashMap<String, WebSearchable>(userWsMap); wsMap.putAll(globalWsMap); } } } Map<String, ? extends WebSearchable> filteredWsMap = new LinkedHashMap<String, WebSearchable>(); //Filter by aspects (defined in superuser account) List<String> aspectTags = new ArrayList<String>(); List<String> userTags = new ArrayList<String>(); for (String tag :tags) { if (tag.startsWith(TagNames.IM_ASPECT_PREFIX)) { aspectTags.add(tag); } else { userTags.add(tag); } } if (aspectTags.size() > 0) { wsMap = new SearchFilterEngine().filterByTags(wsMap, aspectTags, type, pm.getSuperuser(), getTagManager()); } if (profile.getUsername() != null && userTags.size() > 0) { filteredWsMap = new SearchFilterEngine().filterByTags(wsMap, userTags, type, profile.getUsername(), getTagManager()); } else { filteredWsMap = wsMap; } List returnList = new ArrayList<String>(); returnList.add(callId); // We need a modifiable map so we can filter out invalid templates LinkedHashMap<String, ? extends WebSearchable> modifiableWsMap = new LinkedHashMap(filteredWsMap); SearchRepository.filterOutInvalidTemplates(modifiableWsMap); for (WebSearchable ws: modifiableWsMap.values()) { List row = new ArrayList(); row.add(ws.getName()); if (filterText != null && filterText.length() > 1) { row.add(highlightedDescMap.get(ws)); row.add(hitMap.get(ws)); } else { row.add(ws.getDescription()); } returnList.add(row); } return returnList; } catch (RuntimeException e) { processException(e); return null; } } /** * For a given bag name and a type different from the bag type, give the number of * converted objects * * @param bagName the name of the bag * @param type the type to convert to * @return the number of converted objects */ public static int getConvertCountForBag(String bagName, String type) { try { HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); String pckName = im.getModel().getPackageName(); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); TemplateManager templateManager = im.getTemplateManager(); WebResultsExecutor webResultsExecutor = im.getWebResultsExecutor(profile); InterMineBag imBag = null; int count = 0; try { imBag = bagManager.getUserOrGlobalBag(profile, bagName); List<TemplateQuery> conversionTemplates = templateManager.getConversionTemplates(); PathQuery pathQuery = TypeConverter.getConversionQuery(conversionTemplates, TypeUtil.instantiate(pckName + "." + imBag.getType()), TypeUtil.instantiate(pckName + "." + type), imBag); count = webResultsExecutor.count(pathQuery); } catch (Exception e) { throw new RuntimeException(e); } return count; } catch (RuntimeException e) { processException(e); return 0; } } /** * used on REPORT page * * For a gene, generate links to other intermines. Include gene and orthologues. * * Returns NULL if no values found. It's possible that the identifier in the local mine will * match more than one entry in the remote mine but this will be handled by the portal of the * remote mine. * * @param organismName gene.organism * @param primaryIdentifier identifier for gene * @param symbol identifier for gene or NULL * @return the links to friendly intermines */ public static String getFriendlyMineReportLinks(String organismName, String primaryIdentifier, String symbol) { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Properties webProperties = SessionMethods.getWebProperties(servletContext); FriendlyMineManager olm = FriendlyMineManager.getInstance(im, webProperties); InterMineLinkGenerator linkGen = null; Class<?> clazz = TypeUtil.instantiate("org.intermine.bio.web.util.FriendlyMineReportLinkGenerator"); Constructor<?> constructor; try { constructor = clazz.getConstructor(new Class[] {}); linkGen = (InterMineLinkGenerator) constructor.newInstance(new Object[] {}); } catch (Exception e) { LOG.error("Failed to instantiate FriendlyMineReportLinkGenerator because: " + e); return null; } return linkGen.getLinks(olm, null, organismName, primaryIdentifier).toString(); } /** * For LIST ANALYSIS page - For a mine, test if that mine has orthologues * * @param mineName name of a friendly mine * @param organisms list of organisms for genes in this list * @param identifiers list of identifiers of genes in this list * @return the links to friendly intermines or an error message */ public static String getFriendlyMineListLinks(String mineName, String organisms, String identifiers) { if (StringUtils.isEmpty(mineName) || StringUtils.isEmpty(organisms) || StringUtils.isEmpty(identifiers)) { return null; } ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Properties webProperties = SessionMethods.getWebProperties(servletContext); FriendlyMineManager linkManager = FriendlyMineManager.getInstance(im, webProperties); InterMineLinkGenerator linkGen = null; Class<?> clazz = TypeUtil.instantiate("org.intermine.bio.web.util.FriendlyMineListLinkGenerator"); Constructor<?> constructor; Collection<JSONObject> results = null; try { constructor = clazz.getConstructor(new Class[] {}); linkGen = (InterMineLinkGenerator) constructor.newInstance(new Object[] {}); // runs remote templates (possibly) results = linkGen.getLinks(linkManager, mineName, organisms, identifiers); } catch (Exception e) { LOG.error("Failed to instantiate FriendlyMineListLinkGenerator because: " + e); return null; } if (results == null || results.isEmpty()) { return null; } return results.toString(); } /** * Saves information, that some element was toggled - displayed or hidden. * * @param elementId element id * @param opened new aspect state */ public static void saveToggleState(String elementId, boolean opened) { try { AjaxServices.getWebState().getToggledElements().put(elementId, Boolean.valueOf(opened)); } catch (RuntimeException e) { processException(e); } } /** * Set state that should be saved during the session. * @param name name of state * @param value value of state */ public static void setState(String name, String value) { try { AjaxServices.getWebState().setState(name, value); } catch (RuntimeException e) { processException(e); } } /** * validate bag upload * @param bagName name of new bag to be validated * @return error msg to display, if any */ public static String validateBagName(String bagName) { try { HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); bagName = bagName.trim(); // TODO get message text from the properties file if ("".equals(bagName)) { return "You cannot save a list with a blank name"; } if (!NameUtil.isValidName(bagName)) { return INVALID_NAME_MSG; } if (profile.getSavedBags().get(bagName) != null) { return "The list name you have chosen is already in use"; } if (bagManager.getGlobalBag(bagName) != null) { return "The list name you have chosen is already in use -" + " there is a public list called " + bagName; } return ""; } catch (RuntimeException e) { processException(e); return null; } } /** * validation that happens before new bag is saved * @param bagName name of new bag * @param selectedBags bags involved in operation * @param operation which operation is taking place - delete, union, intersect or subtract * @return error msg, if any */ public static String validateBagOperations(String bagName, String[] selectedBags, String operation) { try { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); Profile profile = SessionMethods.getProfile(session); // TODO get error text from the properties file if (selectedBags.length == 0) { return "No lists are selected"; } if ("delete".equals(operation)) { for (int i = 0; i < selectedBags.length; i++) { Set<String> queries = new HashSet<String>(); queries.addAll(queriesThatMentionBag(profile.getSavedQueries(), selectedBags[i])); queries.addAll(queriesThatMentionBag(profile.getHistory(), selectedBags[i])); if (queries.size() > 0) { return "List " + selectedBags[i] + " cannot be deleted as it is referenced " + "by other queries " + queries; } } for (int i = 0; i < selectedBags.length; i++) { if (profile.getSavedBags().get(selectedBags[i]) == null) { return "List " + selectedBags[i] + " cannot be deleted as it is a shared " + "list"; } } } else if (!"copy".equals(operation)) { Properties properties = SessionMethods.getWebProperties(servletContext); String defaultName = properties.getProperty("lists.input.example"); if (("".equals(bagName) || (bagName.equalsIgnoreCase(defaultName)))) { return "New list name is required"; } else if (!NameUtil.isValidName(bagName)) { return INVALID_NAME_MSG; } } return ""; } catch (RuntimeException e) { processException(e); return null; } } /** * Provide a list of queries that mention a named bag * @param savedQueries a saved queries map (name -&gt; query) * @param bagName the name of a bag * @return the list of queries */ private static List<String> queriesThatMentionBag(Map<String, SavedQuery> savedQueries, String bagName) { try { List<String> queries = new ArrayList<String>(); for (Iterator<String> i = savedQueries.keySet().iterator(); i.hasNext();) { String queryName = (String) i.next(); SavedQuery query = (SavedQuery) savedQueries.get(queryName); if (query.getPathQuery().getBagNames().contains(bagName)) { queries.add(queryName); } } return queries; } catch (RuntimeException e) { processException(e); return null; } } /** * @param widgetId unique id for this widget * @param bagName name of list * @param selectedExtraAttribute extra attribute (like organism) * @return graph widget */ public static GraphWidget getProcessGraphWidget(String widgetId, String bagName, String selectedExtraAttribute) { try { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); WebConfig webConfig = SessionMethods.getWebConfig(servletContext); ObjectStore os = im.getObjectStore(); Model model = os.getModel(); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); InterMineBag imBag = bagManager.getUserOrGlobalBag(profile, bagName); Type type = webConfig.getTypes().get(model.getPackageName() + "." + imBag.getType()); List<WidgetConfig> widgets = type.getWidgets(); for (WidgetConfig widget: widgets) { if (widget.getId().equals(widgetId)) { GraphWidgetConfig graphWidgetConf = (GraphWidgetConfig) widget; graphWidgetConf.setSession(session); GraphWidget graphWidget = new GraphWidget(graphWidgetConf, imBag, os, selectedExtraAttribute); return graphWidget; } } } catch (RuntimeException e) { processWidgetException(e, widgetId); } return null; } /** * @param widgetId unique id for this widget * @param bagName name of list * @return graph widget */ public static HTMLWidget getProcessHTMLWidget(String widgetId, String bagName) { try { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); WebConfig webConfig = SessionMethods.getWebConfig(servletContext); Model model = im.getModel(); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); InterMineBag imBag = bagManager.getUserOrGlobalBag(profile, bagName); Type type = webConfig.getTypes().get(model.getPackageName() + "." + imBag.getType()); List<WidgetConfig> widgets = type.getWidgets(); for (WidgetConfig widget: widgets) { if (widget.getId().equals(widgetId)) { HTMLWidgetConfig htmlWidgetConf = (HTMLWidgetConfig) widget; HTMLWidget htmlWidget = new HTMLWidget(htmlWidgetConf); return htmlWidget; } } } catch (RuntimeException e) { processWidgetException(e, widgetId); } return null; } /** * * @param widgetId unique ID for this widget * @param bagName name of list * @return table widget */ public static TableWidget getProcessTableWidget(String widgetId, String bagName) { try { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); WebConfig webConfig = SessionMethods.getWebConfig(servletContext); ObjectStore os = im.getObjectStore(); Model model = os.getModel(); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); InterMineBag imBag = bagManager.getUserOrGlobalBag(profile, bagName); Map<String, List<FieldDescriptor>> classKeys = im.getClassKeys(); Type type = webConfig.getTypes().get(model.getPackageName() + "." + imBag.getType()); List<WidgetConfig> widgets = type.getWidgets(); for (WidgetConfig widgetConfig: widgets) { if (widgetConfig.getId().equals(widgetId)) { TableWidgetConfig tableWidgetConfig = (TableWidgetConfig) widgetConfig; tableWidgetConfig.setClassKeys(classKeys); tableWidgetConfig.setWebConfig(webConfig); TableWidget tableWidget = new TableWidget(tableWidgetConfig, imBag, os, null); return tableWidget; } } } catch (RuntimeException e) { processWidgetException(e, widgetId); } return null; } /** * * @param widgetId unique ID for each widget * @param bagName name of list * @param errorCorrection error correction method to use * @param max maximum value to display * @param filters list of strings used to filter widget results, ie Ontology * @param externalLink link to external datasource * @param externalLinkLabel name of external datasource. * @return enrichment widget */ public static EnrichmentWidget getProcessEnrichmentWidget(String widgetId, String bagName, String errorCorrection, String max, String filters, String externalLink, String externalLinkLabel) { try { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); WebConfig webConfig = SessionMethods.getWebConfig(servletContext); ObjectStore os = im.getObjectStore(); Model model = os.getModel(); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); InterMineBag imBag = bagManager.getUserOrGlobalBag(profile, bagName); Type type = webConfig.getTypes().get(model.getPackageName() + "." + imBag.getType()); List<WidgetConfig> widgets = type.getWidgets(); for (WidgetConfig widgetConfig : widgets) { if (widgetConfig.getId().equals(widgetId)) { EnrichmentWidgetConfig enrichmentWidgetConfig = (EnrichmentWidgetConfig) widgetConfig; enrichmentWidgetConfig.setExternalLink(externalLink); enrichmentWidgetConfig.setExternalLinkLabel(externalLinkLabel); EnrichmentWidget enrichmentWidget = new EnrichmentWidget( enrichmentWidgetConfig, imBag, os, filters, max, errorCorrection); return enrichmentWidget; } } } catch (RuntimeException e) { processWidgetException(e, widgetId); } return null; } /** * Add an ID to the PagedTable selection * @param selectedId the id * @param tableId the identifier for the PagedTable * @param columnIndex the column of the selected id * @return the field values of the first selected objects */ public static List<String> selectId(String selectedId, String tableId, String columnIndex) { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); PagedTable pt = SessionMethods.getResultsTable(session, tableId); pt.selectId(new Integer(selectedId), (new Integer(columnIndex)).intValue()); Map<String, List<FieldDescriptor>> classKeys = im.getClassKeys(); ObjectStore os = im.getObjectStore(); return pt.getFirstSelectedFields(os, classKeys); } /** * remove an Id from the PagedTable * @param deSelectId the ID to remove from the selection * @param tableId the PagedTable identifier * @return the field values of the first selected objects */ public static List<String> deSelectId(String deSelectId, String tableId) { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); PagedTable pt = SessionMethods.getResultsTable(session, tableId); pt.deSelectId(new Integer(deSelectId)); Map<String, List<FieldDescriptor>> classKeys = im.getClassKeys(); ObjectStore os = im.getObjectStore(); return pt.getFirstSelectedFields(os, classKeys); } /** * Select all the elements in a PagedTable * @param index the index of the selected column * @param tableId the PagedTable identifier */ public static void selectAll(int index, String tableId) { HttpSession session = WebContextFactory.get().getSession(); PagedTable pt = SessionMethods.getResultsTable(session, tableId); pt.clearSelectIds(); pt.setAllSelectedColumn(index); } /** * AJAX request - reorder view. * @param newOrder the new order as a String * @param oldOrder the previous order as a String */ public void reorder(String newOrder, String oldOrder) { HttpSession session = WebContextFactory.get().getSession(); List<String> newOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(newOrder).values()); List<String> oldOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(oldOrder).values()); List<String> view = SessionMethods.getEditingView(session); ArrayList<String> newView = new ArrayList<String>(); for (int i = 0; i < view.size(); i++) { String newi = newOrderList.get(i); int oldi = oldOrderList.indexOf(newi); newView.add(view.get(oldi)); } PathQuery query = SessionMethods.getQuery(session); query.clearView(); query.addViews(newView); } /** * AJAX request - reorder the constraints. * @param newOrder the new order as a String * @param oldOrder the previous order as a String */ public void reorderConstraints(String newOrder, String oldOrder) { HttpSession session = WebContextFactory.get().getSession(); List<String> newOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(newOrder).values()); List<String> oldOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(oldOrder).values()); PathQuery query = SessionMethods.getQuery(session); if (query instanceof TemplateQuery) { TemplateQuery template = (TemplateQuery) query; for (int index = 0; index < newOrderList.size() - 1; index++) { String newi = newOrderList.get(index); int oldi = oldOrderList.indexOf(newi); if (index != oldi) { List<PathConstraint> editableConstraints = template.getModifiableEditableConstraints(); PathConstraint editableConstraint = editableConstraints.remove(oldi); editableConstraints.add(index, editableConstraint); template.setEditableConstraints(editableConstraints); break; } } } } /** * Add a Node from the sort order * @param path the Path as a String * @param direction the direction to sort by * @exception Exception if the application business logic throws */ public void addToSortOrder(String path, String direction) throws Exception { HttpSession session = WebContextFactory.get().getSession(); PathQuery query = SessionMethods.getQuery(session); OrderDirection orderDirection = OrderDirection.ASC; if ("DESC".equals(direction.toUpperCase())) { orderDirection = OrderDirection.DESC; } query.clearOrderBy(); query.addOrderBy(path, orderDirection); } /** * Work as a proxy for fetching remote file (RSS) * @param rssURL the url * @return String representation of a file */ public static String getNewsPreview(String rssURL) { try { URL url = new URL(rssURL); BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream())); String str; StringBuffer sb = new StringBuffer(); // append to string buffer while ((str = in.readLine()) != null) { sb.append(str); } in.close(); return sb.toString(); } catch (MalformedURLException e) { return ""; } catch (IOException e) { return ""; } } /** * Adds tag and assures that there is only one tag for this combination of tag name, tagged * Object and type. * @param tag tag name * @param taggedObject object id that is tagged by this tag * @param type tag type * @return 'ok' string if succeeded else error string */ public static String addTag(String tag, String taggedObject, String type) { String tagName = tag; LOG.info("Called addTag(). tagName:" + tagName + " taggedObject:" + taggedObject + " type: " + type); try { HttpServletRequest request = getRequest(); Profile profile = getProfile(request); tagName = tagName.trim(); HttpSession session = request.getSession(); if (profile.getUsername() != null && !StringUtils.isEmpty(tagName) && !StringUtils.isEmpty(type) && !StringUtils.isEmpty(taggedObject)) { if (tagExists(tagName, taggedObject, type)) { return "Already tagged with this tag."; } if (!TagManager.isValidTagName(tagName)) { return INVALID_NAME_MSG; } if (tagName.startsWith(TagNames.IM_PREFIX) && !SessionMethods.isSuperUser(session)) { return "You cannot add a tag starting with " + TagNames.IM_PREFIX + ", " + "that is a reserved word."; } TagManager tagManager = getTagManager(); tagManager.addTag(tagName, taggedObject, type, profile.getUsername()); ServletContext servletContext = session.getServletContext(); if (SessionMethods.isSuperUser(session)) { SearchRepository tr = SessionMethods. getGlobalSearchRepository(servletContext); tr.webSearchableTagChange(type, tagName); } return "ok"; } return "Adding tag failed."; } catch (Throwable e) { LOG.error("Adding tag failed", e); return "Adding tag failed."; } } /** * Deletes tag. * @param tagName tag name * @param tagged id of tagged object * @param type tag type * @return 'ok' string if succeeded else error string */ public static String deleteTag(String tagName, String tagged, String type) { LOG.info("Called deleteTag(). tagName:" + tagName + " taggedObject:" + tagged + " type: " + type); try { HttpServletRequest request = getRequest(); HttpSession session = request.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = getProfile(request); TagManager manager = im.getTagManager(); manager.deleteTag(tagName, tagged, type, profile.getUsername()); ServletContext servletContext = session.getServletContext(); if (SessionMethods.isSuperUser(session)) { SearchRepository tr = SessionMethods.getGlobalSearchRepository(servletContext); tr.webSearchableTagChange(type, tagName); } return "ok"; } catch (Throwable e) { LOG.error("Deleting tag failed", e); return "Deleting tag failed."; } } /** * Returns all tags of specified tag type together with prefixes of these tags. * For instance: for tag 'bio:experiment' it automatically adds 'bio' tag. * @param type tag type * @return tags */ public static Set<String> getTags(String type) { HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); TagManager tagManager = im.getTagManager(); Profile profile = getProfile(request); if (profile.isLoggedIn()) { return tagManager.getUserTagNames(type, profile.getUsername()); } return new TreeSet<String>(); } /** * Returns all tags by which is specified object tagged. * @param type tag type * @param tagged id of tagged object * @return tags */ public static Set<String> getObjectTags(String type, String tagged) { HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); TagManager tagManager = im.getTagManager(); Profile profile = getProfile(request); if (profile.isLoggedIn()) { return tagManager.getObjectTagNames(tagged, type, profile.getUsername()); } return new TreeSet<String>(); } private static boolean tagExists(String tag, String taggedObject, String type) { HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); TagManager tagManager = im.getTagManager(); String userName = getProfile(request).getUsername(); return tagManager.getObjectTagNames(taggedObject, type, userName).contains(tag); } private static Profile getProfile(HttpServletRequest request) { return SessionMethods.getProfile(request.getSession()); } private static HttpServletRequest getRequest() { return WebContextFactory.get().getHttpServletRequest(); } private static TagManager getTagManager() { HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); return im.getTagManager(); } /** * Set the constraint logic on a query to be the given expression. * * @param expression the constraint logic for the query * @throws PathException if the query is invalid */ public static void setConstraintLogic(String expression) throws PathException { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); PathQuery query = SessionMethods.getQuery(session); query.setConstraintLogic(expression); List<String> messages = query.fixUpForJoinStyle(); for (String message : messages) { SessionMethods.recordMessage(message, session); } } /** * Get the grouped constraint logic * @return a list representing the grouped constraint logic */ public static String getConstraintLogic() { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); PathQuery query = SessionMethods.getQuery(session); return (query.getConstraintLogic()); } /** * @param suffix string of input before request for more results * @param wholeList whether or not to show the entire list or a truncated version * @param field field name from the table for the lucene search * @param className class name (table in the database) for lucene search * @return an array of values for this classname.field */ public String[] getContent(String suffix, boolean wholeList, String field, String className) { ServletContext servletContext = WebContextFactory.get().getServletContext(); AutoCompleter ac = SessionMethods.getAutoCompleter(servletContext); ac.createRAMIndex(className + "." + field); if (!wholeList && suffix.length() > 0) { String[] shortList = ac.getFastList(suffix, field, 31); return shortList; } else if (suffix.length() > 2 && wholeList) { String[] longList = ac.getList(suffix, field); return longList; } String[] defaultList = {""}; return defaultList; } public String getSavedBagStatus() { Collection<JSONObject> jsonSavedBagStatus = new HashSet<JSONObject>(); HttpSession session = WebContextFactory.get().getSession(); Profile profile = SessionMethods.getProfile(session); Map<String, String> savedBagStatus = (Map<String, String>) session.getAttribute(Constants.SAVED_BAG_STATUS); JSONObject jsonSavedBag = null; String bagName = null; String status = null; try { for (Map.Entry<String, String> entry : savedBagStatus.entrySet()) { jsonSavedBag = new JSONObject(); bagName = entry.getKey(); status = entry.getValue(); jsonSavedBag.put("bagName", bagName); jsonSavedBag.put("status", status); try { if (status.equals(Constants.CURRENT_BAG)) { jsonSavedBag.put("size", profile.getSavedBags().get(bagName).getSize()); } else { jsonSavedBag.put("size", 0); } } catch (ObjectStoreException ose) { LOG.error("Error retriving the size of beg: " + bagName, ose); } jsonSavedBagStatus.add(jsonSavedBag); } } catch (JSONException jse) { LOG.error("Errors generating json objects", jse); } return jsonSavedBagStatus.toString(); } }
package com.imsweb.seerapi.client; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.List; import java.util.TimeZone; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation.Builder; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.GenericType; import javax.ws.rs.core.MediaType; import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility; import org.codehaus.jackson.annotate.JsonMethod; import org.codehaus.jackson.jaxrs.JacksonJsonProvider; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.SerializationConfig.Feature; import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; import org.glassfish.jersey.message.GZipEncoder; import com.imsweb.seerapi.client.disease.Disease; import com.imsweb.seerapi.client.disease.DiseaseChangelogResults; import com.imsweb.seerapi.client.disease.DiseaseSearch; import com.imsweb.seerapi.client.disease.DiseaseSearchResults; import com.imsweb.seerapi.client.disease.DiseaseVersion; import com.imsweb.seerapi.client.disease.PrimarySite; import com.imsweb.seerapi.client.disease.SamePrimaries; import com.imsweb.seerapi.client.disease.SiteCategory; import com.imsweb.seerapi.client.glossary.Glossary; import com.imsweb.seerapi.client.glossary.GlossaryChangelogResults; import com.imsweb.seerapi.client.glossary.GlossarySearch; import com.imsweb.seerapi.client.glossary.GlossarySearchResults; import com.imsweb.seerapi.client.glossary.GlossaryVersion; import com.imsweb.seerapi.client.naaccr.NaaccrField; import com.imsweb.seerapi.client.naaccr.NaaccrFieldName; import com.imsweb.seerapi.client.naaccr.NaaccrVersion; import com.imsweb.seerapi.client.rx.Rx; import com.imsweb.seerapi.client.rx.RxChangelogResults; import com.imsweb.seerapi.client.rx.RxSearch; import com.imsweb.seerapi.client.rx.RxSearchResults; import com.imsweb.seerapi.client.rx.RxVersion; import com.imsweb.seerapi.client.shared.Version; import com.imsweb.seerapi.client.siterecode.SiteRecode; import com.imsweb.seerapi.client.staging.SchemaLookup; import com.imsweb.seerapi.client.staging.StagingAlgorithm; import com.imsweb.seerapi.client.staging.StagingData; import com.imsweb.seerapi.client.staging.StagingSchema; import com.imsweb.seerapi.client.staging.StagingSchemaInfo; import com.imsweb.seerapi.client.staging.StagingTable; import com.imsweb.seerapi.client.staging.StagingVersion; import com.imsweb.seerapi.client.surgery.SurgeryTable; /** * Entry point for Java API into SEER*API. */ public final class SeerApi { // output all dates in ISO-8061 format and UTC time private static final DateFormat _DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); // define the JSON provider which uses Jackson and a customized ObjectMapper private static final JacksonJsonProvider _JACKSON_PROVIDER = new JacksonJsonProvider(); static { _DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC")); ObjectMapper mapper = new ObjectMapper(); // do not write null values mapper.configure(Feature.WRITE_NULL_MAP_VALUES, false); mapper.setSerializationInclusion(Inclusion.NON_NULL); // set Date objects to output in readable customized format mapper.configure(Feature.WRITE_DATES_AS_TIMESTAMPS, false); mapper.setDateFormat(_DATE_FORMAT); mapper.setVisibility(JsonMethod.ALL, Visibility.NONE); mapper.setVisibility(JsonMethod.FIELD, Visibility.ANY); _JACKSON_PROVIDER.setMapper(mapper); } private Client _client; private String _baseUrl; private String _apiKey; /** * Creates a client API root object * @param baseUrl base URL for API * @param apiKey API key */ private SeerApi(String baseUrl, String apiKey) { if (baseUrl.endsWith("/")) baseUrl = baseUrl.substring(0, baseUrl.length() - 1); this._baseUrl = baseUrl; this._apiKey = apiKey; _client = ClientBuilder.newClient() .register(_JACKSON_PROVIDER) .register(GZipEncoder.class) .register(ErrorResponseFilter.class); } /** * Creates a connection to the API * @param baseUrl base URL for API * @param apiKey API key * @return a new SeerApi instance */ protected static SeerApi connect(String baseUrl, String apiKey) { return new SeerApi(baseUrl, apiKey); } /** * Helper method to return a base web target * @param path the API path which is added onto the base URL * @return a WebTarget using the base URL and the passed path */ private WebTarget createTarget(String path) { if (_apiKey == null || _apiKey.isEmpty()) throw new IllegalStateException("This operation requires a credential but none is given to the SeerApi constructor"); return _client.target(_baseUrl).path(path); } /** * Builds the default invocation builder. All requests are currently JSON and are GZIP encoded. * @param target the WebTarget for the API call * @return a Builder instance using the passed target and including default header information that is used on all our calls */ private Builder getBuilder(WebTarget target) { return target.request(MediaType.APPLICATION_JSON_TYPE).header("X-SEERAPI-Key", _apiKey).acceptEncoding("gzip"); } /** * Return the version of the SEER Site Recode database. * @return a String representing the database version */ public String siteRecodeVersion() { WebTarget target = createTarget("/recode/version"); return getBuilder(target).get(Version.class).getVersion(); } /** * Return the SEER Site Group for the site/histology combination, or 99999 if the combination is unknown. * @param site Primary Site O3 * @param histology Histology O3 * @return a SiteRecode object based on the site and histology */ public SiteRecode siteRecode(String site, String histology) { WebTarget target = createTarget("/recode/sitegroup").queryParam("site", site).queryParam("hist", histology); return getBuilder(target).get(SiteRecode.class); } /** * Return a collection of NaaccrVersion objects which descibe the available versions * @return a list of the available NAACCR versions and information about each of them */ public List<NaaccrVersion> naaccrVersions() { WebTarget target = createTarget("/naaccr/versions"); return getBuilder(target).get(new GenericType<List<NaaccrVersion>>() {}); } /** * Return a list of all the field identifiers and names from a specified NAACCR version * @param version NAACCR version * @return a list of NaaccrFieldName objects */ public List<NaaccrFieldName> naaccrFieldNames(String version) { WebTarget target = createTarget("/naaccr/{version}").resolveTemplate("version", version); return getBuilder(target).get(new GenericType<List<NaaccrFieldName>>() {}); } /** * Return a list of all the field identifiers and names from a specified NAACCR version * @param version NAACCR version * @param item NAACCR item number * @return a list of NaaccrFieldName objects */ public NaaccrField naaccrField(String version, Integer item) { WebTarget target = createTarget("/naaccr/{version}/item/{item}").resolveTemplate("version", version).resolveTemplate("item", item); return getBuilder(target).get(NaaccrField.class); } /** * Return a list of all disease versions and information about them * @return a list of DiseaseVersion objects */ public List<DiseaseVersion> diseaseVersions() { WebTarget target = createTarget("/disease/versions"); return getBuilder(target).get(new GenericType<List<DiseaseVersion>>() {}); } /** * Return the changelog entries for the passed database version * @param version Disease version * @param fromDate if not null, only include changes from this date forward (YYYY-MM-DD) * @param toDate if not null, only include changes prior to this date (YYYY-MM-DD) * @param count if not null, limit the number returned * @return a list of DiseaseChangelogResults objects */ public DiseaseChangelogResults diseaseChangelogs(String version, String fromDate, String toDate, Integer count) { WebTarget target = createTarget("/disease/{version}/changelog") .resolveTemplate("version", version) .queryParam("from", fromDate) .queryParam("to", toDate) .queryParam("count", count); return getBuilder(target).get(DiseaseChangelogResults.class); } /** * Return a list of matching diseases * @param version Disease version * @param search DiseaseSearch object * @return a DiseaseSearchResults object */ public DiseaseSearchResults diseaseSearch(String version, DiseaseSearch search) { WebTarget target = createTarget("/disease/{version}").resolveTemplate("version", version); target = target.queryParam("q", search.getQuery()) .queryParam("type", search.getType()) .queryParam("site_category", search.getSiteCategory()) .queryParam("mode", search.getMode()) .queryParam("status", search.getStatus()) .queryParam("assigned_to", search.getAssignedTo()) .queryParam("modified_from", search.getModifiedFrom()) .queryParam("modified_to", search.getModifiedTo()) .queryParam("published_from", search.getPublishedFrom()) .queryParam("published_to", search.getPublishedTo()) .queryParam("been_published", search.getBeenPublished()) .queryParam("hidden", search.getHidden()) .queryParam("count", search.getCount()) .queryParam("offset", search.getOffset()) .queryParam("order", search.getOrderBy()) .queryParam("output_type", search.getOutputType()); return getBuilder(target).get(DiseaseSearchResults.class); } /** * Return a complete disease entity based in identifier. Note that by default the disease entity does not include relevant glossary references. * @param version Disease version * @param id Disease identifier * @return a Disease object */ public Disease diseaseById(String version, String id) { return diseaseById(version, id, false); } /** * Return a complete disease entity based in identifier * @param version Disease version * @param id Disease identifier * @param includeGlossary if true, include the glossary * @return a Disease object */ public Disease diseaseById(String version, String id, boolean includeGlossary) { WebTarget target = createTarget("/disease/{version}/id/{id}") .resolveTemplate("version", version) .resolveTemplate("id", id) .queryParam("glossary", includeGlossary); return getBuilder(target).get(Disease.class); } /** * Return a list of all primary sites and labels * @return a List of PrimarySite objects */ public List<PrimarySite> diseasePrimarySites() { WebTarget target = createTarget("/disease/primary_site"); return getBuilder(target).get(new GenericType<List<PrimarySite>>() {}); } /** * Return a single primary site and label * @param primarySite Primary Site O3 * @return a PrimarySite object */ public List<PrimarySite> diseasePrimarySiteCode(String primarySite) { WebTarget target = createTarget("/disease/primary_site/{code}").resolveTemplate("code", primarySite); return getBuilder(target).get(new GenericType<List<PrimarySite>>() {}); } /** * Return a complete list of site categories and definitions * @return a list of SiteCategory objects */ public List<SiteCategory> diseaseSiteCategories() { WebTarget target = createTarget("/disease/site_categories"); return getBuilder(target).get(new GenericType<List<SiteCategory>>() {}); } /** * Return whether the 2 morphologies represent the same primary for the given year. * @param version Disease version * @param morphology1 ICD O3 Morphology * @param morphology2 ICD O3 Morphology * @param year Year of Diagnosis * @return a SamePrimary object */ public SamePrimaries diseaseSamePrimaries(String version, String morphology1, String morphology2, String year) { WebTarget target = createTarget("/disease/{version}/same_primary") .resolveTemplate("version", version) .queryParam("d1", morphology1) .queryParam("d2", morphology2) .queryParam("year", year); return getBuilder(target).get(SamePrimaries.class); } /** * Returns the reportable year range of the supplied disease. * @param disease Disease object * @return a Disease object with the reportability field filled in */ public Disease diseaseReportability(Disease disease) { WebTarget target = createTarget("/disease/reportability"); return getBuilder(target).post(Entity.json(disease), Disease.class); } /** * Return a list of all glossary versions and information about them * @return a list of GlossaryVersion objects */ public List<GlossaryVersion> glossaryVersions() { WebTarget target = createTarget("/glossary/versions"); return getBuilder(target).get(new GenericType<List<GlossaryVersion>>() {}); } /** * Return a complete glossary entity based in identifier * @param version Glossary version * @param id Glossary identifier * @return a Glossary object */ public Glossary glossaryById(String version, String id) { WebTarget target = createTarget("/glossary/{version}/id/{id}").resolveTemplate("version", version).resolveTemplate("id", id); return getBuilder(target).get(Glossary.class); } /** * Return a list of matching glossaries * @param version Glossary version * @param search GlossarySearch object * @return a GlossarySearchResults object */ public GlossarySearchResults glossarySearch(String version, GlossarySearch search) { WebTarget target = createTarget("/glossary/{version}").resolveTemplate("version", version); target = target.queryParam("q", search.getQuery()) .queryParam("mode", search.getMode()) .queryParam("status", search.getStatus()) .queryParam("assigned_to", search.getAssignedTo()) .queryParam("modified_from", search.getModifiedFrom()) .queryParam("modified_to", search.getModifiedTo()) .queryParam("published_from", search.getPublishedFrom()) .queryParam("published_to", search.getPublishedTo()) .queryParam("been_published", search.getBeenPublished()) .queryParam("hidden", search.getHidden()) .queryParam("count", search.getCount()) .queryParam("offset", search.getOffset()) .queryParam("order", search.getOrderBy()) .queryParam("output_type", search.getOutputType()); // list parameters need to passed as an object array to get multiple query parameters; otherwise there is a single query // parameter with a list of values, which the API won't understand if (search.getCategory() != null) target = target.queryParam("category", search.getCategory().toArray()); return getBuilder(target).get(GlossarySearchResults.class); } /** * Return the changelog entries for the passed database version * @param version Glossary version * @param fromDate if not null, only include changes from this date forward (YYYY-MM-DD) * @param toDate if not null, only include changes prior to this date (YYYY-MM-DD) * @param count if not null, limit the number returned * @return a list of GlossaryChangelogResults objects */ public GlossaryChangelogResults glossaryChangelogs(String version, String fromDate, String toDate, Integer count) { WebTarget target = createTarget("/glossary/{version}/changelog") .resolveTemplate("version", version) .queryParam("from", fromDate) .queryParam("to", toDate) .queryParam("count", count); return getBuilder(target).get(GlossaryChangelogResults.class); } /** * Return a collection of Version objects which descibe the available versions * @return a list of the available site-specific surgery versions and information about each of them */ public List<Version> siteSpecificSurgeryVersions() { WebTarget target = createTarget("/surgery/versions"); return getBuilder(target).get(new GenericType<List<Version>>() {}); } /** * Return a list of all the site-specific surgery table titles from a specific version * @param version version * @return a list of site-specific surgery table titles */ public List<String> siteSpecificSurgeryTables(String version) { WebTarget target = createTarget("/surgery/{version}/tables").resolveTemplate("version", version); return getBuilder(target).get(new GenericType<List<String>>() {}); } /** * Return a specific site-specific surgary table from a specific version * @param version version * @param title site title (optional if the site/histology is provided) * @param site primary site (optional if the title is provided) * @param histology histology (optional if the title is provided) * @return a site-specific surgery table */ public SurgeryTable siteSpecificSurgeryTable(String version, String title, String site, String histology) { WebTarget target = createTarget("/surgery/{version}/table") .resolveTemplate("version", version) .queryParam("title", title) .queryParam("site", site) .queryParam("hist", histology); return getBuilder(target).get(SurgeryTable.class); } /** * Return a list of all Rx versions and information about them. Note that by default the Rx entity does not include relevant glossary references. * @return a list of RxVersion objects */ public List<RxVersion> rxVersions() { WebTarget target = createTarget("/rx/versions"); return getBuilder(target).get(new GenericType<List<RxVersion>>() {}); } /** * Return a complete Rx entity based in identifier * @param version Rx version * @param id Rx identifier * @return a Rx object */ public Rx rxById(String version, String id) { return rxById(version, id, false); } /** * Return a complete Rx entity based in identifier * @param version Rx version * @param id Rx identifier * @param includeGlossary if true, include the glossary * @return a Rx object */ public Rx rxById(String version, String id, boolean includeGlossary) { WebTarget target = createTarget("/rx/{version}/id/{id}") .resolveTemplate("version", version) .resolveTemplate("id", id) .queryParam("glossary", includeGlossary); return getBuilder(target).get(Rx.class); } /** * Return a list of matching Rx entities * @param version Rx version * @param search RxSearch object * @return a RxSearchResults object */ public RxSearchResults rxSearch(String version, RxSearch search) { WebTarget target = createTarget("/rx/{version}").resolveTemplate("version", version); target = target.queryParam("q", search.getQuery()) .queryParam("type", search.getType()) .queryParam("do_not_code", search.getDoNotCode()) .queryParam("category", search.getCategory()) .queryParam("mode", search.getMode()) .queryParam("status", search.getStatus()) .queryParam("assigned_to", search.getAssignedTo()) .queryParam("modified_from", search.getModifiedFrom()) .queryParam("modified_to", search.getModifiedTo()) .queryParam("published_from", search.getPublishedFrom()) .queryParam("published_to", search.getPublishedTo()) .queryParam("been_published", search.getBeenPublished()) .queryParam("hidden", search.getHidden()) .queryParam("count", search.getCount()) .queryParam("offset", search.getOffset()) .queryParam("order", search.getOrderBy()) .queryParam("output_type", search.getOutputType()); return getBuilder(target).get(RxSearchResults.class); } /** * Return the changelog entries for the passed database version * @param version Rx version * @param fromDate if not null, only include changes from this date forward (YYYY-MM-DD) * @param toDate if not null, only include changes prior to this date (YYYY-MM-DD) * @param count if not null, limit the number returned * @return a list of RxChangelogResults objects */ public RxChangelogResults rxChangelogs(String version, String fromDate, String toDate, Integer count) { WebTarget target = createTarget("/rx/{version}/changelog") .resolveTemplate("version", version) .queryParam("from", fromDate) .queryParam("to", toDate) .queryParam("count", count); return getBuilder(target).get(RxChangelogResults.class); } /** * Return a list of all supported staging algorithms * @return a list of StagingAlgorithm objects */ public List<StagingAlgorithm> stagingAlgorithms() { WebTarget target = createTarget("/staging/algorithms"); return getBuilder(target).get(new GenericType<List<StagingAlgorithm>>() {}); } /** * Return a list of supported versions for the passed algorithm * @param algorithm an algorithm identifier * @return */ public List<StagingVersion> stagingAlgorithmVersions(String algorithm) { WebTarget target = createTarget("/staging/{algorithm}/versions").resolveTemplate("algorithm", algorithm); return getBuilder(target).get(new GenericType<List<StagingVersion>>() {}); } /** * Return a list of matching schemas * @param algorithm an algorithm identifier * @param version a version * @param query an optional text query * @return a list of schemas */ public List<StagingSchemaInfo> stagingSchemas(String algorithm, String version, String query) { WebTarget target = createTarget("/staging/{algorithm}/{version}/schemas") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .queryParam("q", query); return getBuilder(target).get(new GenericType<List<StagingSchemaInfo>>() {}); } /** * Perform a schema lookup * @param algorithm an algorithm identifier * @param version a version * @param data a StagingData object containing the input for the lookup * @return */ public List<StagingSchemaInfo> stagingSchemaLookup(String algorithm, String version, SchemaLookup data) { WebTarget target = createTarget("/staging/{algorithm}/{version}/schemas/lookup") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version); return getBuilder(target).post(Entity.json(data.getInputs()), new GenericType<List<StagingSchemaInfo>>() {}); } /** * Return a single schema definition by schema identifier * @param algorithm an algorithm identifier * @param version a version * @param id a schema identifier * @return a schema object */ public StagingSchema stagingSchemaById(String algorithm, String version, String id) { WebTarget target = createTarget("/staging/{algorithm}/{version}/schema/{id}") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .resolveTemplate("id", id); return getBuilder(target).get(StagingSchema.class); } /** * Return a list of tables which are involved in the specified schema * @param algorithm an algorithm identifier * @param version a version * @param id a schema identifier */ public List<StagingTable> stagingSchemaInvolvedTables(String algorithm, String version, String id) { WebTarget target = createTarget("/staging/{algorithm}/{version}/schema/{id}/tables") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .resolveTemplate("id", id); return getBuilder(target).get(new GenericType<List<StagingTable>>() {}); } /** * Return a list of matching tables * @param algorithm an algorithm identifier * @param version a version * @param query an optional text query * @return */ public List<StagingTable> stagingTables(String algorithm, String version, String query) { WebTarget target = createTarget("/staging/{algorithm}/{version}/tables") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .queryParam("q", query); return getBuilder(target).get(new GenericType<List<StagingTable>>() {}); } /** * Return a single table definition by table identifier * @param algorithm an algorithm identifier * @param version a version * @param id a table identifier * @return */ public StagingTable stagingTableById(String algorithm, String version, String id) { WebTarget target = createTarget("/staging/{algorithm}/{version}/table/{id}") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .resolveTemplate("id", id); return getBuilder(target).get(StagingTable.class); } /** * Return a list of schemas which the specified table is involved in * @param algorithm an algorithm identifier * @param version a version * @param id a table identifier */ public List<StagingSchema> stagingTableInvolvedSchemas(String algorithm, String version, String id) { WebTarget target = createTarget("/staging/{algorithm}/{version}/table/{id}/schemas") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .resolveTemplate("id", id); return getBuilder(target).get(new GenericType<List<StagingSchema>>() {}); } /** * Stage the passed input * @param algorithm an algorithm identifier * @param version a version * @param data a StagingData object containing the input for the staging call * @return */ public StagingData stagingStage(String algorithm, String version, StagingData data) { WebTarget target = createTarget("/staging/{algorithm}/{version}/stage") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version); return getBuilder(target).post(Entity.json(data.getInput()), StagingData.class); } public StagingSchema stagingCreateSchema(StagingSchema schema, String comment) { WebTarget target = createTarget("/staging/{algorithm}/{version}/schema") .resolveTemplate("algorithm", schema.getAlgorithm()) .resolveTemplate("version", schema.getVersion()); target = target.queryParam("comment", comment); return getBuilder(target).post(Entity.json(schema), StagingSchema.class); } public StagingSchema stagingUpdateSchema(StagingSchema schema, String comment) { WebTarget target = createTarget("/staging/{algorithm}/{version}/schema/{id}") .resolveTemplate("algorithm", schema.getAlgorithm()) .resolveTemplate("version", schema.getVersion()) .resolveTemplate("id", schema.getId()); target = target.queryParam("comment", comment); return getBuilder(target).put(Entity.json(schema), StagingSchema.class); } public void stagingDeleteSchema(String algorithm, String version, String id, String comment) { WebTarget target = createTarget("/staging/{algorithm}/{version}/schema/{id}") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .resolveTemplate("id", id); target = target.queryParam("comment", comment); getBuilder(target).delete(); } public StagingTable stagingCreateTable(StagingTable table, String comment) { WebTarget target = createTarget("/staging/{algorithm}/{version}/table") .resolveTemplate("algorithm", table.getAlgorithm()) .resolveTemplate("version", table.getVersion()); target = target.queryParam("comment", comment); return getBuilder(target).post(Entity.json(table), StagingTable.class); } public StagingTable stagingUpdateTable(StagingTable table, String comment) { WebTarget target = createTarget("/staging/{algorithm}/{version}/table/{id}") .resolveTemplate("algorithm", table.getAlgorithm()) .resolveTemplate("version", table.getVersion()) .resolveTemplate("id", table.getId()); target = target.queryParam("comment", comment); return getBuilder(target).put(Entity.json(table), StagingTable.class); } public void stagingDeleteTable(String algorithm, String version, String id, String comment) { WebTarget target = createTarget("/staging/{algorithm}/{version}/table/{id}") .resolveTemplate("algorithm", algorithm) .resolveTemplate("version", version) .resolveTemplate("id", id); target = target.queryParam("comment", comment); getBuilder(target).delete(); } }
package org.intermine.dwr; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Constructor; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import javax.mail.MessagingException; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.apache.lucene.queryParser.ParseException; import org.apache.struts.Globals; import org.apache.struts.util.MessageResources; import org.directwebremoting.WebContext; import org.directwebremoting.WebContextFactory; import org.intermine.InterMineException; import org.intermine.api.InterMineAPI; import org.intermine.api.bag.BagManager; import org.intermine.api.bag.TypeConverter; import org.intermine.api.bag.UnknownBagTypeException; import org.intermine.api.mines.FriendlyMineManager; import org.intermine.api.mines.FriendlyMineQueryRunner; import org.intermine.api.mines.Mine; import org.intermine.api.profile.BagDoesNotExistException; import org.intermine.api.profile.BagState; import org.intermine.api.profile.InterMineBag; import org.intermine.api.profile.Profile; import org.intermine.api.profile.ProfileAlreadyExistsException; import org.intermine.api.profile.ProfileManager; import org.intermine.api.profile.SavedQuery; import org.intermine.api.profile.TagManager; import org.intermine.api.profile.UserAlreadyShareBagException; import org.intermine.api.profile.UserNotFoundException; import org.intermine.api.query.WebResultsExecutor; import org.intermine.api.results.WebTable; import org.intermine.api.search.SearchRepository; import org.intermine.api.search.SearchResults; import org.intermine.api.search.SearchTarget; import org.intermine.api.search.TagFilter; import org.intermine.api.search.WebSearchable; import org.intermine.api.tag.TagNames; import org.intermine.api.tag.TagTypes; import org.intermine.api.template.ApiTemplate; import org.intermine.api.template.TemplateManager; import org.intermine.api.template.TemplateSummariser; import org.intermine.api.util.NameUtil; import org.intermine.metadata.ClassDescriptor; import org.intermine.metadata.FieldDescriptor; import org.intermine.metadata.ReferenceDescriptor; import org.intermine.objectstore.ObjectStore; import org.intermine.objectstore.ObjectStoreException; import org.intermine.objectstore.query.Query; import org.intermine.objectstore.query.ResultsRow; import org.intermine.pathquery.OrderDirection; import org.intermine.pathquery.Path; import org.intermine.pathquery.PathConstraint; import org.intermine.pathquery.PathException; import org.intermine.pathquery.PathQuery; import org.intermine.template.TemplateQuery; import org.intermine.util.MailUtils; import org.intermine.util.StringUtil; import org.intermine.util.TypeUtil; import org.intermine.web.autocompletion.AutoCompleter; import org.intermine.web.displayer.InterMineLinkGenerator; import org.intermine.web.logic.Constants; import org.intermine.web.logic.PortalHelper; import org.intermine.web.logic.bag.BagConverter; import org.intermine.web.logic.config.WebConfig; import org.intermine.web.logic.profile.UpgradeBagList; import org.intermine.web.logic.query.PageTableQueryMonitor; import org.intermine.web.logic.query.QueryMonitorTimeout; import org.intermine.web.logic.results.PagedTable; import org.intermine.web.logic.results.WebState; import org.intermine.web.logic.session.QueryCountQueryMonitor; import org.intermine.web.logic.session.SessionMethods; import org.json.JSONException; import org.json.JSONObject; /** * This class contains the methods called through DWR Ajax * * @author Xavier Watkins * @author Daniela Butano * */ public class AjaxServices { protected static final Logger LOG = Logger.getLogger(AjaxServices.class); private static final Object ERROR_MSG = "Error happened during DWR ajax service."; private static final Set<String> NON_WS_TAG_TYPES = new HashSet<String>(Arrays.asList( TagTypes.CLASS, TagTypes.COLLECTION, TagTypes.REFERENCE)); /** * Creates a favourite Tag for the given templateName * * @param name the name of the template we want to set as a favourite * @param type type of tag (bag or template) * @param isFavourite whether or not this item is currently a favourite */ public void setFavourite(String name, String type, boolean isFavourite) { String nameCopy = name.replaceAll("#039;", "'"); try { // already a favourite. turning off. if (isFavourite) { AjaxServices.deleteTag(TagNames.IM_FAVOURITE, nameCopy, type); // not a favourite. turning on. } else { AjaxServices.addTag(TagNames.IM_FAVOURITE, nameCopy, type); } } catch (Exception e) { processException(e); } } private static void processException(Exception e) { LOG.error(ERROR_MSG, e); if (e instanceof RuntimeException) { throw (RuntimeException) e; } throw new RuntimeException(e); } /** * Precomputes the given template query * @param templateName the template query name * @return a String to guarantee the service ran properly */ public String preCompute(String templateName) { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); Map<String, ApiTemplate> templates = profile.getSavedTemplates(); TemplateQuery t = templates.get(templateName); WebResultsExecutor executor = im.getWebResultsExecutor(profile); try { session.setAttribute("precomputing_" + templateName, "true"); executor.precomputeTemplate(t); } catch (ObjectStoreException e) { LOG.error("Error while precomputing", e); } finally { session.removeAttribute("precomputing_" + templateName); } } catch (RuntimeException e) { processException(e); } return "precomputed"; } /** * Summarises the given template query. * * @param templateName the template query name * @return a String to guarantee the service ran properly */ public String summarise(String templateName) { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); Map<String, ApiTemplate> templates = profile.getSavedTemplates(); ApiTemplate template = templates.get(templateName); TemplateSummariser summariser = im.getTemplateSummariser(); try { session.setAttribute("summarising_" + templateName, "true"); summariser.summarise(template); } catch (ObjectStoreException e) { LOG.error("Failed to summarise " + templateName, e); } catch (NullPointerException e) { NullPointerException e2 = new NullPointerException("No such template " + templateName); e2.initCause(e); throw e2; } finally { session.removeAttribute("summarising_" + templateName); } } catch (RuntimeException e) { processException(e); } return "summarised"; } /** * Rename a element such as history, name, bag * @param name the name of the element * @param type history, saved, bag * @param reName the new name for the element * @return the new name of the element as a String * @exception Exception if the application business logic throws * an exception */ public String rename(String name, String type, String reName) throws Exception { String newName; try { newName = reName.trim(); WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); Profile profile = SessionMethods.getProfile(session); SavedQuery sq; if (name.equals(newName) || StringUtils.isEmpty(newName)) { return name; } // TODO get error text from properties file if (!NameUtil.isValidName(newName)) { return "<i>" + NameUtil.INVALID_NAME_MSG + "</i>"; } if ("history".equals(type)) { if (profile.getHistory().get(name) == null) { return "<i>" + name + " does not exist</i>"; } if (profile.getHistory().get(newName) != null) { return "<i>" + newName + " already exists</i>"; } profile.renameHistory(name, newName); } else if ("saved".equals(type)) { if (profile.getSavedQueries().get(name) == null) { return "<i>" + name + " does not exist</i>"; } if (profile.getSavedQueries().get(newName) != null) { return "<i>" + newName + " already exists</i>"; } sq = profile.getSavedQueries().get(name); profile.deleteQuery(sq.getName()); sq = new SavedQuery(newName, sq.getDateCreated(), sq.getPathQuery()); profile.saveQuery(sq.getName(), sq); } else if ("bag".equals(type)) { try { profile.renameBag(name, newName); } catch (IllegalArgumentException e) { return "<i>" + name + " does not exist</i>"; } catch (ProfileAlreadyExistsException e) { return "<i>" + newName + " already exists</i>"; } } else if ("invalid.bag.type".equals(type)) { try { profile.fixInvalidBag(name, newName); InterMineAPI im = SessionMethods.getInterMineAPI(session); new Thread(new UpgradeBagList(profile, im.getBagQueryRunner(), session)) .start(); } catch (UnknownBagTypeException e) { return "<i>" + e.getMessage() + "</i>"; } catch (ObjectStoreException e) { return "<i>Error fixing type</i>"; } } else { return "Type unknown"; } return newName; } catch (RuntimeException e) { processException(e); return null; } } /** * Generate a new API key for a given user. * @param username the user to generate the key for. * @return A new API key, or null if something untoward happens. * @throws Exception an exception. */ public String generateApiKey(String username) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ProfileManager pm = im.getProfileManager(); Profile p = pm.getProfile(username); return pm.generateApiKey(p); } catch (RuntimeException e) { processException(e); return null; } } /** * Delete a user's API key, thus disabling webservice access. A message "deleted" * is returned to confirm success. * @param username The user whose key we should delete. * @return A confirmation string. * @throws Exception if somethign bad happens */ public String deleteApiKey(String username) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ProfileManager pm = im.getProfileManager(); Profile p = pm.getProfile(username); p.setApiKey(null); return "deleted"; } catch (RuntimeException e) { processException(e); return null; } } /** * For a given bag, set its description * @param bagName the bag * @param description the description as entered by the user * @return the description for display on the jsp page * @throws Exception an exception */ public String saveBagDescription(String bagName, String description) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); Profile profile = SessionMethods.getProfile(session); InterMineBag bag = profile.getSavedBags().get(bagName); if (bag == null) { throw new InterMineException("List \"" + bagName + "\" not found."); } bag.setDescription(description); return description; } catch (RuntimeException e) { processException(e); return null; } } /** * Set the description of a view path. * @param pathString the string representation of the path * @param description the new description * @return the description, or null if the description was empty */ public String changeViewPathDescription(String pathString, String description) { try { String descr = description; if (description.trim().length() == 0) { descr = null; } WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); PathQuery query = SessionMethods.getQuery(session); Path path = query.makePath(pathString); Path prefixPath = path.getPrefix(); if (descr == null) { // setting to null removes the description query.setDescription(prefixPath.getNoConstraintsString(), null); } else { query.setDescription(prefixPath.getNoConstraintsString(), descr); } if (descr == null) { return null; } return descr.replaceAll("&", "&amp;").replaceAll("<", "&lt;").replaceAll(">", "&gt;"); } catch (RuntimeException e) { processException(e); return null; } catch (PathException e) { processException(e); return null; } } /* * Cannot be refactored from AjaxServices, else WebContextFactory.get() returns null */ private static WebState getWebState() { HttpSession session = WebContextFactory.get().getSession(); return SessionMethods.getWebState(session); } /** * This method gets a map of ids of elements that were in the past (during session) toggled and * returns them in JSON * @return JSON serialized to a String * @throws JSONException */ public static String getToggledElements() { HttpSession session = WebContextFactory.get().getSession(); WebState webState = SessionMethods.getWebState(session); Collection<JSONObject> lists = new HashSet<JSONObject>(); try { for (Map.Entry<String, Boolean> entry : webState.getToggledElements().entrySet()) { JSONObject list = new JSONObject(); list.put("id", entry.getKey()); list.put("opened", entry.getValue().toString()); lists.add(list); } } catch (JSONException jse) { LOG.error("Errors generating json objects", jse); } return lists.toString(); } /** * Get the summary for the given column * @param summaryPath the path for the column as a String * @param tableName name of column-owning table * @return a collection of rows * @throws Exception an exception */ public static List getColumnSummary(String tableName, String summaryPath) throws Exception { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); WebResultsExecutor webResultsExecutor = im.getWebResultsExecutor(profile); WebTable webTable = (SessionMethods.getResultsTable(session, tableName)) .getWebTable(); PathQuery pathQuery = webTable.getPathQuery(); List<ResultsRow> results = (List) webResultsExecutor.summariseQuery(pathQuery, summaryPath); // Start the count of results Query countQuery = webResultsExecutor.makeSummaryQuery(pathQuery, summaryPath); QueryCountQueryMonitor clientState = new QueryCountQueryMonitor(Constants.QUERY_TIMEOUT_SECONDS * 1000, countQuery); MessageResources messages = (MessageResources) ctx.getHttpServletRequest() .getAttribute(Globals.MESSAGES_KEY); String qid = SessionMethods.startQueryCount(clientState, session, messages); List<ResultsRow> pageSizeResults = new ArrayList<ResultsRow>(); int rowCount = 0; for (ResultsRow row : results) { rowCount++; if (rowCount > 10) { break; } pageSizeResults.add(row); } return Arrays.asList(new Object[] {pageSizeResults, qid, new Integer(rowCount)}); } catch (RuntimeException e) { processException(e); return null; } } /** * Return the number of rows of results from the query with the given query id. If the size * isn't yet available, return null. The query must be started with * SessionMethods.startPagedTableCount(). * @param qid the id * @return the row count or null if not yet available */ public static Integer getResultsSize(String qid) { try { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); QueryMonitorTimeout controller = (QueryMonitorTimeout) SessionMethods.getRunningQueryController(qid, session); // this could happen if the user navigates away then back to the page if (controller == null) { return null; } // First tickle the controller to avoid timeout controller.tickle(); if (controller.isCancelledWithError()) { LOG.debug("query qid " + qid + " error"); return null; } else if (controller.isCancelled()) { LOG.debug("query qid " + qid + " cancelled"); return null; } else if (controller.isCompleted()) { LOG.debug("query qid " + qid + " complete"); if (controller instanceof PageTableQueryMonitor) { PagedTable pt = ((PageTableQueryMonitor) controller).getPagedTable(); return new Integer(pt.getExactSize()); } if (controller instanceof QueryCountQueryMonitor) { return new Integer(((QueryCountQueryMonitor) controller).getCount()); } LOG.debug("query qid " + qid + " - unknown controller type"); return null; } else { // query still running LOG.debug("query qid " + qid + " still running, making client wait"); return null; } } catch (RuntimeException e) { processException(e); return null; } } /** * Given a scope, type, tags and some filter text, produce a list of matching WebSearchable, in * a format useful in JavaScript. * <p> * Each element of the returned List is a List containing a * WebSearchable name, a score (from Lucene) and a string with the matching parts of the * description highlighted. * <p> * ie - search for "<code>me</code>": * <pre> * [ * [ "Some name", 0.123, "So&lt;i&gt;me&lt;/i&gt; name" ], * ... * ] * </pre> * * @param scope the scope (either Scope.GLOBAL or Scope.USER). * @param type the type (from TagTypes). * @param tags the tags to filter on. * @param filterText the text to pass to Lucene. * @param filterAction toggles favourites filter off and on; will be blank or 'favourites' * @param callId unique id * @return a List of Lists */ public static List<String> filterWebSearchables(String scope, String type, List<String> tags, String filterText, String filterAction, String callId) { try { final HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ProfileManager pm = im.getProfileManager(); final Profile profile = SessionMethods.getProfile(session); final SearchRepository userRepository = profile.getSearchRepository(); final SearchTarget target = new SearchTarget(scope, type); final SearchResults results; try { results = SearchResults.runLuceneSearch(filterText, target, userRepository); } catch (ParseException e) { LOG.error("couldn't run lucene filter", e); ArrayList<String> emptyList = new ArrayList<String>(); emptyList.add(callId); return emptyList; } catch (IOException e) { LOG.error("couldn't run lucene filter", e); ArrayList<String> emptyList = new ArrayList<String>(); emptyList.add(callId); return emptyList; } //Filter by aspects (defined in superuser account) List<String> aspectTags = new ArrayList<String>(); List<String> userTags = new ArrayList<String>(); for (String tag :tags) { if (tag.startsWith(TagNames.IM_ASPECT_PREFIX)) { aspectTags.add(tag); } else { if (profile.getUsername() != null) { // Only allow filtering from registered users. userTags.add(tag); } } } TagFilter aspects = new TagFilter(aspectTags, pm.getSuperuserProfile(), type); TagFilter requiredTags = new TagFilter(userTags, profile, type); List returnList = new ArrayList(); returnList.add(callId); for (org.intermine.api.search.SearchResult sr: results) { WebSearchable ws = sr.getItem(); if (SearchResults.isInvalidTemplate(ws)) { continue; } if (!(aspects.hasAllTags(ws) && requiredTags.hasAllTags(ws))) { continue; } returnList.add(sr.asList()); } return returnList; } catch (RuntimeException e) { processException(e); return null; } } /** * For a given bag name and a type different from the bag type, give the number of * converted objects * * @param bagName the name of the bag * @param type the type to convert to * @return the number of converted objects */ public static int getConvertCountForBag(String bagName, String type) { try { HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); String pckName = im.getModel().getPackageName(); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); TemplateManager templateManager = im.getTemplateManager(); WebResultsExecutor webResultsExecutor = im.getWebResultsExecutor(profile); int count = 0; InterMineBag imBag = bagManager.getBag(profile, bagName); List<ApiTemplate> conversionTemplates = templateManager.getConversionTemplates(); PathQuery pathQuery = TypeConverter.getConversionQuery(conversionTemplates, TypeUtil.instantiate(pckName + "." + imBag.getType()), TypeUtil.instantiate(pckName + "." + type), imBag); count = webResultsExecutor.count(pathQuery); return count; } catch (Exception e) { LOG.error("failed to get type converted counts", e); return 0; } } /** * For a list and a converter, return types and related counts * * @param bagName the name of the bag * @param converterName Java class that processes the data * @return the number of converted objects */ public static String getCustomConverterCounts(String bagName, String converterName) { try { final HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final Profile profile = SessionMethods.getProfile(session); final BagManager bagManager = im.getBagManager(); final InterMineBag imBag = bagManager.getBag(profile, bagName); final ServletContext servletContext = WebContextFactory.get().getServletContext(); final WebConfig webConfig = SessionMethods.getWebConfig(servletContext); final BagConverter bagConverter = PortalHelper.getBagConverter(im, webConfig, converterName); // should be ordered Map<String, String> results = bagConverter.getCounts(profile, imBag); List<JSONObject> jsonResults = new LinkedList<JSONObject>(); for (Map.Entry<String, String> entry : results.entrySet()) { JSONObject organism = new JSONObject(); organism.put("name", entry.getKey()); organism.put("count", entry.getValue()); jsonResults.add(organism); } return jsonResults.toString(); } catch (Exception e) { LOG.error("failed to get custom converter counts", e); return null; } } /** * used on REPORT page * * For a gene, generate links to other intermines. Include gene and orthologues. * * Returns NULL if no values found. It's possible that the identifier in the local mine will * match more than one entry in the remote mine but this will be handled by the portal of the * remote mine. * * @param mineName name of mine to query * @param organisms gene.organism * @param identifiers identifiers for gene * @return the links to friendly intermines */ public static String getFriendlyMineLinks(String mineName, String organisms, String identifiers) { if (StringUtils.isEmpty(mineName) || StringUtils.isEmpty(organisms) || StringUtils.isEmpty(identifiers)) { return null; } final HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ServletContext servletContext = WebContextFactory.get().getServletContext(); final Properties webProperties = SessionMethods.getWebProperties(servletContext); final FriendlyMineManager fmm = FriendlyMineManager.getInstance(im, webProperties); InterMineLinkGenerator linkGen = null; Constructor<?> constructor; try { Class<?> clazz = TypeUtil.instantiate( "org.intermine.bio.web.displayer.FriendlyMineLinkGenerator"); constructor = clazz.getConstructor(new Class[] {}); linkGen = (InterMineLinkGenerator) constructor.newInstance(new Object[] {}); } catch (Exception e) { LOG.error("Failed to instantiate FriendlyMineLinkGenerator because: " + e); return null; } Collection<JSONObject> results = linkGen.getLinks(fmm, mineName, organisms, identifiers); if (results == null || results.isEmpty()) { return null; } return results.toString(); } /** * used on REPORT page * * For a gene, display pathways found in other mines for orthologous genes * * @param mineName mine to query * @param orthologues list of genes to query for * @return the links to friendly intermines */ public static String getFriendlyMinePathways(String mineName, String orthologues) { if (StringUtils.isEmpty(orthologues)) { return null; } Mine mine; HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ServletContext servletContext = WebContextFactory.get().getServletContext(); final Properties webProperties = SessionMethods.getWebProperties(servletContext); final FriendlyMineManager linkManager = FriendlyMineManager.getInstance(im, webProperties); mine = linkManager.getMine(mineName); if (mine == null || mine.getReleaseVersion() == null) { // mine is dead return null; } final String xmlQuery = getXMLQuery("FriendlyMinesPathways.xml", orthologues); try { JSONObject results = FriendlyMineQueryRunner.runJSONWebServiceQuery(mine, xmlQuery); if (results == null) { LOG.error("Couldn't query " + mine.getName() + " for pathways"); return null; } results.put("mineURL", mine.getUrl()); return results.toString(); } catch (IOException e) { LOG.error("Couldn't query " + mine.getName() + " for pathways", e); return null; } catch (JSONException e) { LOG.error("Error adding Mine URL to pathways results", e); return null; } catch (Throwable t) { LOG.error(t); return null; } } private static String getXMLQuery(String filename, Object... positionalArgs) { try { return String.format( IOUtils.toString( AjaxServices.class.getResourceAsStream(filename)), positionalArgs); } catch (IOException e) { LOG.error(e); throw new RuntimeException("Could not read " + filename, e); } catch (NullPointerException npe) { LOG.error(npe); throw new RuntimeException(filename + " not found", npe); } catch (Throwable e) { LOG.error(e); throw new RuntimeException("Unexpected exception", e); } } /** * Return list of disease ontology terms associated with list of provided rat genes. Returns * JSONObject as string with ID (intermine ID) and name (ontologyTerm.name) * * @param orthologues list of rat genes * @return JSONobject.toString of JSON object */ @SuppressWarnings("unchecked") public static String getRatDiseases(String orthologues) { if (StringUtils.isEmpty(orthologues)) { return null; } HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); final ServletContext servletContext = WebContextFactory.get().getServletContext(); final Properties webProperties = SessionMethods.getWebProperties(servletContext); final FriendlyMineManager linkManager = FriendlyMineManager.getInstance(im, webProperties); Mine mine = linkManager.getMine("RatMine"); HashMap<String, Object> map = new HashMap<String, Object>(); if (mine == null || mine.getReleaseVersion() == null) { // mine is dead map.put("status", "offline"); return new JSONObject(map).toString(); } final String xmlQuery = getXMLQuery("RatDiseases.xml", orthologues); try { JSONObject results = FriendlyMineQueryRunner.runJSONWebServiceQuery(mine, xmlQuery); if (results != null) { results.put("mineURL", mine.getUrl()); results.put("status", "online"); return results.toString(); } } catch (IOException e) { LOG.error("Couldn't query ratmine for diseases", e); } catch (JSONException e) { LOG.error("Couldn't process ratmine disease results", e); } return null; } /** * Saves information, that some element was toggled - displayed or hidden. * * @param elementId element id * @param opened new aspect state */ public static void saveToggleState(String elementId, boolean opened) { try { AjaxServices.getWebState().getToggledElements().put(elementId, Boolean.valueOf(opened)); } catch (RuntimeException e) { processException(e); } } /** * Set state that should be saved during the session. * @param name name of state * @param value value of state */ public static void setState(String name, String value) { try { AjaxServices.getWebState().setState(name, value); } catch (RuntimeException e) { processException(e); } } /** * validate bag upload * @param bagName name of new bag to be validated * @return error msg to display, if any */ public static String validateBagName(String bagName) { try { HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); bagName = bagName.trim(); // TODO get message text from the properties file if ("".equals(bagName)) { return "You cannot save a list with a blank name"; } if (!NameUtil.isValidName(bagName)) { return TagManager.INVALID_NAME_MSG; } if (profile.getSavedBags().get(bagName) != null) { return "The list name you have chosen is already in use"; } if (bagManager.getGlobalBag(bagName) != null) { return "The list name you have chosen is already in use -" + " there is a public list called " + bagName; } return ""; } catch (RuntimeException e) { processException(e); return null; } } /** * validation that happens before new bag is saved * @param bagName name of new list * @param selectedBags bags involved in operation * @param operation which operation is taking place - delete, union, intersect or subtract * @return error msg, if any */ public static String validateBagOperations(String bagName, String[] selectedBags, String operation) { try { ServletContext servletContext = WebContextFactory.get().getServletContext(); HttpSession session = WebContextFactory.get().getSession(); Profile profile = SessionMethods.getProfile(session); // TODO get error text from the properties file if (selectedBags.length == 0) { return "No lists are selected"; } if ("delete".equals(operation)) { for (int i = 0; i < selectedBags.length; i++) { Set<String> queries = new HashSet<String>(); queries.addAll(queriesThatMentionBag(profile.getSavedQueries(), selectedBags[i])); queries.addAll(queriesThatMentionBag(profile.getHistory(), selectedBags[i])); if (queries.size() > 0) { // TODO the javascript method relies on the content of this message. // which is dumb and should be fixed. in the meantime, don't change this. final String msg = "You are trying to delete the list: `" + selectedBags[i] + "`, which is used by these queries: " + queries + ". Select OK to delete the list and queries or Cancel " + "to cancel this operation."; return msg; } } for (int i = 0; i < selectedBags.length; i++) { Map allBags = profile.getAllBags(); if (!allBags.containsKey(selectedBags[i])) { return "List `" + selectedBags[i] + "` cannot be deleted as it is a shared " + "list"; } } } else if (!"copy".equals(operation)) { Properties properties = SessionMethods.getWebProperties(servletContext); String defaultName = properties.getProperty("lists.input.example"); if (bagName.equalsIgnoreCase(defaultName)) { return "New list name is required"; } else if (!NameUtil.isValidName(bagName)) { return NameUtil.INVALID_NAME_MSG; } } return ""; } catch (RuntimeException e) { processException(e); return null; } } /** * Provide a list of queries that mention a named bag * @param savedQueries a saved queries map (name -&gt; query) * @param bagName the name of a bag * @return the list of queries */ private static List<String> queriesThatMentionBag(Map<String, SavedQuery> savedQueries, String bagName) { try { List<String> queries = new ArrayList<String>(); for (Iterator<String> i = savedQueries.keySet().iterator(); i.hasNext();) { String queryName = (String) i.next(); SavedQuery query = (SavedQuery) savedQueries.get(queryName); if (query.getPathQuery().getBagNames().contains(bagName)) { queries.add(queryName); } } return queries; } catch (RuntimeException e) { processException(e); return null; } } /** * Add an ID to the PagedTable selection * @param selectedId the id * @param tableId the identifier for the PagedTable * @param columnIndex the column of the selected id * @return the field values of the first selected objects */ public static List<String> selectId(String selectedId, String tableId, String columnIndex) { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); PagedTable pt = SessionMethods.getResultsTable(session, tableId); pt.selectId(new Integer(selectedId), (new Integer(columnIndex)).intValue()); Map<String, List<FieldDescriptor>> classKeys = im.getClassKeys(); ObjectStore os = im.getObjectStore(); return pt.getFirstSelectedFields(os, classKeys); } /** * remove an Id from the PagedTable * @param deSelectId the ID to remove from the selection * @param tableId the PagedTable identifier * @return the field values of the first selected objects */ public static List<String> deSelectId(String deSelectId, String tableId) { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); PagedTable pt = SessionMethods.getResultsTable(session, tableId); pt.deSelectId(new Integer(deSelectId)); Map<String, List<FieldDescriptor>> classKeys = im.getClassKeys(); ObjectStore os = im.getObjectStore(); return pt.getFirstSelectedFields(os, classKeys); } /** * Select all the elements in a PagedTable * @param index the index of the selected column * @param tableId the PagedTable identifier */ public static void selectAll(int index, String tableId) { HttpSession session = WebContextFactory.get().getSession(); PagedTable pt = SessionMethods.getResultsTable(session, tableId); pt.clearSelectIds(); pt.setAllSelectedColumn(index); } /** * AJAX request - reorder view. * @param newOrder the new order as a String * @param oldOrder the previous order as a String */ public void reorder(String newOrder, String oldOrder) { HttpSession session = WebContextFactory.get().getSession(); List<String> newOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(newOrder).values()); List<String> oldOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(oldOrder).values()); List<String> view = SessionMethods.getEditingView(session); ArrayList<String> newView = new ArrayList<String>(); for (int i = 0; i < view.size(); i++) { String newi = newOrderList.get(i); int oldi = oldOrderList.indexOf(newi); newView.add(view.get(oldi)); } PathQuery query = SessionMethods.getQuery(session); query.clearView(); query.addViews(newView); } /** * AJAX request - reorder the constraints. * @param newOrder the new order as a String * @param oldOrder the previous order as a String */ public void reorderConstraints(String newOrder, String oldOrder) { HttpSession session = WebContextFactory.get().getSession(); List<String> newOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(newOrder).values()); List<String> oldOrderList = new LinkedList<String>(StringUtil.serializedSortOrderToMap(oldOrder).values()); PathQuery query = SessionMethods.getQuery(session); if (query instanceof TemplateQuery) { TemplateQuery template = (TemplateQuery) query; for (int index = 0; index < newOrderList.size() - 1; index++) { String newi = newOrderList.get(index); int oldi = oldOrderList.indexOf(newi); if (index != oldi) { List<PathConstraint> editableConstraints = template.getModifiableEditableConstraints(); PathConstraint editableConstraint = editableConstraints.remove(oldi); editableConstraints.add(index, editableConstraint); template.setEditableConstraints(editableConstraints); break; } } } } /** * Add a Node from the sort order * @param path the Path as a String * @param direction the direction to sort by * @exception Exception if the application business logic throws */ public void addToSortOrder(String path, String direction) throws Exception { HttpSession session = WebContextFactory.get().getSession(); PathQuery query = SessionMethods.getQuery(session); OrderDirection orderDirection = OrderDirection.ASC; if ("DESC".equals(direction.toUpperCase())) { orderDirection = OrderDirection.DESC; } query.clearOrderBy(); query.addOrderBy(path, orderDirection); } /** * Work as a proxy for fetching remote file (RSS) * @param rssURL the url * @return String representation of a file */ public static String getNewsPreview(String rssURL) { try { URL url = new URL(rssURL); BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream())); String str; StringBuffer sb = new StringBuffer(); // append to string buffer while ((str = in.readLine()) != null) { sb.append(str); } in.close(); return sb.toString(); } catch (MalformedURLException e) { return ""; } catch (IOException e) { return ""; } } /** * Adds tag and assures that there is only one tag for this combination of tag name, tagged * Object and type. * @param tag tag name * @param taggedObject object id that is tagged by this tag * @param type tag type * @return 'ok' string if succeeded else error string */ public static String addTag(String tag, String taggedObject, String type) { String tagName = tag; LOG.info("Called addTag(). tagName:" + tagName + " taggedObject:" + taggedObject + " type: " + type); if (StringUtils.isBlank(tagName)) { LOG.error("Adding tag failed"); return "tag must not be blank"; } if (StringUtils.isBlank(taggedObject)) { LOG.error("Adding tag failed"); return "object to tag must not be blank"; } try { final HttpServletRequest request = getRequest(); final Profile profile = getProfile(request); final InterMineAPI im = SessionMethods.getInterMineAPI(request); tagName = tagName.trim(); if (profile.getUsername() != null && !StringUtils.isEmpty(tagName) && !StringUtils.isEmpty(type) && !StringUtils.isEmpty(taggedObject)) { if (tagExists(tagName, taggedObject, type)) { return "Already tagged with this tag."; } TagManager tagManager = getTagManager(); BagManager bm = im.getBagManager(); TemplateManager tm = im.getTemplateManager(); if (NON_WS_TAG_TYPES.contains(type)) { if (TagTypes.CLASS.equals(type)) { ClassDescriptor cd = im.getModel().getClassDescriptorByName(taggedObject); tagManager.addTag(tagName, cd, profile); } else { String[] bits = taggedObject.split("\\."); ClassDescriptor cd = im.getModel().getClassDescriptorByName(bits[0]); FieldDescriptor fd = cd.getFieldDescriptorByName(bits[1]); if (fd.isCollection() || fd.isReference()) { tagManager.addTag(tagName, (ReferenceDescriptor) fd, profile); } } } else { WebSearchable ws = null; if (TagTypes.BAG.equals(type)) { ws = bm.getBag(profile, taggedObject); } else if (TagTypes.TEMPLATE.equals(type)) { ws = tm.getUserOrGlobalTemplate(profile, taggedObject); } if (ws == null) { throw new RuntimeException("Could not find " + type + " " + taggedObject); } else { tagManager.addTag(tagName, ws, profile); } } return "ok"; } LOG.error("Adding tag failed: tag='" + tag + "', taggedObject='" + taggedObject + "', type='" + type + "'"); return "Adding tag failed."; } catch (TagManager.TagNamePermissionException e) { LOG.error("Adding tag failed", e); return e.getMessage(); } catch (TagManager.TagNameException e) { LOG.error("Adding tag failed", e); return e.getMessage(); } catch (Throwable e) { LOG.error("Adding tag failed", e); return "Adding tag failed."; } } /** * Deletes tag. * @param tagName tag name * @param tagged id of tagged object * @param type tag type * @return 'ok' string if succeeded else error string */ public static String deleteTag(String tagName, String tagged, String type) { LOG.info("Called deleteTag(). tagName:" + tagName + " taggedObject:" + tagged + " type: " + type); try { HttpServletRequest request = getRequest(); HttpSession session = request.getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = getProfile(request); TagManager manager = im.getTagManager(); BagManager bm = im.getBagManager(); if (NON_WS_TAG_TYPES.contains(type)) { if (TagTypes.CLASS.equals(type)) { ClassDescriptor cd = im.getModel().getClassDescriptorByName(tagged); manager.deleteTag(tagName, cd, profile); } else { String[] bits = tagged.split("\\."); ClassDescriptor cd = im.getModel().getClassDescriptorByName(bits[0]); FieldDescriptor fd = cd.getFieldDescriptorByName(bits[1]); if (fd.isCollection() || fd.isReference()) { manager.deleteTag(tagName, (ReferenceDescriptor) fd, profile); } } return "ok"; } else { WebSearchable ws = null; if (TagTypes.BAG.equals(type)) { ws = bm.getUserBag(profile, tagged); } else if (TagTypes.TEMPLATE.equals(type)) { ws = profile.getTemplate(tagged); } if (ws == null) { throw new RuntimeException("Could not find " + type + " " + tagged); } manager.deleteTag(tagName, ws, profile); } return "ok"; } catch (Throwable e) { LOG.error("Deleting tag failed", e); return "Deleting tag failed."; } } /** * Returns all tags of specified tag type together with prefixes of these tags. * For instance: for tag 'bio:experiment' it automatically adds 'bio' tag. * @param type tag type * @return tags */ public static Set<String> getTags(String type) { final HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); final TagManager tagManager = im.getTagManager(); final Profile profile = getProfile(request); if (profile.isLoggedIn()) { return tagManager.getUserTagNames(type, profile.getUsername()); } return new TreeSet<String>(); } /** * Returns all tags by which is specified object tagged. * @param type tag type * @param tagged id of tagged object * @return tags */ public static Set<String> getObjectTags(String type, String tagged) { HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); TagManager tagManager = im.getTagManager(); Profile profile = getProfile(request); if (profile.isLoggedIn()) { return tagManager.getObjectTagNames(tagged, type, profile.getUsername()); } return new TreeSet<String>(); } private static boolean tagExists(String tag, String taggedObject, String type) { HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); TagManager tagManager = im.getTagManager(); String userName = getProfile(request).getUsername(); return tagManager.getObjectTagNames(taggedObject, type, userName).contains(tag); } private static Profile getProfile(HttpServletRequest request) { return SessionMethods.getProfile(request.getSession()); } /** * Return the single use API key for the current profile * @return the single use APi key */ public static String getSingleUseKey() { HttpServletRequest request = getRequest(); Profile profile = SessionMethods.getProfile(request.getSession()); return profile.getSingleUseKey(); } /** * Return the request retrieved from the web contest * @return the request */ private static HttpServletRequest getRequest() { return WebContextFactory.get().getHttpServletRequest(); } /** * Return the TagManager * @return the tag manager */ private static TagManager getTagManager() { HttpServletRequest request = getRequest(); final InterMineAPI im = SessionMethods.getInterMineAPI(request.getSession()); return im.getTagManager(); } /** * Set the constraint logic on a query to be the given expression. * * @param expression the constraint logic for the query * @return messages to display in the jsp page * @throws PathException if the query is invalid */ public static String setConstraintLogic(String expression) throws PathException { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); PathQuery query = SessionMethods.getQuery(session); query.setConstraintLogic(expression); List<String> messages = query.fixUpForJoinStyle(); StringBuffer messagesToDisplay = new StringBuffer(); for (String message : messages) { messagesToDisplay.append(message); //SessionMethods.recordMessage(message, session); } return messagesToDisplay.toString(); } /** * Get the grouped constraint logic * @return a list representing the grouped constraint logic */ public static String getConstraintLogic() { WebContext ctx = WebContextFactory.get(); HttpSession session = ctx.getSession(); PathQuery query = SessionMethods.getQuery(session); return (query.getConstraintLogic()); } /** * @param suffix string of input before request for more results * @param wholeList whether or not to show the entire list or a truncated version * @param field field name from the table for the lucene search * @param className class name (table in the database) for lucene search * @return an array of values for this classname.field */ public String[] getContent(String suffix, boolean wholeList, String field, String className) { ServletContext servletContext = WebContextFactory.get().getServletContext(); AutoCompleter ac = SessionMethods.getAutoCompleter(servletContext); ac.createRAMIndex(className + "." + field); // swap "-" for spaces, ticket #2357 suffix = suffix.replace("-", " "); if (!wholeList && suffix.length() > 0) { String[] shortList = ac.getFastList(suffix, field, 31); return shortList; } else if (suffix.length() > 2 && wholeList) { String[] longList = ac.getList(suffix, field); return longList; } String[] defaultList = {""}; return defaultList; } /** * This method gets the latest bags from the session (SessionMethods) and returns them in JSON * @return JSON serialized to a String * @throws JSONException json exception */ @SuppressWarnings("unchecked") public String getSavedBagStatus() throws JSONException { HttpSession session = WebContextFactory.get().getSession(); @SuppressWarnings("unchecked") Map<String, Map<String, Object>> savedBagStatus = (Map<String, Map<String, Object>>) session.getAttribute(Constants.SAVED_BAG_STATUS); // this is where my lists go Collection<JSONObject> lists = new HashSet<JSONObject>(); try { for (Map.Entry<String, Map<String, Object>> entry : savedBagStatus.entrySet()) { Map<String, Object> listAttributes = entry.getValue(); // save to the resulting JSON object only if these are 'actionable' lists if (listAttributes.get("status").equals(BagState.CURRENT.toString()) || listAttributes.get("status").equals(BagState.TO_UPGRADE.toString())) { JSONObject list = new JSONObject(); list.put("name", entry.getKey()); list.put("status", listAttributes.get("status")); if (listAttributes.containsKey("size")) { list.put("size", listAttributes.get("size")); } lists.add(list); } } } catch (JSONException jse) { LOG.error("Errors generating json objects", jse); } return lists.toString(); } /** * Update with the value given in input the field of the previous template * saved into the session * @param field the field to update * @param value the value */ public void updateTemplate(String field, String value) { HttpSession session = WebContextFactory.get().getSession(); boolean isNewTemplate = (session.getAttribute(Constants.NEW_TEMPLATE) != null) ? true : false; TemplateQuery templateQuery = (TemplateQuery) SessionMethods.getQuery(session); if (!isNewTemplate && session.getAttribute(Constants.PREV_TEMPLATE_NAME) == null) { session.setAttribute(Constants.PREV_TEMPLATE_NAME, templateQuery.getName()); } try { PropertyUtils.setSimpleProperty(templateQuery, field, value); } catch (Exception ex) { ex.printStackTrace(); } } /** * Share the bag given in input with the user which userName is input and send email * @param userName the user which the bag has to be shared with * @param bagName the bag name to share * @return 'ok' string if succeeded else error string */ public String addUserToShareBag(String userName, String bagName) { HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); if (profile.getUsername().equals(userName)) { return "The user already shares the bag."; } try { bagManager.shareBagWithUser(bagName, profile.getUsername(), userName); } catch (UserNotFoundException e1) { return "User not found."; } catch (UserAlreadyShareBagException e2) { return "The user already shares the bag."; } Properties webProperties = SessionMethods.getWebProperties(session.getServletContext()); InterMineBag bag = profile.getSavedBags().get(bagName); try { MailUtils.emailSharingList(userName, profile.getUsername(), bag, webProperties); } catch (Exception ex) { LOG.warn("Problems sending sharing list mail.", ex); } return "ok"; } /** * Un-share the bag given in input with the user which userName is input * @param userName the user which the bag has to be un-shared with * @param bagName the bag name to un-share * @return 'ok' string if succeeded else error string */ public String deleteUserToShareBag(String userName, String bagName) { HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); try { bagManager.unshareBagWithUser(bagName, profile.getUsername(), userName); } catch (UserNotFoundException unfe) { return "User not found."; } catch (BagDoesNotExistException bnee) { return "Tha list does not exist."; } return "ok"; } /** * Return the list of userssharign the bag in input * @param bagName the bag name that the users share * @return the list of users */ public List<String> getUsersSharingBag(String bagName) { HttpSession session = WebContextFactory.get().getSession(); final InterMineAPI im = SessionMethods.getInterMineAPI(session); Profile profile = SessionMethods.getProfile(session); BagManager bagManager = im.getBagManager(); return bagManager.getUsersSharingBag(bagName, profile.getUsername()); } }
package org.innovateuk.ifs.assessment.transactional; import org.innovateuk.ifs.assessment.mapper.CompetitionInviteMapper; import org.innovateuk.ifs.category.domain.Category; import org.innovateuk.ifs.category.domain.InnovationArea; import org.innovateuk.ifs.category.mapper.InnovationAreaMapper; import org.innovateuk.ifs.category.repository.InnovationAreaRepository; import org.innovateuk.ifs.category.resource.InnovationAreaResource; import org.innovateuk.ifs.commons.error.Error; import org.innovateuk.ifs.commons.service.ServiceResult; import org.innovateuk.ifs.competition.domain.Competition; import org.innovateuk.ifs.competition.repository.CompetitionRepository; import org.innovateuk.ifs.invite.domain.*; import org.innovateuk.ifs.invite.mapper.ParticipantStatusMapper; import org.innovateuk.ifs.invite.repository.CompetitionInviteRepository; import org.innovateuk.ifs.invite.repository.CompetitionParticipantRepository; import org.innovateuk.ifs.invite.repository.RejectionReasonRepository; import org.innovateuk.ifs.invite.resource.*; import org.innovateuk.ifs.notifications.resource.ExternalUserNotificationTarget; import org.innovateuk.ifs.notifications.resource.Notification; import org.innovateuk.ifs.notifications.resource.NotificationTarget; import org.innovateuk.ifs.notifications.resource.SystemNotificationSource; import org.innovateuk.ifs.notifications.service.NotificationTemplateRenderer; import org.innovateuk.ifs.notifications.service.senders.NotificationSender; import org.innovateuk.ifs.security.LoggedInUserSupplier; import org.innovateuk.ifs.profile.domain.Profile; import org.innovateuk.ifs.user.domain.Role; import org.innovateuk.ifs.user.domain.User; import org.innovateuk.ifs.profile.repository.ProfileRepository; import org.innovateuk.ifs.user.repository.RoleRepository; import org.innovateuk.ifs.user.repository.UserRepository; import org.innovateuk.ifs.user.resource.UserResource; import org.innovateuk.ifs.user.resource.UserRoleType; import org.innovateuk.ifs.util.EntityLookupCallbacks; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.method.P; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.*; import static java.lang.Boolean.TRUE; import static java.lang.String.format; import static java.time.format.DateTimeFormatter.ofPattern; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.toList; import static org.apache.commons.lang3.StringUtils.lowerCase; import static org.innovateuk.ifs.category.resource.CategoryType.INNOVATION_AREA; import static org.innovateuk.ifs.commons.error.CommonErrors.internalServerErrorError; import static org.innovateuk.ifs.commons.error.CommonErrors.notFoundError; import static org.innovateuk.ifs.commons.error.CommonFailureKeys.*; import static org.innovateuk.ifs.commons.service.ServiceResult.*; import static org.innovateuk.ifs.competition.resource.CompetitionStatus.*; import static org.innovateuk.ifs.invite.constant.InviteStatus.*; import static org.innovateuk.ifs.invite.domain.CompetitionParticipantRole.ASSESSOR; import static org.innovateuk.ifs.invite.domain.Invite.generateInviteHash; import static org.innovateuk.ifs.invite.domain.ParticipantStatus.*; import static org.innovateuk.ifs.util.CollectionFunctions.mapWithIndex; import static org.innovateuk.ifs.util.CollectionFunctions.simpleMap; import static org.innovateuk.ifs.util.EntityLookupCallbacks.find; import static org.innovateuk.ifs.util.MapFunctions.asMap; import static org.innovateuk.ifs.util.StringFunctions.plainTextToHtml; import static org.innovateuk.ifs.util.StringFunctions.stripHtml; /** * Service for managing {@link org.innovateuk.ifs.invite.domain.CompetitionInvite}s. */ @Service @Transactional public class CompetitionInviteServiceImpl implements CompetitionInviteService { private static final String WEB_CONTEXT = "/assessment"; private static final DateTimeFormatter inviteFormatter = ofPattern("d MMMM yyyy"); private static final DateTimeFormatter detailsFormatter = ofPattern("dd MMM yyyy"); @Autowired private CompetitionInviteRepository competitionInviteRepository; @Autowired private CompetitionParticipantRepository competitionParticipantRepository; @Autowired private RejectionReasonRepository rejectionReasonRepository; @Autowired private CompetitionRepository competitionRepository; @Autowired private InnovationAreaRepository innovationAreaRepository; @Autowired private CompetitionInviteMapper competitionInviteMapper; @Autowired private InnovationAreaMapper innovationAreaMapper; @Autowired private ParticipantStatusMapper participantStatusMapper; @Autowired private UserRepository userRepository; @Autowired private ProfileRepository profileRepository; @Autowired private NotificationSender notificationSender; @Autowired private NotificationTemplateRenderer renderer; @Autowired private SystemNotificationSource systemNotificationSource; @Autowired private LoggedInUserSupplier loggedInUserSupplier; @Autowired private RoleRepository roleRepository; @Value("${ifs.web.baseURL}") private String webBaseUrl; enum Notifications { INVITE_ASSESSOR, INVITE_ASSESSOR_GROUP } @Override public ServiceResult<AssessorInvitesToSendResource> getAllInvitesToSend(long competitionId) { return getCompetition(competitionId).andOnSuccess(competition -> { List<CompetitionInvite> invites = competitionInviteRepository.getByCompetitionIdAndStatus(competition.getId(), CREATED); List<String> recipients = simpleMap(invites, CompetitionInvite::getName); recipients.sort(String::compareTo); return serviceSuccess(new AssessorInvitesToSendResource( recipients, competition.getId(), competition.getName(), getInvitePreviewContent(competition) )); }); } @Override public ServiceResult<AssessorInvitesToSendResource> getInviteToSend(long inviteId) { return getById(inviteId).andOnSuccess(invite -> serviceSuccess(new AssessorInvitesToSendResource( singletonList(invite.getName()), invite.getTarget().getId(), invite.getTarget().getName(), getInviteContent(invite) )) ); } private String getInviteContent(CompetitionInvite invite) { NotificationTarget notificationTarget = new ExternalUserNotificationTarget("", ""); Competition competition = invite.getTarget(); return getInviteContent(notificationTarget, asMap( "competitionName", competition.getName(), "acceptsDate", competition.getAssessorAcceptsDate().format(inviteFormatter), "deadlineDate", competition.getAssessorDeadlineDate().format(inviteFormatter), "name", invite.getName(), "inviteUrl", format("%s/invite/competition/%s", webBaseUrl + WEB_CONTEXT, invite.getHash()) )); } private String getInvitePreviewContent(Competition competition) { NotificationTarget notificationTarget = new ExternalUserNotificationTarget("", ""); return getInvitePreviewContent(notificationTarget, asMap( "competitionName", competition.getName(), "acceptsDate", competition.getAssessorAcceptsDate().format(inviteFormatter), "deadlineDate", competition.getAssessorDeadlineDate().format(inviteFormatter) )); } @Override public ServiceResult<CompetitionInviteResource> getInvite(String inviteHash) { return getByHashIfOpen(inviteHash) .andOnSuccessReturn(competitionInviteMapper::mapToResource); } @Override public ServiceResult<CompetitionInviteResource> openInvite(String inviteHash) { return getByHashIfOpen(inviteHash) .andOnSuccessReturn(this::openInvite) .andOnSuccessReturn(competitionInviteMapper::mapToResource); } @Override public ServiceResult<Void> acceptInvite(String inviteHash, UserResource currentUser) { final User user = userRepository.findOne(currentUser.getId()); return getParticipantByInviteHash(inviteHash) .andOnSuccess(p -> accept(p, user)) .andOnSuccessReturnVoid(); } @Override public ServiceResult<Void> rejectInvite(String inviteHash, RejectionReasonResource rejectionReason, Optional<String> rejectionComment) { return getRejectionReason(rejectionReason) .andOnSuccess(reason -> getParticipantByInviteHash(inviteHash) .andOnSuccess(invite -> reject(invite, reason, rejectionComment))) .andOnSuccessReturnVoid(); } @Override public ServiceResult<Boolean> checkExistingUser(@P("inviteHash") String inviteHash) { return getByHash(inviteHash).andOnSuccessReturn(invite -> { if (invite.getUser() != null) { return TRUE; } return userRepository.findByEmail(invite.getEmail()).isPresent(); }); } @Override public ServiceResult<AvailableAssessorPageResource> getAvailableAssessors(long competitionId, Pageable pageable, Optional<Long> innovationArea) { final Page<User> pagedResult = innovationArea.map(i -> userRepository.findAssessorsByCompetitionAndInnovationArea(competitionId, i, pageable)) .orElse(userRepository.findAssessorsByCompetition(competitionId, pageable)); return serviceSuccess(new AvailableAssessorPageResource( pagedResult.getTotalElements(), pagedResult.getTotalPages(), simpleMap(pagedResult.getContent(), this::mapToAvailableAssessorResource), pagedResult.getNumber(), pagedResult.getSize() )); } @Override public ServiceResult<List<AvailableAssessorResource>> getAvailableAssessors(long competitionId, Optional<Long> innovationArea) { final List<User> result; if (innovationArea.isPresent()) { result = userRepository.findAssessorsByCompetitionAndInnovationArea( competitionId, innovationArea.get() ); } else { result = userRepository.findAssessorsByCompetition(competitionId); } return serviceSuccess(simpleMap(result, this::mapToAvailableAssessorResource)); } private AvailableAssessorResource mapToAvailableAssessorResource(User assessor) { Profile profile = profileRepository.findOne(assessor.getProfileId()); AvailableAssessorResource availableAssessor = new AvailableAssessorResource(); availableAssessor.setId(assessor.getId()); availableAssessor.setEmail(assessor.getEmail()); availableAssessor.setName(assessor.getName()); availableAssessor.setBusinessType(profile.getBusinessType()); availableAssessor.setCompliant(profile.isCompliant(assessor)); availableAssessor.setInnovationAreas(simpleMap(profile.getInnovationAreas(), innovationAreaMapper::mapToResource)); return availableAssessor; } @Override public ServiceResult<AssessorCreatedInvitePageResource> getCreatedInvites(long competitionId, Pageable pageable) { Page<CompetitionInvite> pagedResult = competitionInviteRepository.getByCompetitionIdAndStatus(competitionId, CREATED, pageable); List<AssessorCreatedInviteResource> createdInvites = simpleMap( pagedResult.getContent(), competitionInvite -> { AssessorCreatedInviteResource assessorCreatedInvite = new AssessorCreatedInviteResource(); assessorCreatedInvite.setName(competitionInvite.getName()); assessorCreatedInvite.setInnovationAreas(getInnovationAreasForInvite(competitionInvite)); assessorCreatedInvite.setCompliant(isUserCompliant(competitionInvite)); assessorCreatedInvite.setEmail(competitionInvite.getEmail()); assessorCreatedInvite.setInviteId(competitionInvite.getId()); if (competitionInvite.getUser() != null) { assessorCreatedInvite.setId(competitionInvite.getUser().getId()); } return assessorCreatedInvite; } ); return serviceSuccess(new AssessorCreatedInvitePageResource( pagedResult.getTotalElements(), pagedResult.getTotalPages(), createdInvites, pagedResult.getNumber(), pagedResult.getSize() )); } @Override public ServiceResult<CompetitionInviteStatisticsResource> getInviteStatistics(long competitionId) { CompetitionInviteStatisticsResource statisticsResource = new CompetitionInviteStatisticsResource(); statisticsResource.setInvited(competitionInviteRepository.countByCompetitionIdAndStatusIn(competitionId, EnumSet.of(OPENED, SENT))); statisticsResource.setInviteList(competitionInviteRepository.countByCompetitionIdAndStatusIn(competitionId, EnumSet.of(CREATED))); statisticsResource.setAccepted(competitionParticipantRepository.countByCompetitionIdAndRoleAndStatus(competitionId, ASSESSOR, ACCEPTED)); statisticsResource.setDeclined(competitionParticipantRepository.countByCompetitionIdAndRoleAndStatus(competitionId, ASSESSOR, REJECTED)); return serviceSuccess(statisticsResource); } @Override public ServiceResult<AssessorInviteOverviewPageResource> getInvitationOverview(long competitionId, Pageable pageable, Optional<Long> innovationArea, Optional<ParticipantStatus> status, Optional<Boolean> compliant) { Page<CompetitionParticipant> pagedResult; if (innovationArea.isPresent() || compliant.isPresent()) { // We want to avoid performing the potentially expensive join on Profile if possible pagedResult = competitionParticipantRepository.getAssessorsByCompetitionAndInnovationAreaAndStatusAndCompliant( competitionId, innovationArea.orElse(null), status.orElse(null), compliant.orElse(null), pageable ); } else { pagedResult = competitionParticipantRepository.getAssessorsByCompetitionAndStatus( competitionId, status.orElse(null), pageable ); } List<AssessorInviteOverviewResource> inviteOverviews = simpleMap( pagedResult.getContent(), participant -> { AssessorInviteOverviewResource assessorInviteOverview = new AssessorInviteOverviewResource(); assessorInviteOverview.setName(participant.getInvite().getName()); assessorInviteOverview.setStatus(participantStatusMapper.mapToResource(participant.getStatus())); assessorInviteOverview.setDetails(getDetails(participant)); assessorInviteOverview.setInviteId(participant.getInvite().getId()); if (participant.getUser() != null) { Profile profile = profileRepository.findOne(participant.getUser().getProfileId()); assessorInviteOverview.setId(participant.getUser().getId()); assessorInviteOverview.setBusinessType(profile.getBusinessType()); assessorInviteOverview.setCompliant(profile.isCompliant(participant.getUser())); assessorInviteOverview.setInnovationAreas(simpleMap(profile.getInnovationAreas(), innovationAreaMapper::mapToResource)); } else { assessorInviteOverview.setInnovationAreas(singletonList( innovationAreaMapper.mapToResource(participant.getInvite().getInnovationArea()) )); } return assessorInviteOverview; }); return serviceSuccess(new AssessorInviteOverviewPageResource( pagedResult.getTotalElements(), pagedResult.getTotalPages(), inviteOverviews, pagedResult.getNumber(), pagedResult.getSize() )); } @Override public ServiceResult<CompetitionInviteResource> inviteUser(NewUserStagedInviteResource stagedInvite) { return getByEmailAndCompetition(stagedInvite.getEmail(), stagedInvite.getCompetitionId()).handleSuccessOrFailure( failure -> getCompetition(stagedInvite.getCompetitionId()) .andOnSuccess(competition -> getInnovationArea(stagedInvite.getInnovationAreaId()) .andOnSuccess(innovationArea -> inviteUserToCompetition( stagedInvite.getName(), stagedInvite.getEmail(), competition, innovationArea ) ) ) .andOnSuccessReturn(competitionInviteMapper::mapToResource), success -> serviceFailure(Error.globalError( "validation.competitionInvite.create.email.exists", singletonList(stagedInvite.getEmail()) )) ); } @Override public ServiceResult<Void> inviteNewUsers(List<NewUserStagedInviteResource> newUserStagedInvites, long competitionId) { return getCompetition(competitionId).andOnSuccessReturn(competition -> mapWithIndex(newUserStagedInvites, (index, invite) -> getByEmailAndCompetition(invite.getEmail(), competitionId).handleSuccessOrFailure( failure -> getInnovationArea(invite.getInnovationAreaId()) .andOnSuccess(innovationArea -> inviteUserToCompetition(invite.getName(), invite.getEmail(), competition, innovationArea) ) .andOnFailure(() -> serviceFailure(Error.fieldError( "invites[" + index + "].innovationArea", invite.getInnovationAreaId(), "validation.competitionInvite.create.innovationArea.required" )) ), success -> serviceFailure(Error.fieldError( "invites[" + index + "].email", invite.getEmail(), "validation.competitionInvite.create.email.exists" )) ) )) .andOnSuccess(list -> aggregate(list)) .andOnSuccessReturnVoid(); } private String getDetails(CompetitionParticipant participant) { String details = null; if (participant.getStatus() == REJECTED) { details = format("Invite declined as %s", lowerCase(participant.getRejectionReason().getReason())); } else if (participant.getStatus() == PENDING) { if (participant.getInvite().getSentOn() != null) { details = format("Invite sent: %s", participant.getInvite().getSentOn().format(detailsFormatter)); } } return details; } private ServiceResult<InnovationArea> getInnovationArea(long innovationCategoryId) { return find(innovationAreaRepository.findOne(innovationCategoryId), notFoundError(Category.class, innovationCategoryId, INNOVATION_AREA)); } private ServiceResult<CompetitionInvite> inviteUserToCompetition(String name, String email, Competition competition, InnovationArea innovationArea) { return serviceSuccess( competitionInviteRepository.save(new CompetitionInvite(name, email, generateInviteHash(), competition, innovationArea)) ); } @Override public ServiceResult<CompetitionInviteResource> inviteUser(ExistingUserStagedInviteResource stagedInvite) { return getUserById(stagedInvite.getUserId()) .andOnSuccess(user -> inviteUserToCompetition(user, stagedInvite.getCompetitionId())) .andOnSuccessReturn(competitionInviteMapper::mapToResource); } @Override public ServiceResult<Void> inviteUsers(List<ExistingUserStagedInviteResource> stagedInvites) { return serviceSuccess(mapWithIndex(stagedInvites, (i, invite) -> getUserById(invite.getUserId()).andOnSuccess(user -> getByEmailAndCompetition(user.getEmail(), invite.getCompetitionId()).andOnFailure(() -> inviteUserToCompetition(user, invite.getCompetitionId()) )))).andOnSuccessReturnVoid(); } private ServiceResult<CompetitionInvite> inviteUserToCompetition(User user, long competitionId) { return getCompetition(competitionId) .andOnSuccessReturn( competition -> competitionInviteRepository.save(new CompetitionInvite(user, generateInviteHash(), competition)) ); } private ServiceResult<Competition> getCompetition(long competitionId) { return find(competitionRepository.findOne(competitionId), notFoundError(Competition.class, competitionId)); } private ServiceResult<User> getUserByEmail(String email) { return find(userRepository.findByEmail(email), notFoundError(User.class, email)); } private ServiceResult<User> getUserById(long id) { return find(userRepository.findOne(id), notFoundError(User.class, id)); } @Override public ServiceResult<Void> sendAllInvites(long competitionId, AssessorInviteSendResource assessorInviteSendResource) { return getCompetition(competitionId).andOnSuccess(competition -> { String customTextPlain = stripHtml(assessorInviteSendResource.getContent()); String customTextHtml = plainTextToHtml(customTextPlain); return ServiceResult.processAnyFailuresOrSucceed(simpleMap( competitionInviteRepository.getByCompetitionIdAndStatus(competition.getId(), CREATED), invite -> { competitionParticipantRepository.save( new CompetitionParticipant(invite.send(loggedInUserSupplier.get(), ZonedDateTime.now())) ); if (invite.isNewAssessorInvite()) { userRepository.findByEmail(invite.getEmail()).ifPresent(this::addAssessorRoleToUser); } return sendInviteNotification( assessorInviteSendResource.getSubject(), inviteFormatter, customTextPlain, customTextHtml, invite, Notifications.INVITE_ASSESSOR_GROUP ); } )); }); } @Override public ServiceResult<Void> resendInvite(long inviteId, AssessorInviteSendResource assessorInviteSendResource) { return getParticipantByInviteId(inviteId) .andOnSuccess(participant -> resendInviteNotification(participant.getInvite().sendOrResend(loggedInUserSupplier.get(), ZonedDateTime.now()), assessorInviteSendResource) ) .andOnSuccessReturnVoid(); } private ServiceResult<Notification> resendInviteNotification(CompetitionInvite invite, AssessorInviteSendResource assessorInviteSendResource) { // Strip any HTML that may have been added to the content by the user. String bodyPlain = stripHtml(assessorInviteSendResource.getContent()); // HTML'ify the plain content to add line breaks. String bodyHtml = plainTextToHtml(bodyPlain); NotificationTarget recipient = new ExternalUserNotificationTarget(invite.getName(), invite.getEmail()); Notification notification = new Notification(systemNotificationSource, singletonList(recipient), Notifications.INVITE_ASSESSOR, asMap( "subject", assessorInviteSendResource.getSubject(), "bodyPlain", bodyPlain, "bodyHtml", bodyHtml )); return notificationSender.sendNotification(notification); } private ServiceResult<Void> sendInviteNotification(String subject, DateTimeFormatter formatter, String customTextPlain, String customTextHtml, CompetitionInvite invite, Notifications notificationType) { NotificationTarget recipient = new ExternalUserNotificationTarget(invite.getName(), invite.getEmail()); Notification notification = new Notification( systemNotificationSource, recipient, notificationType, asMap( "subject", subject, "name", invite.getName(), "competitionName", invite.getTarget().getName(), "acceptsDate", invite.getTarget().getAssessorAcceptsDate().format(formatter), "deadlineDate", invite.getTarget().getAssessorDeadlineDate().format(formatter), "inviteUrl", format("%s/invite/competition/%s", webBaseUrl + WEB_CONTEXT, invite.getHash()), "customTextPlain", customTextPlain, "customTextHtml", customTextHtml )); return notificationSender.sendNotification(notification).andOnSuccessReturnVoid(); } private void addAssessorRoleToUser(User user) { Role assessorRole = roleRepository.findOneByName(UserRoleType.ASSESSOR.getName()); user.addRole(assessorRole); } @Override public ServiceResult<Void> deleteInvite(String email, long competitionId) { return getByEmailAndCompetition(email, competitionId).andOnSuccess(this::deleteInvite); } @Override public ServiceResult<Void> deleteAllInvites(long competitionId) { return find(competitionRepository.findOne(competitionId), notFoundError(Competition.class, competitionId)) .andOnSuccessReturnVoid(competition -> competitionInviteRepository.deleteByCompetitionIdAndStatus(competition.getId(), CREATED)); } private ServiceResult<CompetitionInvite> getByHash(String inviteHash) { return find(competitionInviteRepository.getByHash(inviteHash), notFoundError(CompetitionInvite.class, inviteHash)); } private ServiceResult<CompetitionInvite> getById(long id) { return find(competitionInviteRepository.findOne(id), notFoundError(CompetitionInvite.class, id)); } private ServiceResult<CompetitionParticipant> getParticipantByInviteId(long inviteId) { return find(competitionParticipantRepository.getByInviteId(inviteId), notFoundError(CompetitionParticipant.class, inviteId)); } private String getInviteContent(NotificationTarget notificationTarget, Map<String, Object> arguments) { return renderer.renderTemplate(systemNotificationSource, notificationTarget, "invite_assessor_editable_text.txt", arguments).getSuccessObject(); } private String getInvitePreviewContent(NotificationTarget notificationTarget, Map<String, Object> arguments) { return renderer.renderTemplate(systemNotificationSource, notificationTarget, "invite_assessor_preview_text.txt", arguments).getSuccessObject(); } private ServiceResult<CompetitionInvite> getByEmailAndCompetition(String email, long competitionId) { return find(competitionInviteRepository.getByEmailAndCompetitionId(email, competitionId), notFoundError(CompetitionInvite.class, email, competitionId)); } private ServiceResult<Void> deleteInvite(CompetitionInvite invite) { if (invite.getStatus() != CREATED) { return ServiceResult.serviceFailure(new Error(COMPETITION_INVITE_CANNOT_DELETE_ONCE_SENT, invite.getEmail())); } competitionInviteRepository.delete(invite); return serviceSuccess(); } private ServiceResult<CompetitionInvite> getByHashIfOpen(String inviteHash) { return getByHash(inviteHash).andOnSuccess(invite -> { if (!EnumSet.of(READY_TO_OPEN, IN_ASSESSMENT, CLOSED, OPEN).contains(invite.getTarget().getCompetitionStatus())) { return ServiceResult.serviceFailure(new Error(COMPETITION_INVITE_EXPIRED, invite.getTarget().getName())); } CompetitionParticipant participant = competitionParticipantRepository.getByInviteHash(inviteHash); if (participant == null) { return serviceSuccess(invite); } if (participant.getStatus() == ACCEPTED || participant.getStatus() == REJECTED) { return ServiceResult.serviceFailure(new Error(COMPETITION_INVITE_CLOSED, invite.getTarget().getName())); } return serviceSuccess(invite); }); } private CompetitionInvite openInvite(CompetitionInvite invite) { return competitionInviteRepository.save(invite.open()); } private ServiceResult<CompetitionParticipant> getParticipantByInviteHash(String inviteHash) { return find(competitionParticipantRepository.getByInviteHash(inviteHash), notFoundError(CompetitionParticipant.class, inviteHash)); } private ServiceResult<CompetitionParticipant> accept(CompetitionParticipant participant, User user) { if (participant.getInvite().getStatus() != OPENED) { return ServiceResult.serviceFailure(new Error(COMPETITION_PARTICIPANT_CANNOT_ACCEPT_UNOPENED_INVITE, getInviteCompetitionName(participant))); } else if (participant.getStatus() == ACCEPTED) { return ServiceResult.serviceFailure(new Error(COMPETITION_PARTICIPANT_CANNOT_ACCEPT_ALREADY_ACCEPTED_INVITE, getInviteCompetitionName(participant))); } else if (participant.getStatus() == REJECTED) { return ServiceResult.serviceFailure(new Error(COMPETITION_PARTICIPANT_CANNOT_ACCEPT_ALREADY_REJECTED_INVITE, getInviteCompetitionName(participant))); } else { return applyInnovationAreaToUserProfile(participant, user) .andOnSuccessReturn(() -> participant.acceptAndAssignUser(user)); } } private ServiceResult<Participant> applyInnovationAreaToUserProfile(CompetitionParticipant participant, User user) { if (participant.getInvite().isNewAssessorInvite()) { return getProfileForUser(user).andOnSuccessReturn( profile -> { profile.addInnovationArea(participant.getInvite().getInnovationArea()); return participant; } ); } else { return serviceSuccess(participant); } } private ServiceResult<Profile> getProfileForUser(User user) { return find(profileRepository.findOne(user.getProfileId()), notFoundError(Profile.class, user.getProfileId())); } private ServiceResult<CompetitionParticipant> reject(CompetitionParticipant participant, RejectionReason rejectionReason, Optional<String> rejectionComment) { if (participant.getInvite().getStatus() != OPENED) { return ServiceResult.serviceFailure(new Error(COMPETITION_PARTICIPANT_CANNOT_REJECT_UNOPENED_INVITE, getInviteCompetitionName(participant))); } else if (participant.getStatus() == ACCEPTED) { return ServiceResult.serviceFailure(new Error(COMPETITION_PARTICIPANT_CANNOT_REJECT_ALREADY_ACCEPTED_INVITE, getInviteCompetitionName(participant))); } else if (participant.getStatus() == REJECTED) { return ServiceResult.serviceFailure(new Error(COMPETITION_PARTICIPANT_CANNOT_REJECT_ALREADY_REJECTED_INVITE, getInviteCompetitionName(participant))); } else { return serviceSuccess(participant.reject(rejectionReason, rejectionComment)); } } private ServiceResult<RejectionReason> getRejectionReason(final RejectionReasonResource rejectionReason) { return find(rejectionReasonRepository.findOne(rejectionReason.getId()), notFoundError(RejectionReason.class, rejectionReason.getId())); } private String getInviteCompetitionName(CompetitionParticipant participant) { return participant.getInvite().getTarget().getName(); } private boolean isUserCompliant(CompetitionInvite competitionInvite) { if (competitionInvite == null || competitionInvite.getUser() == null) { return false; } Profile profile = profileRepository.findOne(competitionInvite.getUser().getProfileId()); return profile.isCompliant(competitionInvite.getUser()); } private List<InnovationAreaResource> getInnovationAreasForInvite(CompetitionInvite competitionInvite) { if (competitionInvite.isNewAssessorInvite()) { return singletonList(innovationAreaMapper.mapToResource(competitionInvite.getInnovationArea())); } else { return profileRepository.findOne(competitionInvite.getUser().getProfileId()).getInnovationAreas().stream() .map(innovationAreaMapper::mapToResource) .collect(toList()); } } }
package com.jorgemf.util; public abstract class ResourcesFactory<k> { private static final int LIST_INCREMENT = 10; private k[] resources; private int lastFreeResource; private int increment; public ResourcesFactory() { this(LIST_INCREMENT); } public ResourcesFactory(int listIncrement) { if (listIncrement <= 0) { throw new RuntimeException("Increment has to be greater than 0"); } increment = listIncrement; //noinspection unchecked resources = (k[]) (new Object[0]); lastFreeResource = -1; } protected abstract k createResource(); public void releaseResource(k resource) { if (resources.length == lastFreeResource + 1) { //noinspection unchecked k[] newlist = (k[]) (new Object[resources.length + increment]); System.arraycopy(resources, 0, newlist, 0, resources.length); resources = newlist; } lastFreeResource++; resources[lastFreeResource] = resource; } public k getResource() { k resource; if (lastFreeResource < 0) { resource = createResource(); } else { resource = resources[lastFreeResource]; resources[lastFreeResource] = null; lastFreeResource } return resource; } }
package com.kodcu.service; import com.kodcu.component.EditorPane; import com.kodcu.component.MyTab; import com.kodcu.controller.ApplicationController; import com.kodcu.other.Current; import com.kodcu.other.IOHelper; import com.kodcu.service.ui.EditorService; import com.kodcu.service.ui.TabService; import com.kodcu.service.ui.WebviewService; import javafx.collections.ObservableList; import javafx.scene.Node; import javafx.scene.control.TabPane; import javafx.scene.layout.AnchorPane; import javafx.stage.FileChooser; import netscape.javascript.JSObject; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.*; import static java.nio.file.StandardOpenOption.CREATE; import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; @Component public class DocumentService { private final DirectoryService directoryService; private final ApplicationController controller; private final WebviewService webviewService; private final EditorService editorService; private final TabService tabService; private final Current current; @Autowired public DocumentService(DirectoryService directoryService, ApplicationController controller, WebviewService webviewService, EditorService editorService, TabService tabService, Current current) { this.directoryService = directoryService; this.controller = controller; this.webviewService = webviewService; this.editorService = editorService; this.tabService = tabService; this.current = current; webviewService.setDocumentService(this); } public void saveDoc() { Path currentPath = directoryService.currentPath(); if (Objects.isNull(currentPath) || !current.getCurrentTabText().contains(" *")) return; Optional<IOException> exception = IOHelper.writeToFile(currentPath, current.currentEditorValue(), TRUNCATE_EXISTING, CREATE); if (exception.isPresent()) return; current.setCurrentTabText(currentPath.getFileName().toString()); ObservableList<String> recentFiles = controller.getRecentFilesList(); recentFiles.remove(currentPath.toString()); recentFiles.add(0, currentPath.toString()); current.setCurrentTabText(current.getCurrentTabText().replace(" *", "")); directoryService.setInitialDirectory(Optional.ofNullable(currentPath.toFile())); current.currentTab().setPath(currentPath); } public void newDoc() { newDoc(null); } public void newDoc(final String content) { EditorPane editorPane = webviewService.createWebView(); editorPane.confirmHandler(param -> { if ("command:ready".equals(param)) { JSObject window = editorPane.getWindow(); window.setMember("afx", controller); window.call("updateOptions", new Object[]{}); Map<String, String> shortCuts = controller.getShortCuts(); Set<String> keySet = shortCuts.keySet(); for (String key : keySet) { window.call("addNewCommand", new Object[]{key, shortCuts.get(key)}); } window.call("setInitialized"); String finalContent = content; if (Objects.isNull(finalContent)) finalContent = ""; window.call("setEditorValue", new Object[]{finalContent}); } return false; }); AnchorPane anchorPane = new AnchorPane(); MyTab tab = tabService.createTab(); Node editorVBox = editorService.createEditorVBox(editorPane, tab); controller.fitToParent(editorVBox); anchorPane.getChildren().add(editorVBox); tab.setEditorPane(editorPane); tab.setContent(anchorPane); tab.setTabText("new *"); TabPane tabPane = controller.getTabPane(); tabPane.getTabs().add(tab); tab.select(); editorPane.focus(); } public void openDoc() { FileChooser fileChooser = directoryService.newFileChooser("Open Asciidoc File"); fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("Asciidoc", "*.adoc", "*.asciidoc", "*.asc", "*.ad", "*.txt", "*.*")); fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("Markdown", "*.md", "*.markdown", "*.txt", "*.*")); List<File> chosenFiles = fileChooser.showOpenMultipleDialog(controller.getStage()); if (chosenFiles != null) { chosenFiles.stream().map(e -> e.toPath()).forEach(directoryService.getOpenFileConsumer()::accept); chosenFiles.stream() .map(File::toString).filter(file -> !controller.getRecentFilesList().contains(file)) .forEach(controller.getRecentFilesList()::addAll); directoryService.setInitialDirectory(Optional.ofNullable(chosenFiles.get(0))); } } }
package com.minespaceships.mod; import com.minespaceships.mod.blocks.EnergyBlock; import com.minespaceships.mod.blocks.EngineBlock; import com.minespaceships.mod.blocks.NavigatorBlock; import com.minespaceships.mod.blocks.PhaserBlock; import com.minespaceships.mod.blocks.ShieldBlock; import com.minespaceships.mod.events.BlockEvent; import com.minespaceships.mod.overhead.ChatRegisterEntity; import net.minecraft.block.Block; import net.minecraft.client.Minecraft; import net.minecraft.init.Blocks; import net.minecraft.item.Item; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.Mod.EventHandler; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.common.network.NetworkRegistry; import net.minecraftforge.fml.common.registry.GameRegistry; @Mod(modid = ExampleMod.MODID, version = ExampleMod.VERSION) public class ExampleMod { private static ExampleMod Singleton; public static final String MODID = "minespaceships"; public static final String VERSION = "1.0"; private static int navigatorBlockId = 1000; /** * Event that gets called in an early initialization state of Minecraft * @param event */ @EventHandler public void preInit(FMLPreInitializationEvent event) { Singleton = this; //register our game objects so Minecraft knows how to use them. GameRegistry.registerBlock(new NavigatorBlock(), "NavigatorBlock"); GameRegistry.registerBlock(new EnergyBlock(), "EnergyBlock"); GameRegistry.registerBlock(new PhaserBlock(), "PhaserBlock"); GameRegistry.registerBlock(new EngineBlock(), "EngineBlock"); GameRegistry.registerBlock(new ShieldBlock(), "ShieldBlock"); GameRegistry.registerTileEntity(ChatRegisterEntity.class, "ChatRegisterEntity"); // Register event listener MinecraftForge.EVENT_BUS.register(new BlockEvent()); } @EventHandler public void init(FMLInitializationEvent event) { } @EventHandler public void postInit(FMLPostInitializationEvent event){ } public static ExampleMod instance(){ return Singleton; } }
package com.servioticy.api.internal; import java.io.IOException; import java.util.Date; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import com.fasterxml.jackson.core.JsonProcessingException; import com.servioticy.api.commons.data.CouchBase; import com.servioticy.api.commons.data.Group; import com.servioticy.api.commons.data.SO; import com.servioticy.api.commons.datamodel.Data; import com.servioticy.api.commons.elasticsearch.SearchEngine; import com.servioticy.api.commons.exceptions.ServIoTWebApplicationException; import com.servioticy.api.commons.utils.Config; @Path("/") public class Paths { @Path("/{soId}") @GET @Produces("application/json") public Response getSO(@Context HttpHeaders hh, @PathParam("soId") String soId) { // Get the Service Object CouchBase cb = new CouchBase(); SO so = cb.getSO(soId); if (so == null) throw new ServIoTWebApplicationException(Response.Status.NOT_FOUND, "The Service Object was not found."); return Response.ok(so.responseGetSO()) .header("Server", "api.compose") .header("Date", new Date(System.currentTimeMillis())) .build(); } @Path("/{soId}/streams/{streamId}/subscriptions") @GET @Produces("application/json") public Response getSubscriptions(@Context HttpHeaders hh, @PathParam("soId") String soId, @PathParam("streamId") String streamId) { // Get the Service Object CouchBase cb = new CouchBase(); SO so = cb.getSO(soId); if (so == null) throw new ServIoTWebApplicationException(Response.Status.NOT_FOUND, "The Service Object was not found."); String response = so.responseSubscriptions(streamId); // Generate response if (response == null) return Response.noContent() .header("Server", "api.servIoTicy") .header("Date", new Date(System.currentTimeMillis())) .build(); return Response.ok(response) .header("Server", "api.servIoTicy") .header("Date", new Date(System.currentTimeMillis())) .build(); } @Path("/{soId}/streams/{streamId}/lastUpdate") @GET @Produces("application/json") public Response getLastUpdate(@Context HttpHeaders hh, @PathParam("soId") String soId, @PathParam("streamId") String streamId) { // Get the Service Object CouchBase cb = new CouchBase(); SO so = cb.getSO(soId); if (so == null) throw new ServIoTWebApplicationException(Response.Status.NOT_FOUND, "The Service Object was not found."); // Get the Service Object Data long lastUpdate = SearchEngine.getLastUpdateTimeStamp(soId,streamId); Data data = cb.getData(soId,streamId,lastUpdate); if (data == null) return Response.noContent() .header("Server", "api.servIoTicy") .header("Date", new Date(System.currentTimeMillis())) .build(); return Response.ok(data.responseLastUpdate()) .header("Server", "api.servIoTicy") .header("Date", new Date(System.currentTimeMillis())) .build(); } @Path("/groups/lastUpdate") @POST @Produces("application/json") public Response getLastGroupUpdate(@Context HttpHeaders hh, String body) throws JsonProcessingException, IOException { // Check if exists request data if (body.isEmpty()) throw new ServIoTWebApplicationException(Response.Status.BAD_REQUEST, "No data in the request"); // Create Group petition Group group = new Group(body); String response = group.lastUpdate(); if (response.equals("{}")) return Response.noContent() .header("Server", "api.servIoTicy") .header("Date", new Date(System.currentTimeMillis())) .build(); return Response.ok(response) .header("Server", "api.compose") .header("Date", new Date(System.currentTimeMillis())) .build(); } @Path("/opid/{opId}") @GET @Produces("application/json") public Response getOpId(@Context HttpHeaders hh, @PathParam("opId") String opId, @PathParam("streamId") String streamId) { // Get the Service Object CouchBase cb = new CouchBase(); String res = cb.getOpId(opId); if (res == null) throw new ServIoTWebApplicationException(Response.Status.NOT_FOUND, "The OpId was not found."); return Response.ok(res) .header("Server", "api.servIoTicy") .header("Date", new Date(System.currentTimeMillis())) .build(); } @Path("/{soId}/streams/{streamId}/{opId}") @PUT @Produces("application/json") public Response updateInternalSOData(@Context HttpHeaders hh, @PathParam("soId") String soId, @PathParam("streamId") String streamId, @PathParam("opId") String opId, String body) { // Check if exists request data if (body.isEmpty()) throw new ServIoTWebApplicationException(Response.Status.BAD_REQUEST, "No data in the request"); // Get the Service Object CouchBase cb = new CouchBase(); SO so = cb.getSO(soId); if (so == null) throw new ServIoTWebApplicationException(Response.Status.NOT_FOUND, "The Service Object was not found."); // Create Data Data data = new Data(so, streamId, body); // Store in Couchbase cb.setData(data); // Set the opId cb.setOpId(opId, Config.getOpIdExpiration()); return Response.ok(body) .header("Server", "api.compose") .header("Date", new Date(System.currentTimeMillis())) .build(); } }