idx int64 0 41.2k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
7,000 | private void checkOrMarkPrivateAccess ( Expression source , FieldNode fn , boolean lhsOfAssignment ) { if ( fn == null ) return ; ClassNode enclosingClassNode = typeCheckingContext . getEnclosingClassNode ( ) ; ClassNode declaringClass = fn . getDeclaringClass ( ) ; if ( fn . isPrivate ( ) && ( declaringClass != enclosingClassNode || typeCheckingContext . getEnclosingClosure ( ) != null ) && declaringClass . getModule ( ) == enclosingClassNode . getModule ( ) ) { if ( ! lhsOfAssignment && enclosingClassNode . isDerivedFrom ( declaringClass ) ) { boolean isPrimBool = fn . getOriginType ( ) . equals ( ClassHelper . boolean_TYPE ) ; String suffix = Verifier . capitalize ( fn . getName ( ) ) ; MethodNode getterNode = findValidGetter ( enclosingClassNode , "get" + suffix ) ; if ( getterNode == null && isPrimBool ) { getterNode = findValidGetter ( enclosingClassNode , "is" + suffix ) ; } if ( getterNode != null ) { source . putNodeMetaData ( INFERRED_TYPE , getterNode . getReturnType ( ) ) ; return ; } } StaticTypesMarker marker = lhsOfAssignment ? StaticTypesMarker . PV_FIELDS_MUTATION : StaticTypesMarker . PV_FIELDS_ACCESS ; addPrivateFieldOrMethodAccess ( source , declaringClass , marker , fn ) ; } } | Given a field node checks if we are accessing or setting a private field from an inner class . |
7,001 | private String checkOrMarkInnerFieldAccess ( Expression source , FieldNode fn , boolean lhsOfAssignment , String delegationData ) { if ( fn == null || fn . isStatic ( ) ) return delegationData ; ClassNode enclosingClassNode = typeCheckingContext . getEnclosingClassNode ( ) ; ClassNode declaringClass = fn . getDeclaringClass ( ) ; if ( ( fn . isPublic ( ) || fn . isProtected ( ) ) && ( declaringClass != enclosingClassNode || typeCheckingContext . getEnclosingClosure ( ) != null ) && declaringClass . getModule ( ) == enclosingClassNode . getModule ( ) && ! lhsOfAssignment && enclosingClassNode . isDerivedFrom ( declaringClass ) ) { if ( source instanceof PropertyExpression ) { PropertyExpression pe = ( PropertyExpression ) source ; if ( "this" . equals ( pe . getPropertyAsString ( ) ) || source instanceof AttributeExpression ) return delegationData ; pe . getObjectExpression ( ) . putNodeMetaData ( StaticTypesMarker . IMPLICIT_RECEIVER , "owner" ) ; } return "owner" ; } return delegationData ; } | Given a field node checks if we are accessing or setting a public or protected field from an inner class . |
7,002 | protected void silentlyVisitMethodNode ( final MethodNode directMethodCallCandidate ) { ErrorCollector collector = new ErrorCollector ( typeCheckingContext . getErrorCollector ( ) . getConfiguration ( ) ) ; startMethodInference ( directMethodCallCandidate , collector ) ; } | visit a method call target to infer the type . Don t report errors right away that will be done by a later visitMethod call |
7,003 | private static ClassNode createUsableClassNodeFromGenericsType ( final GenericsType genericsType ) { ClassNode value = genericsType . getType ( ) ; if ( genericsType . isPlaceholder ( ) ) { value = OBJECT_TYPE ; } ClassNode lowerBound = genericsType . getLowerBound ( ) ; if ( lowerBound != null ) { value = lowerBound ; } else { ClassNode [ ] upperBounds = genericsType . getUpperBounds ( ) ; if ( upperBounds != null ) { value = WideningCategories . lowestUpperBound ( Arrays . asList ( upperBounds ) ) ; } } return value ; } | Given a GenericsType instance returns a ClassNode which can be used as an inferred type . |
7,004 | private static ClassNode adjustWithTraits ( final MethodNode directMethodCallCandidate , final ClassNode receiver , final ClassNode [ ] args , final ClassNode returnType ) { if ( directMethodCallCandidate instanceof ExtensionMethodNode ) { ExtensionMethodNode emn = ( ExtensionMethodNode ) directMethodCallCandidate ; if ( "withTraits" . equals ( emn . getName ( ) ) && "DefaultGroovyMethods" . equals ( emn . getExtensionMethodNode ( ) . getDeclaringClass ( ) . getNameWithoutPackage ( ) ) ) { List < ClassNode > nodes = new LinkedList < ClassNode > ( ) ; Collections . addAll ( nodes , receiver . getInterfaces ( ) ) ; for ( ClassNode arg : args ) { if ( isClassClassNodeWrappingConcreteType ( arg ) ) { nodes . add ( arg . getGenericsTypes ( ) [ 0 ] . getType ( ) ) ; } else { nodes . add ( arg ) ; } } return new LowestUpperBoundClassNode ( returnType . getName ( ) + "Composed" , OBJECT_TYPE , nodes . toArray ( ClassNode . EMPTY_ARRAY ) ) ; } } return returnType ; } | A special method handling the withTrait call for which the type checker knows more than what the type signature is able to tell . If withTrait is detected then a new class node is created representing the list of trait interfaces . |
7,005 | protected List < MethodNode > findMethodsWithGenerated ( ClassNode receiver , String name ) { List < MethodNode > methods = receiver . getMethods ( name ) ; if ( methods . isEmpty ( ) || receiver . isResolved ( ) ) return methods ; List < MethodNode > result = addGeneratedMethods ( receiver , methods ) ; return result ; } | This method returns the list of methods named against the supplied parameter that are defined on the specified receiver but it will also add non existing methods that will be generated afterwards by the compiler for example if a method is using default values and that the specified class node isn t compiled yet . |
7,006 | public static Object asType ( Object object , Class type ) throws Throwable { if ( object == null ) object = NullObject . getNullObject ( ) ; return invokeMethodN ( object . getClass ( ) , object , "asType" , new Object [ ] { type } ) ; } | Provides a hook for type coercion of the given object to the required type |
7,007 | public static Object castToType ( Object object , Class type ) throws Throwable { return DefaultTypeTransformation . castToType ( object , type ) ; } | Provides a hook for type casting of the given object to the required type |
7,008 | public static < K , V > Map < V , K > inverse ( Map < K , V > map ) { return inverse ( map , false ) ; } | Returns the inverse view of this map and duplicated key is not allowed |
7,009 | public static < K , V > Map < V , K > inverse ( Map < K , V > map , boolean force ) { Map < V , K > resultMap = new LinkedHashMap < > ( ) ; for ( Map . Entry < K , V > entry : map . entrySet ( ) ) { V value = entry . getValue ( ) ; if ( ! force && resultMap . containsKey ( value ) ) { throw new IllegalArgumentException ( "duplicated key found: " + value ) ; } resultMap . put ( value , entry . getKey ( ) ) ; } return Collections . < V , K > unmodifiableMap ( resultMap ) ; } | Returns the inverse view of this map |
7,010 | public QName get ( String localName ) { if ( uri != null && uri . length ( ) > 0 ) { if ( prefix != null ) { return new QName ( uri , localName , prefix ) ; } else { return new QName ( uri , localName ) ; } } else { return new QName ( localName ) ; } } | Returns the QName for the given localName . |
7,011 | public Object invokeMethod ( String methodName , Object args ) { Map attributes = findAttributes ( args ) ; for ( Iterator < Map . Entry > iter = attributes . entrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Map . Entry entry = iter . next ( ) ; String key = String . valueOf ( entry . getKey ( ) ) ; if ( key . startsWith ( "xmlns:" ) ) { String prefix = key . substring ( 6 ) ; String uri = String . valueOf ( entry . getValue ( ) ) ; namespace ( uri , prefix ) ; iter . remove ( ) ; } } return super . invokeMethod ( methodName , args ) ; } | Allow automatic detection of namespace declared in the attributes |
7,012 | public void dup ( ) { ClassNode type = getTopOperand ( ) ; stack . add ( type ) ; MethodVisitor mv = controller . getMethodVisitor ( ) ; if ( type == ClassHelper . double_TYPE || type == ClassHelper . long_TYPE ) { mv . visitInsn ( DUP2 ) ; } else { mv . visitInsn ( DUP ) ; } } | duplicate top element |
7,013 | public void remove ( int amount ) { int size = stack . size ( ) ; for ( int i = size - 1 ; i > size - 1 - amount ; i -- ) { popWithMessage ( i ) ; } } | Remove amount elements from the operand stack without using pop . For example after a method invocation |
7,014 | public void swap ( ) { MethodVisitor mv = controller . getMethodVisitor ( ) ; int size = stack . size ( ) ; ClassNode b = stack . get ( size - 1 ) ; ClassNode a = stack . get ( size - 2 ) ; if ( isTwoSlotType ( a ) ) { if ( isTwoSlotType ( b ) ) { mv . visitInsn ( DUP2_X2 ) ; mv . visitInsn ( POP2 ) ; } else { mv . visitInsn ( DUP_X2 ) ; mv . visitInsn ( POP ) ; } } else { if ( isTwoSlotType ( b ) ) { mv . visitInsn ( DUP2_X1 ) ; mv . visitInsn ( POP2 ) ; } else { mv . visitInsn ( SWAP ) ; } } stack . set ( size - 1 , a ) ; stack . set ( size - 2 , b ) ; } | swap two top level operands |
7,015 | protected boolean writeStdCompare ( int type , boolean simulate ) { type = type - COMPARE_NOT_EQUAL ; if ( type < 0 || type > 7 ) return false ; if ( ! simulate ) { MethodVisitor mv = getController ( ) . getMethodVisitor ( ) ; OperandStack operandStack = getController ( ) . getOperandStack ( ) ; int bytecode = stdCompareCodes [ type ] ; Label l1 = new Label ( ) ; mv . visitJumpInsn ( bytecode , l1 ) ; mv . visitInsn ( ICONST_1 ) ; Label l2 = new Label ( ) ; mv . visitJumpInsn ( GOTO , l2 ) ; mv . visitLabel ( l1 ) ; mv . visitInsn ( ICONST_0 ) ; mv . visitLabel ( l2 ) ; operandStack . replace ( ClassHelper . boolean_TYPE , 2 ) ; } return true ; } | writes a std compare . This involves the tokens IF_ICMPEQ IF_ICMPNE IF_ICMPEQ IF_ICMPNE IF_ICMPGE IF_ICMPGT IF_ICMPLE and IF_ICMPLT |
7,016 | protected boolean writeSpaceship ( int type , boolean simulate ) { if ( type != COMPARE_TO ) return false ; if ( ! simulate ) { MethodVisitor mv = getController ( ) . getMethodVisitor ( ) ; mv . visitInsn ( DUP2 ) ; Label l1 = new Label ( ) ; mv . visitJumpInsn ( IF_ICMPGE , l1 ) ; mv . visitInsn ( POP2 ) ; mv . visitInsn ( ICONST_M1 ) ; Label l2 = new Label ( ) ; mv . visitJumpInsn ( GOTO , l2 ) ; mv . visitLabel ( l1 ) ; Label l3 = new Label ( ) ; mv . visitJumpInsn ( IF_ICMPNE , l3 ) ; mv . visitInsn ( ICONST_0 ) ; mv . visitJumpInsn ( GOTO , l2 ) ; mv . visitLabel ( l3 ) ; mv . visitInsn ( ICONST_1 ) ; getController ( ) . getOperandStack ( ) . replace ( ClassHelper . int_TYPE , 2 ) ; } return true ; } | writes the spaceship operator type should be COMPARE_TO |
7,017 | public final void chopMap ( ) { if ( mapChopped ) { return ; } mapChopped = true ; if ( this . map == null ) { for ( int index = 0 ; index < len ; index ++ ) { MapItemValue entry = ( MapItemValue ) items [ index ] ; Value value = entry . getValue ( ) ; if ( value == null ) continue ; if ( value . isContainer ( ) ) { chopContainer ( value ) ; } else { value . chop ( ) ; } } } else { for ( Map . Entry < String , Object > entry : map . entrySet ( ) ) { Object object = entry . getValue ( ) ; if ( object instanceof Value ) { Value value = ( Value ) object ; if ( value . isContainer ( ) ) { chopContainer ( value ) ; } else { value . chop ( ) ; } } else if ( object instanceof LazyValueMap ) { LazyValueMap m = ( LazyValueMap ) object ; m . chopMap ( ) ; } else if ( object instanceof ValueList ) { ValueList list = ( ValueList ) object ; list . chopList ( ) ; } } } } | Chop this map . |
7,018 | protected void convertGroovy ( AST node ) { while ( node != null ) { int type = node . getType ( ) ; switch ( type ) { case PACKAGE_DEF : packageDef ( node ) ; break ; case STATIC_IMPORT : case IMPORT : importDef ( node ) ; break ; case TRAIT_DEF : case CLASS_DEF : classDef ( node ) ; break ; case INTERFACE_DEF : interfaceDef ( node ) ; break ; case METHOD_DEF : methodDef ( node ) ; break ; case ENUM_DEF : enumDef ( node ) ; break ; case ANNOTATION_DEF : annotationDef ( node ) ; break ; default : { Statement statement = statement ( node ) ; output . addStatement ( statement ) ; } } node = node . getNextSibling ( ) ; } } | Converts the Antlr AST to the Groovy AST |
7,019 | protected Expression mapExpression ( AST mapNode ) { List expressions = new ArrayList ( ) ; AST elist = mapNode . getFirstChild ( ) ; if ( elist != null ) { assertNodeType ( ELIST , elist ) ; for ( AST node = elist . getFirstChild ( ) ; node != null ; node = node . getNextSibling ( ) ) { switch ( node . getType ( ) ) { case LABELED_ARG : case SPREAD_MAP_ARG : break ; case SPREAD_ARG : assertNodeType ( SPREAD_MAP_ARG , node ) ; break ; default : assertNodeType ( LABELED_ARG , node ) ; break ; } expressions . add ( mapEntryExpression ( node ) ) ; } } MapExpression mapExpression = new MapExpression ( expressions ) ; configureAST ( mapExpression , mapNode ) ; return mapExpression ; } | Typically only used for map constructors I think? |
7,020 | protected ClassNode buildName ( AST node ) { if ( isType ( TYPE , node ) ) { node = node . getFirstChild ( ) ; } ClassNode answer = null ; if ( isType ( DOT , node ) || isType ( OPTIONAL_DOT , node ) ) { answer = ClassHelper . make ( qualifiedName ( node ) ) ; } else if ( isPrimitiveTypeLiteral ( node ) ) { answer = ClassHelper . make ( node . getText ( ) ) ; } else if ( isType ( INDEX_OP , node ) || isType ( ARRAY_DECLARATOR , node ) ) { AST child = node . getFirstChild ( ) ; answer = buildName ( child ) . makeArray ( ) ; configureAST ( answer , node ) ; return answer ; } else { String identifier = node . getText ( ) ; answer = ClassHelper . make ( identifier ) ; } AST nextSibling = node . getNextSibling ( ) ; if ( isType ( INDEX_OP , nextSibling ) || isType ( ARRAY_DECLARATOR , node ) ) { answer = answer . makeArray ( ) ; configureAST ( answer , node ) ; return answer ; } else { configureAST ( answer , node ) ; return answer ; } } | Extracts an identifier from the Antlr AST and then performs a name resolution to see if the given name is a type from imports aliases or newly created classes |
7,021 | private static CallSite createPojoSite ( CallSite callSite , Object receiver , Object [ ] args ) { final Class klazz = receiver . getClass ( ) ; MetaClass metaClass = InvokerHelper . getMetaClass ( receiver ) ; if ( ! GroovyCategorySupport . hasCategoryInCurrentThread ( ) && metaClass instanceof MetaClassImpl ) { final MetaClassImpl mci = ( MetaClassImpl ) metaClass ; final ClassInfo info = mci . getTheCachedClass ( ) . classInfo ; if ( info . hasPerInstanceMetaClasses ( ) ) { return new PerInstancePojoMetaClassSite ( callSite , info ) ; } else { return mci . createPojoCallSite ( callSite , receiver , args ) ; } } ClassInfo info = ClassInfo . getClassInfo ( klazz ) ; if ( info . hasPerInstanceMetaClasses ( ) ) return new PerInstancePojoMetaClassSite ( callSite , info ) ; else return new PojoMetaClassSite ( callSite , metaClass ) ; } | otherwise or if method doesn t exist we make call via POJO meta class |
7,022 | public Map < K , V > clearAll ( ) { Map < K , V > result = new LinkedHashMap < K , V > ( map . size ( ) ) ; for ( Map . Entry < K , V > entry : map . entrySet ( ) ) { K key = entry . getKey ( ) ; V value = entry . getValue ( ) ; boolean removed = map . remove ( key , value ) ; if ( removed ) { result . put ( key , value ) ; } } return result ; } | Clear the cache |
7,023 | protected void dispatch ( Throwable object , boolean child ) { if ( object instanceof CompilationFailedException ) { report ( ( CompilationFailedException ) object , child ) ; } else if ( object instanceof GroovyExceptionInterface ) { report ( ( GroovyExceptionInterface ) object , child ) ; } else if ( object instanceof GroovyRuntimeException ) { report ( ( GroovyRuntimeException ) object , child ) ; } else if ( object instanceof Exception ) { report ( ( Exception ) object , child ) ; } else { report ( object , child ) ; } } | Runs the report once all initialization is complete . |
7,024 | protected void report ( CompilationFailedException e , boolean child ) { println ( e . toString ( ) ) ; stacktrace ( e , false ) ; } | For CompilationFailedException . |
7,025 | protected void report ( GroovyExceptionInterface e , boolean child ) { println ( ( ( Exception ) e ) . getMessage ( ) ) ; stacktrace ( ( Exception ) e , false ) ; } | For GroovyException . |
7,026 | protected void report ( Throwable e , boolean child ) { println ( ">>> a serious error occurred: " + e . getMessage ( ) ) ; stacktrace ( e , true ) ; } | For everything else . |
7,027 | public void setPadding ( int leftPadding , int topPadding , int rightPadding , int bottomPadding ) { mPaddingLeft = leftPadding ; mPaddingRight = rightPadding ; mPaddingTop = topPadding ; mPaddingBottom = bottomPadding ; } | set paddings for this layoutHelper |
7,028 | public void setMargin ( int leftMargin , int topMargin , int rightMargin , int bottomMargin ) { this . mMarginLeft = leftMargin ; this . mMarginTop = topMargin ; this . mMarginRight = rightMargin ; this . mMarginBottom = bottomMargin ; } | Set margins for this layoutHelper |
7,029 | public static OrientationHelperEx createOrientationHelper ( ExposeLinearLayoutManagerEx layoutManager , int orientation ) { switch ( orientation ) { case HORIZONTAL : return createHorizontalHelper ( layoutManager ) ; case VERTICAL : return createVerticalHelper ( layoutManager ) ; } throw new IllegalArgumentException ( "invalid orientation" ) ; } | Creates an OrientationHelper for the given LayoutManager and orientation . |
7,030 | public static OrientationHelperEx createHorizontalHelper ( ExposeLinearLayoutManagerEx layoutManager ) { return new OrientationHelperEx ( layoutManager ) { public int getEndAfterPadding ( ) { return mLayoutManager . getWidth ( ) - mLayoutManager . getPaddingRight ( ) ; } public int getEnd ( ) { return mLayoutManager . getWidth ( ) ; } public void offsetChildren ( int amount ) { mLayoutManager . offsetChildrenHorizontal ( amount ) ; } public int getStartAfterPadding ( ) { return mLayoutManager . getPaddingLeft ( ) ; } public int getDecoratedMeasurement ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return ! mLayoutManager . isEnableMarginOverLap ( ) ? mLayoutManager . getDecoratedMeasuredWidth ( view ) + params . leftMargin + params . rightMargin : mLayoutManager . getDecoratedMeasuredWidth ( view ) ; } public int getDecoratedMeasurementInOther ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return mLayoutManager . getDecoratedMeasuredHeight ( view ) + params . topMargin + params . bottomMargin ; } public int getDecoratedEnd ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return ! mLayoutManager . isEnableMarginOverLap ( ) ? mLayoutManager . getDecoratedRight ( view ) + params . rightMargin : mLayoutManager . getDecoratedRight ( view ) ; } public int getDecoratedStart ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return ! mLayoutManager . isEnableMarginOverLap ( ) ? mLayoutManager . getDecoratedLeft ( view ) - params . leftMargin : mLayoutManager . getDecoratedLeft ( view ) ; } public int getTotalSpace ( ) { return mLayoutManager . getWidth ( ) - mLayoutManager . getPaddingLeft ( ) - mLayoutManager . getPaddingRight ( ) ; } public void offsetChild ( View view , int offset ) { view . offsetLeftAndRight ( offset ) ; } public int getEndPadding ( ) { return mLayoutManager . getPaddingRight ( ) ; } } ; } | Creates a horizontal OrientationHelper for the given LayoutManager . |
7,031 | public static OrientationHelperEx createVerticalHelper ( ExposeLinearLayoutManagerEx layoutManager ) { return new OrientationHelperEx ( layoutManager ) { public int getEndAfterPadding ( ) { return mLayoutManager . getHeight ( ) - mLayoutManager . getPaddingBottom ( ) ; } public int getEnd ( ) { return mLayoutManager . getHeight ( ) ; } public void offsetChildren ( int amount ) { mLayoutManager . offsetChildrenVertical ( amount ) ; } public int getStartAfterPadding ( ) { return mLayoutManager . getPaddingTop ( ) ; } public int getDecoratedMeasurement ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return ! mLayoutManager . isEnableMarginOverLap ( ) ? mLayoutManager . getDecoratedMeasuredHeight ( view ) + params . topMargin + params . bottomMargin : mLayoutManager . getDecoratedMeasuredHeight ( view ) ; } public int getDecoratedMeasurementInOther ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return mLayoutManager . getDecoratedMeasuredWidth ( view ) + params . leftMargin + params . rightMargin ; } public int getDecoratedEnd ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return ! mLayoutManager . isEnableMarginOverLap ( ) ? mLayoutManager . getDecoratedBottom ( view ) + params . bottomMargin : mLayoutManager . getDecoratedBottom ( view ) ; } public int getDecoratedStart ( View view ) { final RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; return ! mLayoutManager . isEnableMarginOverLap ( ) ? mLayoutManager . getDecoratedTop ( view ) - params . topMargin : mLayoutManager . getDecoratedTop ( view ) ; } public int getTotalSpace ( ) { return mLayoutManager . getHeight ( ) - mLayoutManager . getPaddingTop ( ) - mLayoutManager . getPaddingBottom ( ) ; } public void offsetChild ( View view , int offset ) { view . offsetTopAndBottom ( offset ) ; } public int getEndPadding ( ) { return mLayoutManager . getPaddingBottom ( ) ; } } ; } | Creates a vertical OrientationHelper for the given LayoutManager . |
7,032 | public final View nextView ( RecyclerView . Recycler recycler , LayoutStateWrapper layoutState , LayoutManagerHelper helper , LayoutChunkResult result ) { View view = layoutState . next ( recycler ) ; if ( view == null ) { if ( DEBUG && ! layoutState . hasScrapList ( ) ) { throw new RuntimeException ( "received null view when unexpected" ) ; } result . mFinished = true ; return null ; } helper . addChildView ( layoutState , view ) ; return view ; } | Retrieve next view and add it into layout this is to make sure that view are added by order |
7,033 | protected void handleStateOnResult ( LayoutChunkResult result , View [ ] views ) { if ( views == null ) return ; for ( int i = 0 ; i < views . length ; i ++ ) { View view = views [ i ] ; if ( view == null ) { continue ; } RecyclerView . LayoutParams params = ( RecyclerView . LayoutParams ) view . getLayoutParams ( ) ; if ( params . isItemRemoved ( ) || params . isItemChanged ( ) ) { result . mIgnoreConsumed = true ; } result . mFocusable = result . mFocusable || view . isFocusable ( ) ; if ( result . mFocusable && result . mIgnoreConsumed ) { break ; } } } | Helper methods to handle focus states for views |
7,034 | public Object instantiateItem ( ViewGroup container , int position ) { int itemViewType = getItemViewType ( position ) ; RecyclerView . ViewHolder holder = mRecycledViewPool . getRecycledView ( itemViewType ) ; if ( holder == null ) { holder = mAdapter . createViewHolder ( container , itemViewType ) ; } onBindViewHolder ( ( VH ) holder , position ) ; container . addView ( holder . itemView , new ViewPager . LayoutParams ( ) ) ; return holder ; } | Get view from position |
7,035 | public void endBatchedUpdates ( ) { if ( mCallback instanceof BatchedCallback ) { ( ( BatchedCallback ) mCallback ) . dispatchLastEvent ( ) ; } if ( mCallback == mBatchedCallback ) { mCallback = mBatchedCallback . mWrappedCallback ; } } | Ends the update transaction and dispatches any remaining event to the callback . |
7,036 | public T get ( int index ) throws IndexOutOfBoundsException { if ( index >= mSize || index < 0 ) { throw new IndexOutOfBoundsException ( "Asked to get item at " + index + " but size is " + mSize ) ; } return mData [ index ] ; } | Returns the item at the given index . |
7,037 | private Span getNextSpan ( int defaultLine , LayoutStateWrapper layoutState , LayoutManagerHelper helper ) { OrientationHelperEx orientationHelper = helper . getMainOrientationHelper ( ) ; boolean preferLastSpan = false ; if ( helper . getOrientation ( ) == HORIZONTAL ) { preferLastSpan = ( layoutState . getLayoutDirection ( ) == LAYOUT_START ) != helper . getReverseLayout ( ) ; } else { preferLastSpan = ( ( layoutState . getLayoutDirection ( ) == LAYOUT_START ) == helper . getReverseLayout ( ) ) == helper . isDoLayoutRTL ( ) ; } final int startIndex , endIndex , diff ; if ( preferLastSpan ) { startIndex = mNumLanes - 1 ; endIndex = - 1 ; diff = - 1 ; } else { startIndex = 0 ; endIndex = mNumLanes ; diff = 1 ; } if ( layoutState . getLayoutDirection ( ) == LAYOUT_END ) { Span min = null ; int minLine = Integer . MAX_VALUE ; for ( int i = startIndex ; i != endIndex ; i += diff ) { final Span other = mSpans [ i ] ; int otherLine = other . getEndLine ( defaultLine , orientationHelper ) ; if ( BuildConfig . DEBUG ) { Log . d ( TAG , "end starIndex " + i + " end otherLine " + otherLine ) ; } if ( otherLine < minLine ) { min = other ; minLine = otherLine ; } if ( BuildConfig . DEBUG ) { Log . d ( TAG , "end min " + min + " end minLine " + minLine ) ; } } if ( BuildConfig . DEBUG ) { Log . d ( TAG , "final end min " + min + " final end minLine " + minLine ) ; } return min ; } else { Span max = null ; int maxLine = Integer . MIN_VALUE ; for ( int i = startIndex ; i != endIndex ; i += diff ) { final Span other = mSpans [ i ] ; int otherLine = other . getStartLine ( defaultLine , orientationHelper ) ; if ( BuildConfig . DEBUG ) { Log . d ( TAG , "start starIndex " + i + " start otherLine " + otherLine ) ; } if ( otherLine > maxLine ) { max = other ; maxLine = otherLine ; } if ( BuildConfig . DEBUG ) { Log . d ( TAG , "start max " + max + " start maxLine " + maxLine ) ; } } if ( BuildConfig . DEBUG ) { Log . d ( TAG , "final start max " + max + " final start maxLine " + maxLine ) ; } return max ; } } | Finds the span for the next view . |
7,038 | protected int scrollInternalBy ( int dy , RecyclerView . Recycler recycler , RecyclerView . State state ) { if ( getChildCount ( ) == 0 || dy == 0 ) { return 0 ; } mLayoutState . mRecycle = true ; ensureLayoutStateExpose ( ) ; final int layoutDirection = dy > 0 ? LayoutState . LAYOUT_END : LayoutState . LAYOUT_START ; final int absDy = Math . abs ( dy ) ; updateLayoutStateExpose ( layoutDirection , absDy , true , state ) ; final int freeScroll = mLayoutState . mScrollingOffset ; mLayoutState . mOnRefresLayout = false ; final int consumed = freeScroll + fill ( recycler , mLayoutState , state , false ) ; if ( consumed < 0 ) { if ( DEBUG ) { Log . d ( TAG , "Don't have any more elements to scroll" ) ; } return 0 ; } final int scrolled = absDy > consumed ? layoutDirection * consumed : dy ; mOrientationHelper . offsetChildren ( - scrolled ) ; if ( DEBUG ) { Log . d ( TAG , "scroll req: " + dy + " scrolled: " + scrolled ) ; } return scrolled ; } | Handle scroll event internally cover both horizontal and vertical |
7,039 | protected void recycleChildren ( RecyclerView . Recycler recycler , int startIndex , int endIndex ) { if ( startIndex == endIndex ) { return ; } if ( DEBUG ) { Log . d ( TAG , "Recycling " + Math . abs ( startIndex - endIndex ) + " items" ) ; } if ( endIndex > startIndex ) { for ( int i = endIndex - 1 ; i >= startIndex ; i -- ) { removeAndRecycleViewAt ( i , recycler ) ; } } else { for ( int i = startIndex ; i > endIndex ; i -- ) { removeAndRecycleViewAt ( i , recycler ) ; } } } | Recycles children between given indices . |
7,040 | private void logChildren ( ) { Log . d ( TAG , "internal representation of views on the screen" ) ; for ( int i = 0 ; i < getChildCount ( ) ; i ++ ) { View child = getChildAt ( i ) ; Log . d ( TAG , "item " + getPosition ( child ) + ", coord:" + mOrientationHelper . getDecoratedStart ( child ) ) ; } Log . d ( TAG , "==============" ) ; } | Used for debugging . Logs the internal representation of children to default logger . |
7,041 | public int size ( ) { int count = 0 ; for ( int i = 0 , size = mScrapLength . size ( ) ; i < size ; i ++ ) { int val = mScrapLength . valueAt ( i ) ; count += val ; } return count ; } | Get all items size in current pool |
7,042 | @ SuppressWarnings ( "unchecked" ) public void putRecycledView ( RecyclerView . ViewHolder scrap ) { int viewType = scrap . getItemViewType ( ) ; if ( mMaxScrap . indexOfKey ( viewType ) < 0 ) { mMaxScrap . put ( viewType , DEFAULT_MAX_SIZE ) ; setMaxRecycledViews ( viewType , DEFAULT_MAX_SIZE ) ; } int scrapHeapSize = mScrapLength . indexOfKey ( viewType ) >= 0 ? this . mScrapLength . get ( viewType ) : 0 ; if ( this . mMaxScrap . get ( viewType ) > scrapHeapSize ) { mInnerPool . putRecycledView ( scrap ) ; mScrapLength . put ( viewType , scrapHeapSize + 1 ) ; } else { destroyViewHolder ( scrap ) ; } } | This will be only run in UI Thread |
7,043 | public int getOffsetToStart ( ) { if ( getChildCount ( ) == 0 ) return - 1 ; final View view = getChildAt ( 0 ) ; if ( view == null ) { return - 1 ; } int position = getPosition ( view ) ; final int idx = findRangeLength ( Range . create ( position , position ) ) ; if ( idx < 0 || idx >= mRangeLengths . size ( ) ) { return - 1 ; } int offset = - mOrientationHelper . getDecoratedStart ( view ) ; for ( int i = 0 ; i < idx ; i ++ ) { Pair < Range < Integer > , Integer > pair = mRangeLengths . get ( i ) ; if ( pair != null ) { offset += pair . second ; } } return offset ; } | Return current position related to the top only works when scrolling from the top |
7,044 | private int updateSpecWithExtra ( int spec , int startInset , int endInset ) { if ( startInset == 0 && endInset == 0 ) { return spec ; } final int mode = View . MeasureSpec . getMode ( spec ) ; if ( mode == View . MeasureSpec . AT_MOST || mode == View . MeasureSpec . EXACTLY ) { int size = View . MeasureSpec . getSize ( spec ) ; if ( size - startInset - endInset < 0 ) { return View . MeasureSpec . makeMeasureSpec ( 0 , mode ) ; } else { return View . MeasureSpec . makeMeasureSpec ( View . MeasureSpec . getSize ( spec ) - startInset - endInset , mode ) ; } } return spec ; } | Update measure spec with insets |
7,045 | public int getItemViewType ( int position ) { Pair < AdapterDataObserver , Adapter > p = findAdapterByPosition ( position ) ; if ( p == null ) { return RecyclerView . INVALID_TYPE ; } int subItemType = p . second . getItemViewType ( position - p . first . mStartPosition ) ; if ( subItemType < 0 ) { return subItemType ; } if ( mHasConsistItemType ) { mItemTypeAry . put ( subItemType , p . second ) ; return subItemType ; } int index = p . first . mIndex ; return ( int ) Cantor . getCantor ( subItemType , index ) ; } | Big integer of itemType returned by delegated adapter may lead to failed |
7,046 | synchronized void trim ( long startKey , long endKey ) { final Iterator < Chunk > descendingIterator = chunks . descendingIterator ( ) ; while ( descendingIterator . hasNext ( ) ) { final Chunk currentTail = descendingIterator . next ( ) ; if ( isFirstElementIsEmptyOrGreaterEqualThanKey ( currentTail , endKey ) ) { freeChunk ( currentTail ) ; descendingIterator . remove ( ) ; } else { currentTail . cursor = findFirstIndexOfGreaterEqualElements ( currentTail . keys , currentTail . startIndex , currentTail . cursor , endKey ) ; break ; } } final Iterator < Chunk > iterator = chunks . iterator ( ) ; while ( iterator . hasNext ( ) ) { final Chunk currentHead = iterator . next ( ) ; if ( isLastElementIsLessThanKey ( currentHead , startKey ) ) { freeChunk ( currentHead ) ; iterator . remove ( ) ; } else { final int newStartIndex = findFirstIndexOfGreaterEqualElements ( currentHead . keys , currentHead . startIndex , currentHead . cursor , startKey ) ; if ( currentHead . startIndex != newStartIndex ) { currentHead . startIndex = newStartIndex ; currentHead . chunkSize = currentHead . cursor - currentHead . startIndex ; } break ; } } } | Try to trim all beyond specified boundaries . |
7,047 | public void update ( long value , long timestamp ) { rescaleIfNeeded ( ) ; lockForRegularUsage ( ) ; try { final double itemWeight = weight ( timestamp - startTime ) ; final WeightedSample sample = new WeightedSample ( value , itemWeight ) ; final double priority = itemWeight / ThreadLocalRandom . current ( ) . nextDouble ( ) ; final long newCount = count . incrementAndGet ( ) ; if ( newCount <= size ) { values . put ( priority , sample ) ; } else { Double first = values . firstKey ( ) ; if ( first < priority && values . putIfAbsent ( priority , sample ) == null ) { while ( values . remove ( first ) == null ) { first = values . firstKey ( ) ; } } } } finally { unlockForRegularUsage ( ) ; } } | Adds an old value with a fixed timestamp to the reservoir . |
7,048 | public static String name ( Class < ? > klass , String ... names ) { return name ( klass . getName ( ) , names ) ; } | Concatenates a class name and elements to form a dotted name eliding any null values or empty strings . |
7,049 | public void registerAll ( String prefix , MetricSet metrics ) throws IllegalArgumentException { for ( Map . Entry < String , Metric > entry : metrics . getMetrics ( ) . entrySet ( ) ) { if ( entry . getValue ( ) instanceof MetricSet ) { registerAll ( name ( prefix , entry . getKey ( ) ) , ( MetricSet ) entry . getValue ( ) ) ; } else { register ( name ( prefix , entry . getKey ( ) ) , entry . getValue ( ) ) ; } } } | Given a metric set registers them with the given prefix prepended to their names . |
7,050 | private void printIfEnabled ( MetricAttribute type , String status ) { if ( getDisabledMetricAttributes ( ) . contains ( type ) ) { return ; } output . println ( status ) ; } | Print only if the attribute is enabled |
7,051 | static String sanitize ( String string ) { return WHITESPACE . matcher ( string . trim ( ) ) . replaceAll ( DASH ) ; } | Trims the string and replaces all whitespace characters with the provided symbol |
7,052 | public synchronized static HealthCheckRegistry setDefault ( String name ) { final HealthCheckRegistry registry = getOrCreate ( name ) ; return setDefault ( name , registry ) ; } | Creates a new registry and sets it as the default one under the provided name . |
7,053 | public static HealthCheckRegistry setDefault ( String name , HealthCheckRegistry healthCheckRegistry ) { if ( defaultRegistryName . compareAndSet ( null , name ) ) { add ( name , healthCheckRegistry ) ; return healthCheckRegistry ; } throw new IllegalStateException ( "Default health check registry is already set." ) ; } | Sets the provided registry as the default one under the provided name |
7,054 | synchronized void start ( long initialDelay , long period , TimeUnit unit , Runnable runnable ) { if ( this . scheduledFuture != null ) { throw new IllegalArgumentException ( "Reporter already started" ) ; } this . scheduledFuture = executor . scheduleAtFixedRate ( runnable , initialDelay , period , unit ) ; } | Starts the reporter polling at the given period with the specific runnable action . Visible only for testing . |
7,055 | public HealthCheck . Result runHealthCheck ( String name ) throws NoSuchElementException { final HealthCheck healthCheck = healthChecks . get ( name ) ; if ( healthCheck == null ) { throw new NoSuchElementException ( "No health check named " + name + " exists" ) ; } return healthCheck . execute ( ) ; } | Runs the health check with the given name . |
7,056 | public SortedMap < String , HealthCheck . Result > runHealthChecks ( HealthCheckFilter filter ) { final SortedMap < String , HealthCheck . Result > results = new TreeMap < > ( ) ; for ( Map . Entry < String , HealthCheck > entry : healthChecks . entrySet ( ) ) { final String name = entry . getKey ( ) ; final HealthCheck healthCheck = entry . getValue ( ) ; if ( filter . matches ( name , healthCheck ) ) { final Result result = entry . getValue ( ) . execute ( ) ; results . put ( entry . getKey ( ) , result ) ; } } return Collections . unmodifiableSortedMap ( results ) ; } | Runs the registered health checks matching the filter and returns a map of the results . |
7,057 | public SortedMap < String , HealthCheck . Result > runHealthChecks ( ExecutorService executor ) { return runHealthChecks ( executor , HealthCheckFilter . ALL ) ; } | Runs the registered health checks in parallel and returns a map of the results . |
7,058 | public SortedMap < String , HealthCheck . Result > runHealthChecks ( ExecutorService executor , HealthCheckFilter filter ) { final Map < String , Future < HealthCheck . Result > > futures = new HashMap < > ( ) ; for ( final Map . Entry < String , HealthCheck > entry : healthChecks . entrySet ( ) ) { final String name = entry . getKey ( ) ; final HealthCheck healthCheck = entry . getValue ( ) ; if ( filter . matches ( name , healthCheck ) ) { futures . put ( name , executor . submit ( ( ) -> healthCheck . execute ( ) ) ) ; } } final SortedMap < String , HealthCheck . Result > results = new TreeMap < > ( ) ; for ( Map . Entry < String , Future < Result > > entry : futures . entrySet ( ) ) { try { results . put ( entry . getKey ( ) , entry . getValue ( ) . get ( ) ) ; } catch ( Exception e ) { LOGGER . warn ( "Error executing health check {}" , entry . getKey ( ) , e ) ; results . put ( entry . getKey ( ) , HealthCheck . Result . unhealthy ( e ) ) ; } } return Collections . unmodifiableSortedMap ( results ) ; } | Runs the registered health checks matching the filter in parallel and returns a map of the results . |
7,059 | public void shutdown ( ) { asyncExecutorService . shutdown ( ) ; try { if ( ! asyncExecutorService . awaitTermination ( 1 , TimeUnit . SECONDS ) ) { asyncExecutorService . shutdownNow ( ) ; } } catch ( InterruptedException ie ) { asyncExecutorService . shutdownNow ( ) ; Thread . currentThread ( ) . interrupt ( ) ; } } | Shuts down the scheduled executor for async health checks |
7,060 | public RequestCreator resizeDimen ( int targetWidthResId , int targetHeightResId ) { Resources resources = picasso . context . getResources ( ) ; int targetWidth = resources . getDimensionPixelSize ( targetWidthResId ) ; int targetHeight = resources . getDimensionPixelSize ( targetHeightResId ) ; return resize ( targetWidth , targetHeight ) ; } | Resize the image to the specified dimension size . Use 0 as desired dimension to resize keeping aspect ratio . |
7,061 | public RequestCreator rotate ( float degrees , float pivotX , float pivotY ) { data . rotate ( degrees , pivotX , pivotY ) ; return this ; } | Rotate the image by the specified degrees around a pivot point . |
7,062 | public Bitmap get ( ) throws IOException { long started = System . nanoTime ( ) ; checkNotMain ( ) ; if ( deferred ) { throw new IllegalStateException ( "Fit cannot be used with get." ) ; } if ( ! data . hasImage ( ) ) { return null ; } Request request = createRequest ( started ) ; Action action = new GetAction ( picasso , request ) ; RequestHandler . Result result = forRequest ( picasso , picasso . dispatcher , picasso . cache , picasso . stats , action ) . hunt ( ) ; Bitmap bitmap = result . getBitmap ( ) ; if ( bitmap != null && shouldWriteToMemoryCache ( request . memoryPolicy ) ) { picasso . cache . set ( request . key , bitmap ) ; } return bitmap ; } | Synchronously fulfill this request . Must not be called from the main thread . |
7,063 | private Request createRequest ( long started ) { int id = nextId . getAndIncrement ( ) ; Request request = data . build ( ) ; request . id = id ; request . started = started ; boolean loggingEnabled = picasso . loggingEnabled ; if ( loggingEnabled ) { log ( OWNER_MAIN , VERB_CREATED , request . plainId ( ) , request . toString ( ) ) ; } Request transformed = picasso . transformRequest ( request ) ; if ( transformed != request ) { transformed . id = id ; transformed . started = started ; if ( loggingEnabled ) { log ( OWNER_MAIN , VERB_CHANGED , transformed . logId ( ) , "into " + transformed ) ; } } return transformed ; } | Create the request optionally passing it through the request transformer . |
7,064 | static void flushStackLocalLeaks ( Looper looper ) { Handler handler = new Handler ( looper ) { public void handleMessage ( Message msg ) { sendMessageDelayed ( obtainMessage ( ) , THREAD_LEAK_CLEANING_MS ) ; } } ; handler . sendMessageDelayed ( handler . obtainMessage ( ) , THREAD_LEAK_CLEANING_MS ) ; } | Prior to Android 5 HandlerThread always keeps a stack local reference to the last message that was sent to it . This method makes sure that stack local reference never stays there for too long by sending new messages to it every second . |
7,065 | private void fix ( ) { Set < String > nodeGroups = appContext . getTaskTrackerManager ( ) . getNodeGroups ( ) ; if ( CollectionUtils . isEmpty ( nodeGroups ) ) { return ; } for ( String nodeGroup : nodeGroups ) { List < JobPo > deadJobPo = appContext . getExecutableJobQueue ( ) . getDeadJob ( nodeGroup , SystemClock . now ( ) - MAX_TIME_OUT ) ; if ( CollectionUtils . isNotEmpty ( deadJobPo ) ) { for ( JobPo jobPo : deadJobPo ) { appContext . getExecutableJobQueue ( ) . resume ( jobPo ) ; LOGGER . info ( "Fix executable job : {} " , JSON . toJSONString ( jobPo ) ) ; } } } } | fix the job that running is true and gmtModified too old |
7,066 | public boolean copyChanges ( final TreeRule rule , final boolean overwrite ) { if ( rule == null ) { throw new IllegalArgumentException ( "Cannot copy a null rule" ) ; } if ( tree_id != rule . tree_id ) { throw new IllegalArgumentException ( "Tree IDs do not match" ) ; } if ( level != rule . level ) { throw new IllegalArgumentException ( "Levels do not match" ) ; } if ( order != rule . order ) { throw new IllegalArgumentException ( "Orders do not match" ) ; } if ( overwrite || ( rule . changed . get ( "type" ) && type != rule . type ) ) { type = rule . type ; changed . put ( "type" , true ) ; } if ( overwrite || ( rule . changed . get ( "field" ) && ! field . equals ( rule . field ) ) ) { field = rule . field ; changed . put ( "field" , true ) ; } if ( overwrite || ( rule . changed . get ( "custom_field" ) && ! custom_field . equals ( rule . custom_field ) ) ) { custom_field = rule . custom_field ; changed . put ( "custom_field" , true ) ; } if ( overwrite || ( rule . changed . get ( "regex" ) && ! regex . equals ( rule . regex ) ) ) { setRegex ( rule . regex ) ; } if ( overwrite || ( rule . changed . get ( "separator" ) && ! separator . equals ( rule . separator ) ) ) { separator = rule . separator ; changed . put ( "separator" , true ) ; } if ( overwrite || ( rule . changed . get ( "description" ) && ! description . equals ( rule . description ) ) ) { description = rule . description ; changed . put ( "description" , true ) ; } if ( overwrite || ( rule . changed . get ( "notes" ) && ! notes . equals ( rule . notes ) ) ) { notes = rule . notes ; changed . put ( "notes" , true ) ; } if ( overwrite || ( rule . changed . get ( "regex_group_idx" ) && regex_group_idx != rule . regex_group_idx ) ) { regex_group_idx = rule . regex_group_idx ; changed . put ( "regex_group_idx" , true ) ; } if ( overwrite || ( rule . changed . get ( "display_format" ) && ! display_format . equals ( rule . display_format ) ) ) { display_format = rule . display_format ; changed . put ( "display_format" , true ) ; } for ( boolean has_changes : changed . values ( ) ) { if ( has_changes ) { return true ; } } return false ; } | Copies changed fields from the incoming rule to the local rule |
7,067 | public static TreeRule parseFromStorage ( final KeyValue column ) { if ( column . value ( ) == null ) { throw new IllegalArgumentException ( "Tree rule column value was null" ) ; } final TreeRule rule = JSON . parseToObject ( column . value ( ) , TreeRule . class ) ; rule . initializeChangedMap ( ) ; return rule ; } | Parses a rule from the given column . Used by the Tree class when scanning a row for rules . |
7,068 | public static Deferred < TreeRule > fetchRule ( final TSDB tsdb , final int tree_id , final int level , final int order ) { if ( tree_id < 1 || tree_id > 65535 ) { throw new IllegalArgumentException ( "Invalid Tree ID" ) ; } if ( level < 0 ) { throw new IllegalArgumentException ( "Invalid rule level" ) ; } if ( order < 0 ) { throw new IllegalArgumentException ( "Invalid rule order" ) ; } final GetRequest get = new GetRequest ( tsdb . treeTable ( ) , Tree . idToBytes ( tree_id ) ) ; get . family ( Tree . TREE_FAMILY ( ) ) ; get . qualifier ( getQualifier ( level , order ) ) ; final class FetchCB implements Callback < Deferred < TreeRule > , ArrayList < KeyValue > > { public Deferred < TreeRule > call ( final ArrayList < KeyValue > row ) { if ( row == null || row . isEmpty ( ) ) { return Deferred . fromResult ( null ) ; } return Deferred . fromResult ( parseFromStorage ( row . get ( 0 ) ) ) ; } } return tsdb . getClient ( ) . get ( get ) . addCallbackDeferring ( new FetchCB ( ) ) ; } | Attempts to retrieve the specified tree rule from storage . |
7,069 | public static Deferred < Object > deleteRule ( final TSDB tsdb , final int tree_id , final int level , final int order ) { if ( tree_id < 1 || tree_id > 65535 ) { throw new IllegalArgumentException ( "Invalid Tree ID" ) ; } if ( level < 0 ) { throw new IllegalArgumentException ( "Invalid rule level" ) ; } if ( order < 0 ) { throw new IllegalArgumentException ( "Invalid rule order" ) ; } final DeleteRequest delete = new DeleteRequest ( tsdb . treeTable ( ) , Tree . idToBytes ( tree_id ) , Tree . TREE_FAMILY ( ) , getQualifier ( level , order ) ) ; return tsdb . getClient ( ) . delete ( delete ) ; } | Attempts to delete the specified rule from storage |
7,070 | public static Deferred < Object > deleteAllRules ( final TSDB tsdb , final int tree_id ) { if ( tree_id < 1 || tree_id > 65535 ) { throw new IllegalArgumentException ( "Invalid Tree ID" ) ; } final GetRequest get = new GetRequest ( tsdb . treeTable ( ) , Tree . idToBytes ( tree_id ) ) ; get . family ( Tree . TREE_FAMILY ( ) ) ; final class GetCB implements Callback < Deferred < Object > , ArrayList < KeyValue > > { public Deferred < Object > call ( final ArrayList < KeyValue > row ) throws Exception { if ( row == null || row . isEmpty ( ) ) { return Deferred . fromResult ( null ) ; } final ArrayList < byte [ ] > qualifiers = new ArrayList < byte [ ] > ( row . size ( ) ) ; for ( KeyValue column : row ) { if ( column . qualifier ( ) . length > RULE_PREFIX . length && Bytes . memcmp ( RULE_PREFIX , column . qualifier ( ) , 0 , RULE_PREFIX . length ) == 0 ) { qualifiers . add ( column . qualifier ( ) ) ; } } final DeleteRequest delete = new DeleteRequest ( tsdb . treeTable ( ) , Tree . idToBytes ( tree_id ) , Tree . TREE_FAMILY ( ) , qualifiers . toArray ( new byte [ qualifiers . size ( ) ] [ ] ) ) ; return tsdb . getClient ( ) . delete ( delete ) ; } } return tsdb . getClient ( ) . get ( get ) . addCallbackDeferring ( new GetCB ( ) ) ; } | Attempts to delete all rules belonging to the given tree . |
7,071 | public static TreeRuleType stringToType ( final String type ) { if ( type == null || type . isEmpty ( ) ) { throw new IllegalArgumentException ( "Rule type was empty" ) ; } else if ( type . toLowerCase ( ) . equals ( "metric" ) ) { return TreeRuleType . METRIC ; } else if ( type . toLowerCase ( ) . equals ( "metric_custom" ) ) { return TreeRuleType . METRIC_CUSTOM ; } else if ( type . toLowerCase ( ) . equals ( "tagk" ) ) { return TreeRuleType . TAGK ; } else if ( type . toLowerCase ( ) . equals ( "tagk_custom" ) ) { return TreeRuleType . TAGK_CUSTOM ; } else if ( type . toLowerCase ( ) . equals ( "tagv_custom" ) ) { return TreeRuleType . TAGV_CUSTOM ; } else { throw new IllegalArgumentException ( "Unrecognized rule type" ) ; } } | Parses a string into a rule type enumerator |
7,072 | public static byte [ ] getQualifier ( final int level , final int order ) { final byte [ ] suffix = ( level + ":" + order ) . getBytes ( CHARSET ) ; final byte [ ] qualifier = new byte [ RULE_PREFIX . length + suffix . length ] ; System . arraycopy ( RULE_PREFIX , 0 , qualifier , 0 , RULE_PREFIX . length ) ; System . arraycopy ( suffix , 0 , qualifier , RULE_PREFIX . length , suffix . length ) ; return qualifier ; } | Completes the column qualifier given a level and order using the configured prefix |
7,073 | public DataPoints [ ] evaluate ( TSQuery data_query , List < DataPoints [ ] > results , List < String > params ) { if ( results == null || results . isEmpty ( ) ) { return new DataPoints [ ] { } ; } if ( params == null || params . isEmpty ( ) ) { throw new IllegalArgumentException ( "Need amount of timeshift to perform timeshift" ) ; } String param = params . get ( 0 ) ; if ( param == null || param . length ( ) == 0 ) { throw new IllegalArgumentException ( "Invalid timeshift='" + param + "'" ) ; } param = param . trim ( ) ; long timeshift = - 1 ; if ( param . startsWith ( "'" ) && param . endsWith ( "'" ) ) { timeshift = parseParam ( param ) ; } else { throw new RuntimeException ( "Invalid timeshift parameter: eg '10min'" ) ; } if ( timeshift <= 0 ) { throw new RuntimeException ( "timeshift <= 0" ) ; } return performShift ( results . get ( 0 ) , timeshift ) ; } | in place modify of TsdbResult array to increase timestamps by timeshift |
7,074 | DataPoints shift ( final DataPoints points , final long timeshift ) { final List < DataPoint > dps = new ArrayList < DataPoint > ( ) ; for ( DataPoint pt : points ) { dps . add ( shift ( pt , timeshift ) ) ; } final DataPoint [ ] results = new DataPoint [ dps . size ( ) ] ; dps . toArray ( results ) ; return new PostAggregatedDataPoints ( points , results ) ; } | Adjusts the timestamp of each datapoint by timeshift |
7,075 | public static byte [ ] rowKeyFromTSUID ( final TSDB tsdb , final byte [ ] tsuid , final long timestamp ) { if ( tsuid . length < tsdb . metrics . width ( ) ) { throw new IllegalArgumentException ( "TSUID appears to be missing the metric" ) ; } final long base_time ; if ( ( timestamp & Const . SECOND_MASK ) != 0 ) { base_time = ( ( timestamp / 1000 ) - ( ( timestamp / 1000 ) % Const . MAX_TIMESPAN ) ) ; } else { base_time = ( timestamp - ( timestamp % Const . MAX_TIMESPAN ) ) ; } final byte [ ] row = new byte [ Const . SALT_WIDTH ( ) + tsuid . length + Const . TIMESTAMP_BYTES ] ; System . arraycopy ( tsuid , 0 , row , Const . SALT_WIDTH ( ) , tsdb . metrics . width ( ) ) ; Bytes . setInt ( row , ( int ) base_time , Const . SALT_WIDTH ( ) + tsdb . metrics . width ( ) ) ; System . arraycopy ( tsuid , tsdb . metrics . width ( ) , row , Const . SALT_WIDTH ( ) + tsdb . metrics . width ( ) + Const . TIMESTAMP_BYTES , tsuid . length - tsdb . metrics . width ( ) ) ; RowKey . prefixKeyWithSalt ( row ) ; return row ; } | Generates a row key given a TSUID and an absolute timestamp . The timestamp will be normalized to an hourly base time . If salting is enabled then empty salt bytes will be prepended to the key and must be filled in later . |
7,076 | public static void prefixKeyWithSalt ( final byte [ ] row_key ) { if ( Const . SALT_WIDTH ( ) > 0 ) { if ( row_key . length < ( Const . SALT_WIDTH ( ) + TSDB . metrics_width ( ) ) || ( Bytes . memcmp ( row_key , new byte [ Const . SALT_WIDTH ( ) + TSDB . metrics_width ( ) ] , Const . SALT_WIDTH ( ) , TSDB . metrics_width ( ) ) == 0 ) ) { return ; } final int tags_start = Const . SALT_WIDTH ( ) + TSDB . metrics_width ( ) + Const . TIMESTAMP_BYTES ; final byte [ ] salt_base = new byte [ row_key . length - Const . SALT_WIDTH ( ) - Const . TIMESTAMP_BYTES ] ; System . arraycopy ( row_key , Const . SALT_WIDTH ( ) , salt_base , 0 , TSDB . metrics_width ( ) ) ; System . arraycopy ( row_key , tags_start , salt_base , TSDB . metrics_width ( ) , row_key . length - tags_start ) ; int modulo = Arrays . hashCode ( salt_base ) % Const . SALT_BUCKETS ( ) ; if ( modulo < 0 ) { modulo = modulo * - 1 ; } final byte [ ] salt = getSaltBytes ( modulo ) ; System . arraycopy ( salt , 0 , row_key , 0 , Const . SALT_WIDTH ( ) ) ; } } | Calculates and writes an array of one or more salt bytes at the front of the given row key . |
7,077 | public static int rowKeyContainsMetric ( final byte [ ] metric , final byte [ ] row_key ) { int idx = Const . SALT_WIDTH ( ) ; for ( int i = 0 ; i < metric . length ; i ++ , idx ++ ) { if ( metric [ i ] != row_key [ idx ] ) { return ( metric [ i ] & 0xFF ) - ( row_key [ idx ] & 0xFF ) ; } } return 0 ; } | Checks a row key to determine if it contains the metric UID . If salting is enabled we skip the salt bytes . |
7,078 | public static long getRandomUID ( final int width ) { if ( width > MAX_WIDTH ) { throw new IllegalArgumentException ( "Expecting to return an unsigned long " + "random integer, it can not be larger than " + MAX_WIDTH + " bytes wide" ) ; } final byte [ ] bytes = new byte [ width ] ; random_generator . nextBytes ( bytes ) ; long value = 0 ; for ( int i = 0 ; i < bytes . length ; i ++ ) { value <<= 8 ; value |= bytes [ i ] & 0xFF ; } return value != 0 ? value : value + 1 ; } | Get the next random UID . It creates random bytes then convert it to an unsigned long . |
7,079 | public static ExpressionTree parse ( final String expression , final List < String > metric_queries , final TSQuery data_query ) { if ( expression == null || expression . isEmpty ( ) ) { throw new IllegalArgumentException ( "Expression may not be null or empty" ) ; } if ( expression . indexOf ( '(' ) == - 1 || expression . indexOf ( ')' ) == - 1 ) { throw new IllegalArgumentException ( "Invalid Expression: " + expression ) ; } final ExpressionReader reader = new ExpressionReader ( expression . toCharArray ( ) ) ; reader . skipWhitespaces ( ) ; final String function_name = reader . readFuncName ( ) ; final Expression root_expression = ExpressionFactory . getByName ( function_name ) ; final ExpressionTree root = new ExpressionTree ( root_expression , data_query ) ; reader . skipWhitespaces ( ) ; if ( reader . peek ( ) == '(' ) { reader . next ( ) ; parse ( reader , metric_queries , root , data_query ) ; } return root ; } | Parses an expression into a tree |
7,080 | private static void parseParam ( final String param , final List < String > metric_queries , final ExpressionTree root , final TSQuery data_query , final int index ) { if ( param == null || param . length ( ) == 0 ) { throw new IllegalArgumentException ( "Parameter cannot be null or empty" ) ; } if ( param . indexOf ( '(' ) > 0 && param . indexOf ( ')' ) > 0 ) { final ExpressionTree sub_tree = parse ( param , metric_queries , data_query ) ; root . addSubExpression ( sub_tree , index ) ; } else if ( param . indexOf ( ':' ) >= 0 ) { metric_queries . add ( param ) ; root . addSubMetricQuery ( param , metric_queries . size ( ) - 1 , index ) ; } else { root . addFunctionParameter ( param ) ; } } | Helper that parses out the parameter from the expression |
7,081 | private Deferred < List < String > > resolveAggTags ( final Set < byte [ ] > tagks ) { if ( aggregated_tags != null ) { return Deferred . fromResult ( null ) ; } aggregated_tags = new ArrayList < String > ( tagks . size ( ) ) ; final List < Deferred < String > > names = new ArrayList < Deferred < String > > ( tagks . size ( ) ) ; for ( final byte [ ] tagk : tagks ) { names . add ( tsdb . tag_names . getNameAsync ( tagk ) ) ; } final class ResolveCB implements Callback < List < String > , ArrayList < String > > { public List < String > call ( final ArrayList < String > names ) throws Exception { for ( final String name : names ) { aggregated_tags . add ( name ) ; } return aggregated_tags ; } } return Deferred . group ( names ) . addCallback ( new ResolveCB ( ) ) ; } | Resolves the set of tag keys to their string names . |
7,082 | public void next ( ) { if ( ! hasNext ( ) ) { throw new IllegalDataException ( "No more data" ) ; } for ( final ITimeSyncedIterator sub : queries . values ( ) ) { sub . next ( timestamp ) ; } timestamp = nextTimestamp ( ) ; } | fetch the next set of time aligned results for all series |
7,083 | static byte [ ] flattenTags ( final boolean use_query_tags , final boolean include_agg_tags , final ByteMap < byte [ ] > tags , final ByteSet agg_tags , final ITimeSyncedIterator sub ) { if ( tags . isEmpty ( ) ) { return HBaseClient . EMPTY_ARRAY ; } final ByteSet query_tagks ; final int tag_size ; if ( use_query_tags ) { int i = 0 ; if ( sub . getQueryTagKs ( ) != null && ! sub . getQueryTagKs ( ) . isEmpty ( ) ) { query_tagks = sub . getQueryTagKs ( ) ; for ( final Map . Entry < byte [ ] , byte [ ] > pair : tags . entrySet ( ) ) { if ( query_tagks . contains ( pair . getKey ( ) ) ) { i ++ ; } } } else { query_tagks = new ByteSet ( ) ; } tag_size = i ; } else { query_tagks = new ByteSet ( ) ; tag_size = tags . size ( ) ; } int len = ( tag_size * ( TSDB . tagk_width ( ) + TSDB . tagv_width ( ) ) ) + ( include_agg_tags ? ( agg_tags . size ( ) * TSDB . tagk_width ( ) ) : 0 ) ; final byte [ ] tagks = new byte [ len ] ; int i = 0 ; for ( final Map . Entry < byte [ ] , byte [ ] > pair : tags . entrySet ( ) ) { if ( use_query_tags && ! query_tagks . contains ( pair . getKey ( ) ) ) { continue ; } System . arraycopy ( pair . getKey ( ) , 0 , tagks , i , TSDB . tagk_width ( ) ) ; i += TSDB . tagk_width ( ) ; System . arraycopy ( pair . getValue ( ) , 0 , tagks , i , TSDB . tagv_width ( ) ) ; i += TSDB . tagv_width ( ) ; } if ( include_agg_tags ) { for ( final byte [ ] tagk : agg_tags ) { System . arraycopy ( tagk , 0 , tagks , i , TSDB . tagk_width ( ) ) ; i += TSDB . tagk_width ( ) ; } } return tagks ; } | Flattens the appropriate tags into a single byte array |
7,084 | public void markSerialized ( final HttpResponseStatus response , final Throwable exception ) { this . exception = exception ; this . response = response ; query_completed_ts = DateTime . currentTimeMillis ( ) ; overall_stats . put ( QueryStat . PROCESSING_PRE_WRITE_TIME , DateTime . nanoTime ( ) - query_start_ns ) ; synchronized ( running_queries ) { if ( ! running_queries . containsKey ( this . hashCode ( ) ) ) { if ( ! ENABLE_DUPLICATES ) { LOG . warn ( "Query was already marked as complete: " + this ) ; } } running_queries . remove ( hashCode ( ) ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Removed completed query " + remote_address + " with hash " + hashCode ( ) + " on thread " + Thread . currentThread ( ) . getId ( ) ) ; } } aggQueryStats ( ) ; final int cache_hash = this . hashCode ( ) ^ response . toString ( ) . hashCode ( ) ; synchronized ( completed_queries ) { final QueryStats old_query = completed_queries . getIfPresent ( cache_hash ) ; if ( old_query == null ) { completed_queries . put ( cache_hash , this ) ; } else { old_query . executed ++ ; } } } | Marks a query as completed with the given HTTP code with exception and moves it from the running map to the cache updating the cache if it already existed . |
7,085 | public void markSent ( ) { sent_to_client = true ; overall_stats . put ( QueryStat . TOTAL_TIME , DateTime . nanoTime ( ) - query_start_ns ) ; LOG . info ( "Completing query=" + JSON . serializeToString ( this ) ) ; QUERY_LOG . info ( this . toString ( ) ) ; } | Marks the query as complete and logs it to the proper logs . This is called after the data has been sent to the client . |
7,086 | public void markSendFailed ( ) { overall_stats . put ( QueryStat . TOTAL_TIME , DateTime . nanoTime ( ) - query_start_ns ) ; LOG . info ( "Completing query=" + JSON . serializeToString ( this ) ) ; QUERY_LOG . info ( this . toString ( ) ) ; } | Leaves the sent_to_client field as false when we were unable to write to the client end point . |
7,087 | public static Map < String , Object > getRunningAndCompleteStats ( ) { Map < String , Object > root = new TreeMap < String , Object > ( ) ; if ( running_queries . isEmpty ( ) ) { root . put ( "running" , Collections . emptyList ( ) ) ; } else { final List < Object > running = new ArrayList < Object > ( running_queries . size ( ) ) ; root . put ( "running" , running ) ; for ( final QueryStats stats : running_queries . values ( ) ) { final Map < String , Object > obj = new HashMap < String , Object > ( 10 ) ; obj . put ( "query" , stats . query ) ; obj . put ( "remote" , stats . remote_address ) ; obj . put ( "user" , stats . user ) ; obj . put ( "headers" , stats . headers ) ; ; obj . put ( "queryStart" , stats . query_start_ms ) ; obj . put ( "elapsed" , DateTime . msFromNanoDiff ( DateTime . nanoTime ( ) , stats . query_start_ns ) ) ; running . add ( obj ) ; } } final Map < Integer , QueryStats > completed = completed_queries . asMap ( ) ; if ( completed . isEmpty ( ) ) { root . put ( "completed" , Collections . emptyList ( ) ) ; } else { root . put ( "completed" , completed . values ( ) ) ; } return root ; } | Builds a serializable map from the running and cached query maps to be returned to a caller . |
7,088 | public void addStat ( final int query_index , final QueryStat name , final long value ) { Map < QueryStat , Long > qs = query_stats . get ( query_index ) ; if ( qs == null ) { qs = new HashMap < QueryStat , Long > ( ) ; query_stats . put ( query_index , qs ) ; } qs . put ( name , value ) ; } | Adds a stat for a sub query replacing it if it exists . Times must be in nanoseconds . |
7,089 | public void updateStat ( final int query_index , final QueryStat name , final long value ) { Map < QueryStat , Long > qs = query_stats . get ( query_index ) ; long cum_time = value ; if ( qs == null ) { qs = new HashMap < QueryStat , Long > ( ) ; query_stats . put ( query_index , qs ) ; } if ( qs . containsKey ( name ) ) { cum_time += qs . get ( name ) ; } qs . put ( name , cum_time ) ; } | Increments the cumulative value for a cumulative stat . If it s a time then it must be in nanoseconds |
7,090 | public void addScannerStat ( final int query_index , final int id , final QueryStat name , final long value ) { Map < Integer , Map < QueryStat , Long > > qs = scanner_stats . get ( query_index ) ; if ( qs == null ) { qs = new ConcurrentHashMap < Integer , Map < QueryStat , Long > > ( Const . SALT_WIDTH ( ) > 0 ? Const . SALT_BUCKETS ( ) : 1 ) ; scanner_stats . put ( query_index , qs ) ; } Map < QueryStat , Long > scanner_stat_map = qs . get ( id ) ; if ( scanner_stat_map == null ) { scanner_stat_map = new HashMap < QueryStat , Long > ( ) ; qs . put ( id , scanner_stat_map ) ; } scanner_stat_map . put ( name , value ) ; } | Adds a value for a specific scanner for a specific sub query . If it s a time then it must be in nanoseconds . |
7,091 | public void addScannerServers ( final int query_index , final int id , final Set < String > servers ) { Map < Integer , Set < String > > query_servers = scanner_servers . get ( query_index ) ; if ( query_servers == null ) { query_servers = new ConcurrentHashMap < Integer , Set < String > > ( Const . SALT_WIDTH ( ) > 0 ? Const . SALT_BUCKETS ( ) : 1 ) ; scanner_servers . put ( query_index , query_servers ) ; } query_servers . put ( id , servers ) ; } | Adds or overwrites the list of servers scanned by a scanner |
7,092 | public void updateScannerStat ( final int query_index , final int id , final QueryStat name , final long value ) { Map < Integer , Map < QueryStat , Long > > qs = scanner_stats . get ( query_index ) ; long cum_time = value ; if ( qs == null ) { qs = new ConcurrentHashMap < Integer , Map < QueryStat , Long > > ( ) ; scanner_stats . put ( query_index , qs ) ; } Map < QueryStat , Long > scanner_stat_map = qs . get ( id ) ; if ( scanner_stat_map == null ) { scanner_stat_map = new HashMap < QueryStat , Long > ( ) ; qs . put ( id , scanner_stat_map ) ; } if ( scanner_stat_map . containsKey ( name ) ) { cum_time += scanner_stat_map . get ( name ) ; } scanner_stat_map . put ( name , cum_time ) ; } | Updates or adds a stat for a specific scanner . IF it s a time it must be in nanoseconds |
7,093 | public void addScannerId ( final int query_index , final int id , final String string_id ) { Map < Integer , String > scanners = scanner_ids . get ( query_index ) ; if ( scanners == null ) { scanners = new ConcurrentHashMap < Integer , String > ( ) ; scanner_ids . put ( query_index , scanners ) ; } scanners . put ( id , string_id ) ; } | Adds a scanner for a sub query to the stats along with the description of the scanner . |
7,094 | public Map < String , Object > getStats ( final boolean with_sub_queries , final boolean with_scanners ) { final Map < String , Object > map = new TreeMap < String , Object > ( ) ; for ( final Entry < QueryStat , Long > entry : overall_stats . entrySet ( ) ) { if ( entry . getKey ( ) . is_time ) { map . put ( entry . getKey ( ) . toString ( ) , DateTime . msFromNano ( entry . getValue ( ) ) ) ; } else { map . put ( entry . getKey ( ) . toString ( ) , entry . getValue ( ) ) ; } } if ( with_sub_queries ) { final Iterator < Entry < Integer , Map < QueryStat , Long > > > it = query_stats . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { final Entry < Integer , Map < QueryStat , Long > > entry = it . next ( ) ; final Map < String , Object > qs = new HashMap < String , Object > ( 1 ) ; qs . put ( String . format ( "queryIdx_%02d" , entry . getKey ( ) ) , getQueryStats ( entry . getKey ( ) , with_scanners ) ) ; map . putAll ( qs ) ; } } return map ; } | Returns measurements of the given query |
7,095 | private Scanner getScanner ( final int salt ) { final Scanner scanner = tsdb . getClient ( ) . newScanner ( query . useMeta ( ) ? tsdb . metaTable ( ) : tsdb . dataTable ( ) ) ; scanner . setFamily ( query . useMeta ( ) ? TSMeta . FAMILY : TSDB . FAMILY ( ) ) ; if ( metric_uid != null ) { byte [ ] key ; if ( query . useMeta ( ) || Const . SALT_WIDTH ( ) < 1 ) { key = metric_uid ; } else { key = new byte [ Const . SALT_WIDTH ( ) + TSDB . metrics_width ( ) ] ; System . arraycopy ( RowKey . getSaltBytes ( salt ) , 0 , key , 0 , Const . SALT_WIDTH ( ) ) ; System . arraycopy ( metric_uid , 0 , key , Const . SALT_WIDTH ( ) , metric_uid . length ) ; } scanner . setStartKey ( key ) ; long uid = UniqueId . uidToLong ( metric_uid , TSDB . metrics_width ( ) ) ; uid ++ ; if ( uid < Internal . getMaxUnsignedValueOnBytes ( TSDB . metrics_width ( ) ) ) { if ( query . useMeta ( ) || Const . SALT_WIDTH ( ) < 1 ) { key = UniqueId . longToUID ( uid , TSDB . metrics_width ( ) ) ; } else { key = new byte [ Const . SALT_WIDTH ( ) + TSDB . metrics_width ( ) ] ; System . arraycopy ( RowKey . getSaltBytes ( salt ) , 0 , key , 0 , Const . SALT_WIDTH ( ) ) ; System . arraycopy ( UniqueId . longToUID ( uid , TSDB . metrics_width ( ) ) , 0 , key , Const . SALT_WIDTH ( ) , metric_uid . length ) ; } scanner . setStopKey ( key ) ; } } if ( rowkey_regex != null ) { scanner . setKeyRegexp ( rowkey_regex , CHARSET ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Scanner regex: " + QueryUtil . byteRegexToString ( rowkey_regex ) ) ; } } return scanner ; } | Compiles a scanner with the given salt ID if salting is enabled AND we re not scanning the meta table . |
7,096 | public void validate ( ) { if ( metric == null || metric . isEmpty ( ) ) { throw new IllegalArgumentException ( "missing or empty metric" ) ; } if ( id == null || id . isEmpty ( ) ) { throw new IllegalArgumentException ( "missing or empty id" ) ; } Query . validateId ( id ) ; if ( time_offset != null ) { DateTime . parseDateTimeString ( time_offset , null ) ; } if ( aggregator != null && ! aggregator . isEmpty ( ) ) { try { Aggregators . get ( aggregator . toLowerCase ( ) ) ; } catch ( final NoSuchElementException e ) { throw new IllegalArgumentException ( "Invalid aggregator" ) ; } } if ( fill_policy != null ) { fill_policy . validate ( ) ; } } | Validates the metric |
7,097 | public void overrideConfig ( final String property , final String value ) { properties . put ( property , value ) ; loadStaticVariables ( ) ; } | Allows for modifying properties after creation or loading . |
7,098 | public final boolean getBoolean ( final String property ) { final String val = properties . get ( property ) . trim ( ) . toUpperCase ( ) ; if ( val . equals ( "1" ) ) return true ; if ( val . equals ( "TRUE" ) ) return true ; if ( val . equals ( "YES" ) ) return true ; return false ; } | Returns the given property as a boolean |
7,099 | public final String getDirectoryName ( final String property ) { String directory = properties . get ( property ) ; if ( directory == null || directory . isEmpty ( ) ) { return null ; } if ( IS_WINDOWS ) { if ( directory . charAt ( directory . length ( ) - 1 ) == '\\' || directory . charAt ( directory . length ( ) - 1 ) == '/' ) { return directory ; } if ( directory . contains ( "/" ) ) { return directory + "/" ; } return directory + "\\" ; } if ( directory . contains ( "\\" ) ) { throw new IllegalArgumentException ( "Unix path names cannot contain a back slash" ) ; } if ( directory == null || directory . isEmpty ( ) ) { return null ; } if ( directory . charAt ( directory . length ( ) - 1 ) == '/' ) { return directory ; } return directory + "/" ; } | Returns the directory name making sure the end is an OS dependent slash |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.