input
stringlengths 28
18.7k
| output
stringlengths 39
1.69k
|
|---|---|
testWrongIndexOfMultilineM_Message ( ) { java . lang . String line1 = "M:00,02,VgIMAQpXb2huemltbWVyCvMrAgtUb2lsZXR0ZSBFRwrenQMOVG9pbGV0dGUgMS4gT0cK3rgECkJhZGV6aW1tZXIK3qoFDFNjaGxhZnppbW1lcgresQYDSmFuD4lCBwlDaHJpc3RpbmEPiTYIBEZsdXIPiT0KEEJhZGV6aW1tZXIgMi4gT0cPiRwLBULDvHJvD4k/DAxHw6RzdGV6aW1tZXIPiRoJC1dhc2Noa8O8Y2hlD4lXNgQHOCtLRVEwMTg4NjczCFRlcnJhc3NlAQQHMblLRVEwMTg3MTkwCEZsdXJ0w7xyAQIK8ytLRVEwMzc5NTg3C1dhbmRoZWl6dW5nAQIK9P9LRVEwMzgwMDU1DkZlbnN0ZXJoZWl6dW5nAQQHMbtLRVEwMTg3MTg4CEZsdXJ0w7xyAgQHMuxLRVEwMTg2ODg0B0ZlbnN0ZXICAQrenUtFUTA0MDY5NjIHSGVpenVuZwIBCt64S0VRMDQwNjk4OQdIZWl6dW5nAwQIFGdLRVEwMTkwNTc3B0ZlbnN0ZXIDBAc2l0tFUTAxODU5NDUIRmx1cnTDvHIEAQreqktFUTA0MDY5NzUHSGVpenVuZwQBCt8JS0VRMDQwNzA3MA5IYW5kdHVjaGVpenVuZwQEBzhTS0VRMDE4ODcxMAdGZW5zdGVyBAQIFIxLRVEwMTkwNTQzFkZlbnN0ZXIgU3RyYcOfZSByZWNodHMFAQresUtFUTA0MDY5ODIHSGVpenVuZwUEBzHmS0VRMDE4NzE0NhVGZW5zdGVyIFN0cmHDn2UgbGlua3MFAxBXqUxFUTA5ODIxNTYOV2FuZHRoZXJtb3N0YXQBBA/u1ExFUTA3OTQ3NTIIRmx1cnTDvHIGBA/v6kxFUTA3OTQ0NzQNRmVuc3RlciBsaW5rcwYED/HnTEVRMDc5Mzk2NA5GZW5zdGVyIHJlY2h0cwYBD4lCTEVRMTAwNDYwMAdIZWl6dW5nBgQP9BVMRVEwNzkzNDA2CEZsdXJ0w7xyBwQP79FMRVEwNzk0NDk5B0ZlbnN0ZXIHAQ+JNkxFUTEwMDQ1ODgHSGVpenVuZwcBD4k9TEVRMTAwNDU5NQ1IZWl6dW5nIHVudGVuCAEPiRxMRVExMDA0NTYyB0hlaXp1bmcKBA/yTUxFUTA3OTM4NjIHRmVuc3RlcgoED/F+TEVRMDc5NDA2OQhGbHVydMO8cgoBD4k/TEVRMTAwNDU5NwdIZWl6dW5nCwQP8YdMRVEwNzk0MDYwB0ZlbnN0ZXILBA/xSExFUTA3OTQxMjQIRmx1cnTDvHILBA/yVkxFUTA3OTM4NTMURmVuc3RlciBHYXJ0ZW4gbGlua3MMBA/yI0xFUTA3OTM5MDQVRmVuc3RlciBHYXJ0ZW4gcmVjaHRzDAEPiRpMRVExMDA0NTYwB0hlaXp1bmcMBA/vj0xFUTA3OTQ1NjUPRmVuc3RlciBTdHJhw59lDAQP8CtMRVEwNzk0NDA5BFTDvHIDBAgUa0tFUTAxODcwNjkNRmVuc3RlciBTZWl0ZQUEBzagS0VRMDE4NTkzNhVGZW5zdGVyIFN0cmHDn2UgbGlua3MBBA/wI0xFUTA3OTQ0MTYORmVuc3RlciBLw7xjaGUBAxBV50xFUTA5ODI2NzYOV2FuZHRoZXJtb3N0YXQFAxBW2kxFUTA5ODIzNjgOV2FuZHRoZXJtb3N0YXQEAxBV4kxFUTA5ODI2NzEOV2FuZHRoZXJtb3N0YXQHAxBZWExFUTA5ODE3MjkOV2FuZHRoZXJtb3N0YXQMAxBV6ExFUTA5ODI2NzcOV2FuZHRoZXJtb3N0YXQGAxBV40xFUTA5ODI2NzIOV2FuZHRoZXJtb3N0YXQKBAcxoEtFUTAxODcyMTYLV2FzY2hrw7xjaGUF" ; java . lang . String line2 = "M:02,02,AxBV8ExFUTA5ODI2ODUOV2FuZHRoZXJtb3N0YXQJBA/v50xFUTA3OTQ0NzcNQmFsa29uZmVuc3RlcgkBD4lXTEVRMTAwNDYyMRZIZWl6dW5nIHVudGVybSBGZW5zdGVyCQQP8llMRVEwNzkzODUwDkZlbnN0ZXIgcmVjaHRzCQQP8bxMRVEwNzk0MDA3DUZlbnN0ZXIgbGlua3MJAQ+JOExFUTEwMDQ1OTAOSGVpenVuZyBCYWxrb24JBA/yLExFUTA3OTM4OTUKQmFsa29udMO8cgkED++zTEVRMDc5NDUyOQhGbHVydMO8cgkB" ; try { this . processor . addReceivedLine ( line2 ) ; org . junit . Assert . fail ( "Expected<sp>exception<sp>was<sp>not<sp>thrown." ) ; } catch ( org . openhab . binding . maxcube . internal . exceptions . IncorrectMultilineIndexException e ) { } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( "Unexpected<sp>error" ) ; } try { this . processor . reset ( ) ; "<AssertPlaceHolder>" ; this . processor . addReceivedLine ( line2 ) ; org . junit . Assert . fail ( "Expected<sp>exception<sp>was<sp>not<sp>thrown." ) ; } catch ( org . openhab . binding . maxcube . internal . exceptions . IncorrectMultilineIndexException e ) { } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( "Unexpected<sp>error" ) ; } } addReceivedLine ( java . lang . String ) { if ( ( this . currentMessage ) != null ) { throw new org . openhab . binding . maxcube . internal . exceptions . MessageIsWaitingException ( ) ; } org . openhab . binding . maxcube . internal . message . MessageType messageType = org . openhab . binding . maxcube . internal . message . MessageProcessor . getMessageType ( line ) ; if ( messageType == null ) { throw new org . openhab . binding . maxcube . internal . exceptions . UnsupportedMessageTypeException ( ) ; } if ( ( ( this . currentMessageType ) != null ) && ( ! ( messageType . equals ( this . currentMessageType ) ) ) ) { throw new org . openhab . binding . maxcube . internal . exceptions . IncompleteMessageException ( ) ; } java . lang . Boolean result = true ; switch ( messageType ) { case H : this . currentMessage = new org . openhab . binding . maxcube . internal . message . H_Message ( line ) ; break ; case C : this . currentMessage = new org . openhab . binding . maxcube . internal . message . C_Message ( line ) ; break ; case L : this . currentMessage = new org . openhab . binding . maxcube . internal . message . L_Message ( line ) ; break ; case S : this . currentMessage = new org . openhab . binding . maxcube . internal . message . S_Message ( line ) ; break ; case M : result = handle_M_MessageLine ( line ) ; break ; default : } return result ; }
|
org . junit . Assert . assertFalse ( this . processor . addReceivedLine ( line1 ) )
|
testInvalidHexCodeString ( ) { java . awt . Color c = au . gov . ga . worldwind . common . layers . model . gocad . GocadColor . gocadLineToColor ( "*solid*color:#00GG00" ) ; "<AssertPlaceHolder>" ; } gocadLineToColor ( java . lang . String ) { if ( gocadLine == null ) { return null ; } java . util . regex . Matcher matcher = au . gov . ga . worldwind . common . layers . model . gocad . GocadColor . color4Pattern . matcher ( gocadLine ) ; if ( matcher . matches ( ) ) { double r = java . lang . Double . parseDouble ( matcher . group ( 1 ) ) ; double g = java . lang . Double . parseDouble ( matcher . group ( 2 ) ) ; double b = java . lang . Double . parseDouble ( matcher . group ( 3 ) ) ; double a = java . lang . Double . parseDouble ( matcher . group ( 4 ) ) ; return new java . awt . Color ( ( ( float ) ( r ) ) , ( ( float ) ( g ) ) , ( ( float ) ( b ) ) , ( ( float ) ( a ) ) ) ; } matcher = au . gov . ga . worldwind . common . layers . model . gocad . GocadColor . color3Pattern . matcher ( gocadLine ) ; if ( matcher . matches ( ) ) { double r = java . lang . Double . parseDouble ( matcher . group ( 1 ) ) ; double g = java . lang . Double . parseDouble ( matcher . group ( 2 ) ) ; double b = java . lang . Double . parseDouble ( matcher . group ( 3 ) ) ; return new java . awt . Color ( ( ( float ) ( r ) ) , ( ( float ) ( g ) ) , ( ( float ) ( b ) ) , 1.0F ) ; } matcher = au . gov . ga . worldwind . common . layers . model . gocad . GocadColor . colorNamePattern . matcher ( gocadLine ) ; if ( matcher . matches ( ) ) { java . lang . String name = matcher . group ( 1 ) . trim ( ) . toLowerCase ( ) ; au . gov . ga . worldwind . common . layers . model . gocad . GocadColor gc = au . gov . ga . worldwind . common . layers . model . gocad . GocadColor . prettyToColor . get ( name ) ; if ( gc == null ) { gc = au . gov . ga . worldwind . common . layers . model . gocad . GocadColor . nameToColor . get ( name ) ; } if ( gc != null ) { return gc . color ; } } matcher = au . gov . ga . worldwind . common . layers . model . gocad . GocadColor . hexCodePattern . matcher ( gocadLine ) ; if ( matcher . matches ( ) ) { try { int hex = java . lang . Integer . parseInt ( matcher . group ( 1 ) , 16 ) ; return new java . awt . Color ( hex ) ; } catch ( java . lang . NumberFormatException e ) { return null ; } } return null ; }
|
org . junit . Assert . assertNull ( c )
|
testGetReferencedColumnFunction ( ) { org . sagebionetworks . table . query . model . DerivedColumn element = org . sagebionetworks . table . query . util . SqlElementUntils . createDerivedColumn ( "max(bar)" ) ; "<AssertPlaceHolder>" ; } getReferencedColumnName ( ) { org . sagebionetworks . table . query . model . ColumnNameReference hasQuotedValue = getReferencedColumn ( ) ; if ( hasQuotedValue != null ) { return hasQuotedValue . toSqlWithoutQuotes ( ) ; } return null ; }
|
org . junit . Assert . assertEquals ( "bar" , element . getReferencedColumnName ( ) )
|
testQtz395_CopyConstructorMustPreserveTimeZone ( ) { java . util . TimeZone nonDefault = java . util . TimeZone . getTimeZone ( "Europe/Brussels" ) ; if ( nonDefault . equals ( java . util . TimeZone . getDefault ( ) ) ) { nonDefault = org . jboss . elasticsearch . river . remote . CronExpressionTest . EST_TIME_ZONE ; } org . jboss . elasticsearch . river . remote . CronExpression cronExpression = new org . jboss . elasticsearch . river . remote . CronExpression ( "0<sp>15<sp>10<sp>*<sp>*<sp>?<sp>2005" ) ; cronExpression . setTimeZone ( nonDefault ) ; org . jboss . elasticsearch . river . remote . CronExpression copyCronExpression = new org . jboss . elasticsearch . river . remote . CronExpression ( cronExpression ) ; "<AssertPlaceHolder>" ; } getTimeZone ( ) { if ( ( timeZone ) == null ) { timeZone = java . util . TimeZone . getDefault ( ) ; } return timeZone ; }
|
org . junit . Assert . assertEquals ( nonDefault , copyCronExpression . getTimeZone ( ) )
|
testFindByPrimaryKeyExisting ( ) { com . liferay . polls . model . PollsChoice newPollsChoice = addPollsChoice ( ) ; com . liferay . polls . model . PollsChoice existingPollsChoice = _persistence . findByPrimaryKey ( newPollsChoice . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
|
org . junit . Assert . assertEquals ( existingPollsChoice , newPollsChoice )
|
usesFirstCacheAbsolutePath ( ) { com . facebook . buck . io . filesystem . ProjectFilesystem filesystem = com . facebook . buck . io . filesystem . impl . FakeProjectFilesystem . createJavaOnlyFilesystem ( ) ; java . nio . file . Path path = filesystem . getPath ( "world.txt" ) ; filesystem . touch ( path ) ; java . nio . file . Path fullPath = filesystem . resolve ( path ) ; com . facebook . buck . util . cache . ProjectFileHashCache innerCache = com . facebook . buck . util . cache . impl . DefaultFileHashCache . createDefaultFileHashCache ( filesystem , fileHashCacheMode ) ; com . facebook . buck . util . cache . impl . StackedFileHashCache cache = new com . facebook . buck . util . cache . impl . StackedFileHashCache ( com . google . common . collect . ImmutableList . of ( innerCache ) ) ; cache . get ( fullPath ) ; "<AssertPlaceHolder>" ; } willGet ( com . facebook . buck . core . io . ArchiveMemberPath ) { return ( remoteArchiveHashes . containsKey ( relPath . withArchivePath ( filesystem . resolve ( relPath . getArchivePath ( ) ) ) ) ) || ( delegate . willGet ( relPath ) ) ; }
|
org . junit . Assert . assertTrue ( innerCache . willGet ( path ) )
|
testGetBuffer ( ) { io . netty . buffer . ByteBuf buffer = io . netty . buffer . Unpooled . buffer ( 1024 ) ; io . vertx . proton . impl . ProtonWritableBufferImpl writable = new io . vertx . proton . impl . ProtonWritableBufferImpl ( buffer ) ; "<AssertPlaceHolder>" ; } getBuffer ( ) { return nettyBuffer ; }
|
org . junit . Assert . assertSame ( buffer , writable . getBuffer ( ) )
|
buildFromGetReponse ( ) { org . elasticsearch . action . get . GetResponse response = mock ( org . elasticsearch . action . get . GetResponse . class , Mockito . RETURNS_DEEP_STUBS ) ; when ( response . getType ( ) ) . thenReturn ( ESEntityType . NODE . getIndiceName ( ) ) ; when ( response . getId ( ) ) . thenReturn ( "1" ) ; java . util . Map < java . lang . String , java . lang . String > tags = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; tags . put ( "highway" , "primary" ) ; when ( response . getSource ( ) . get ( "tags" ) ) . thenReturn ( tags ) ; java . util . List < java . lang . Double > location = java . util . Arrays . asList ( new java . lang . Double [ ] { 2.0 , 1.0 } ) ; @ org . openstreetmap . osmosis . plugin . elasticsearch . model . entity . SuppressWarnings ( "unchecked" ) java . util . Map < java . lang . String , java . lang . Object > shape = mock ( java . util . Map . class ) ; when ( shape . get ( "coordinates" ) ) . thenReturn ( location ) ; when ( response . getSource ( ) . get ( "shape" ) ) . thenReturn ( shape ) ; org . openstreetmap . osmosis . plugin . elasticsearch . model . entity . ESNode expected = ESNode . Builder . create ( ) . id ( 1L ) . location ( 1.0 , 2.0 ) . addTag ( "highway" , "primary" ) . build ( ) ; org . openstreetmap . osmosis . plugin . elasticsearch . model . entity . ESNode actual = ESNode . Builder . buildFromGetReponse ( response ) ; "<AssertPlaceHolder>" ; } buildFromGetReponse ( org . elasticsearch . action . get . GetResponse ) { if ( ! ( response . getType ( ) . equals ( ESEntityType . NODE . getIndiceName ( ) ) ) ) throw new java . lang . IllegalArgumentException ( "Provided<sp>GetResponse<sp>is<sp>not<sp>a<sp>Node" ) ; org . openstreetmap . osmosis . plugin . elasticsearch . model . entity . ESNode . Builder builder = new org . openstreetmap . osmosis . plugin . elasticsearch . model . entity . ESNode . Builder ( ) ; builder . id = java . lang . Long . valueOf ( response . getId ( ) ) ; builder . tags = ( ( java . util . Map < java . lang . String , java . lang . String > ) ( response . getSource ( ) . get ( "tags" ) ) ) ; java . util . Map < java . lang . String , java . lang . Object > shape = ( ( java . util . Map < java . lang . String , java . lang . Object > ) ( response . getSource ( ) . get ( "shape" ) ) ) ; java . util . List < java . lang . Double > location = ( ( java . util . List < java . lang . Double > ) ( shape . get ( "coordinates" ) ) ) ; builder . latitude = location . get ( 1 ) ; builder . longitude = location . get ( 0 ) ; return builder . build ( ) ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testAsInt4 ( ) { org . apache . tajo . datum . Datum d = org . apache . tajo . datum . DatumFactory . createDate ( org . apache . tajo . datum . TestDateDatum . DATE ) ; org . apache . tajo . datum . Datum copy = org . apache . tajo . datum . DatumFactory . createDate ( d . asInt4 ( ) ) ; "<AssertPlaceHolder>" ; } asInt4 ( ) { throw new org . apache . tajo . exception . TajoRuntimeException ( new org . apache . tajo . exception . InvalidValueForCastException ( type , INT4 ) ) ; }
|
org . junit . Assert . assertEquals ( d , copy )
|
getActive_shouldReturnTrueIfNotVoidedAndDateInRangeWithNullStartDate ( ) { org . openmrs . PatientState patientState = new org . openmrs . PatientState ( ) ; patientState . setStartDate ( null ) ; patientState . setEndDate ( rightRange ) ; patientState . setVoided ( false ) ; boolean active = patientState . getActive ( inRange ) ; "<AssertPlaceHolder>" ; } getActive ( java . util . Date ) { if ( onDate == null ) { onDate = new java . util . Date ( ) ; } return ( ( ! ( getVoided ( ) ) ) && ( ( org . openmrs . util . OpenmrsUtil . compareWithNullAsEarliest ( startDate , onDate ) ) <= 0 ) ) && ( ( org . openmrs . util . OpenmrsUtil . compareWithNullAsLatest ( endDate , onDate ) ) > 0 ) ; }
|
org . junit . Assert . assertTrue ( active )
|
testIndexZero ( ) { java . lang . String regex = "/([^/]{4})/" ; org . apache . druid . query . extraction . ExtractionFn extractionFn = new org . apache . druid . query . extraction . RegexDimExtractionFn ( regex , 0 , true , null ) ; java . util . Set < java . lang . String > extracted = new java . util . LinkedHashSet ( ) ; for ( java . lang . String path : org . apache . druid . query . extraction . RegexDimExtractionFnTest . paths ) { extracted . add ( extractionFn . apply ( path ) ) ; } java . util . Set < java . lang . String > expected = com . google . common . collect . Sets . newLinkedHashSet ( com . google . common . collect . ImmutableList . of ( "/prod/" , "/demo/" , "/dash/" ) ) ; "<AssertPlaceHolder>" ; } of ( org . apache . druid . collections . bitmap . ImmutableBitmap , boolean , long ) { return new org . apache . druid . segment . BitmapOffset ( bitmapIndex , descending , numRows ) ; }
|
org . junit . Assert . assertEquals ( expected , extracted )
|
existsWithExistingUser ( ) { "<AssertPlaceHolder>" ; } exists ( ) { return false ; }
|
org . junit . Assert . assertTrue ( this . user . exists ( ) )
|
shouldRejectImmutableAttributesForReassigningHost ( ) { host = new org . openstack . atlas . docs . loadbalancers . api . management . v1 . Host ( ) ; host . setId ( 23 ) ; host . setName ( "name" ) ; host . setClusterId ( 2 ) ; host . setCoreDeviceId ( "43" ) ; host . setManagementIp ( "119.9.9.9" ) ; host . setMaxConcurrentConnections ( 34 ) ; host . setType ( HostType . FAILOVER ) ; res = hv . validate ( host , ReassignHostContext . REASSIGN_HOST ) ; "<AssertPlaceHolder>" ; } resultMessage ( org . openstack . atlas . api . validation . results . ValidatorResult , java . lang . Enum ) { java . lang . StringBuilder sb = new java . lang . StringBuilder ( ) ; if ( ! ( result . passedValidation ( ) ) ) { java . util . List < org . openstack . atlas . api . validation . results . ExpectationResult > ers = result . getValidationResults ( ) ; sb . append ( java . lang . String . format ( "ON<sp>%s<sp>result.withMessage([" , ctx . toString ( ) ) ) ; for ( org . openstack . atlas . api . validation . results . ExpectationResult er : ers ) { sb . append ( java . lang . String . format ( "%s" , er . getMessage ( ) ) ) ; sb . append ( "])" ) ; } } else { sb . append ( java . lang . String . format ( "On<sp>%s<sp>All<sp>Expectations<sp>PASSED\n" , ctx . toString ( ) ) ) ; } return sb . toString ( ) ; }
|
org . junit . Assert . assertFalse ( resultMessage ( res , ReassignHostContext . REASSIGN_HOST ) , res . passedValidation ( ) )
|
mustFlushAsFastAsPossibleDuringTryCheckPoint ( ) { java . util . concurrent . atomic . AtomicBoolean doneDisablingLimits = new java . util . concurrent . atomic . AtomicBoolean ( ) ; limiter = new org . neo4j . io . pagecache . IOLimiter ( ) { @ org . neo4j . kernel . impl . transaction . log . checkpoint . Override public long maybeLimitIO ( long previousStamp , int recentlyCompletedIOs , java . io . Flushable flushable ) { return 0 ; } @ org . neo4j . kernel . impl . transaction . log . checkpoint . Override public void enableLimit ( ) { doneDisablingLimits . set ( true ) ; } @ org . neo4j . kernel . impl . transaction . log . checkpoint . Override public boolean isLimited ( ) { return doneDisablingLimits . get ( ) ; } } ; mockTxIdStore ( ) ; org . neo4j . kernel . impl . transaction . log . checkpoint . CheckPointerImpl checkPointer = checkPointer ( ) ; checkPointer . tryCheckPoint ( org . neo4j . kernel . impl . transaction . log . checkpoint . CheckPointerImplTest . INFO ) ; "<AssertPlaceHolder>" ; } get ( ) { org . neo4j . kernel . recovery . LogTailScanner . LogTailInformation logTailInformation = logTailScanner . getTailInformation ( ) ; org . neo4j . kernel . impl . transaction . log . entry . CheckPoint lastCheckPoint = logTailInformation . lastCheckPoint ; long txIdAfterLastCheckPoint = logTailInformation . firstTxIdAfterLastCheckPoint ; if ( ! ( logTailInformation . commitsAfterLastCheckpoint ( ) ) ) { monitor . noCommitsAfterLastCheckPoint ( ( lastCheckPoint != null ? lastCheckPoint . getLogPosition ( ) : null ) ) ; return createRecoveryInformation ( LogPosition . UNSPECIFIED , txIdAfterLastCheckPoint ) ; } if ( lastCheckPoint != null ) { monitor . commitsAfterLastCheckPoint ( lastCheckPoint . getLogPosition ( ) , txIdAfterLastCheckPoint ) ; return createRecoveryInformation ( lastCheckPoint . getLogPosition ( ) , txIdAfterLastCheckPoint ) ; } else { if ( ( logTailInformation . oldestLogVersionFound ) != ( INITIAL_LOG_VERSION ) ) { long fromLogVersion = java . lang . Math . max ( org . neo4j . kernel . recovery . INITIAL_LOG_VERSION , logTailInformation . oldestLogVersionFound ) ; throw new org . neo4j . kernel . impl . store . UnderlyingStorageException ( ( ( ( "No<sp>check<sp>point<sp>found<sp>in<sp>any<sp>log<sp>file<sp>from<sp>version<sp>" + fromLogVersion ) + "<sp>to<sp>" ) + ( logTailInformation . currentLogVersion ) ) ) ; } monitor . noCheckPointFound ( ) ; return createRecoveryInformation ( org . neo4j . kernel . impl . transaction . log . LogPosition . start ( 0 ) , txIdAfterLastCheckPoint ) ; } }
|
org . junit . Assert . assertTrue ( doneDisablingLimits . get ( ) )
|
testCheckInvalidCRLF1 ( ) { com . erudika . para . utils . filters . MockHttpServletRequest request = new com . erudika . para . utils . filters . MockHttpServletRequest ( ) ; request . setHeader ( CORSFilter . REQUEST_HEADER_ORIGIN , "http://www.w3.org\r\n" ) ; request . setMethod ( "GET" ) ; com . erudika . para . utils . filters . CORSFilter corsFilter = new com . erudika . para . utils . filters . CORSFilter ( ) ; corsFilter . init ( com . erudika . para . utils . filters . TestConfigs . getDefaultFilterConfig ( ) ) ; com . erudika . para . utils . filters . CORSFilter . CORSRequestType requestType = corsFilter . checkRequestType ( request ) ; "<AssertPlaceHolder>" ; } checkRequestType ( javax . servlet . http . HttpServletRequest ) { com . erudika . para . utils . filters . CORSFilter . CORSRequestType requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . INVALID_CORS ; if ( request == null ) { throw new java . lang . IllegalArgumentException ( "HttpServletRequest<sp>object<sp>is<sp>null" ) ; } java . lang . String originHeader = request . getHeader ( com . erudika . para . utils . filters . CORSFilter . REQUEST_HEADER_ORIGIN ) ; if ( originHeader != null ) { if ( originHeader . isEmpty ( ) ) { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . INVALID_CORS ; } else if ( ! ( com . erudika . para . utils . filters . CORSFilter . isValidOrigin ( originHeader ) ) ) { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . INVALID_CORS ; } else { java . lang . String method = org . apache . commons . lang3 . StringUtils . trimToEmpty ( request . getMethod ( ) ) ; if ( com . erudika . para . utils . filters . CORSFilter . HTTP_METHODS . contains ( method ) ) { if ( "OPTIONS" . equals ( method ) ) { java . lang . String accessControlRequestMethodHeader = request . getHeader ( com . erudika . para . utils . filters . CORSFilter . REQUEST_HEADER_ACCESS_CONTROL_REQUEST_METHOD ) ; if ( org . apache . commons . lang3 . StringUtils . isNotBlank ( accessControlRequestMethodHeader ) ) { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . PRE_FLIGHT ; } else if ( org . apache . commons . lang3 . StringUtils . isWhitespace ( accessControlRequestMethodHeader ) ) { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . INVALID_CORS ; } else { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . ACTUAL ; } } else if ( ( "GET" . equals ( method ) ) || ( "HEAD" . equals ( method ) ) ) { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . SIMPLE ; } else if ( "POST" . equals ( method ) ) { java . lang . String contentType = request . getContentType ( ) ; if ( contentType != null ) { contentType = contentType . toLowerCase ( ) . trim ( ) ; if ( com . erudika . para . utils . filters . CORSFilter . SIMPLE_HTTP_REQUEST_CONTENT_TYPE_VALUES . contains ( contentType ) ) { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . SIMPLE ; } else { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . ACTUAL ; } } } else if ( com . erudika . para . utils . filters . CORSFilter . COMPLEX_HTTP_METHODS . contains ( method ) ) { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . ACTUAL ; } } } } else { requestType = com . erudika . para . utils . filters . CORSFilter . CORSRequestType . NOT_CORS ; } return requestType ; }
|
org . junit . Assert . assertEquals ( CORSFilter . CORSRequestType . INVALID_CORS , requestType )
|
addSameElementTwiceAfterTransformation ( ) { filter . transform ( from , to ) ; java . lang . String s = "aa" ; from . add ( s ) ; from . add ( "bb" ) ; from . add ( s ) ; "<AssertPlaceHolder>" ; } add ( jetbrains . jetpad . model . event . EventSource ) { myEventSources . add ( source ) ; }
|
org . junit . Assert . assertEquals ( java . util . Arrays . asList ( "aa" , "bb" , "aa" ) , to )
|
twoIRIs ( ) { org . apache . clerezza . commons . rdf . Graph mGraph = new org . apache . clerezza . commons . rdf . impl . utils . simple . SimpleGraph ( ) ; org . apache . clerezza . commons . rdf . IRI mbox1 = new org . apache . clerezza . commons . rdf . IRI ( "mailto:foo@example.org" ) ; final org . apache . clerezza . commons . rdf . IRI resource1 = new org . apache . clerezza . commons . rdf . IRI ( "http://example.org/" ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( resource1 , org . apache . clerezza . rdf . ontologies . FOAF . mbox , mbox1 ) ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( resource1 , org . apache . clerezza . rdf . ontologies . RDFS . comment , new org . apache . clerezza . commons . rdf . impl . utils . PlainLiteralImpl ( "a<sp>comment" ) ) ) ; final org . apache . clerezza . commons . rdf . IRI resource2 = new org . apache . clerezza . commons . rdf . IRI ( "http://2.example.org/" ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( resource2 , org . apache . clerezza . rdf . ontologies . FOAF . mbox , mbox1 ) ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( resource2 , org . apache . clerezza . rdf . ontologies . RDFS . comment , new org . apache . clerezza . commons . rdf . impl . utils . PlainLiteralImpl ( "another<sp>comment" ) ) ) ; org . apache . clerezza . rdf . utils . Smusher . smush ( mGraph , ontology ) ; "<AssertPlaceHolder>" ; } size ( ) { expandTill ( Integer . MAX_VALUE ) ; return valueList . size ( ) ; }
|
org . junit . Assert . assertEquals ( 4 , mGraph . size ( ) )
|
testSubtraction ( ) { org . hipparchus . analysis . polynomials . PolynomialFunction p1 = new org . hipparchus . analysis . polynomials . PolynomialFunction ( new double [ ] { - 2 , 1 } ) ; checkNullPolynomial ( p1 . subtract ( p1 ) ) ; org . hipparchus . analysis . polynomials . PolynomialFunction p2 = new org . hipparchus . analysis . polynomials . PolynomialFunction ( new double [ ] { - 2 , 6 } ) ; p2 = p2 . subtract ( p1 ) ; checkPolynomial ( p2 , "5<sp>x" ) ; p1 = new org . hipparchus . analysis . polynomials . PolynomialFunction ( new double [ ] { 1 , - 4 , 2 } ) ; p2 = new org . hipparchus . analysis . polynomials . PolynomialFunction ( new double [ ] { - 1 , 3 , 2 } ) ; p1 = p1 . subtract ( p2 ) ; "<AssertPlaceHolder>" ; checkPolynomial ( p1 , "2<sp>-<sp>7<sp>x" ) ; } degree ( ) { return ( coefficients . length ) - 1 ; }
|
org . junit . Assert . assertEquals ( 1 , p1 . degree ( ) )
|
check_platform_and_db_with_different_maintenance_version_is_accepted ( ) { given_DB_Jar_Home_Versions ( "6.2.0" , "6.2.1" ) ; java . lang . Boolean sameVersion = checkPlatformVersion . call ( ) ; "<AssertPlaceHolder>" ; } call ( ) { try { org . bonitasoft . engine . commons . JavaMethodInvoker jmi = new org . bonitasoft . engine . commons . JavaMethodInvoker ( ) ; jmi . invokeJavaMethod ( "au.edu.sydney.faas.applicationstudent.StudentInformation" , valueToSetObjectWith , objectToInvokeJavaMethodOn , "setStudentInformation" , "au.edu.sydney.faas.applicationstudent.StudentInformation" ) ; } catch ( java . lang . Exception e ) { throw new java . lang . RuntimeException ( e ) ; } return null ; }
|
org . junit . Assert . assertTrue ( sameVersion )
|
shouldBeAbleToRetrieveAllKeys ( ) { uk . gov . gchq . gaffer . cache . impl . JcsDistributedCacheTest . cache1 . put ( "test1" , 1 ) ; uk . gov . gchq . gaffer . cache . impl . JcsDistributedCacheTest . cache1 . put ( "test2" , 2 ) ; uk . gov . gchq . gaffer . cache . impl . JcsDistributedCacheTest . cache2 . put ( "test3" , 3 ) ; java . util . Set < java . lang . String > keys = uk . gov . gchq . gaffer . cache . impl . JcsDistributedCacheTest . cache2 . getAllKeys ( ) ; "<AssertPlaceHolder>" ; assert keys . contains ( "test1" ) ; assert keys . contains ( "test2" ) ; assert keys . contains ( "test3" ) ; } size ( ) { return map . size ( ) ; }
|
org . junit . Assert . assertEquals ( 3 , keys . size ( ) )
|
shouldLogAppropriateMessagesIfLog4jSetToErrorLevel ( ) { log4jLogger . setLevel ( org . apache . log4j . Level . ERROR ) ; logger . error ( org . modeshape . common . logging . LoggerTest . errorMessageWithNoParameters ) ; logger . warn ( org . modeshape . common . logging . LoggerTest . warningMessageWithNoParameters ) ; logger . info ( org . modeshape . common . logging . LoggerTest . infoMessageWithNoParameters ) ; logger . debug ( "This<sp>is<sp>a<sp>debug<sp>message<sp>with<sp>no<sp>parameters" ) ; logger . trace ( "This<sp>is<sp>a<sp>trace<sp>message<sp>with<sp>no<sp>parameters" ) ; log . removeFirst ( Logger . Level . ERROR , "This<sp>is<sp>an<sp>error<sp>message<sp>with<sp>no<sp>parameters" ) ; "<AssertPlaceHolder>" ; } hasEvents ( ) { return ( this . events . size ( ) ) != 0 ; }
|
org . junit . Assert . assertEquals ( false , log . hasEvents ( ) )
|
testRetrieve ( ) { java . lang . String [ ] expected = new java . lang . String [ ] { "the" , "quick" , "brown" , "fox" , "jumps" , "over" , "lazy" , "dog" } ; for ( int i = 0 ; i < ( expected . length ) ; i ++ ) { "<AssertPlaceHolder>" ; } } get ( int ) { return new nl . inl . blacklab . search . Hit ( doc [ i ] , start [ i ] , end [ i ] ) ; }
|
org . junit . Assert . assertEquals ( expected [ i ] , t . get ( i ) )
|
testHudsonDiffBackup ( ) { final org . jvnet . hudson . plugins . thinbackup . ThinBackupPluginImpl mockPlugin = org . jvnet . hudson . plugins . thinbackup . TestHelper . createMockPlugin ( jenkinsHome , backupDir ) ; performHudsonDiffBackup ( mockPlugin ) ; final java . io . File lastDiffBackup = backupDir . listFiles ( ( ( java . io . FileFilter ) ( org . apache . commons . io . filefilter . FileFilterUtils . prefixFileFilter ( BackupType . DIFF . toString ( ) ) ) ) ) [ 0 ] ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( super . toString ( ) ) + "[plugin=" ) + ( plugin . title ) ) + "]" ; }
|
org . junit . Assert . assertEquals ( 1 , lastDiffBackup . list ( ) . length )
|
testViewInitialization ( ) { org . dashbuilder . dataset . date . TimeInstant beginMonth = new org . dashbuilder . dataset . date . TimeInstant ( TimeInstant . TimeMode . BEGIN , org . dashbuilder . dataset . group . DateIntervalType . MONTH , org . dashbuilder . dataset . date . Month . JANUARY , new org . dashbuilder . dataset . date . TimeAmount ( 10 , org . dashbuilder . dataset . group . DateIntervalType . DAY ) ) ; org . dashbuilder . displayer . client . widgets . filter . TimeInstantEditor timeInstantEditor = new org . dashbuilder . displayer . client . widgets . filter . TimeInstantEditor ( timeInstantView , timeAmountEditor ) ; timeInstantEditor . init ( beginMonth , changeCommand ) ; "<AssertPlaceHolder>" ; verify ( timeInstantView ) . init ( timeInstantEditor ) ; verify ( timeInstantView ) . clearTimeModeSelector ( ) ; verify ( timeInstantView , times ( TimeInstant . TimeMode . values ( ) . length ) ) . addTimeModeItem ( any ( TimeInstant . TimeMode . class ) ) ; verify ( timeInstantView ) . setSelectedTimeModeIndex ( TimeInstant . TimeMode . BEGIN . getIndex ( ) ) ; verify ( timeInstantView ) . enableIntervalTypeSelector ( ) ; verify ( timeInstantView ) . clearIntervalTypeSelector ( ) ; verify ( timeInstantView , times ( TimeInstantEditor . INTERVAL_TYPES . size ( ) ) ) . addIntervalTypeItem ( any ( org . dashbuilder . dataset . group . DateIntervalType . class ) ) ; verify ( timeInstantView ) . setSelectedIntervalTypeIndex ( 3 ) ; }
|
org . junit . Assert . assertEquals ( timeInstantView , timeInstantEditor . view )
|
testGradientWeightDecay ( ) { org . nd4j . linalg . activations . Activation [ ] activFns = new org . nd4j . linalg . activations . Activation [ ] { org . nd4j . linalg . activations . Activation . SIGMOID , org . nd4j . linalg . activations . Activation . TANH , org . nd4j . linalg . activations . Activation . THRESHOLDEDRELU } ; boolean [ ] characteristic = new boolean [ ] { false , true } ; org . nd4j . linalg . lossfunctions . LossFunctions . LossFunction [ ] lossFunctions = new org . nd4j . linalg . lossfunctions . LossFunctions . LossFunction [ ] { org . nd4j . linalg . lossfunctions . LossFunctions . LossFunction . MCXENT , org . nd4j . linalg . lossfunctions . LossFunctions . LossFunction . MSE } ; org . nd4j . linalg . activations . Activation [ ] outputActivations = new org . nd4j . linalg . activations . Activation [ ] { org . nd4j . linalg . activations . Activation . SOFTMAX , org . nd4j . linalg . activations . Activation . TANH } ; org . nd4j . linalg . dataset . api . preprocessor . DataNormalization scaler = new org . nd4j . linalg . dataset . api . preprocessor . NormalizerMinMaxScaler ( ) ; org . nd4j . linalg . dataset . api . iterator . DataSetIterator iter = new org . deeplearning4j . datasets . iterator . impl . IrisDataSetIterator ( 150 , 150 ) ; scaler . fit ( iter ) ; iter . setPreProcessor ( scaler ) ; org . nd4j . linalg . dataset . DataSet ds = iter . next ( ) ; org . nd4j . linalg . api . ndarray . INDArray input = ds . getFeatures ( ) ; org . nd4j . linalg . api . ndarray . INDArray labels = ds . getLabels ( ) ; double [ ] l2vals = new double [ ] { 0.4 , 0.0 , 0.4 , 0.4 , 0.0 , 0.0 } ; double [ ] l1vals = new double [ ] { 0.0 , 0.0 , 0.5 , 0.0 , 0.5 , 0.0 } ; double [ ] biasL2 = new double [ ] { 0.0 , 0.0 , 0.0 , 0.2 , 0.0 , 0.0 } ; double [ ] biasL1 = new double [ ] { 0.0 , 0.0 , 0.6 , 0.0 , 0.0 , 0.5 } ; double [ ] wdVals = new double [ ] { 0.0 , 0.0 , 0.0 , 0.0 , 0.4 , 0.0 } ; double [ ] wdBias = new double [ ] { 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.4 } ; for ( org . nd4j . linalg . activations . Activation afn : activFns ) { for ( int i = 0 ; i < ( lossFunctions . length ) ; i ++ ) { for ( int k = 0 ; k < ( l2vals . length ) ; k ++ ) { org . nd4j . linalg . lossfunctions . LossFunctions . LossFunction lf = lossFunctions [ i ] ; org . nd4j . linalg . activations . Activation outputActivation = outputActivations [ i ] ; double l2 = l2vals [ k ] ; double l1 = l1vals [ k ] ; org . deeplearning4j . nn . conf . MultiLayerConfiguration conf = new org . deeplearning4j . nn . conf . NeuralNetConfiguration . Builder ( ) . l2 ( l2 ) . l1 ( l1 ) . l2Bias ( biasL2 [ k ] ) . l1Bias ( biasL1 [ k ] ) . weightDecay ( wdVals [ k ] ) . weightDecayBias ( wdBias [ k ] ) . optimizationAlgo ( OptimizationAlgorithm . CONJUGATE_GRADIENT ) . seed ( 12345L ) . list ( ) . layer ( 0 , new org . deeplearning4j . gradientcheck . DenseLayer . Builder ( ) . nIn ( 4 ) . nOut ( 3 ) . dist ( new org . deeplearning4j . nn . conf . distribution . NormalDistribution ( 0 , 1 ) ) . updater ( new org . nd4j . linalg . learning . config . NoOp ( ) ) . activation ( afn ) . build ( ) ) . layer ( 1 , new org . deeplearning4j . gradientcheck . OutputLayer . Builder ( lf ) . nIn ( 3 ) . nOut ( 3 ) . dist ( new org . deeplearning4j . nn . conf . distribution . NormalDistribution ( 0 , 1 ) ) . updater ( new org . nd4j . linalg . learning . config . NoOp ( ) ) . activation ( outputActivation ) . build ( ) ) . build ( ) ; org . deeplearning4j . nn . multilayer . MultiLayerNetwork mln = new org . deeplearning4j . nn . multilayer . MultiLayerNetwork ( conf ) ; mln . init ( ) ; boolean gradOK1 = org . deeplearning4j . gradientcheck . GradientCheckUtil . checkGradients ( mln , org . deeplearning4j . gradientcheck . GradientCheckTests . DEFAULT_EPS , org . deeplearning4j . gradientcheck . GradientCheckTests . DEFAULT_MAX_REL_ERROR , org . deeplearning4j . gradientcheck . GradientCheckTests . DEFAULT_MIN_ABS_ERROR , org . deeplearning4j . gradientcheck . GradientCheckTests . PRINT_RESULTS , org . deeplearning4j . gradientcheck . GradientCheckTests . RETURN_ON_FIRST_FAILURE , input , labels ) ; java . lang . String msg = ( ( ( ( ( ( ( ( "testGradientWeightDecay()<sp>-<sp>activationFn=" + afn ) + ",<sp>lossFn=" ) + lf ) + ",<sp>outputActivation=" ) + outputActivation ) + ",<sp>l2=" ) + l2 ) + ",<sp>l1=" ) + l1 ; "<AssertPlaceHolder>" ; org . deeplearning4j . TestUtils . testModelSerialization ( mln ) ; } } } } checkGradients ( org . deeplearning4j . nn . multilayer . MultiLayerNetwork , double , double , double , boolean , boolean , org . nd4j . linalg . api .
|
org . junit . Assert . assertTrue ( msg , gradOK1 )
|
testShouldCallSpecCreateForEachMethod ( ) { jdave . runner . SpecRunnerTest . BooleanSpec . specCreateCalled = 0 ; runner . run ( jdave . runner . SpecRunnerTest . BooleanSpec . class , new jdave . SpecVisitorAdapter ( new jdave . ResultsAdapter ( ) ) ) ; "<AssertPlaceHolder>" ; } run ( java . lang . Class , jdave . runner . ISpecVisitor ) { runOnceBefores ( specType ) ; runContexts ( specType , callback ) ; runOnceAfters ( specType ) ; }
|
org . junit . Assert . assertEquals ( 5 , jdave . runner . SpecRunnerTest . BooleanSpec . specCreateCalled )
|
testExonic ( ) { java . lang . String [ ] legacyVariantStrings = new java . lang . String [ ] { "XXX:EX3+3ins" , "XXX:EX3-3insT" , "XXX:EX3-3insTCT" , "XXX:EX3-3ins3" } ; for ( java . lang . String legacyVariantString : legacyVariantStrings ) { de . charite . compbio . jannovar . hgvs . legacy . LegacyVariant variant = driver . parseLegacyChangeString ( legacyVariantString ) ; "<AssertPlaceHolder>" ; } } toLegacyString ( ) { return com . google . common . base . Joiner . on ( "" ) . join ( location . toLegacyString ( ) , "del" , deletedSeq . toHGVSString ( ) ) ; }
|
org . junit . Assert . assertEquals ( legacyVariantString , variant . toLegacyString ( ) )
|
testSort ( ) { double [ ] vals = new double [ ] { 12 , 32 , 31 , 11 , 52 , 13 , - 1 , - 222 , 2 } ; org . apache . hama . commons . math . DoubleVector vec = new org . apache . hama . commons . math . DenseDoubleVector ( vals ) ; java . util . Comparator < java . lang . Double > comparator = new java . util . Comparator < java . lang . Double > ( ) { @ org . apache . hama . commons . math . Override public int compare ( java . lang . Double arg0 , java . lang . Double arg1 ) { return java . lang . Double . compare ( arg0 , arg1 ) ; } } ; java . util . List < org . apache . hama . commons . math . Tuple < java . lang . Double , java . lang . Integer > > sorted = org . apache . hama . commons . math . DenseDoubleVector . sort ( vec , comparator ) ; for ( int i = 1 ; i < ( sorted . size ( ) ) ; ++ i ) { org . apache . hama . commons . math . Tuple < java . lang . Double , java . lang . Integer > previous = sorted . get ( ( i - 1 ) ) ; org . apache . hama . commons . math . Tuple < java . lang . Double , java . lang . Integer > cur = sorted . get ( i ) ; "<AssertPlaceHolder>" ; } } getFirst ( ) { return first ; }
|
org . junit . Assert . assertTrue ( ( ( previous . getFirst ( ) ) <= ( cur . getFirst ( ) ) ) )
|
correctlySetsLongVersionProperty ( ) { org . springframework . data . mongodb . core . PersonWithVersionPropertyOfTypeLong person = new org . springframework . data . mongodb . core . PersonWithVersionPropertyOfTypeLong ( ) ; person . firstName = "Dave" ; template . save ( person ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertThat ( person . version , is ( 0L ) )
|
customCollector_whenResultContainsAllElementsFrSource_thenCorrect ( ) { com . baeldung . java8 . Collector < com . baeldung . stream . Product , ? , com . baeldung . java8 . LinkedList < com . baeldung . stream . Product > > toLinkedList = com . baeldung . java8 . Collector . of ( LinkedList :: new , LinkedList :: add , ( first , second ) -> { first . addAll ( second ) ; return first ; } ) ; com . baeldung . java8 . LinkedList < com . baeldung . stream . Product > linkedListOfPersons = productList . stream ( ) . collect ( toLinkedList ) ; "<AssertPlaceHolder>" ; } containsAll ( java . util . Collection ) { for ( java . lang . Object o : collection ) { if ( ! ( contains ( o ) ) ) { return false ; } } return true ; }
|
org . junit . Assert . assertTrue ( linkedListOfPersons . containsAll ( productList ) )
|
startProcess ( ) { ninja . build . RunClassInSeparateJvmMachine rcsjm = new ninja . build . RunClassInSeparateJvmMachine ( "FakeDaemon" , ninja . build . FakeDaemonMain . class . getName ( ) , getOurClassPath ( ) , java . util . Collections . EMPTY_LIST , new java . io . File ( "." ) ) ; final java . io . ByteArrayOutputStream baos = new java . io . ByteArrayOutputStream ( ) ; rcsjm . setOutput ( baos ) ; org . zeroturnaround . exec . StartedProcess startedProcess = rcsjm . startProcess ( ) ; "<AssertPlaceHolder>" ; waitOrTimeout ( fakeDaemonCondition ( baos ) , com . google . code . tempusfugit . temporal . Timeout . timeout ( com . google . code . tempusfugit . temporal . Duration . millis ( 10000 ) ) ) ; startedProcess . getProcess ( ) . destroy ( ) ; org . zeroturnaround . exec . ProcessResult processResult = startedProcess . getFuture ( ) . get ( ) ; } isAlive ( java . lang . Process ) { try { p . exitValue ( ) ; return false ; } catch ( java . lang . Exception e ) { return true ; } }
|
org . junit . Assert . assertTrue ( isAlive ( startedProcess . getProcess ( ) ) )
|
shouldReturnEmptyPropertyInfosWhenSuppliedValidUrlAndAllPropertiesWithRepositoriesInHTTP ( ) { java . util . Properties validProperties = new java . util . Properties ( ) ; validProperties . put ( JcrDriver . WORKSPACE_PROPERTY_NAME , org . modeshape . jdbc . delegate . HttpRepositoryDelegateTest . WORKSPACE ) ; validProperties . put ( JcrDriver . USERNAME_PROPERTY_NAME , org . modeshape . jdbc . delegate . HttpRepositoryDelegateTest . USER_NAME ) ; validProperties . put ( JcrDriver . PASSWORD_PROPERTY_NAME , org . modeshape . jdbc . delegate . HttpRepositoryDelegateTest . PASSWORD ) ; validProperties . put ( JcrDriver . REPOSITORY_PROPERTY_NAME , org . modeshape . jdbc . delegate . HttpRepositoryDelegateTest . REPOSITORY_NAME ) ; delegate = factory ( ) . createRepositoryDelegate ( org . modeshape . jdbc . delegate . HttpRepositoryDelegateTest . VALID_HTTP_URL , validProperties , null ) ; java . sql . DriverPropertyInfo [ ] infos = delegate . getConnectionInfo ( ) . getPropertyInfos ( ) ; "<AssertPlaceHolder>" ; } getPropertyInfos ( ) { java . util . List < java . sql . DriverPropertyInfo > results = new java . util . ArrayList < java . sql . DriverPropertyInfo > ( ) ; addUrlPropertyInfo ( results ) ; addUserNamePropertyInfo ( results ) ; addPasswordPropertyInfo ( results ) ; addWorkspacePropertyInfo ( results ) ; addRepositoryNamePropertyInfo ( results ) ; return results . toArray ( new java . sql . DriverPropertyInfo [ results . size ( ) ] ) ; }
|
org . junit . Assert . assertThat ( infos . length , org . hamcrest . core . Is . is ( 0 ) )
|
getFirst_A$ ( ) { java . lang . String _1 = "foo" ; java . lang . Integer _2 = 123 ; java . lang . Long _3 = 456L ; java . lang . Boolean _4 = true ; com . m3 . scalaflavor4j . Tuple4 < java . lang . String , java . lang . Integer , java . lang . Long , java . lang . Boolean > tuple = com . m3 . scalaflavor4j . Tuple . apply ( _1 , _2 , _3 , _4 ) ; java . lang . String actual = tuple . getFirst ( ) ; java . lang . String expected = "foo" ; "<AssertPlaceHolder>" ; } getFirst ( ) { return _1 ( ) ; }
|
org . junit . Assert . assertThat ( actual , org . hamcrest . CoreMatchers . is ( org . hamcrest . CoreMatchers . equalTo ( expected ) ) )
|
testReset ( ) { this . dupIndicator . reset ( ) ; "<AssertPlaceHolder>" ; } getDuplicateMap ( ) { return this . distinctMap ; }
|
org . junit . Assert . assertTrue ( ( ( dupIndicator . getDuplicateMap ( ) . size ( ) ) < 1 ) )
|
testReadUuid ( ) { final com . allanbank . mongodb . bson . Document seed = com . allanbank . mongodb . bson . builder . BuilderFactory . start ( ) . addBinary ( "juuid" , ( ( byte ) ( 3 ) ) , com . allanbank . mongodb . bson . io . BsonInputStreamTest . LEGACY_UUID_BYTES ) . addBinary ( "uuid" , ( ( byte ) ( 4 ) ) , com . allanbank . mongodb . bson . io . BsonInputStreamTest . STANDARD_UUID_BYTES ) . build ( ) ; final com . allanbank . mongodb . bson . Document expected = new com . allanbank . mongodb . bson . impl . RootDocument ( new com . allanbank . mongodb . bson . element . UuidElement ( "juuid" , ( ( byte ) ( 3 ) ) , com . allanbank . mongodb . bson . io . BsonInputStreamTest . TEST_UUID ) , new com . allanbank . mongodb . bson . element . UuidElement ( "uuid" , ( ( byte ) ( 4 ) ) , com . allanbank . mongodb . bson . io . BsonInputStreamTest . TEST_UUID ) ) ; final java . io . ByteArrayOutputStream out = new java . io . ByteArrayOutputStream ( ) ; final com . allanbank . mongodb . bson . io . BsonOutputStream bout = new com . allanbank . mongodb . bson . io . BsonOutputStream ( out ) ; bout . writeDocument ( seed ) ; final java . io . ByteArrayInputStream in = new java . io . ByteArrayInputStream ( out . toByteArray ( ) ) ; final com . allanbank . mongodb . bson . io . BsonInputStream reader = new com . allanbank . mongodb . bson . io . BsonInputStream ( in ) ; final com . allanbank . mongodb . bson . Document doc = reader . readDocument ( ) ; reader . close ( ) ; "<AssertPlaceHolder>" ; } close ( ) { synchronized ( myForwardCallback ) { myClosed = true ; sendKill ( ) ; } }
|
org . junit . Assert . assertEquals ( expected , doc )
|
testCantDeleteDirWithOpenDirStreamAndChild ( ) { java . nio . file . Path dir = root . resolve ( "dir" ) ; java . nio . file . Files . createDirectories ( dir ) ; java . nio . file . Path child = dir . resolve ( "child" ) ; java . nio . file . Files . createDirectories ( child ) ; try ( java . nio . file . DirectoryStream < java . nio . file . Path > stream = java . nio . file . Files . newDirectoryStream ( dir ) ) { java . nio . file . Files . delete ( child ) ; "<AssertPlaceHolder>" ; java . nio . file . Files . delete ( dir ) ; } } delete ( java . nio . file . Path ) { getFs ( path ) . delete ( toEfsPath ( path ) ) ; }
|
org . junit . Assert . assertFalse ( java . nio . file . Files . exists ( child ) )
|
testDeleteWithoutItem ( ) { eu . abc4trust . db . SimpleParamTypes table = SimpleParamTypes . TEST_TABLE ; java . lang . String prefix = java . util . UUID . randomUUID ( ) . toString ( ) ; java . net . URI key = java . net . URI . create ( ( "test-" + prefix ) ) ; eu . abc4trust . db . TestJdbcPersistentStorage . ps . clearTestTable ( ) ; "<AssertPlaceHolder>" ; } deleteItem ( eu . abc4trust . db . SimpleParamTypes , java . net . URI ) { return ( storage . get ( table ) . remove ( key ) ) != null ; }
|
org . junit . Assert . assertFalse ( eu . abc4trust . db . TestJdbcPersistentStorage . ps . deleteItem ( table , key ) )
|
testCreateScan ( ) { final org . csstudio . scan . log . derby . DerbyDataLogger log = new org . csstudio . scan . log . derby . DerbyDataLogger ( ) ; org . csstudio . scan . log . derby . DerbyDataLogUnitTest . setScan ( log . createScan ( "Demo" ) ) ; log . close ( ) ; System . out . println ( ( "New<sp>scan:<sp>" + ( org . csstudio . scan . log . derby . DerbyDataLogUnitTest . scan ) ) ) ; "<AssertPlaceHolder>" ; } close ( ) { if ( ( log_writer ) != null ) { log_writer . stop ( ) ; log_writer = null ; } }
|
org . junit . Assert . assertNotNull ( org . csstudio . scan . log . derby . DerbyDataLogUnitTest . scan )
|
testMonitorRunningAnalysesSuccessErrorStillRunning ( ) { analysisSubmission . setAnalysisState ( AnalysisState . RUNNING ) ; java . util . Map < ca . corefacility . bioinformatics . irida . model . workflow . execution . galaxy . GalaxyWorkflowState , java . util . Set < java . lang . String > > stateIds = ca . corefacility . bioinformatics . irida . pipeline . upload . galaxy . integration . Util . buildStateIdsWithStateFilled ( GalaxyWorkflowState . ERROR , com . google . common . collect . Sets . newHashSet ( "1" ) ) ; ca . corefacility . bioinformatics . irida . model . workflow . execution . galaxy . GalaxyWorkflowStatus galaxyWorkflowStatus = new ca . corefacility . bioinformatics . irida . model . workflow . execution . galaxy . GalaxyWorkflowStatus ( ca . corefacility . bioinformatics . irida . model . workflow . execution . galaxy . GalaxyWorkflowState . RUNNING , stateIds ) ; when ( analysisSubmissionRepository . findByAnalysisState ( AnalysisState . RUNNING ) ) . thenReturn ( java . util . Arrays . asList ( analysisSubmission ) ) ; when ( analysisExecutionService . getWorkflowStatus ( analysisSubmission ) ) . thenReturn ( galaxyWorkflowStatus ) ; analysisExecutionScheduledTask . monitorRunningAnalyses ( ) ; "<AssertPlaceHolder>" ; verify ( analysisSubmissionRepository ) . save ( analysisSubmission ) ; } getAnalysisState ( ) { return analysisState ; }
|
org . junit . Assert . assertEquals ( AnalysisState . ERROR , analysisSubmission . getAnalysisState ( ) )
|
testIsDocumentExists_DocumentNominalLabelIdDTO ( ) { boolean result = qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentNominalLabelResourceFacadeImp . documentNominalLabelResourceFacadeImp . isDocumentExists ( qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentNominalLabelResourceFacadeImp . documentNominalLabel . getIdDTO ( ) ) ; "<AssertPlaceHolder>" ; } getIdDTO ( ) { return this . idDTO ; }
|
org . junit . Assert . assertEquals ( true , result )
|
transform_testXmlToXml_xsl ( ) { org . ikasan . component . converter . xml . XsltConverter < java . lang . String , java . lang . String > uut = new org . ikasan . component . converter . xml . XsltConverter < java . lang . String , java . lang . String > ( org . apache . xalan . xsltc . trax . TransformerFactoryImpl . newInstance ( ) ) ; org . ikasan . component . converter . xml . XsltConverterConfiguration XsltConverterConfiguration = new org . ikasan . component . converter . xml . XsltConverterConfiguration ( ) ; XsltConverterConfiguration . setStylesheetLocation ( ( ( org . ikasan . component . converter . xml . XsltConverterTest . CLASSPATH_URL_PREFIX ) + "testXmlToXml.xsl" ) ) ; uut . setConfiguration ( XsltConverterConfiguration ) ; final byte [ ] inboundPayloadContent = new java . lang . String ( "<sourceRoot><sourceElement1>element1Value</sourceElement1><sourceElement2>element2Value</sourceElement2></sourceRoot>" ) . getBytes ( ) ; java . lang . String outboundPayloadContent = new java . lang . String ( "<?xml<sp>version=\"1.0\"<sp>encoding=\"UTF-8\"?><targetRoot><targetElement1>element1Value</targetElement1><targetElement2>element2Value</targetElement2></targetRoot>" ) ; uut . startManagedResource ( ) ; java . lang . String result = uut . convert ( new java . lang . String ( inboundPayloadContent ) ) ; "<AssertPlaceHolder>" ; } convert ( java . lang . Object ) { return payload ; }
|
org . junit . Assert . assertEquals ( outboundPayloadContent , result )
|
getAllLocations_shouldReturnAllLocationsWhenIncludeRetiredIsTrue ( ) { java . util . List < org . openmrs . Location > locations = org . openmrs . api . context . Context . getLocationService ( ) . getAllLocations ( true ) ; "<AssertPlaceHolder>" ; } size ( ) { return getMemberships ( ) . stream ( ) . filter ( ( m ) -> ! ( m . getVoided ( ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) . size ( ) ; }
|
org . junit . Assert . assertEquals ( 6 , locations . size ( ) )
|
execute ( ) { java . io . File in = folder . newFile ( "in" ) ; java . io . File out = folder . newFile ( "out" ) ; put ( in , "aaa" , "bbb" , "ccc" ) ; try ( com . asakusafw . windgate . core . GateTask task = new com . asakusafw . windgate . core . GateTask ( profile ( ) , script ( p ( "testing" , "fs1" , in , "fs2" , out ) ) , "testing" , true , true , new com . asakusafw . windgate . core . ParameterList ( ) ) ) { task . execute ( ) ; } java . util . List < java . lang . String > results = get ( out ) ; "<AssertPlaceHolder>" ; } is ( java . lang . CharSequence , int , char ) { return ( string . charAt ( column ) ) == value ; }
|
org . junit . Assert . assertThat ( results , is ( java . util . Arrays . asList ( "aaa" , "bbb" , "ccc" ) ) )
|
stateMachineTraits058Test ( ) { cruise . umple . compiler . UmpleModel model = getModelByFilename ( "trait_test_data_0016.ump" ) ; boolean result = false ; try { model . run ( ) ; } catch ( java . lang . Exception e ) { result = e . getMessage ( ) . contains ( "236" ) ; } finally { "<AssertPlaceHolder>" ; cruise . umple . util . SampleFileWriter . destroy ( "traitTest.ump" ) ; } } contains ( java . lang . Object ) { if ( ( parent ) != null ) { return ( super . contains ( obj ) ) || ( parent . contains ( obj ) ) ; } else { return super . contains ( obj ) ; } }
|
org . junit . Assert . assertTrue ( result )
|
testPutReadOnly ( ) { setStackStatus ( StatusEnum . READ_ONLY , org . sagebionetworks . repo . web . StackStatusInterceptorTest . MSG_FORMAT ) ; "<AssertPlaceHolder>" ; servletTestHelper . updateEntity ( dispatchServlet , sampleProject , adminUserId ) ; org . junit . Assert . fail ( ) ; } getCurrentStatus ( ) { return stackStatusDao . getFullCurrentStatus ( ) ; }
|
org . junit . Assert . assertEquals ( StatusEnum . READ_ONLY , stackStatusDao . getCurrentStatus ( ) )
|
testListAttributeLocaleInheritance ( ) { java . util . List < org . apache . tiles . request . ApplicationResource > urls = new java . util . ArrayList < org . apache . tiles . request . ApplicationResource > ( ) ; urls . add ( url21 ) ; definitionDao . setSources ( urls ) ; org . apache . tiles . request . ApplicationContext applicationContext = createMock ( org . apache . tiles . request . ApplicationContext . class ) ; definitionDao . setReader ( new org . apache . tiles . definition . digester . DigesterDefinitionsReader ( ) ) ; replay ( applicationContext ) ; org . apache . tiles . Definition definition = definitionDao . getDefinition ( "test.inherit.list" , Locale . ITALIAN ) ; org . apache . tiles . ListAttribute listAttribute = ( ( org . apache . tiles . ListAttribute ) ( definition . getAttribute ( "list" ) ) ) ; java . util . List < org . apache . tiles . Attribute > attributes = listAttribute . getValue ( ) ; "<AssertPlaceHolder>" ; verify ( applicationContext ) ; } getValue ( ) { return value ; }
|
org . junit . Assert . assertEquals ( 2 , attributes . size ( ) )
|
testZstdFileIsNotSplittable ( ) { java . lang . String baseName = "test-input" ; java . io . File compressedFile = tmpFolder . newFile ( ( baseName + ".zst" ) ) ; writeFile ( compressedFile , generateInput ( 10 ) , CompressionMode . ZSTD ) ; org . apache . beam . sdk . io . CompressedSource < java . lang . Byte > source = org . apache . beam . sdk . io . CompressedSource . from ( new org . apache . beam . sdk . io . CompressedSourceTest . ByteSource ( compressedFile . getPath ( ) , 1 ) ) ; "<AssertPlaceHolder>" ; } isSplittable ( ) { return signature . processElement ( ) . isSplittable ( ) ; }
|
org . junit . Assert . assertFalse ( source . isSplittable ( ) )
|
testIsRequestedSessionIdValidFalse ( ) { javax . servlet . http . HttpServletRequest wrappedSimple = mock ( javax . servlet . http . HttpServletRequest . class ) ; com . amadeus . session . servlet . HttpRequestWrapper req = spy ( new com . amadeus . session . servlet . HttpRequestWrapper ( wrappedSimple , servletContext ) ) ; req . setRequestedSessionId ( com . amadeus . session . servlet . TestHttpRequestWrapper . SESSION_ID , true ) ; "<AssertPlaceHolder>" ; verify ( req , never ( ) ) . getSession ( false ) ; } isRequestedSessionIdValid ( ) { retrieveSessionId ( ) ; return ( ( retrievedId ) != null ) && ( isRepositoryChecked ( ) ) ; }
|
org . junit . Assert . assertFalse ( req . isRequestedSessionIdValid ( ) )
|
storeKey ( ) { eu . abc4trust . xml . SecretKey inspectorSecretKey = new eu . abc4trust . xml . SecretKey ( ) ; eu . abc4trust . xml . CryptoParams cryptoParams = new eu . abc4trust . xml . CryptoParams ( ) ; cryptoParams . getContent ( ) . add ( "TestString1" ) ; inspectorSecretKey . setCryptoParams ( cryptoParams ) ; eu . abc4trust . abce . integrationtests . inspector . credentialmanager . CredentialManagerImplTest . credMng . storeInspectorSecretKey ( eu . abc4trust . abce . integrationtests . inspector . credentialmanager . CredentialManagerImplTest . EXPECTED_UUID , inspectorSecretKey ) ; eu . abc4trust . xml . SecretKey storedInspectorSecretKey = eu . abc4trust . abce . integrationtests . inspector . credentialmanager . CredentialManagerImplTest . credMng . getInspectorSecretKey ( eu . abc4trust . abce . integrationtests . inspector . credentialmanager . CredentialManagerImplTest . EXPECTED_UUID ) ; "<AssertPlaceHolder>" ; } getInspectorSecretKey ( java . net . URI ) { final eu . abc4trust . xml . SecretKey ret = ( ( eu . abc4trust . xml . SecretKey ) ( eu . abc4trust . util . ByteSerializer . readFromBytes ( ps . getItem ( SimpleParamTypes . INSPECTOR_SECRET_KEY , inspectorKeyUID ) ) ) ) ; if ( ( ret != null ) && ( ( ret . getCryptoParams ( ) ) != null ) ) { eu . abc4trust . xml . util . XmlUtils . fixNestedContent ( ret . getCryptoParams ( ) ) ; } return ret ; }
|
org . junit . Assert . assertEquals ( inspectorSecretKey . getCryptoParams ( ) . getContent ( ) . get ( 0 ) , storedInspectorSecretKey . getCryptoParams ( ) . getContent ( ) . get ( 0 ) )
|
shouldMatchMetadataByStreamId ( ) { final uk . gov . justice . services . messaging . Metadata metadata = metadataWithRandomUUID ( uk . gov . justice . services . test . utils . core . matchers . JsonEnvelopeMetadataMatcherTest . EVENT_NAME ) . withStreamId ( uk . gov . justice . services . test . utils . core . matchers . JsonEnvelopeMetadataMatcherTest . STREAM_ID ) . build ( ) ; "<AssertPlaceHolder>" ; } metadata ( ) { return new uk . gov . justice . services . test . utils . core . matchers . JsonEnvelopeMetadataMatcher ( ) ; }
|
org . junit . Assert . assertThat ( metadata , uk . gov . justice . services . test . utils . core . matchers . JsonEnvelopeMetadataMatcher . metadata ( ) . withStreamId ( uk . gov . justice . services . test . utils . core . matchers . JsonEnvelopeMetadataMatcherTest . STREAM_ID ) )
|
testDao ( ) { "<AssertPlaceHolder>" ; } getContadorDao ( ) { return fwktd_sir_contadorDao ; }
|
org . junit . Assert . assertNotNull ( getContadorDao ( ) )
|
testEnsureSetIndexes ( ) { java . lang . String result = org . slc . sli . ingestion . util . MongoCommander . ensureIndexes ( indexes , dbName , mockedMongoTemplate ) ; "<AssertPlaceHolder>" ; for ( java . lang . String collection : shardCollections ) { com . mongodb . DBObject asskeys = new com . mongodb . BasicDBObject ( ) ; asskeys . put ( "creationTime" , 1 ) ; com . mongodb . DBObject options = buildOpts ( ( ( ( dbName ) + "." ) + collection ) , collectionOrder . get ( collection ) ) ; org . mockito . Mockito . verify ( collectionIns . get ( collection ) , org . mockito . Mockito . times ( 1 ) ) . createIndex ( asskeys , options ) ; } } ensureIndexes ( java . lang . String , java . lang . String , org . springframework . data . mongodb . core . MongoTemplate ) { java . util . Set < org . slc . sli . ingestion . util . MongoIndex > indexes = org . slc . sli . ingestion . util . MongoCommander . indexTxtFileParser . parse ( indexFile ) ; com . mongodb . DB dbConn = org . slc . sli . ingestion . util . MongoCommander . getDB ( db , mongoTemplate ) ; return org . slc . sli . ingestion . util . MongoCommander . ensureIndexes ( indexes , dbConn ) ; }
|
org . junit . Assert . assertNull ( result )
|
createWritesItemSource ( ) { org . appenders . log4j2 . elasticsearch . StringItemSourceFactory factory = createDefaultTestStringItemSourceFactory ( ) ; org . apache . logging . log4j . core . LogEvent logEvent = mock ( org . apache . logging . log4j . core . LogEvent . class ) ; com . fasterxml . jackson . databind . ObjectWriter objectWriter = spy ( new com . fasterxml . jackson . databind . ObjectMapper ( ) . writerFor ( org . apache . logging . log4j . core . LogEvent . class ) ) ; org . appenders . log4j2 . elasticsearch . ItemSource itemSource = factory . create ( logEvent , objectWriter ) ; "<AssertPlaceHolder>" ; verify ( objectWriter ) . writeValueAsString ( eq ( logEvent ) ) ; } create ( java . lang . Object , com . fasterxml . jackson . databind . ObjectWriter ) { try { return new org . appenders . log4j2 . elasticsearch . StringItemSource ( objectWriter . writeValueAsString ( event ) ) ; } catch ( com . fasterxml . jackson . core . JsonProcessingException e ) { org . appenders . log4j2 . elasticsearch . StringItemSourceFactory . LOGGER . error ( ( "Cannot<sp>write<sp>item<sp>source:<sp>" + ( e . getMessage ( ) ) ) ) ; return null ; } }
|
org . junit . Assert . assertNotNull ( itemSource )
|
testFindAttribute ( ) { com . freedomotic . rules . PayloadTest . LOG . info ( "Produce<sp>a<sp>list<sp>of<sp>statements<sp>searching<sp>by<sp>statement<sp>attribute<sp>name" ) ; com . freedomotic . rules . Payload payload = new com . freedomotic . rules . Payload ( ) ; payload . addStatement ( "no" , "value1" ) ; payload . addStatement ( "yes" , "value2" ) ; payload . addStatement ( "yes" , "value3" ) ; payload . addStatement ( "no" , "value4" ) ; payload . addStatement ( "yes" , "value5" ) ; payload . addStatement ( "yes" , "value6" ) ; "<AssertPlaceHolder>" ; } getStatements ( java . lang . String ) { java . util . List < com . freedomotic . rules . Statement > statements = new java . util . ArrayList ( ) ; synchronized ( payloadLst ) { payloadLst . stream ( ) . filter ( ( i ) -> i . getAttribute ( ) . equalsIgnoreCase ( attribute ) ) . forEachOrdered ( ( i ) -> { statements . add ( i ) ; } ) ; } return statements ; }
|
org . junit . Assert . assertEquals ( 4 , payload . getStatements ( "yes" ) . size ( ) )
|
testExecDescribeTriplesForZeroResult ( ) { java . lang . String queryString = "describe<sp>?s<sp>?p<sp>?o<sp>where<sp>{<sp>?s<sp>?p<sp>'madhu'<sp>.<sp>}<sp>limit<sp>1" ; com . franz . agraph . jena . AGQuery query = com . franz . agraph . jena . AGQueryFactory . create ( queryString ) ; try ( com . franz . agraph . jena . AGQueryExecution qe = com . franz . agraph . jena . AGQueryExecutionFactory . create ( query , test . AGQueryExecutionTest . model ) ) { java . util . Iterator < org . apache . jena . graph . Triple > iter = qe . execDescribeTriples ( ) ; "<AssertPlaceHolder>" ; } } hasNext ( ) { try { if ( ( next ) == null ) { next = parseNext ( ) ; } } catch ( java . io . IOException e ) { throw new org . eclipse . rdf4j . query . resultio . UnsupportedQueryResultFormatException ( e ) ; } return ( next ) != null ; }
|
org . junit . Assert . assertFalse ( iter . hasNext ( ) )
|
iteratorStep_6 ( ) { org . apache . jena . atlas . iterator . Iterator < ? > iter = org . apache . jena . atlas . iterator . TestIter . iterator ( 5 ) ; int x = org . apache . jena . atlas . iterator . Iter . step ( iter , 6 ) ; "<AssertPlaceHolder>" ; } iterator ( int ) { return java . util . stream . IntStream . range ( 1 , ( n + 1 ) ) . iterator ( ) ; }
|
org . junit . Assert . assertEquals ( 5 , x )
|
containerShouldStopGracefullyOnUnsubscribeErrors ( ) { when ( connectionFactoryMock . getConnection ( ) ) . thenReturn ( connectionMock ) ; doThrow ( new java . lang . IllegalStateException ( ) ) . when ( subscriptionMock ) . pUnsubscribe ( ) ; doAnswer ( ( it ) -> { java . lang . Runnable r = it . getArgument ( 0 ) ; new java . lang . Thread ( r ) . start ( ) ; return null ; } ) . when ( executorMock ) . execute ( any ( ) ) ; doAnswer ( ( it ) -> { when ( connectionMock . isSubscribed ( ) ) . thenReturn ( true ) ; return null ; } ) . when ( connectionMock ) . subscribe ( any ( ) , any ( ) ) ; container . addMessageListener ( adapter , new org . springframework . data . redis . listener . ChannelTopic ( "a" ) ) ; container . start ( ) ; when ( connectionMock . getSubscription ( ) ) . thenReturn ( subscriptionMock ) ; container . stop ( ) ; "<AssertPlaceHolder>" ; verify ( subscriptionMock ) . close ( ) ; } isRunning ( ) { synchronized ( this . lifecycleMonitor ) { return running ; } }
|
org . junit . Assert . assertThat ( container . isRunning ( ) , org . hamcrest . core . Is . is ( false ) )
|
testFindByPrimaryKeyExisting ( ) { com . liferay . asset . list . model . AssetListEntryUsage newAssetListEntryUsage = addAssetListEntryUsage ( ) ; com . liferay . asset . list . model . AssetListEntryUsage existingAssetListEntryUsage = _persistence . findByPrimaryKey ( newAssetListEntryUsage . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
|
org . junit . Assert . assertEquals ( existingAssetListEntryUsage , newAssetListEntryUsage )
|
testCommentDefault ( ) { com . mitchellbosecke . pebble . PebbleEngine pebble = new com . mitchellbosecke . pebble . PebbleEngine . Builder ( ) . loader ( new com . mitchellbosecke . pebble . loader . StringLoader ( ) ) . build ( ) ; com . mitchellbosecke . pebble . template . PebbleTemplate template = pebble . getTemplate ( "{#<sp>comment1<sp>#}\n{#<sp>comment2<sp>#}" ) ; java . io . Writer writer = new java . io . StringWriter ( ) ; template . evaluate ( writer ) ; "<AssertPlaceHolder>" ; } toString ( ) { return sb . toString ( ) ; }
|
org . junit . Assert . assertEquals ( "" , writer . toString ( ) )
|
testRemoveFromEmpty ( ) { org . hipparchus . util . OpenIntToFieldHashMap < org . hipparchus . fraction . Fraction > map = new org . hipparchus . util . OpenIntToFieldHashMap < org . hipparchus . fraction . Fraction > ( field ) ; "<AssertPlaceHolder>" ; } getZero ( ) { return org . hipparchus . geometry . euclidean . twod . Vector2D . ZERO ; }
|
org . junit . Assert . assertTrue ( field . getZero ( ) . equals ( map . remove ( 50 ) ) )
|
testDynamicQueryByPrimaryKeyMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . asset . list . model . AssetListEntryUsage . class , _dynamicQueryClassLoader ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . eq ( "assetListEntryUsageId" , com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ) ) ; java . util . List < com . liferay . asset . list . model . AssetListEntryUsage > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
|
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
|
copyAttributes_PARTNER_SUBSCRIPTION ( ) { org . oscm . domobjects . Product prod = new org . oscm . domobjects . Product ( ) ; prod . setType ( ServiceType . PARTNER_SUBSCRIPTION ) ; prod . setConfiguratorUrl ( "some<sp>value" ) ; org . oscm . serviceprovisioningservice . assembler . ProductAssembler . copyAttributes ( prod , voService ) ; verifyCopiedAttributes ( prod ) ; "<AssertPlaceHolder>" ; } getConfiguratorUrl ( ) { return vo . getConfiguratorUrl ( ) ; }
|
org . junit . Assert . assertNull ( prod . getConfiguratorUrl ( ) )
|
createBotTrigger ( ) { ai . labs . models . BotTriggerConfiguration expected = createExpectedBotTriggerConfiguration ( ) ; javax . ws . rs . core . Response response = restBotTriggerStore . createBotTrigger ( expected ) ; "<AssertPlaceHolder>" ; org . mockito . Mockito . verify ( botTriggerStore , times ( 1 ) ) . createBotTrigger ( eq ( expected ) ) ; org . mockito . Mockito . verify ( botTriggersCache , times ( 1 ) ) . put ( eq ( intent ) , eq ( expected ) ) ; } createBotTrigger ( ai . labs . models . BotTriggerConfiguration ) { try { botTriggerStore . createBotTrigger ( botTriggerConfiguration ) ; botTriggersCache . put ( botTriggerConfiguration . getIntent ( ) , botTriggerConfiguration ) ; return javax . ws . rs . core . Response . ok ( ) . build ( ) ; } catch ( ai . labs . persistence . IResourceStore e ) { throw new javax . ws . rs . WebApplicationException ( e . getLocalizedMessage ( ) , Response . Status . CONFLICT ) ; } catch ( ai . labs . persistence . IResourceStore e ) { log . error ( e . getLocalizedMessage ( ) , e ) ; throw new javax . ws . rs . InternalServerErrorException ( e . getLocalizedMessage ( ) ) ; } }
|
org . junit . Assert . assertEquals ( 200 , response . getStatus ( ) )
|
testUnCapitalize1 ( ) { java . lang . Object target = "ABC" ; java . lang . String expResult = "aBC" ; java . lang . String result = org . thymeleaf . util . StringUtils . unCapitalize ( target ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertEquals ( expResult , result )
|
testSetFontListWithTwoElements ( ) { org . eclipse . swt . widgets . FontDialog fontDialog = new org . eclipse . swt . widgets . FontDialog ( shell ) ; fontDialog . setFontList ( new org . eclipse . swt . graphics . FontData [ 0 ] ) ; "<AssertPlaceHolder>" ; } getFontList ( ) { org . eclipse . swt . graphics . FontData [ ] result ; synchronized ( lock ) { result = new org . eclipse . swt . graphics . FontData [ fontDatas . size ( ) ] ; fontDatas . toArray ( result ) ; } return result ; }
|
org . junit . Assert . assertNull ( fontDialog . getFontList ( ) )
|
builderShouldCreatePopulatedOperation ( ) { final uk . gov . gchq . gaffer . operation . impl . job . GetJobResults op = new uk . gov . gchq . gaffer . operation . impl . job . GetJobResults . Builder ( ) . jobId ( "jobId" ) . build ( ) ; "<AssertPlaceHolder>" ; } getJobId ( ) { return jobId ; }
|
org . junit . Assert . assertEquals ( "jobId" , op . getJobId ( ) )
|
errorMarkerMessageArgs ( ) { uk . org . lidalia . slf4jtest . LoggingEvent event = uk . org . lidalia . slf4jtest . LoggingEvent . error ( marker , message , arg1 , arg2 ) ; uk . org . lidalia . slf4jtest . LoggingEvent expected = new uk . org . lidalia . slf4jtest . LoggingEvent ( ERROR , marker , message , arg1 , arg2 ) ; "<AssertPlaceHolder>" ; } error ( org . slf4j . Marker , java . lang . Throwable , java . lang . String , java . lang . Object [ ] ) { return new uk . org . lidalia . slf4jtest . LoggingEvent ( uk . org . lidalia . slf4jext . Level . ERROR , marker , throwable , message , arguments ) ; }
|
org . junit . Assert . assertThat ( event , org . hamcrest . core . Is . is ( expected ) )
|
testPDDeferredReqSetInboundStandardMode ( ) { gov . hhs . fha . nhinc . configuration . jmx . PassthruMXBeanRegistry registry = gov . hhs . fha . nhinc . configuration . jmx . PassthruMXBeanRegistry . getInstance ( ) ; gov . hhs . fha . nhinc . configuration . IConfiguration . serviceEnum serviceName = gov . hhs . fha . nhinc . configuration . IConfiguration . serviceEnum . DocumentSubmissionDeferredRequest ; gov . hhs . fha . nhinc . configuration . IConfiguration . directionEnum direction = gov . hhs . fha . nhinc . configuration . IConfiguration . directionEnum . Inbound ; boolean status = true ; gov . hhs . fha . nhinc . docsubmission . configuration . jmx . DocumentSubmissionDefRequest11WebServices docSubmissionDeferredReq = mock ( gov . hhs . fha . nhinc . docsubmission . configuration . jmx . DocumentSubmissionDefRequest11WebServices . class ) ; when ( docSubmissionDeferredReq . isInboundStandard ( ) ) . thenReturn ( status ) ; when ( docSubmissionDeferredReq . getServiceName ( ) ) . thenReturn ( serviceEnum . DocumentSubmissionDeferredRequest ) ; registry . registerWebServiceMXBean ( docSubmissionDeferredReq ) ; boolean standard = registry . isStandard ( serviceName , direction ) ; "<AssertPlaceHolder>" ; } isStandard ( gov . hhs . fha . nhinc . configuration . IConfiguration . serviceEnum , gov . hhs . fha . nhinc . configuration . IConfiguration . directionEnum ) { boolean standardMode = false ; for ( gov . hhs . fha . nhinc . configuration . jmx . WebServicesMXBean b : registeredBeans ) { if ( ( ( gov . hhs . fha . nhinc . configuration . jmx . PassthruMXBeanRegistry . isOutbound ( direction ) ) && ( b . getServiceName ( ) . equals ( serviceName ) ) ) && ( b . isOutboundStandard ( ) ) ) { standardMode = true ; } if ( ( ( gov . hhs . fha . nhinc . configuration . jmx . PassthruMXBeanRegistry . isInbound ( direction ) ) && ( b . getServiceName ( ) . equals ( serviceName ) ) ) && ( b . isInboundStandard ( ) ) ) { standardMode = true ; } } return standardMode ; }
|
org . junit . Assert . assertEquals ( true , standard )
|
getLocationManagerForPartitionedTableWithBaseOverride ( ) { org . apache . hadoop . hive . metastore . api . StorageDescriptor sd = new org . apache . hadoop . hive . metastore . api . StorageDescriptor ( ) ; sd . setLocation ( ( ( com . hotels . bdp . circustrain . core . source . SourceTest . TABLE_BASE_PATH ) + "/partition" ) ) ; partition . setSd ( sd ) ; com . hotels . bdp . circustrain . core . source . Source source = new com . hotels . bdp . circustrain . core . source . Source ( sourceCatalog , hiveConf , metaStoreClientSupplier , sourceCatalogListener , true , com . hotels . bdp . circustrain . core . source . SourceTest . TABLE_BASE_PATH ) ; com . hotels . bdp . circustrain . api . SourceLocationManager locationManager = source . getLocationManager ( table , partitions , com . hotels . bdp . circustrain . core . source . SourceTest . EVENT_ID , copierOptions ) ; "<AssertPlaceHolder>" ; } getTableLocation ( ) { return tableLocation ; }
|
org . junit . Assert . assertThat ( locationManager . getTableLocation ( ) , org . hamcrest . CoreMatchers . is ( new org . apache . hadoop . fs . Path ( com . hotels . bdp . circustrain . core . source . SourceTest . TABLE_BASE_PATH ) ) )
|
testZero ( ) { tested = new codeine . command_peer . ExponentialProgressiveRateClaculator ( 0 , 9 ) ; "<AssertPlaceHolder>" ; } getTimeToSleep ( long ) { return getTimeToSleep ( ratio ( ) , loopTime ) ; }
|
org . junit . Assert . assertEquals ( 0 , tested . getTimeToSleep ( 0 ) )
|
testLevenstein ( ) { annotatorResult = org . aksw . gerbil . utils . DocumentTextEditRevoker . revokeTextEdits ( annotatorResult , documentWithOrigText . getText ( ) ) ; "<AssertPlaceHolder>" ; } revokeTextEdits ( org . aksw . gerbil . transfer . nif . Document , java . lang . String ) { if ( ! ( document . getText ( ) . equals ( originalText ) ) ) { org . aksw . gerbil . utils . DocumentTextComparison . DocumentTextComparisonResult comparison = org . aksw . gerbil . utils . DocumentTextComparison . getLevensteinDistance ( document . getText ( ) , originalText ) ; org . aksw . gerbil . utils . DocumentTextEditRevoker . updateNEPositions ( document , comparison ) ; document . setText ( originalText ) ; } return document ; }
|
org . junit . Assert . assertEquals ( documentWithOrigText , annotatorResult )
|
nullValue ( ) { final org . jboss . msc . value . ImmediateValue < java . lang . Object > immediateValue = new org . jboss . msc . value . ImmediateValue < java . lang . Object > ( null ) ; "<AssertPlaceHolder>" ; } getValue ( ) { synchronized ( this ) { if ( ( injector ) != null ) return injector . getValue ( ) ; if ( ( instance ) != null ) return instance . getValue ( ) ; } throw new java . lang . IllegalStateException ( "Service<sp>is<sp>not<sp>installed" ) ; }
|
org . junit . Assert . assertNull ( immediateValue . getValue ( ) )
|
testInsertRetrieveEmptyList ( ) { net . vz . mongodb . jackson . mock . MockObject object = new net . vz . mongodb . jackson . mock . MockObject ( ) ; object . simpleList = java . util . Collections . emptyList ( ) ; coll . insert ( object ) ; net . vz . mongodb . jackson . mock . MockObject result = coll . findOne ( ) ; "<AssertPlaceHolder>" ; } findOne ( ) { return findOne ( new net . vz . mongodb . jackson . BasicDBObject ( ) ) ; }
|
org . junit . Assert . assertEquals ( object . simpleList , result . simpleList )
|
listHostsFilterBySelectors ( ) { final java . util . List < java . lang . String > hosts = com . google . common . collect . ImmutableList . of ( "foo1" , "foo2" , "foo3" ) ; mockResponse ( "GET" , org . hamcrest . Matchers . allOf ( com . spotify . helios . client . HeliosClientTest . hasPath ( "/hosts/" ) , com . spotify . helios . client . HeliosClientTest . containsQuery ( "selector=foo%3Dbar" ) , com . spotify . helios . client . HeliosClientTest . containsQuery ( "selector=site%3Dabc" ) ) , com . spotify . helios . client . HeliosClientTest . response ( "GET" , 200 , hosts ) ) ; final java . util . Set < java . lang . String > selectors = com . google . common . collect . ImmutableSet . of ( "foo=bar" , "site=abc" ) ; "<AssertPlaceHolder>" ; } listHosts ( java . lang . String ) { return listHosts ( com . google . common . collect . ImmutableMultimap . of ( "namePattern" , namePattern ) ) ; }
|
org . junit . Assert . assertThat ( client . listHosts ( selectors ) . get ( ) , org . hamcrest . Matchers . equalTo ( hosts ) )
|
textDB_8_bnode_subject ( ) { org . apache . jena . query . text . Dataset ds = create ( ) ; org . apache . jena . query . text . TestTextNonTxn . dataTurtle ( ds , org . apache . jena . atlas . lib . StrUtils . strjoinNL ( "PREFIX<sp>rdfs:<sp><http://www.w3.org/2000/01/rdf-schema#>" , "[]<sp>a<sp><http://example.org/Entity>;<sp>rdfs:label<sp>'foo'<sp>." ) ) ; java . lang . String qs = org . apache . jena . atlas . lib . StrUtils . strjoinNL ( "PREFIX<sp>text:<sp><http://jena.apache.org/text#>" , "PREFIX<sp>rdfs:<sp><http://www.w3.org/2000/01/rdf-schema#>" , "SELECT<sp>*" , "{<sp>?s<sp>text:query<sp>'foo'<sp>.<sp>?s<sp>a<sp><http://example.org/Entity><sp>}" ) ; org . apache . jena . query . text . Query q = org . apache . jena . query . text . QueryFactory . create ( qs ) ; org . apache . jena . query . text . QueryExecution qexec = org . apache . jena . query . text . QueryExecutionFactory . create ( q , ds ) ; org . apache . jena . query . text . ResultSet rs = qexec . execSelect ( ) ; java . util . List < org . apache . jena . query . text . QuerySolution > x = org . apache . jena . atlas . iterator . Iter . toList ( rs ) ; "<AssertPlaceHolder>" ; } size ( ) { return prefixes . size ( ) ; }
|
org . junit . Assert . assertEquals ( 1 , x . size ( ) )
|
test_return_octet ( ) { byte result = server . return_octet ( ) ; "<AssertPlaceHolder>" ; } return_octet ( ) { return ( ( byte ) ( 240 ) ) ; }
|
org . junit . Assert . assertEquals ( ( ( byte ) ( 240 ) ) , result )
|
testLowWatermark ( ) { final int childType = 3 ; com . twitter . crunch . Node child = mockChildNode ( childType ) ; com . twitter . crunch . Node root = mockRootNode ( childType , child ) ; com . twitter . crunch . AssignmentTracker tracker = new com . twitter . crunch . AssignmentTrackerImpl ( root , ( ( int ) ( ( AssignmentTrackerImpl . LOW_WATERMARK ) - 1 ) ) , 0.25 ) ; "<AssertPlaceHolder>" ; } trackAssignment ( com . twitter . crunch . Node ) { return false ; }
|
org . junit . Assert . assertFalse ( tracker . trackAssignment ( child ) )
|
testDynamicQueryByProjectionMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . marketplace . model . Module . class , _dynamicQueryClassLoader ) ; dynamicQuery . setProjection ( com . liferay . portal . kernel . dao . orm . ProjectionFactoryUtil . property ( "moduleId" ) ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . in ( "moduleId" , new java . lang . Object [ ] { com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) } ) ) ; java . util . List < java . lang . Object > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
|
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
|
upgradeActivitiesToVrc3Test ( ) { javax . xml . bind . JAXBContext jaxbContext1 = javax . xml . bind . JAXBContext . newInstance ( org . orcid . jaxb . model . record . summary_rc4 . ActivitiesSummary . class ) ; javax . xml . bind . JAXBContext jaxbContext2 = javax . xml . bind . JAXBContext . newInstance ( org . orcid . jaxb . model . record . summary_rc4 . ActivitiesSummary . class ) ; javax . xml . bind . Unmarshaller jaxbUnmarshaller1 = jaxbContext1 . createUnmarshaller ( ) ; javax . xml . bind . Unmarshaller jaxbUnmarshaller2 = jaxbContext2 . createUnmarshaller ( ) ; java . io . InputStream rc3Stream = org . orcid . record_2_0 . ConvertVrc4ToV2Test . class . getClassLoader ( ) . getResourceAsStream ( "test-activities-2.0_rc4.xml" ) ; java . io . InputStream rc4Stream = org . orcid . record_2_0 . ConvertVrc4ToV2Test . class . getClassLoader ( ) . getResourceAsStream ( "test-activities-2.0.xml" ) ; org . orcid . jaxb . model . record . summary_rc4 . ActivitiesSummary rc3Activities = ( ( org . orcid . jaxb . model . record . summary_rc4 . ActivitiesSummary ) ( jaxbUnmarshaller1 . unmarshal ( rc3Stream ) ) ) ; org . orcid . jaxb . model . record . summary_v2 . ActivitiesSummary rc4Activities1 = ( ( org . orcid . jaxb . model . record . summary_v2 . ActivitiesSummary ) ( jaxbUnmarshaller2 . unmarshal ( rc4Stream ) ) ) ; org . orcid . core . version . V2Convertible result = versionConverterV2_0_rc4ToV2_0 . upgrade ( new org . orcid . core . version . V2Convertible ( rc3Activities , "v2_rc4" ) ) ; org . orcid . jaxb . model . record . summary_v2 . ActivitiesSummary rc4Activities2 = ( ( org . orcid . jaxb . model . record . summary_v2 . ActivitiesSummary ) ( result . getObjectToConvert ( ) ) ) ; "<AssertPlaceHolder>" ; } getObjectToConvert ( ) { return objectToConvert ; }
|
org . junit . Assert . assertEquals ( rc4Activities1 , rc4Activities2 )
|
testRuleEventListener ( ) { java . lang . String str = ( ( ( ( ( ( ( "import<sp>" + ( org . drools . compiler . Person . class . getCanonicalName ( ) ) ) + "onDeleteMatch:<sp>20" 7 ) + "onDeleteMatch:<sp>20" 0 ) + "rule<sp>R<sp>when\n" ) + "<sp>$p:<sp>Person(<sp>$age:<sp>age<sp><<sp>20<sp>)\n" ) + "then\n" ) + "onDeleteMatch:<sp>20" 9 ) + "end\n" ; org . kie . api . runtime . KieSession ksession = new org . kie . internal . utils . KieHelper ( ) . addContent ( str , ResourceType . DRL ) . build ( ) . newKieSession ( ) ; java . util . List < java . lang . String > list = new java . util . ArrayList ( ) ; ( ( org . kie . internal . event . rule . RuleEventManager ) ( ksession ) ) . addEventListener ( new org . kie . internal . event . rule . RuleEventListener ( ) { @ org . drools . compiler . integrationtests . Override public void onBeforeMatchFire ( org . kie . api . runtime . rule . Match match ) { list . add ( ( "onDeleteMatch:<sp>20" 8 + ( match . getDeclarationValue ( "onDeleteMatch:<sp>20" 3 ) ) ) ) ; } @ org . drools . compiler . integrationtests . Override public void onAfterMatchFire ( org . kie . api . runtime . rule . Match match ) { list . add ( ( "onDeleteMatch:<sp>20" 1 + ( match . getDeclarationValue ( "onDeleteMatch:<sp>20" 3 ) ) ) ) ; } @ org . drools . compiler . integrationtests . Override public void onDeleteMatch ( org . kie . api . runtime . rule . Match match ) { list . add ( ( "onDeleteMatch:<sp>" + ( match . getDeclarationValue ( "onDeleteMatch:<sp>20" 3 ) ) ) ) ; } @ org . drools . compiler . integrationtests . Override public void onUpdateMatch ( org . kie . api . runtime . rule . Match match ) { list . add ( ( "onDeleteMatch:<sp>20" 6 + ( match . getDeclarationValue ( "onDeleteMatch:<sp>20" 3 ) ) ) ) ; } } ) ; ksession . insert ( new org . drools . compiler . Person ( "John<sp>Smith" , 18 ) ) ; ksession . fireAllRules ( ) ; java . util . List < java . lang . String > expected = java . util . Arrays . asList ( "onDeleteMatch:<sp>20" 4 , "onDeleteMatch:<sp>20" 2 , "onUpdateMatch:<sp>19" , "onBeforeMatchFire:<sp>19" , "onDeleteMatch:<sp>20" 5 , "onDeleteMatch:<sp>20" ) ; "<AssertPlaceHolder>" ; } fireAllRules ( ) { return 0 ; }
|
org . junit . Assert . assertEquals ( expected , list )
|
unquote_3 ( ) { final java . lang . String input = "h" ; final java . lang . String expected = "h" ; final java . lang . String actual = org . erlide . util . StringUtils . unquote ( input ) ; "<AssertPlaceHolder>" ; } unquote ( java . lang . String ) { final int length = s . length ( ) ; if ( ( ( length > 2 ) && ( ( s . charAt ( 0 ) ) == '\'' ) ) && ( ( s . charAt ( ( length - 1 ) ) ) == '\'' ) ) { return s . substring ( 1 , ( length - 1 ) ) ; } return s ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testNegativeIntegerKey ( ) { java . io . File file = net . openhft . chronicle . map . ChronicleMapTest . getPersistenceFile ( ) ; try ( net . openhft . chronicle . map . ChronicleMap < java . lang . Integer , java . lang . Integer > map = net . openhft . chronicle . map . ChronicleMap . of ( net . openhft . chronicle . map . Integer . class , net . openhft . chronicle . map . Integer . class ) . entries ( 1 ) . createPersistedTo ( file ) ) { map . put ( ( - 1 ) , ( - 1 ) ) ; } try ( net . openhft . chronicle . map . ChronicleMap < java . lang . Integer , java . lang . Integer > map = net . openhft . chronicle . map . ChronicleMap . of ( net . openhft . chronicle . map . Integer . class , net . openhft . chronicle . map . Integer . class ) . entries ( 1 ) . recoverPersistedTo ( file , true ) ) { "<AssertPlaceHolder>" ; } } get ( java . lang . Object ) { return check ( new net . openhft . chronicle . map . ReplicationCheckingMap . Call < K , V > ( ) { @ net . openhft . chronicle . map . Override public java . lang . Object method ( java . util . concurrent . ConcurrentMap < K , V > map ) { return map . get ( key ) ; } } ) ; }
|
org . junit . Assert . assertEquals ( java . lang . Integer . valueOf ( ( - 1 ) ) , map . get ( ( - 1 ) ) )
|
CallInstanceInterfaceGenericInterface ( ) { java . lang . String fromClass = "Domain.Direct.Violating.CallInstanceInterfaceGenericInterface" ; java . lang . String toClass = "Technology.Direct.Dao.CallInstanceInterfaceDAO<p1>" ; java . util . ArrayList < java . lang . String > typesToFind = new java . util . ArrayList < java . lang . String > ( ) ; typesToFind . add ( "Call" ) ; "<AssertPlaceHolder>" ; } areDependencyTypesDetected ( java . lang . String , java . lang . String , java . util . ArrayList , boolean ) { return areDependencyTypesDetected ( classFrom , classTo , dependencyTypes , "" , isIndirect ) ; }
|
org . junit . Assert . assertTrue ( areDependencyTypesDetected ( fromClass , toClass , typesToFind , false ) )
|
testConvertToDataModelTypeWithoutGtCount ( ) { stats . getGenotypeCount ( ) . clear ( ) ; stats . getGenotypeFreq ( ) . clear ( ) ; stats . setAlleleCount ( ( - 1 ) ) ; stats . setRefAlleleCount ( ( - 1 ) ) ; stats . setAltAlleleCount ( ( - 1 ) ) ; org . opencb . opencga . storage . mongodb . variant . converters . DocumentToVariantStatsConverter converter = new org . opencb . opencga . storage . mongodb . variant . converters . DocumentToVariantStatsConverter ( ) ; mongoStats . put ( DocumentToVariantStatsConverter . NUMGT_FIELD , new org . bson . Document ( ) ) ; org . opencb . biodata . models . variant . stats . VariantStats converted = converter . convertToDataModelType ( mongoStats ) ; "<AssertPlaceHolder>" ; } convertToDataModelType ( org . bson . Document ) { org . bson . Document projects = ( ( org . bson . Document ) ( object . get ( "projects" ) ) ) ; return super . convertToDataModelType ( projects ) ; }
|
org . junit . Assert . assertEquals ( stats , converted )
|
testGetTagsWithExtraTagFields ( ) { com . streamsets . pipeline . stage . destination . influxdb . GenericRecordConverterConfigBean conf = new com . streamsets . pipeline . stage . destination . influxdb . GenericRecordConverterConfigBean ( ) ; conf . tagFields . add ( "/extraTag" ) ; java . util . Map < java . lang . String , java . lang . String > expectedTags = new ImmutableMap . Builder < java . lang . String , java . lang . String > ( ) . put ( "host" , com . streamsets . pipeline . stage . destination . influxdb . TestCollectdRecordConverter . HOSTNAME ) . put ( "instance" , com . streamsets . pipeline . stage . destination . influxdb . TestCollectdRecordConverter . PLUGIN_INSTANCE ) . put ( "type" , com . streamsets . pipeline . stage . destination . influxdb . TestCollectdRecordConverter . TYPE ) . put ( "extraTag" , "tagValue" ) . build ( ) ; record . set ( "/extraTag" , com . streamsets . pipeline . api . Field . create ( "tagValue" ) ) ; java . util . Map < java . lang . String , java . lang . String > tags = com . streamsets . pipeline . stage . destination . influxdb . CollectdRecordConverter . getTags ( conf . tagFields , record ) ; "<AssertPlaceHolder>" ; } getTags ( java . util . List , com . streamsets . pipeline . api . Record ) { java . util . Map < java . lang . String , java . lang . String > tags = new java . util . HashMap ( com . streamsets . pipeline . stage . destination . influxdb . CollectdRecordConverter . TAG_FIELDS . size ( ) ) ; for ( java . lang . String tag : com . streamsets . pipeline . stage . destination . influxdb . CollectdRecordConverter . TAG_FIELDS ) { com . streamsets . pipeline . stage . destination . influxdb . CollectdRecordConverter . putIfTag ( record , tags , tag ) ; } tags . putAll ( com . streamsets . pipeline . stage . destination . influxdb . RecordConverterUtil . getTags ( tagFields , record ) ) ; return tags ; }
|
org . junit . Assert . assertEquals ( expectedTags , tags )
|
mapCancelOrderFailureReturnsFalse ( ) { java . lang . String data = "{\"success\":0}" ; boolean result = mapper . mapCancelOrder ( data ) ; "<AssertPlaceHolder>" ; } mapCancelOrder ( java . lang . String ) { int success = gson . fromJson ( cancelOrderResult , com . cf . data . map . poloniex . JsonObject . class ) . get ( "success" ) . getAsInt ( ) ; return success == 1 ; }
|
org . junit . Assert . assertFalse ( result )
|
setCertificateSavesCerts ( ) { com . microsoft . azure . sdk . iot . deps . auth . IotHubCertificateManager testCertManager = mockit . Deencapsulation . newInstance ( com . microsoft . azure . sdk . iot . deps . auth . IotHubCertificateManager . class ) ; mockit . Deencapsulation . invoke ( testCertManager , "setCertificates" , someSingleValidCertificate ) ; java . util . Collection < ? extends java . security . cert . Certificate > certificates = mockit . Deencapsulation . getField ( testCertManager , "certificates" ) ; "<AssertPlaceHolder>" ; } size ( ) { return this . property . size ( ) ; }
|
org . junit . Assert . assertTrue ( ( ( certificates . size ( ) ) == 1 ) )
|
testEvaluationError ( ) { java . lang . String errorMessage = "something<sp>failed" ; mockQueryPlanner . setQueryPlan ( createQueryPlan ( ( previousTaskResult ) -> { throw new java . sql . SQLException ( errorMessage ) ; } , QueryType . SELECT ) ) ; java . util . List < org . meridor . perspective . sql . impl . QueryResult > results = queryProcessor . process ( org . meridor . perspective . sql . impl . QueryProcessorImplTest . createQuery ( org . meridor . perspective . sql . impl . QueryProcessorImplTest . QUERY ) ) ; org . meridor . perspective . sql . impl . QueryResult evaluationErrorResult = new org . meridor . perspective . sql . impl . QueryResult ( ) ; evaluationErrorResult . setStatus ( QueryStatus . EVALUATION_ERROR ) ; evaluationErrorResult . setData ( org . meridor . perspective . sql . impl . DataContainer . empty ( ) . toData ( ) ) ; evaluationErrorResult . setMessage ( errorMessage ) ; "<AssertPlaceHolder>" ; } contains ( java . lang . String ) { for ( org . meridor . perspective . shell . common . validator . Setting c : org . meridor . perspective . shell . common . validator . Setting . values ( ) ) { if ( c . name ( ) . equals ( name ) ) { return true ; } } return false ; }
|
org . junit . Assert . assertThat ( results , contains ( evaluationErrorResult ) )
|
testGetVirtualServersList ( ) { java . util . List < org . rackspace . stingray . client . list . Child > children = client . getVirtualServers ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return size ; }
|
org . junit . Assert . assertTrue ( ( ( children . size ( ) ) > 0 ) )
|
testInterleavedCancellationBarriers ( ) { org . apache . flink . runtime . io . network . partition . consumer . BufferOrEvent [ ] sequence = new org . apache . flink . runtime . io . network . partition . consumer . BufferOrEvent [ ] { org . apache . flink . streaming . runtime . io . BarrierTrackerTest . createBarrier ( 1L , 0 ) , org . apache . flink . streaming . runtime . io . BarrierTrackerTest . createCancellationBarrier ( 2L , 0 ) , org . apache . flink . streaming . runtime . io . BarrierTrackerTest . createCancellationBarrier ( 1L , 1 ) , org . apache . flink . streaming . runtime . io . BarrierTrackerTest . createCancellationBarrier ( 2L , 1 ) , org . apache . flink . streaming . runtime . io . BarrierTrackerTest . createCancellationBarrier ( 1L , 2 ) , org . apache . flink . streaming . runtime . io . BarrierTrackerTest . createCancellationBarrier ( 2L , 2 ) , org . apache . flink . streaming . runtime . io . BarrierTrackerTest . createBuffer ( 0 ) } ; org . apache . flink . streaming . runtime . io . MockInputGate gate = new org . apache . flink . streaming . runtime . io . MockInputGate ( org . apache . flink . streaming . runtime . io . BarrierTrackerTest . PAGE_SIZE , 3 , java . util . Arrays . asList ( sequence ) ) ; org . apache . flink . streaming . runtime . io . BarrierTracker tracker = new org . apache . flink . streaming . runtime . io . BarrierTracker ( gate ) ; org . apache . flink . runtime . jobgraph . tasks . AbstractInvokable statefulTask = mock ( org . apache . flink . runtime . jobgraph . tasks . AbstractInvokable . class ) ; tracker . registerCheckpointEventHandler ( statefulTask ) ; for ( org . apache . flink . runtime . io . network . partition . consumer . BufferOrEvent boe : sequence ) { if ( ( boe . isBuffer ( ) ) || ( ( ( boe . getEvent ( ) . getClass ( ) ) != ( org . apache . flink . runtime . io . network . api . CheckpointBarrier . class ) ) && ( ( boe . getEvent ( ) . getClass ( ) ) != ( org . apache . flink . runtime . io . network . api . CancelCheckpointMarker . class ) ) ) ) { "<AssertPlaceHolder>" ; } } verify ( statefulTask , times ( 1 ) ) . abortCheckpointOnBarrier ( eq ( 1L ) , any ( java . lang . Throwable . class ) ) ; verify ( statefulTask , times ( 1 ) ) . abortCheckpointOnBarrier ( eq ( 2L ) , any ( java . lang . Throwable . class ) ) ; } getNextNonBlocked ( ) { while ( true ) { java . util . Optional < org . apache . flink . runtime . io . network . partition . consumer . BufferOrEvent > next = inputGate . getNextBufferOrEvent ( ) ; if ( ! ( next . isPresent ( ) ) ) { return null ; } org . apache . flink . runtime . io . network . partition . consumer . BufferOrEvent bufferOrEvent = next . get ( ) ; if ( bufferOrEvent . isBuffer ( ) ) { return bufferOrEvent ; } else if ( ( bufferOrEvent . getEvent ( ) . getClass ( ) ) == ( org . apache . flink . runtime . io . network . api . CheckpointBarrier . class ) ) { processBarrier ( ( ( org . apache . flink . runtime . io . network . api . CheckpointBarrier ) ( bufferOrEvent . getEvent ( ) ) ) , bufferOrEvent . getChannelIndex ( ) ) ; } else if ( ( bufferOrEvent . getEvent ( ) . getClass ( ) ) == ( org . apache . flink . runtime . io . network . api . CancelCheckpointMarker . class ) ) { processCheckpointAbortBarrier ( ( ( org . apache . flink . runtime . io . network . api . CancelCheckpointMarker ) ( bufferOrEvent . getEvent ( ) ) ) , bufferOrEvent . getChannelIndex ( ) ) ; } else { return bufferOrEvent ; } } }
|
org . junit . Assert . assertEquals ( boe , tracker . getNextNonBlocked ( ) )
|
shouldFindTestUnitsInCustomJUnit3Class ( ) { final java . util . Collection < org . pitest . testapi . TestUnit > actual = this . testee . findTestUnits ( org . pitest . junit . JUnitCustomRunnerTestUnitFinderTest . JMockTest . class ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return this . members . isEmpty ( ) ; }
|
org . junit . Assert . assertFalse ( actual . isEmpty ( ) )
|
testRegistrationOfExecutionsFailing ( ) { try { final org . apache . flink . runtime . jobgraph . JobVertexID jid1 = new org . apache . flink . runtime . jobgraph . JobVertexID ( ) ; final org . apache . flink . runtime . jobgraph . JobVertexID jid2 = new org . apache . flink . runtime . jobgraph . JobVertexID ( ) ; org . apache . flink . runtime . jobgraph . JobVertex v1 = new org . apache . flink . runtime . jobgraph . JobVertex ( "v1" , jid1 ) ; org . apache . flink . runtime . jobgraph . JobVertex v2 = new org . apache . flink . runtime . jobgraph . JobVertex ( "v2" , jid2 ) ; java . util . Map < org . apache . flink . runtime . executiongraph . ExecutionAttemptID , org . apache . flink . runtime . executiongraph . Execution > executions = setupExecution ( v1 , 7 , v2 , 6 ) . f1 ; for ( org . apache . flink . runtime . executiongraph . Execution e : executions . values ( ) ) { e . markFailed ( null ) ; } "<AssertPlaceHolder>" ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( e . getMessage ( ) ) ; } } size ( ) { return routes . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , executions . size ( ) )
|
testUnwrapDecryptPlainText ( ) { final java . lang . String wrappedEncryptedValue = "plaintext" ; ddf . security . encryption . impl . EncryptionServiceImplTest . LOGGER . debug ( "Original<sp>value<sp>is:<sp>{}" , wrappedEncryptedValue ) ; final ddf . security . encryption . impl . EncryptionServiceImpl encryptionService = new ddf . security . encryption . impl . EncryptionServiceImpl ( ) ; final java . lang . String decryptedValue = encryptionService . decryptValue ( wrappedEncryptedValue ) ; ddf . security . encryption . impl . EncryptionServiceImplTest . LOGGER . debug ( "Unwrapped<sp>decrypted<sp>value<sp>is:<sp>{}" , decryptedValue ) ; "<AssertPlaceHolder>" ; } decryptValue ( java . lang . String ) { if ( org . apache . commons . lang3 . StringUtils . isEmpty ( wrappedEncryptedValue ) ) { return wrappedEncryptedValue ; } java . lang . String encryptedValue = unwrapEncryptedValue ( wrappedEncryptedValue ) ; if ( wrappedEncryptedValue . equals ( encryptedValue ) ) { return wrappedEncryptedValue ; } return decrypt ( encryptedValue ) ; }
|
org . junit . Assert . assertEquals ( wrappedEncryptedValue , decryptedValue )
|
testGetAndSetJMSCorrelationID ( ) { org . apache . qpid . jms . message . JmsMessage msg = factory . createMessage ( ) ; msg . setJMSCorrelationID ( this . jmsCorrelationID ) ; "<AssertPlaceHolder>" ; } getJMSCorrelationID ( ) { return correlationId ; }
|
org . junit . Assert . assertTrue ( msg . getJMSCorrelationID ( ) . equals ( this . jmsCorrelationID ) )
|
testGetTargetClusterEnd ( ) { java . util . Map < java . lang . String , java . lang . String > props = new java . util . HashMap ( ) ; org . apache . ambari . server . controller . ivory . Feed feed = new org . apache . ambari . server . controller . ivory . Feed ( "Feed1" , "d" , "s" , "sch" , "source" , "st" , "send" , "l" , "a" , "target" , "st" , "d" 0 , "l" , "a" , props ) ; "<AssertPlaceHolder>" ; } getTargetClusterEnd ( ) { return targetClusterEnd ; }
|
org . junit . Assert . assertEquals ( "d" 0 , feed . getTargetClusterEnd ( ) )
|
testBaseEquality1 ( ) { org . wildfly . security . auth . client . AuthenticationConfiguration c1 ; org . wildfly . security . auth . client . AuthenticationConfiguration c2 ; c1 = org . wildfly . security . auth . client . AuthenticationConfiguration . empty ( ) . useName ( "name1" ) . usePort ( 1234 ) . useProtocol ( "abcd" ) ; c2 = org . wildfly . security . auth . client . AuthenticationConfiguration . empty ( ) . useName ( "name1" ) . usePort ( 1234 ) . useProtocol ( "abcd" ) ; "<AssertPlaceHolder>" ; } useProtocol ( java . lang . String ) { if ( ( protocol == null ) || ( protocol . isEmpty ( ) ) ) { protocol = null ; } if ( java . util . Objects . equals ( this . setProtocol , protocol ) ) { return this ; } else { return new org . wildfly . security . auth . client . AuthenticationConfiguration ( this , org . wildfly . security . auth . client . AuthenticationConfiguration . SET_PROTOCOL , protocol ) ; } }
|
org . junit . Assert . assertEquals ( c1 , c2 )
|
testGetAllData1 ( ) { int samplingRate = 16000 ; javax . sound . sampled . AudioFormat af = new javax . sound . sampled . AudioFormat ( AudioFormat . Encoding . PCM_SIGNED , samplingRate , 16 , 1 , 2 , samplingRate , false ) ; double [ ] testSignal = marytts . util . math . FFTTest . getSampleSignal ( 16000 ) ; marytts . util . data . audio . DDSAudioInputStream ais = new marytts . util . data . audio . DDSAudioInputStream ( new marytts . util . data . BufferedDoubleDataSource ( testSignal ) , af ) ; double [ ] result = new marytts . util . data . audio . AudioDoubleDataSource ( ais ) . getAllData ( ) ; "<AssertPlaceHolder>" ; } getAllData ( ) { if ( ( leafNode ) != null ) return leafNode . getAllData ( ) ; else if ( ( decisionNode ) != null ) return decisionNode . getAllData ( ) ; return null ; }
|
org . junit . Assert . assertTrue ( ( ( result . length ) == ( testSignal . length ) ) )
|
ackTransaction ( ) { eu . icolumbo . breeze . TestBean . Data record1 = new eu . icolumbo . breeze . TestBean . Data ( ) ; record1 . setId ( 0 ) ; record1 . setMessage ( "ding" ) ; eu . icolumbo . breeze . TestBean . Data record2 = new eu . icolumbo . breeze . TestBean . Data ( ) ; record2 . setId ( 1 ) ; record2 . setMessage ( "dong" ) ; java . util . List < java . lang . Object > bean = new java . util . ArrayList ( ) ; bean . add ( record1 ) ; bean . add ( record2 ) ; doReturn ( bean ) . when ( applicationContextMock ) . getBean ( bean . getClass ( ) ) ; eu . icolumbo . breeze . SpringSpout subject = new eu . icolumbo . breeze . SpringSpout ( bean . getClass ( ) , "toArray()" , "g" ) ; subject . setScatterOutput ( true ) ; subject . setAckSignature ( "set(id,<sp>message)" ) ; subject . setApplicationContext ( applicationContextMock ) ; subject . open ( stormConf , contextMock , collectorMock ) ; subject . nextTuple ( ) ; org . mockito . ArgumentCaptor < java . lang . Object > messageIdCaptor = org . mockito . ArgumentCaptor . forClass ( java . lang . Object . class ) ; verify ( collectorMock ) . emit ( eq ( "default" ) , eq ( bean . subList ( 0 , 1 ) ) , messageIdCaptor . capture ( ) ) ; verify ( collectorMock ) . emit ( eq ( "default" ) , eq ( bean . subList ( 1 , 2 ) ) , messageIdCaptor . capture ( ) ) ; verifyNoMoreInteractions ( collectorMock ) ; subject . ack ( messageIdCaptor . getAllValues ( ) . get ( 0 ) ) ; subject . ack ( messageIdCaptor . getAllValues ( ) . get ( 1 ) ) ; "<AssertPlaceHolder>" ; } ack ( java . lang . Object ) { if ( ! ( o instanceof eu . icolumbo . breeze . TransactionContext ) ) { eu . icolumbo . breeze . SpringSpout . logger . warn ( "Ack<sp>with<sp>unknown<sp>message<sp>ID:<sp>{}" , o ) ; return ; } eu . icolumbo . breeze . TransactionContext context = ( ( eu . icolumbo . breeze . TransactionContext ) ( o ) ) ; java . lang . Object [ ] values = context . getAckParams ( ) ; eu . icolumbo . breeze . SpringSpout . logger . trace ( "Ack<sp>with:<sp>{}" , values ) ; try { invoke ( ackMethod , values ) ; } catch ( java . lang . Exception e ) { eu . icolumbo . breeze . SpringSpout . logger . error ( "Ack<sp>notification<sp>abort" , e ) ; } }
|
org . junit . Assert . assertEquals ( asList ( ( ( java . lang . Object ) ( "ding" ) ) , "dong" ) , bean )
|
getNumRows ( ) { cn . edu . hit . ir . JNN . Dim dim = new cn . edu . hit . ir . JNN . Dim ( java . util . Arrays . asList ( 100 , 200 ) , 1 ) ; "<AssertPlaceHolder>" ; } getNumRows ( ) { return d [ 0 ] ; }
|
org . junit . Assert . assertEquals ( dim . getNumRows ( ) , 100 )
|
testSetMaxRowsHigherThanQueryLimit ( ) { try ( java . sql . Statement stmt = org . apache . drill . jdbc . StatementMaxRowsTest . connection . createStatement ( ) ) { int valueToSet = ( org . apache . drill . jdbc . StatementMaxRowsTest . RANDOMIZER . nextInt ( 10 ) ) + 11 ; stmt . setMaxRows ( valueToSet ) ; stmt . executeQuery ( org . apache . drill . jdbc . StatementMaxRowsTest . SYS_OPTIONS_SQL_LIMIT_10 ) ; java . sql . ResultSet rs = stmt . getResultSet ( ) ; int rowCount = 0 ; while ( rs . next ( ) ) { rs . getBytes ( 1 ) ; rowCount ++ ; } rs . close ( ) ; "<AssertPlaceHolder>" ; } } close ( ) { if ( ( client ) != null ) { client . close ( ) ; } }
|
org . junit . Assert . assertTrue ( ( valueToSet > rowCount ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.