input
stringlengths 28
18.7k
| output
stringlengths 39
1.69k
|
|---|---|
preserveTimestampDirCanBeDisabled ( ) { org . springframework . context . annotation . AnnotationConfigApplicationContext context = new org . springframework . context . annotation . AnnotationConfigApplicationContext ( ) ; org . springframework . boot . test . EnvironmentTestUtils . addEnvironment ( context , "ftp.preserveTimestamp:false" ) ; context . register ( org . springframework . cloud . stream . app . ftp . source . FtpSourcePropertiesTests . Conf . class ) ; context . refresh ( ) ; org . springframework . cloud . stream . app . ftp . source . FtpSourceProperties properties = context . getBean ( org . springframework . cloud . stream . app . ftp . source . FtpSourceProperties . class ) ; "<AssertPlaceHolder>" ; } isPreserveTimestamp ( ) { return preserveTimestamp ; }
|
org . junit . Assert . assertTrue ( ( ! ( properties . isPreserveTimestamp ( ) ) ) )
|
testCreateStrictQueryBlank ( ) { final java . util . List < org . apache . lucene . search . Query > query = new org . yes . cart . search . query . impl . ProductIdSearchQueryBuilder ( ) . createQueryChain ( null , "productId" , "<sp>" ) ; "<AssertPlaceHolder>" ; } createQueryChain ( org . yes . cart . search . dto . NavigationContext , java . lang . String , java . lang . Object ) { java . lang . String strValue = "1" ; if ( value instanceof java . util . Collection ) { if ( org . apache . commons . collections . CollectionUtils . isNotEmpty ( ( ( java . util . Collection ) ( value ) ) ) ) { strValue = ( "0" . equals ( ( ( java . util . Collection ) ( value ) ) . iterator ( ) . next ( ) ) ) ? "0" : "1" ; } } else { strValue = ( "0" . equals ( value ) ) ? "0" : "1" ; } return java . util . Collections . singletonList ( createNumericQuery ( ( ( PRODUCT_SHOP_INSTOCK_FLAG_FIELD ) + strValue ) , navigationContext . getCustomerShopId ( ) ) ) ; }
|
org . junit . Assert . assertNull ( query )
|
serializeDeserializeMetadata ( ) { org . apache . kafka . clients . consumer . internals . PartitionAssignor . Subscription subscription = new org . apache . kafka . clients . consumer . internals . PartitionAssignor . Subscription ( java . util . Arrays . asList ( "foo" , "bar" ) ) ; java . nio . ByteBuffer buffer = org . apache . kafka . clients . consumer . internals . ConsumerProtocol . serializeSubscription ( subscription ) ; org . apache . kafka . clients . consumer . internals . PartitionAssignor . Subscription parsedSubscription = org . apache . kafka . clients . consumer . internals . ConsumerProtocol . deserializeSubscription ( buffer ) ; "<AssertPlaceHolder>" ; } topics ( ) { return java . util . Arrays . copyOf ( org . apache . kafka . streams . tests . SmokeTestDriver . TOPICS , org . apache . kafka . streams . tests . SmokeTestDriver . TOPICS . length ) ; }
|
org . junit . Assert . assertEquals ( subscription . topics ( ) , parsedSubscription . topics ( ) )
|
givenFilePath_whenUsingFilesLines_thenFileData ( ) { java . lang . String expectedData = "Hello<sp>World<sp>from<sp>fileTest.txt!!!" ; java . nio . file . Path path = java . nio . file . Paths . get ( getClass ( ) . getClassLoader ( ) . getResource ( "fileTest.txt" ) . toURI ( ) ) ; java . util . stream . Stream < java . lang . String > lines = java . nio . file . Files . lines ( path ) ; java . lang . String data = lines . collect ( java . util . stream . Collectors . joining ( "\n" ) ) ; lines . close ( ) ; "<AssertPlaceHolder>" ; } trim ( ) { engine . eval ( new java . io . InputStreamReader ( com . baeldung . scripting . NashornUnitTest . class . getResourceAsStream ( "/js/trim.js" ) ) ) ; }
|
org . junit . Assert . assertEquals ( expectedData , data . trim ( ) )
|
testOwnerPoolEntitlementCountProductOnly ( ) { org . candlepin . model . ConsumerType type = consumerTypeCurator . getByLabel ( "system" ) ; org . candlepin . model . Product prod = new org . candlepin . model . Product ( "sysProd" , "sysProd" ) ; prod . setAttribute ( Pool . Attributes . ENABLED_CONSUMER_TYPES , type . getLabel ( ) ) ; createProduct ( prod , owner ) ; pool1 . setProduct ( prod ) ; owner . addEntitlementPool ( pool1 ) ; org . candlepin . model . OwnerInfo info = ownerInfoCurator . getByOwner ( owner ) ; java . util . Map < java . lang . String , java . lang . Integer > expectedPoolCount = new java . util . HashMap < java . lang . String , java . lang . Integer > ( ) { { put ( "system" , 1 ) ; put ( "domain" , 0 ) ; put ( "uebercert" , 0 ) ; } } ; "<AssertPlaceHolder>" ; } getConsumerTypeCountByPool ( ) { return consumerTypeCountByPool ; }
|
org . junit . Assert . assertEquals ( expectedPoolCount , info . getConsumerTypeCountByPool ( ) )
|
doTrimAll_A$String_null ( ) { org . junithelper . core . filter . TrimFilterManager target = new org . junithelper . core . filter . TrimFilterManager ( ) ; target . addFilter ( new org . junithelper . core . filter . impl . TrimCommentFilter ( ) , new org . junithelper . core . filter . impl . TrimQuotationFilter ( ) ) ; java . lang . String src = null ; java . lang . String actual = target . doTrimAll ( src ) ; java . lang . String expected = null ; "<AssertPlaceHolder>" ; } doTrimAll ( java . lang . String ) { java . lang . String dest = src ; for ( org . junithelper . core . filter . TrimFilter filter : filters ) { dest = filter . trimAll ( dest ) ; } return dest ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testGetParameters ( ) { java . lang . Long specOfferId = 1L ; org . lnu . is . domain . specoffer . SpecOffer specOffer = new org . lnu . is . domain . specoffer . SpecOffer ( ) ; specOffer . setId ( specOfferId ) ; java . lang . Long waveTypeId = 2L ; org . lnu . is . domain . wave . type . WaveType waveType = new org . lnu . is . domain . wave . type . WaveType ( ) ; waveType . setId ( waveTypeId ) ; java . lang . Integer licCount = 1 ; java . lang . Integer stateCount = 2 ; java . lang . Integer benefitCount = 3 ; java . lang . Integer targetCount = 4 ; java . util . Date beginDate = new java . util . Date ( ) ; java . util . Date endDate = new java . util . Date ( ) ; org . lnu . is . domain . specoffer . SpecOfferWave entity = new org . lnu . is . domain . specoffer . SpecOfferWave ( ) ; entity . setSpecOffer ( specOffer ) ; entity . setWaveType ( waveType ) ; entity . setLicCount ( licCount ) ; entity . setStateCount ( stateCount ) ; entity . setBenefitCount ( benefitCount ) ; entity . setTargetCount ( targetCount ) ; entity . setBeginDate ( beginDate ) ; entity . setEndDate ( endDate ) ; java . util . Map < java . lang . String , java . lang . Object > expected = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; expected . put ( "specOffer" , specOffer ) ; expected . put ( "waveType" , waveType ) ; expected . put ( "licCount" , licCount ) ; expected . put ( "stateCount" , stateCount ) ; expected . put ( "benefitCount" , benefitCount ) ; expected . put ( "targetCount" , targetCount ) ; expected . put ( "status" , RowStatus . ACTIVE ) ; expected . put ( "userGroups" , groups ) ; expected . put ( "beginDate" , beginDate ) ; expected . put ( "endDate" , endDate ) ; when ( specOfferDao . getEntityById ( anyLong ( ) ) ) . thenReturn ( specOffer ) ; when ( waveTypeDao . getEntityById ( anyLong ( ) ) ) . thenReturn ( waveType ) ; java . util . Map < java . lang . String , java . lang . Object > actual = unit . getParameters ( entity ) ; "<AssertPlaceHolder>" ; } getParameters ( org . springframework . web . context . request . NativeWebRequest ) { java . util . Map < java . lang . String , java . lang . Object > resultMap = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; java . util . Map < java . lang . String , java . lang . String > pathVariables = ( ( java . util . Map < java . lang . String , java . lang . String > ) ( webRequest . getAttribute ( HandlerMapping . URI_TEMPLATE_VARIABLES_ATTRIBUTE , RequestAttributes . SCOPE_REQUEST ) ) ) ; java . util . Map < java . lang . String , java . lang . Object > requestParams = getRequestParameterMap ( webRequest ) ; for ( Map . Entry < java . lang . String , java . lang . Object > entry : requestParams . entrySet ( ) ) { resultMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } resultMap . putAll ( pathVariables ) ; return resultMap ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testGetHiddenNode ( ) { org . apache . jackrabbit . oak . plugins . tree . impl . ImmutableTree hidden = org . apache . jackrabbit . oak . plugins . tree . impl . ImmutableTreeTest . getHiddenTree ( immutable ) ; "<AssertPlaceHolder>" ; } getHiddenTree ( org . apache . jackrabbit . oak . plugins . tree . impl . ImmutableTree ) { return ( ( org . apache . jackrabbit . oak . plugins . tree . impl . ImmutableTree ) ( org . apache . jackrabbit . oak . plugins . tree . TreeUtil . getTree ( immutable , org . apache . jackrabbit . oak . plugins . tree . impl . ImmutableTreeTest . HIDDEN_PATH ) ) ) ; }
|
org . junit . Assert . assertNotNull ( hidden )
|
thePluginCanBeChanged ( ) { final org . jivesoftware . util . SystemProperty < java . lang . Long > longProperty = SystemProperty . Builder . ofType ( org . jivesoftware . util . Long . class ) . setKey ( "a-plugin-property" ) . setDefaultValue ( 42L ) . setPlugin ( "TestPluginName" ) . setDynamic ( false ) . build ( ) ; "<AssertPlaceHolder>" ; } getPlugin ( ) { return plugin ; }
|
org . junit . Assert . assertThat ( longProperty . getPlugin ( ) , org . hamcrest . CoreMatchers . is ( "TestPluginName" ) )
|
givenAnBoxedIntegerArray_whenUsingStream_thenFindSum ( ) { java . lang . Integer [ ] array = new java . lang . Integer [ ] { 1 , 3 , 4 , 8 , 19 , 20 } ; int expectedSumOfArray = 55 ; int actualSumOfArray = com . baeldung . array . SumAndAverageInArray . findSumUsingStream ( array ) ; "<AssertPlaceHolder>" ; } findSumUsingStream ( int [ ] ) { return java . util . Arrays . stream ( array ) . sum ( ) ; }
|
org . junit . Assert . assertEquals ( expectedSumOfArray , actualSumOfArray )
|
givenBiWeeklyFrequencyAndChildCreatedThreeWeeksAfterParentFirstCustomerScheduleForChildSynchsWithNearestScheduleOfParent ( ) { applicableCalendarEvents = new org . mifos . domain . builders . CalendarEventBuilder ( ) . build ( ) ; org . joda . time . DateTime tue19thOfApril = new org . joda . time . DateTime ( ) . withDate ( 2011 , 4 , 19 ) ; org . joda . time . DateTime tue10thOfMay = new org . joda . time . DateTime ( ) . withDate ( 2011 , 4 , 26 ) . plusWeeks ( 2 ) ; accountFees = new java . util . ArrayList < org . mifos . accounts . business . AccountFeesEntity > ( ) ; org . mifos . application . meeting . business . MeetingBO centerMeeting = new org . mifos . domain . builders . MeetingBuilder ( ) . customerMeeting ( ) . weekly ( ) . every ( 2 ) . occuringOnA ( WeekDay . MONDAY ) . startingFrom ( tue19thOfApril . minusDays ( 1 ) . toDate ( ) ) . build ( ) ; org . mifos . application . meeting . business . MeetingBO groupMeeting = new org . mifos . domain . builders . MeetingBuilder ( ) . customerMeeting ( ) . weekly ( ) . every ( 2 ) . occuringOnA ( WeekDay . MONDAY ) . startingFrom ( tue10thOfMay . minusDays ( 1 ) . toDate ( ) ) . build ( ) ; org . mifos . customers . center . business . CenterBO center = new org . mifos . domain . builders . CenterBuilder ( ) . active ( ) . withActivationDate ( tue19thOfApril ) . with ( centerMeeting ) . build ( ) ; org . mifos . customers . group . business . GroupBO group = new org . mifos . domain . builders . GroupBuilder ( ) . active ( ) . withParentCustomer ( center ) . withActivationDate ( tue10thOfMay ) . withMeeting ( groupMeeting ) . build ( ) ; org . mifos . customers . business . CustomerAccountBO centerAccount = org . mifos . customers . business . CustomerAccountBO . createNew ( center , accountFees , centerMeeting , applicableCalendarEvents ) ; org . mifos . customers . business . CustomerAccountBO groupAccount = org . mifos . customers . business . CustomerAccountBO . createNew ( group , accountFees , groupMeeting , applicableCalendarEvents ) ; java . util . List < org . mifos . accounts . business . AccountActionDateEntity > centerSchedules = new java . util . ArrayList < org . mifos . accounts . business . AccountActionDateEntity > ( centerAccount . getAccountActionDates ( ) ) ; java . util . List < org . mifos . accounts . business . AccountActionDateEntity > groupSchedules = new java . util . ArrayList < org . mifos . accounts . business . AccountActionDateEntity > ( groupAccount . getAccountActionDates ( ) ) ; org . joda . time . LocalDate thirdCenterDate = new org . joda . time . LocalDate ( centerSchedules . get ( 2 ) . getActionDate ( ) ) ; org . joda . time . LocalDate firstGroupDate = new org . joda . time . LocalDate ( groupSchedules . get ( 0 ) . getActionDate ( ) ) ; "<AssertPlaceHolder>" ; } getActionDate ( ) { return org . mifos . framework . util . helpers . DateUtils . currentDate ( ) ; }
|
org . junit . Assert . assertThat ( firstGroupDate , org . hamcrest . CoreMatchers . is ( thirdCenterDate ) )
|
testJarFromClassPathSupplier ( ) { final java . io . File file1 = temporaryFolder . newFile ( ) ; final java . io . File file2 = temporaryFolder . newFile ( ) ; final java . io . File directory = temporaryFolder . newFolder ( ) ; final java . lang . String classPath = org . apache . flink . container . entrypoint . ClassPathJobGraphRetrieverTest . javaClassPath ( "" , "" , "" , file1 . getAbsolutePath ( ) , "" , directory . getAbsolutePath ( ) , "" , file2 . getAbsolutePath ( ) , "" , "" ) ; java . lang . Iterable < java . io . File > jarFiles = org . apache . flink . container . entrypoint . ClassPathJobGraphRetrieverTest . setClassPathAndGetJarsOnClassPath ( classPath ) ; "<AssertPlaceHolder>" ; } contains ( java . util . List , int ) { for ( org . apache . flink . streaming . connectors . kafka . internals . KafkaTopicPartition ktp : partitions ) { if ( ( ktp . getPartition ( ) ) == partition ) { return true ; } } return false ; }
|
org . junit . Assert . assertThat ( jarFiles , org . hamcrest . Matchers . contains ( file1 , file2 ) )
|
testL1LanId ( ) { isisNeighbor . setL1LanId ( systemId ) ; result1 = isisNeighbor . l1LanId ( ) ; "<AssertPlaceHolder>" ; } is ( java . lang . Class ) { return true ; }
|
org . junit . Assert . assertThat ( result1 , org . hamcrest . CoreMatchers . is ( systemId ) )
|
testVenuesEdit ( ) { fi . foyt . foursquare . api . FoursquareApi foursquareApi = fi . foyt . foursquare . api . tests . TestUtils . getAuthenticatedFoursquareApi ( ) ; fi . foyt . foursquare . api . Result < java . lang . Object > result = foursquareApi . venuesEdit ( "4de88f43d22d09215a1f73e1" , "Apuvlineyksikk<sp>/<sp>Moision<sp>toimipiste" , "Moisiontie<sp>11<sp>b" , null , "Mikkeli" , "Etel-Savo" , "50520" , "0443516511" , "61.677701,27.272585" , "4bf58dd8d48988d104941735" , null , null , null ) ; "<AssertPlaceHolder>" ; } getMeta ( ) { return meta ; }
|
org . junit . Assert . assertEquals ( new java . lang . Integer ( 200 ) , result . getMeta ( ) . getCode ( ) )
|
testSetLineCap ( ) { gc . setLineCap ( SWT . CAP_ROUND ) ; "<AssertPlaceHolder>" ; } getLineCap ( ) { checkDisposed ( ) ; return delegate . getLineCap ( ) ; }
|
org . junit . Assert . assertEquals ( SWT . CAP_ROUND , gc . getLineCap ( ) )
|
testAdminRefreshQueuesWithLocalConfigurationProvider ( ) { rm = new org . apache . hadoop . yarn . server . resourcemanager . MockRM ( configuration ) ; rm . init ( configuration ) ; rm . start ( ) ; org . apache . hadoop . yarn . server . resourcemanager . scheduler . capacity . CapacityScheduler cs = ( ( org . apache . hadoop . yarn . server . resourcemanager . scheduler . capacity . CapacityScheduler ) ( rm . getRMContext ( ) . getScheduler ( ) ) ) ; int maxAppsBefore = cs . getConfiguration ( ) . getMaximumSystemApplications ( ) ; try { rm . adminService . refreshQueues ( org . apache . hadoop . yarn . server . api . protocolrecords . RefreshQueuesRequest . newInstance ( ) ) ; "<AssertPlaceHolder>" ; } catch ( java . lang . Exception ex ) { org . junit . Assert . fail ( "Using<sp>localConfigurationProvider.<sp>Should<sp>not<sp>get<sp>any<sp>exception." ) ; } } getConfiguration ( ) { return conf ; }
|
org . junit . Assert . assertEquals ( maxAppsBefore , cs . getConfiguration ( ) . getMaximumSystemApplications ( ) )
|
testPendingMessageQueueTooBig ( ) { final com . allanbank . mongodb . client . message . PendingMessageQueue queue = new com . allanbank . mongodb . client . message . PendingMessageQueue ( ( ( PendingMessageQueue . MAX_SIZE ) + 1 ) , com . allanbank . mongodb . LockType . MUTEX ) ; "<AssertPlaceHolder>" ; } capacity ( ) { return ( myQueue . length ) - 1 ; }
|
org . junit . Assert . assertEquals ( ( ( PendingMessageQueue . MAX_SIZE ) - 1 ) , queue . capacity ( ) )
|
testEmptyMapInitialization ( ) { org . apache . lucene . analysis . uima . ae . AEProvider aeProvider = new org . apache . lucene . analysis . uima . ae . OverridingParamsAEProvider ( "/uima/TestEntityAnnotatorAE.xml" , new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ) ; org . apache . uima . analysis_engine . AnalysisEngine analysisEngine = aeProvider . getAE ( ) ; "<AssertPlaceHolder>" ; } getAE ( ) { synchronized ( this ) { if ( ( cachedDescription ) == null ) { org . apache . uima . util . XMLInputSource in = null ; boolean success = false ; try { in = getInputSource ( ) ; cachedDescription = org . apache . uima . UIMAFramework . getXMLParser ( ) . parseAnalysisEngineDescription ( in ) ; configureDescription ( cachedDescription ) ; success = true ; } catch ( java . lang . Exception e ) { throw new org . apache . uima . resource . ResourceInitializationException ( e ) ; } finally { if ( success ) { try { org . apache . lucene . util . IOUtils . close ( in . getInputStream ( ) ) ; } catch ( java . io . IOException e ) { throw new org . apache . uima . resource . ResourceInitializationException ( e ) ; } } else if ( in != null ) { org . apache . lucene . util . IOUtils . closeWhileHandlingException ( in . getInputStream ( ) ) ; } } } } return org . apache . uima . UIMAFramework . produceAnalysisEngine ( cachedDescription ) ; }
|
org . junit . Assert . assertNotNull ( analysisEngine )
|
testCreateLogicalRouterApiExceptionRollbackRouterAndSwitchPort ( ) { resource . configure ( "NiciraNvpResource" , parameters ) ; final com . cloud . network . nicira . LogicalRouter lrc = mock ( com . cloud . network . nicira . LogicalRouter . class ) ; final com . cloud . network . nicira . LogicalRouterPort lrp = mock ( com . cloud . network . nicira . LogicalRouterPort . class ) ; final com . cloud . network . nicira . LogicalSwitchPort lsp = mock ( com . cloud . network . nicira . LogicalSwitchPort . class ) ; when ( lrc . getUuid ( ) ) . thenReturn ( "ccccc" ) ; when ( lrp . getUuid ( ) ) . thenReturn ( "ddddd" ) . thenReturn ( "eeeee" ) ; when ( lsp . getUuid ( ) ) . thenReturn ( "fffff" ) ; when ( nvpApi . createLogicalRouter ( ( ( com . cloud . network . nicira . LogicalRouter ) ( any ( ) ) ) ) ) . thenReturn ( lrc ) ; when ( nvpApi . createLogicalRouterPort ( eq ( "ccccc" ) , ( ( com . cloud . network . nicira . LogicalRouterPort ) ( any ( ) ) ) ) ) . thenReturn ( lrp ) ; when ( nvpApi . createLogicalSwitchPort ( eq ( "lrouter" 0 ) , ( ( com . cloud . network . nicira . LogicalSwitchPort ) ( any ( ) ) ) ) ) . thenReturn ( lsp ) ; when ( nvpApi . createLogicalRouterNatRule ( ( ( java . lang . String ) ( any ( ) ) ) , ( ( com . cloud . network . nicira . NatRule ) ( any ( ) ) ) ) ) . thenThrow ( new com . cloud . network . nicira . NiciraNvpApiException ( ) ) ; final com . cloud . legacymodel . communication . command . CreateLogicalRouterCommand clrc = new com . cloud . legacymodel . communication . command . CreateLogicalRouterCommand ( "aaaaa" , 50 , "lrouter" 0 , "lrouter" , "publiccidr" , "nexthop" , "lrouter" 1 , "owner" ) ; final com . cloud . legacymodel . communication . answer . CreateLogicalRouterAnswer clra = ( ( com . cloud . legacymodel . communication . answer . CreateLogicalRouterAnswer ) ( resource . executeRequest ( clrc ) ) ) ; "<AssertPlaceHolder>" ; verify ( nvpApi , atLeast ( 1 ) ) . deleteLogicalRouter ( eq ( "ccccc" ) ) ; verify ( nvpApi , atLeast ( 1 ) ) . deleteLogicalSwitchPort ( eq ( "lrouter" 0 ) , eq ( "fffff" ) ) ; } getResult ( ) { if ( ! ( done ) ) { wait ( ) ; } return result ; }
|
org . junit . Assert . assertFalse ( clra . getResult ( ) )
|
testWrite ( ) { org . apache . poi . hslf . record . ExOleObjStg record = new org . apache . poi . hslf . record . ExOleObjStg ( org . apache . poi . hslf . record . TestExOleObjStg . data , 0 , org . apache . poi . hslf . record . TestExOleObjStg . data . length ) ; java . io . ByteArrayOutputStream baos = new java . io . ByteArrayOutputStream ( ) ; record . writeOut ( baos ) ; byte [ ] b = baos . toByteArray ( ) ; "<AssertPlaceHolder>" ; } toByteArray ( ) { byte [ ] result = new byte [ ( ( org . apache . poi . util . LittleEndianConsts . INT_SIZE ) * 2 ) + ( _value . length ) ] ; org . apache . poi . util . LittleEndianByteArrayOutputStream bos = new org . apache . poi . util . LittleEndianByteArrayOutputStream ( result , 0 ) ; try { bos . writeInt ( ( ( org . apache . poi . util . LittleEndianConsts . INT_SIZE ) + ( _value . length ) ) ) ; bos . writeInt ( _format ) ; bos . write ( _value ) ; return result ; } finally { org . apache . poi . util . IOUtils . closeQuietly ( bos ) ; } }
|
org . junit . Assert . assertArrayEquals ( org . apache . poi . hslf . record . TestExOleObjStg . data , b )
|
_3_A$ ( ) { java . lang . String _1 = "foo" ; java . lang . Integer _2 = 123 ; java . lang . Long _3 = 456L ; com . m3 . scalaflavor4j . Tuple3 < java . lang . String , java . lang . Integer , java . lang . Long > target = com . m3 . scalaflavor4j . Tuple3 . apply ( _1 , _2 , _3 ) ; java . lang . Long actual = target . _3 ( ) ; java . lang . Long expected = 456L ; "<AssertPlaceHolder>" ; } _3 ( ) { return _3 ; }
|
org . junit . Assert . assertThat ( actual , org . hamcrest . CoreMatchers . is ( org . hamcrest . CoreMatchers . equalTo ( expected ) ) )
|
testReadWriteDouble ( ) { org . nd4j . linalg . api . ndarray . INDArray write = org . nd4j . linalg . factory . Nd4j . linspace ( 1 , 4 , 4 ) ; java . io . ByteArrayOutputStream bos = new java . io . ByteArrayOutputStream ( ) ; java . io . DataOutputStream dos = new java . io . DataOutputStream ( bos ) ; org . nd4j . linalg . factory . Nd4j . write ( write , dos ) ; java . io . ByteArrayInputStream bis = new java . io . ByteArrayInputStream ( bos . toByteArray ( ) ) ; java . io . DataInputStream dis = new java . io . DataInputStream ( bis ) ; org . nd4j . linalg . api . ndarray . INDArray read = org . nd4j . linalg . factory . Nd4j . read ( dis ) ; "<AssertPlaceHolder>" ; } read ( org . nd4j . linalg . factory . InputStream ) { return org . nd4j . linalg . factory . Nd4j . read ( new org . nd4j . linalg . factory . DataInputStream ( reader ) ) ; }
|
org . junit . Assert . assertEquals ( write , read )
|
testVoerStapUitWelNotificatieberichtNodig ( ) { final nl . bzk . brp . bijhouding . business . stappen . resultaat . Resultaat resultaat = stap . voerStapUit ( bijhoudingsBericht , berichtContext ) ; verify ( marshallService ) . maakBericht ( any ( nl . bzk . brp . model . bijhouding . NotificeerBijhoudingsplanBericht . class ) ) ; "<AssertPlaceHolder>" ; } getMeldingen ( ) { return java . util . Collections . unmodifiableSet ( meldingen ) ; }
|
org . junit . Assert . assertTrue ( resultaat . getMeldingen ( ) . isEmpty ( ) )
|
testGetInterfaceNumber ( ) { org . eclipse . kura . net . admin . monitor . ModemMonitorServiceImpl svc = new org . eclipse . kura . net . admin . monitor . ModemMonitorServiceImpl ( ) ; java . util . List < org . eclipse . kura . net . NetConfig > netConfigs = new java . util . ArrayList ( ) ; org . eclipse . kura . net . NetConfig config = new org . eclipse . kura . net . NetConfigIP4 ( org . eclipse . kura . net . NetInterfaceStatus . netIPv4StatusEnabledLAN , true ) ; netConfigs . add ( config ) ; org . eclipse . kura . net . modem . ModemConfig nc = new org . eclipse . kura . net . modem . ModemConfig ( 1 , org . eclipse . kura . net . modem . ModemConfig . PdpType . PPP , "apn" , org . eclipse . kura . net . IPAddress . parseHostAddress ( "10.10.10.10" ) , 1 , 2 ) ; nc . setPppNumber ( 2 ) ; netConfigs . add ( nc ) ; int result = ( ( int ) ( org . eclipse . kura . core . testutil . TestUtil . invokePrivate ( svc , "getInterfaceNumber" , new java . lang . Class [ ] { java . util . List . class } , netConfigs ) ) ) ; "<AssertPlaceHolder>" ; } invokePrivate ( java . lang . Object , java . lang . String , org . eclipse . kura . core . testutil . Class [ ] , java . lang . Object [ ] ) { java . lang . reflect . Method method = org . eclipse . kura . core . testutil . TestUtil . getMethod ( svc , methodName , paramTypes ) ; method . setAccessible ( true ) ; try { java . lang . Object result = method . invoke ( svc , params ) ; return result ; } catch ( java . lang . IllegalAccessException e ) { org . eclipse . kura . core . testutil . TestUtil . logger . warn ( e . getMessage ( ) , e ) ; } catch ( java . lang . IllegalArgumentException e ) { org . eclipse . kura . core . testutil . TestUtil . logger . warn ( e . getMessage ( ) , e ) ; } catch ( java . lang . reflect . InvocationTargetException e ) { throw e . getCause ( ) ; } return null ; }
|
org . junit . Assert . assertEquals ( 2 , result )
|
checkPathToEmfUriReturnsUriIfFileDoesNotExist ( ) { java . nio . file . Path nonexistentPath = java . nio . file . Paths . get ( "/not/a/path" ) ; org . eclipse . emf . common . util . URI nonexistentUri = fileUtils . pathToEmfUri ( nonexistentPath ) ; "<AssertPlaceHolder>" ; } pathToEmfUri ( java . nio . file . Path ) { return org . eclipse . emf . common . util . URI . createFileURI ( file . toString ( ) ) ; }
|
org . junit . Assert . assertNotNull ( nonexistentUri )
|
testMoveAndRenameExistingToEntryWithReferralAncestor ( ) { try { MNNCtx . rename ( "cn=Alex" , "cn=Emmanuel,ou=Roles" ) ; } catch ( javax . naming . NamingException ne ) { "<AssertPlaceHolder>" ; } } rename ( org . apache . directory . api . ldap . model . message . ModifyDnRequest , org . apache . directory . server . core . api . changelog . LogChange ) { org . apache . directory . server . core . api . interceptor . context . RenameOperationContext renameContext = new org . apache . directory . server . core . api . interceptor . context . RenameOperationContext ( this , modifyDnRequest ) ; renameContext . setLogChange ( log ) ; org . apache . directory . server . core . api . OperationManager operationManager = directoryService . getOperationManager ( ) ; try { operationManager . rename ( renameContext ) ; } catch ( org . apache . directory . api . ldap . model . exception . LdapException e ) { modifyDnRequest . getResultResponse ( ) . addAllControls ( renameContext . getResponseControls ( ) ) ; throw e ; } modifyDnRequest . getResultResponse ( ) . addAllControls ( renameContext . getResponseControls ( ) ) ; }
|
org . junit . Assert . assertTrue ( true )
|
testWriteLines_3arg_nullSeparator ( ) { final java . lang . Object [ ] data = new java . lang . Object [ ] { "hello" , new java . lang . StringBuffer ( "world" ) , "" , "this<sp>is" , null , "some<sp>text" } ; final java . util . List < java . lang . Object > list = java . util . Arrays . asList ( data ) ; final java . io . File file = org . apache . commons . io . testtools . TestUtils . newFile ( getTestDirectory ( ) , "lines.txt" ) ; org . apache . commons . io . FileUtils . writeLines ( file , "US-ASCII" , list ) ; final java . lang . String expected = ( ( ( ( ( ( ( ( "hello" + ( org . apache . commons . io . IOUtils . LINE_SEPARATOR ) ) + "world" ) + ( org . apache . commons . io . IOUtils . LINE_SEPARATOR ) ) + ( org . apache . commons . io . IOUtils . LINE_SEPARATOR ) ) + "this<sp>is" ) + ( org . apache . commons . io . IOUtils . LINE_SEPARATOR ) ) + ( org . apache . commons . io . IOUtils . LINE_SEPARATOR ) ) + "some<sp>text" ) + ( org . apache . commons . io . IOUtils . LINE_SEPARATOR ) ; final java . lang . String actual = org . apache . commons . io . FileUtils . readFileToString ( file , "US-ASCII" ) ; "<AssertPlaceHolder>" ; } readFileToString ( java . io . File , java . nio . charset . Charset ) { try ( java . io . InputStream in = org . apache . commons . io . FileUtils . openInputStream ( file ) ) { return org . apache . commons . io . IOUtils . toString ( in , org . apache . commons . io . Charsets . toCharset ( encoding ) ) ; } }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testDownloadToTempFile ( ) { java . lang . String fileHandleId = "123" ; org . sagebionetworks . repo . model . file . S3FileHandle s3Handle = new org . sagebionetworks . repo . model . file . S3FileHandle ( ) ; s3Handle . setId ( fileHandleId ) ; s3Handle . setKey ( "someKey" ) ; s3Handle . setBucketName ( "someBucket" ) ; java . io . File result = null ; try { result = bulkDownloadDao . downloadToTempFile ( s3Handle ) ; "<AssertPlaceHolder>" ; verify ( mockS3client ) . getObject ( any ( com . amazonaws . services . s3 . model . GetObjectRequest . class ) , any ( java . io . File . class ) ) ; } finally { if ( result != null ) { result . delete ( ) ; } } } downloadToTempFile ( org . sagebionetworks . repo . model . file . S3FileHandle ) { java . io . File tempFile = java . io . File . createTempFile ( ( "FileHandle" + ( fileHandle . getId ( ) ) ) , ".tmp" ) ; s3client . getObject ( new com . amazonaws . services . s3 . model . GetObjectRequest ( fileHandle . getBucketName ( ) , fileHandle . getKey ( ) ) , tempFile ) ; return tempFile ; }
|
org . junit . Assert . assertNotNull ( result )
|
test1 ( ) { java . util . Collection < net . lr . tasklist . model . Task > tasks = taskService . getTasks ( ) ; "<AssertPlaceHolder>" ; } getTasks ( ) { return taskService . getTasks ( ) ; }
|
org . junit . Assert . assertEquals ( 2 , tasks . size ( ) )
|
nietValideAttendering ( ) { final nl . bzk . brp . model . internbericht . ProtocolleringOpdracht protocolleringOpdracht = maakProtocolleringOpdracht ( SoortDienst . ATTENDERING , datumMaterieelSelectie , datumAanvangMaterielePeriode , datumEindeMaterielePeriode , null , datumTijdEindeFormelePeriode , null ) ; "<AssertPlaceHolder>" ; } isValide ( ) { boolean resultaat ; if ( ( levering ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "Levering<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( ( personen ) == null ) || ( personen . isEmpty ( ) ) ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "Personen<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( levering . getToegangLeveringsautorisatieId ( ) ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "ToegangAbonnementId<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( levering . getDienstId ( ) ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "DienstId<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( ( levering . getDatumTijdKlaarzettenLevering ( ) ) == null ) || ( levering . getDatumTijdKlaarzettenLevering ( ) . heeftGeenWaarde ( ) ) ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "DatumTijdKlaarzettenLevering<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( getSoortDienst ( ) ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "Soort<sp>dienst<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . SOORTDIENSTEN_MET_SOORT_SYNCHRONISATIE_VERPLICHT . contains ( getSoortDienst ( ) ) ) && ( ( ( levering . getSoortSynchronisatie ( ) ) == null ) || ( levering . getSoortSynchronisatie ( ) . heeftGeenWaarde ( ) ) ) ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "ToegangAbonnementId<sp>dient<sp>gevuld<sp>te<sp>zijn." 0 , getSoortDienst ( ) ) ; } else { switch ( soortDienst ) { case ATTENDERING : case MUTATIELEVERING_OP_BASIS_VAN_DOELBINDING : resultaat = isValideAttenderingOfMutatieLeveringDoelbinding ( ) ; break ; case GEEF_DETAILS_PERSOON : case GEEF_DETAILS_PERSOON_BULK : resultaat = isValideGeefDetailsPersoon ( getHistorievorm ( ) ) ; break ; case MUTATIELEVERING_OP_BASIS_VAN_AFNEMERINDICATIE : case PLAATSEN_AFNEMERINDICATIE : case VERWIJDEREN_AFNEMERINDICATIE : resultaat = isValideAfnemerindicatie ( ) ; break ; case SYNCHRONISATIE_PERSOON : resultaat = isValideSynchronisatiePersoon ( ) ; break ; case GEEF_MEDEBEWONERS_VAN_PERSOON : resultaat = isValideGeefMedebewonersVanPersoon ( ) ; break ; default : final java . lang . String foutmelding = "Voor<sp>deze<sp>catalogusoptie<sp>is<sp>geen<sp>protocollering<sp>validatie<sp>ingesteld:<sp>" + ( soortDienst ) ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . error ( foutmelding ) ; throw new java . lang . IllegalArgumentException ( foutmelding ) ; } if ( ! resultaat ) { nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( ( "De<sp>protocollering<sp>is<sp>niet<sp>valide<sp>voor<sp>de<sp>catalogusoptie:<sp>{},<sp>" + ( ( "datum<sp>materieel<sp>selectie:<sp>{},<sp>datum<sp>aanvang<sp>materiele<sp>periode:<sp>{},<sp>" + "datum<sp>einde<sp>materiele<sp>periode:<sp>{},<sp>datum<sp>tijd<sp>aanv<sp>form<sp>periode:<sp>{},<sp>" ) + "ToegangAbonnementId<sp>dient<sp>gevuld<sp>te<sp>zijn." 1 ) ) , soortDienst , levering . getDatumMaterieelSelectie ( ) , levering . getDatumAanvangMaterielePeriodeResultaat ( ) , levering . getDatumEindeMaterielePeriodeResultaat ( ) , levering . getDatumTijdAanvangFormelePeriodeResultaat ( ) , levering . getDatumTijdEindeFormelePeriodeResultaat ( ) , historievorm ) ; } } return resultaat ; }
|
org . junit . Assert . assertFalse ( protocolleringOpdracht . isValide ( ) )
|
testResolveWildcardArtifactId ( ) { org . jboss . forge . addon . dependencies . DependencyQuery query = org . jboss . forge . addon . dependencies . builder . DependencyQueryBuilder . create ( org . jboss . forge . addon . dependencies . builder . CoordinateBuilder . create ( ) . setGroupId ( "org.jboss.forge" ) . setArtifactId ( "" ) . setClassifier ( "forge-addon" ) ) ; java . util . Set < org . jboss . forge . addon . dependencies . Dependency > coreAddons = resolver . resolveDependencies ( query ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return getDelegate ( ) . isEmpty ( ) ; }
|
org . junit . Assert . assertFalse ( coreAddons . isEmpty ( ) )
|
testGetServiceMetrics ( ) { System . out . println ( ( ( getTestTraceHead ( "[CygnusHandler.getServiceMetrics]" ) ) + "<sp>-<sp>Not<sp>null<sp>metrics<sp>are<sp>retrieved" ) ) ; com . telefonica . iot . cygnus . handlers . CygnusHandlerTest . CygnusHandlerImpl ch = new com . telefonica . iot . cygnus . handlers . CygnusHandlerTest . CygnusHandlerImpl ( ) ; com . telefonica . iot . cygnus . metrics . CygnusMetrics metrics = ch . getServiceMetrics ( ) ; try { "<AssertPlaceHolder>" ; System . out . println ( ( ( getTestTraceHead ( "[CygnusMetrics.getServiceMetrics]" ) ) + "<sp>-<sp>OK<sp>-<sp>Not<sp>null<sp>metrics<sp>were<sp>retrieved" ) ) ; } catch ( java . lang . AssertionError e ) { System . out . println ( ( ( getTestTraceHead ( "[CygnusMetrics.getServiceMetrics]" ) ) + "<sp>-<sp>FAIL<sp>-<sp>Null<sp>metrics<sp>were<sp>retrieved" ) ) ; throw e ; } getServiceMetrics ( ) { return serviceMetrics ; }
|
org . junit . Assert . assertTrue ( ( metrics != null ) )
|
testFromExisting ( ) { org . gradoop . common . model . impl . id . GradoopId id1 = org . gradoop . common . model . impl . id . GradoopId . get ( ) ; org . gradoop . common . model . impl . id . GradoopId id2 = org . gradoop . common . model . impl . id . GradoopId . get ( ) ; org . gradoop . common . model . impl . id . GradoopId id3 = org . gradoop . common . model . impl . id . GradoopId . get ( ) ; org . gradoop . common . model . impl . id . GradoopIdSet ids = org . gradoop . common . model . impl . id . GradoopIdSet . fromExisting ( id1 , id2 , id3 ) ; "<AssertPlaceHolder>" ; } size ( ) { return ids . size ( ) ; }
|
org . junit . Assert . assertThat ( ids . size ( ) , org . hamcrest . core . Is . is ( 3 ) )
|
testPositiveApiKey ( ) { org . eclipse . microprofile . openapi . models . security . SecurityScheme . Type type = org . eclipse . microprofile . openapi . models . security . SecurityScheme . Type . APIKEY ; org . eclipse . microprofile . openapi . models . security . SecurityScheme . In in = org . eclipse . microprofile . openapi . models . security . SecurityScheme . In . HEADER ; com . ibm . ws . microprofile . openapi . impl . model . security . SecuritySchemeImpl positiveApiKey = new com . ibm . ws . microprofile . openapi . impl . model . security . SecuritySchemeImpl ( ) ; positiveApiKey . setName ( "apiKey" ) ; positiveApiKey . setType ( type ) ; positiveApiKey . setIn ( in ) ; com . ibm . ws . microprofile . openapi . test . utils . TestValidationHelper vh = new com . ibm . ws . microprofile . openapi . test . utils . TestValidationHelper ( ) ; com . ibm . ws . microprofile . openapi . impl . validation . SecuritySchemeValidator validator = com . ibm . ws . microprofile . openapi . impl . validation . SecuritySchemeValidator . getInstance ( ) ; validator . validate ( vh , context , key , positiveApiKey ) ; "<AssertPlaceHolder>" ; } getEventsSize ( ) { return result . getEvents ( ) . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , vh . getEventsSize ( ) )
|
testFetchByPrimaryKeysWithNoPrimaryKeys ( ) { java . util . Set < java . io . Serializable > primaryKeys = new java . util . HashSet < java . io . Serializable > ( ) ; java . util . Map < java . io . Serializable , com . liferay . ratings . kernel . model . RatingsStats > ratingsStatses = _persistence . fetchByPrimaryKeys ( primaryKeys ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return _portalCacheListeners . isEmpty ( ) ; }
|
org . junit . Assert . assertTrue ( ratingsStatses . isEmpty ( ) )
|
testFilterByMultiValuedAttributeNoMatch ( ) { io . katharsis . queryspec . QuerySpec spec = new io . katharsis . queryspec . QuerySpec ( io . katharsis . resource . mock . models . Task . class ) ; spec . addFilter ( new io . katharsis . queryspec . FilterSpec ( java . util . Arrays . asList ( "projects" , "id" ) , FilterOperator . EQ , 15L ) ) ; io . katharsis . resource . list . ResourceList < io . katharsis . resource . mock . models . Task > results = spec . apply ( tasks ) ; "<AssertPlaceHolder>" ; } size ( ) { return list . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , results . size ( ) )
|
testNestedQuotes ( ) { java . lang . String [ ] data = new java . lang . String [ ] { "\"\"" , "test" } ; java . lang . String oracle = new java . lang . String ( "\"\"\"\"\"\",\"test\"\n" ) ; au . com . bytecode . opencsv . CSVWriter writer = null ; au . com . bytecode . opencsv . File tempFile = null ; au . com . bytecode . opencsv . FileWriter fwriter = null ; try { tempFile = au . com . bytecode . opencsv . File . createTempFile ( "csvWriterTest" , ".csv" ) ; tempFile . deleteOnExit ( ) ; fwriter = new au . com . bytecode . opencsv . FileWriter ( tempFile ) ; writer = new au . com . bytecode . opencsv . CSVWriter ( fwriter ) ; } catch ( au . com . bytecode . opencsv . IOException e ) { org . junit . Assert . fail ( ) ; } writer . writeNext ( data ) ; try { writer . close ( ) ; } catch ( au . com . bytecode . opencsv . IOException e ) { org . junit . Assert . fail ( ) ; } try { fwriter . flush ( ) ; org . junit . Assert . fail ( ) ; } catch ( au . com . bytecode . opencsv . IOException e ) { } au . com . bytecode . opencsv . FileReader in = null ; try { in = new au . com . bytecode . opencsv . FileReader ( tempFile ) ; } catch ( au . com . bytecode . opencsv . FileNotFoundException e ) { org . junit . Assert . fail ( ) ; } java . lang . StringBuilder fileContents = new java . lang . StringBuilder ( CSVWriter . INITIAL_STRING_SIZE ) ; try { int ch ; while ( ( ch = in . read ( ) ) != ( - 1 ) ) { fileContents . append ( ( ( char ) ( ch ) ) ) ; } in . close ( ) ; } catch ( au . com . bytecode . opencsv . IOException e ) { org . junit . Assert . fail ( ) ; } "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( null == obj ) || ( ! ( obj instanceof integrationTest . issue3402853 . MockUserBean ) ) ) { return false ; } if ( ( this ) == obj ) { return true ; } return ( ( integrationTest . issue3402853 . MockUserBean ) ( obj ) ) . toString ( ) . equalsIgnoreCase ( this . toString ( ) ) ; }
|
org . junit . Assert . assertTrue ( oracle . equals ( fileContents . toString ( ) ) )
|
testAverageInTree ( ) { System . out . println ( "test:testAverageInTree" ) ; long returnValue = 6 ; org . glassfish . flashlight . datatree . TreeNode server = setupSimpleTree ( ) ; org . glassfish . flashlight . statistics . Average average = org . glassfish . flashlight . statistics . factory . AverageFactory . createAverage ( ) ; for ( int i = 0 ; i < 3 ; i ++ ) average . addDataPoint ( ( ( i + 1 ) * 3 ) ) ; org . glassfish . flashlight . datatree . TreeNode grandson = server . getNode ( "wto.wtoson.wtograndson" ) ; grandson . addChild ( ( ( org . glassfish . flashlight . datatree . TreeNode ) ( average ) ) ) ; org . glassfish . flashlight . datatree . TreeNode averageNode = server . getNode ( "wto.wtoson.wtograndson.average" ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
|
org . junit . Assert . assertEquals ( returnValue , averageNode . getValue ( ) )
|
nullAttributeValue ( ) { software . amazon . awssdk . services . dynamodb . document . Attribute a = new software . amazon . awssdk . services . dynamodb . document . Attribute ( "null<sp>attribute<sp>value<sp>is<sp>fine" , null ) ; "<AssertPlaceHolder>" ; } hashCode ( ) { return id ( ) . hashCode ( ) ; }
|
org . junit . Assert . assertTrue ( ( ( a . hashCode ( ) ) != 0 ) )
|
testReplicaMainFailsBeforeReplicaCalls ( ) { org . apache . hadoop . hbase . client . TestAsyncProcess . MyAsyncProcessWithReplicas ap = createReplicaAp ( 1000 , 0 , 0 , 0 ) ; ap . addFailures ( org . apache . hadoop . hbase . client . TestAsyncProcess . hri1 , org . apache . hadoop . hbase . client . TestAsyncProcess . hri2 ) ; java . util . List < org . apache . hadoop . hbase . client . Get > rows = org . apache . hadoop . hbase . client . TestAsyncProcess . makeTimelineGets ( org . apache . hadoop . hbase . client . TestAsyncProcess . DUMMY_BYTES_1 , org . apache . hadoop . hbase . client . TestAsyncProcess . DUMMY_BYTES_2 ) ; org . apache . hadoop . hbase . client . AsyncProcessTask task = org . apache . hadoop . hbase . client . AsyncProcessTask . newBuilder ( ) . setPool ( ap . service ) . setRpcTimeout ( RPC_TIMEOUT ) . setOperationTimeout ( OPERATION_TIMEOUT ) . setTableName ( org . apache . hadoop . hbase . client . TestAsyncProcess . DUMMY_TABLE ) . setRowAccess ( rows ) . setResults ( new java . lang . Object [ 2 ] ) . setSubmittedRows ( SubmittedRows . ALL ) . build ( ) ; org . apache . hadoop . hbase . client . AsyncRequestFuture ars = ap . submit ( task ) ; verifyReplicaResult ( ars , org . apache . hadoop . hbase . client . TestAsyncProcess . RR . FAILED , org . apache . hadoop . hbase . client . TestAsyncProcess . RR . FAILED ) ; "<AssertPlaceHolder>" ; } getReplicaCallCount ( ) { return replicaCalls . get ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , ap . getReplicaCallCount ( ) )
|
testGetFile ( ) { com . liferay . petra . io . ByteArrayFileInputStream byteArrayFileInputStream = new com . liferay . petra . io . ByteArrayFileInputStream ( _testFile , 512 ) ; "<AssertPlaceHolder>" ; byteArrayFileInputStream . close ( ) ; } getFile ( ) { return _file ; }
|
org . junit . Assert . assertSame ( _testFile , byteArrayFileInputStream . getFile ( ) )
|
whenSaveOrUpdateTransient_thenSavedToDatabaseOnCommit ( ) { com . baeldung . persistence . model . Person person = new com . baeldung . persistence . model . Person ( ) ; person . setName ( "John" ) ; session . saveOrUpdate ( person ) ; session . getTransaction ( ) . commit ( ) ; session . close ( ) ; session = com . baeldung . persistence . save . SaveMethodsIntegrationTest . sessionFactory . openSession ( ) ; session . beginTransaction ( ) ; "<AssertPlaceHolder>" ; } get ( java . lang . String , java . lang . String ) { return java . lang . String . format ( "login<sp>=<sp>%s,<sp>query<sp>=<sp>%s" , login , query ) ; }
|
org . junit . Assert . assertNotNull ( session . get ( com . baeldung . persistence . model . Person . class , person . getId ( ) ) )
|
createDynamicModule ( ) { com . google . inject . Injector injector = com . google . inject . Guice . createInjector ( de . devsurf . injection . guice . scanner . StartupModule . create ( de . devsurf . injection . guice . scanner . asm . ASMClasspathScanner . class , de . devsurf . injection . guice . scanner . PackageFilter . create ( de . devsurf . injection . guice . scanner . asm . tests . autobind . duplicate . DuplicateAutobindTests . class ) ) ) ; "<AssertPlaceHolder>" ; } create ( java . lang . Class ) { return new de . devsurf . injection . guice . scanner . PackageFilter ( clazz . getPackage ( ) . getName ( ) , true ) ; }
|
org . junit . Assert . assertNotNull ( injector )
|
testGetParametersWithDefaultEntity ( ) { org . lnu . is . domain . order . type . OrderType entity = new org . lnu . is . domain . order . type . OrderType ( ) ; java . util . Map < java . lang . String , java . lang . Object > expected = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; expected . put ( "status" , RowStatus . ACTIVE ) ; expected . put ( "userGroups" , groups ) ; java . util . Map < java . lang . String , java . lang . Object > actual = unit . getParameters ( entity ) ; "<AssertPlaceHolder>" ; } getParameters ( org . springframework . web . context . request . NativeWebRequest ) { java . util . Map < java . lang . String , java . lang . Object > resultMap = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; java . util . Map < java . lang . String , java . lang . String > pathVariables = ( ( java . util . Map < java . lang . String , java . lang . String > ) ( webRequest . getAttribute ( HandlerMapping . URI_TEMPLATE_VARIABLES_ATTRIBUTE , RequestAttributes . SCOPE_REQUEST ) ) ) ; java . util . Map < java . lang . String , java . lang . Object > requestParams = getRequestParameterMap ( webRequest ) ; for ( Map . Entry < java . lang . String , java . lang . Object > entry : requestParams . entrySet ( ) ) { resultMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } resultMap . putAll ( pathVariables ) ; return resultMap ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testComponentDescriptorInjection ( ) { org . xwiki . component . embed . EmbeddableComponentManager ecm = new org . xwiki . component . embed . EmbeddableComponentManager ( ) ; org . xwiki . component . descriptor . DefaultComponentDescriptor < org . xwiki . component . embed . EmbeddableComponentManagerTest . Role > d = new org . xwiki . component . descriptor . DefaultComponentDescriptor ( ) ; d . setRoleType ( org . xwiki . component . embed . EmbeddableComponentManagerTest . Role . class ) ; d . setImplementation ( org . xwiki . component . embed . EmbeddableComponentManagerTest . ComponentDescriptorRoleImpl . class ) ; org . xwiki . component . descriptor . DefaultComponentDependency dependencyDescriptor = new org . xwiki . component . descriptor . DefaultComponentDependency ( ) ; dependencyDescriptor . setRoleType ( new org . xwiki . component . util . DefaultParameterizedType ( null , org . xwiki . component . descriptor . ComponentDescriptor . class , org . xwiki . component . embed . EmbeddableComponentManagerTest . ComponentDescriptorRoleImpl . class ) ) ; dependencyDescriptor . setName ( "descriptor" ) ; d . addComponentDependency ( dependencyDescriptor ) ; ecm . registerComponent ( d ) ; org . xwiki . component . embed . EmbeddableComponentManagerTest . ComponentDescriptorRoleImpl impl = ecm . getInstance ( org . xwiki . component . embed . EmbeddableComponentManagerTest . Role . class ) ; "<AssertPlaceHolder>" ; } getComponentDescriptor ( ) { return this . descriptor ; }
|
org . junit . Assert . assertNotNull ( impl . getComponentDescriptor ( ) )
|
testExecuteFunctionArg1SuccessCase ( ) { java . lang . Integer result = managedTransaction . executeFunction ( this :: callHelperWithReturn , testParam1 ) ; "<AssertPlaceHolder>" ; verifyInteractionWithTransactionManagerSuccessCase ( testParam1 ) ; } executeFunction ( ManagedTransactionFunction . Arg1 , T1 ) { return executeTransactionBlockInternal ( ( ) -> fn . apply ( t ) ) ; }
|
org . junit . Assert . assertEquals ( java . lang . Integer . valueOf ( 1 ) , result )
|
testSimpleIterable ( ) { final java . util . List < java . lang . String > expected = java . util . Arrays . asList ( "3" , null , "2" , "1" ) ; java . lang . Iterable < java . lang . String > source = java . util . Arrays . asList ( "1" , "2" , null , "3" ) ; java . util . List < java . lang . String > actual = new java . util . ArrayList ( ) ; xapi . collect . X_Collect . reverse ( source , actual :: add ) ; "<AssertPlaceHolder>" ; } reverse ( java . lang . Iterable , java . util . function . Consumer ) { if ( ( items instanceof xapi . fu . itr . ReverseIterable ) && ( ! ( items instanceof xapi . annotation . gc . NotReusable ) ) ) { final java . lang . Iterable < V > iterable = ( ( xapi . fu . itr . ReverseIterable < V > ) ( items ) ) . getIterable ( ) ; if ( ( iterable != null ) && ( ! ( iterable instanceof xapi . annotation . gc . NotReusable ) ) ) { iterable . forEach ( callback ) ; return ; } } xapi . collect . X_Collect . reverse ( items . iterator ( ) , callback ) ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testAllCharactersUsed ( ) { char [ ] domain = "abcdefABCDEF" . toCharArray ( ) ; java . util . Set < java . lang . Character > chars = new java . util . HashSet < java . lang . Character > ( ) ; for ( char chr : domain ) { chars . add ( chr ) ; } com . carrotsearch . randomizedtesting . generators . CodepointSetGenerator gen = new com . carrotsearch . randomizedtesting . generators . CodepointSetGenerator ( new java . lang . String ( domain ) ) ; java . util . Random r = new java . util . Random ( randomLong ( ) ) ; for ( int i = 0 ; ( i < 1000000 ) && ( ! ( chars . isEmpty ( ) ) ) ; i ++ ) { for ( char ch : gen . ofCodeUnitsLength ( r , 100 , 100 ) . toCharArray ( ) ) { chars . remove ( ch ) ; } } "<AssertPlaceHolder>" ; } isEmpty ( ) { return ( size ( ) ) == 0 ; }
|
org . junit . Assert . assertTrue ( chars . isEmpty ( ) )
|
getCache_There ( ) { java . lang . String name = this . toString ( ) ; javax . cache . CacheManager cacheManager = getCacheManager ( ) ; cacheManager . createCache ( name , new javax . cache . configuration . MutableConfiguration ( ) ) ; javax . cache . Cache cache = cacheManager . getCache ( name ) ; "<AssertPlaceHolder>" ; } getCacheManager ( ) { javax . cache . spi . CachingProvider provider = javax . cache . Caching . getCachingProvider ( ) ; java . net . URI uri = provider . getDefaultURI ( ) ; return javax . cache . Caching . getCachingProvider ( ) . getCacheManager ( uri , provider . getDefaultClassLoader ( ) ) ; }
|
org . junit . Assert . assertSame ( cache , cacheManager . getCache ( name ) )
|
discardLogicWhitespaceAfterArgsRetain ( ) { java . lang . String html = rocker . DiscardLogicWhitespaceAfterArgsRetain . template ( "Hello" ) . render ( ) . toString ( ) ; java . lang . String expected = "Hello!" ; "<AssertPlaceHolder>" ; } toString ( ) { byte [ ] bytes = toByteArray ( ) ; return new java . lang . String ( bytes , this . charset ) ; }
|
org . junit . Assert . assertEquals ( expected , html )
|
threadsWaitingHighwatermark ( ) { org . lightview . business . pool . entity . PoolStatistics poolStats = this . cut . getPoolStats ( "lightfish" , "ConfigurationStore" ) ; "<AssertPlaceHolder>" ; } threadsWaitingHighwatermarkProperty ( ) { final javax . json . JsonObject threadsStatistics = getThreadsStatistics ( ) ; if ( threadsStatistics == null ) { return NOT_AVAILABLE ; } int value = threadsStatistics . getInt ( "highwatermark" ) ; currentThreadsWaiting . set ( value ) ; return this . currentThreadsWaiting ; }
|
org . junit . Assert . assertNotNull ( poolStats . threadsWaitingHighwatermarkProperty ( ) )
|
helper_specifiedName ( ) { pipeline = ( ( com . alibaba . citrus . service . pipeline . impl . PipelineImpl ) ( factory . getBean ( "handleException_helperName" ) ) ) ; java . lang . Exception e = new java . lang . Exception ( ) ; com . alibaba . citrus . webx . util . ErrorHandlerHelper . getInstance ( newRequest ) . init ( "app1" , e , null ) ; pipeline . newInvocation ( ) . invoke ( ) ; com . alibaba . citrus . webx . util . ErrorHandlerHelper helper = ( ( com . alibaba . citrus . webx . util . ErrorHandlerHelper ) ( rundata . getContext ( ) . get ( "error1" ) ) ) ; "<AssertPlaceHolder>" ; } getException ( ) { java . lang . IllegalStateException e1 = new java . lang . IllegalStateException ( ) ; java . lang . IllegalArgumentException e2 = new java . lang . IllegalArgumentException ( ) ; java . io . IOException e3 = new java . io . IOException ( ) ; e1 . initCause ( e3 ) ; e2 . initCause ( e1 ) ; e3 . initCause ( e2 ) ; return e3 ; }
|
org . junit . Assert . assertEquals ( e , helper . getException ( ) )
|
testNN_AtmCorrGetNumOutputNode ( ) { int numNodesInput = 1 ; int numNodesOutput = - 1 ; int numOutputNum = org . esa . s3tbx . fub . wew . util . NN_CHL . compute ( in , numNodesInput , out , numNodesOutput , width , mask , 0 , a ) ; "<AssertPlaceHolder>" ; } compute ( float [ ] [ ] , int , float [ ] [ ] , int , int , int [ ] , int , float [ ] ) { final int [ ] rcheck ; final int nodes_input = 18 ; final int nodes_output = 1 ; final int nodes_input_bias = 1 ; final int nodes_input_pca = 1 ; final int nodes_hidden = 100 ; final int nodes_hidden_bias = 1 ; final double nodes_hidden_temperature = 1.0 ; final double t_input = nodes_hidden_temperature / ( ( double ) ( nodes_input ) ) ; final double t_hidden = nodes_hidden_temperature / ( ( double ) ( nodes_hidden ) ) ; final double [ ] vt ; final double [ ] vt1 ; if ( getNumNodesInput <= 0 ) { return nodes_input ; } if ( getNumNodesOutput <= 0 ) { return nodes_output ; } if ( getNumNodesInput != nodes_input ) { return - 1 ; } if ( getNumNodesOutput != nodes_output ) { return - 2 ; } vt = new double [ nodes_input + nodes_input_bias ] ; vt1 = new double [ nodes_hidden + nodes_hidden_bias ] ; rcheck = new int [ width ] ; for ( int x = 0 ; x < width ; x ++ ) { rcheck [ x ] = 0 ; if ( ( a [ x ] ) < 0.0F ) { rcheck [ x ] = 1 ; } a [ x ] = 1.0F ; } for ( int x = 0 ; x < width ; x ++ ) { if ( ( mask [ x ] ) == 0 ) { if ( ( rcheck [ x ] ) != 0 ) { for ( int i = 0 ; ( i < nodes_input ) && ( ( a [ x ] ) > 0.0F ) ; i ++ ) { if ( ( ( in [ i ] [ x ] ) < ( ( float ) ( NN_General . NODES_INPUT_SCALE_LIMITS [ i ] [ 0 ] ) ) ) || ( ( in [ i ] [ x ] ) > ( ( float ) ( NN_General . NODES_INPUT_SCALE_LIMITS [ i ] [ 1 ] ) ) ) ) { a [ x ] -= 3.0F ; } } if ( ( a [ x ] ) < 0.0F ) { mask [ x ] |= errmask ; } } for ( int i = 0 ; i < nodes_input ; i ++ ) { if ( ( NN_General . NODES_INPUT_SCALE_FLAG [ i ] ) == ( - 1 ) ) { in [ i ] [ x ] = ( ( float ) ( java . lang . Math . log ( ( ( double ) ( in [ i ] [ x ] ) ) ) ) ) ; } if ( ( NN_General . NODES_INPUT_SCALE_FLAG [ i ] ) == ( - 2 ) ) { in [ i ] [ x ] = ( ( float ) ( java . lang . Math . exp ( ( ( double ) ( in [ i ] [ x ] ) ) ) ) ) ; } } for ( int i = 0 ; i < nodes_input ; i ++ ) { in [ i ] [ x ] = ( ( float ) ( NN_General . NODES_INPUT_SCALE_OFF [ i ] ) ) + ( ( ( in [ i ] [ x ] ) - ( ( float ) ( org . esa . s3tbx . fub . wew . util . NN_CHL . nodes_input_scale_run46 [ i ] [ 0 ] ) ) ) / ( ( float ) ( org . esa . s3tbx . fub . wew . util . NN_CHL . nodes_input_scale_run46 [ i ] [ 1 ] ) ) ) ; } if ( nodes_input_pca != 0 ) { for ( int i = 0 ; i < nodes_input ; i ++ ) { vt [ i ] = ( ( double ) ( in [ i ] [ x ] ) ) ; if ( ( NN_General . NODES_INPUT_SCALE_FLAG [ i ] ) == 1 ) { vt [ i ] = 0.0 ; for ( int j = 0 ; j < nodes_input ; j ++ ) { if ( ( NN_General . NODES_INPUT_SCALE_FLAG [ j ] ) == 1 ) { vt [ i ] += ( ( double ) ( in [ j ] [ x ] ) ) * ( org . esa . s3tbx . fub . wew . util . NN_CHL . nodes_input_pca_evec_run46 [ j ] [ i ] ) ; } } } } for ( int i = 0 ; i < nodes_input ; i ++ ) { in [ i ] [ x ] = ( ( float ) ( vt [ i ] ) ) ; } } for ( int i = 0 ; i < nodes_input ; i ++ ) { vt [ i ] = ( ( double ) ( in [ i ] [ x ] ) ) ; } for ( int i = nodes_input ; i < ( nodes_input + nodes_input_bias ) ; i ++ ) { vt [ i ] = 1.0 ; } for ( int i = 0 ; i < nodes_hidden ; i ++ ) { vt1 [ i ] = 0.0 ; for ( int j = 0 ; j < ( nodes_input + nodes_input_bias ) ; j ++ ) { vt1 [ i ] += ( vt [ j ] ) * ( org . esa . s3tbx . fub . wew
|
org . junit . Assert . assertEquals ( 1 , numOutputNum )
|
testGetAsJsonString ( ) { java . lang . String json = "{\"level\":1.0,\"l2\":{\"level\":2.0,\"l3\":{\"level\":3.0}}}" ; java . lang . String conv = com . adobe . acs . commons . json . JsonObjectUtil . getAsJsonString ( json , 3 ) ; "<AssertPlaceHolder>" ; } getAsJsonString ( T , int ) { com . google . gson . Gson gson = new com . google . gson . Gson ( ) ; com . google . gson . JsonObject object = com . adobe . acs . commons . json . JsonObjectUtil . toJsonObject ( source , depth ) ; return gson . toJson ( object ) ; }
|
org . junit . Assert . assertEquals ( json , conv )
|
deletingProcessDeletesActorMappings ( ) { final org . bonitasoft . engine . service . TenantServiceAccessor tenantAccessor = getTenantAccessor ( ) ; final org . bonitasoft . engine . transaction . UserTransactionService transactionService = tenantAccessor . getUserTransactionService ( ) ; final java . lang . String userTaskName = "actNaturally" ; final org . bonitasoft . engine . bpm . process . ProcessDefinition definition = deployAndEnableProcessWithOneHumanTask ( "deletingProcessDeletesActorMappings" , "CandidateForOscarReward" , userTaskName ) ; final org . bonitasoft . engine . bpm . process . ProcessInstance processInstanceId = getProcessAPI ( ) . startProcess ( definition . getId ( ) ) ; waitForUserTask ( processInstanceId , userTaskName ) ; disableAndDeleteProcess ( definition ) ; setSessionInfo ( getSession ( ) ) ; final java . util . List < org . bonitasoft . engine . actor . mapping . model . SActorMember > actorMembers = transactionService . executeInTransaction ( new java . util . concurrent . Callable < java . util . List < org . bonitasoft . engine . actor . mapping . model . SActorMember > > ( ) { @ org . bonitasoft . engine . test . Override public java . util . List < org . bonitasoft . engine . actor . mapping . model . SActorMember > call ( ) throws org . bonitasoft . engine . test . Exception { return getTenantAccessor ( ) . getActorMappingService ( ) . getActorMembersOfUser ( john . getId ( ) , 0 , 1 ) ; } } ) ; "<AssertPlaceHolder>" ; } getId ( ) { return id ; }
|
org . junit . Assert . assertEquals ( 0 , actorMembers . size ( ) )
|
compilationTest ( ) { org . springframework . context . ApplicationContext context = new org . springframework . context . support . ClassPathXmlApplicationContext ( "META-INF/applicationContext.xml" ) ; org . drools . mas . core . DroolsAgent agent = ( ( org . drools . mas . core . DroolsAgent ) ( context . getBean ( "agent" ) ) ) ; "<AssertPlaceHolder>" ; org . drools . mas . helpers . DialogueHelper helper = new org . drools . mas . helpers . DialogueHelper ( "http://${agent.endpoint.ip}:${agent.endpoint.port}/${agent.name}/services/AsyncAgentService?wsdl" ) ; helper . invokeInform ( "me" , "you" , "Hello<sp>World!" , null ) ; java . lang . Thread . sleep ( 3000 ) ; agent . dispose ( ) ; }
|
org . junit . Assert . assertNotNull ( agent )
|
normalizesAbsolutePathWithMoreDoubleDotsThanNamesTest ( ) { com . beijunyi . parallelgit . filesystem . GitPath path = gfs . getPath ( "/a/../.." ) ; com . beijunyi . parallelgit . filesystem . GitPath result = path . normalize ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { if ( ( stringValue ) == null ) stringValue = decode ( com . beijunyi . parallelgit . filesystem . CHARSET , path ) ; return stringValue ; }
|
org . junit . Assert . assertEquals ( "/" , result . toString ( ) )
|
deveObterNumeroControleFCIComoFoiSetado ( ) { final com . fincatto . documentofiscal . nfe310 . classes . nota . NFNotaInfoItemProduto produto = new com . fincatto . documentofiscal . nfe310 . classes . nota . NFNotaInfoItemProduto ( ) ; final java . lang . String numeroControleFCI = "B01F70AF-10BF-4B1F-848C-65FF57F616FE" ; produto . setNumeroControleFCI ( numeroControleFCI ) ; "<AssertPlaceHolder>" ; } getNumeroControleFCI ( ) { return this . numeroControleFCI ; }
|
org . junit . Assert . assertEquals ( numeroControleFCI , produto . getNumeroControleFCI ( ) )
|
testFetchByPrimaryKeyMissing ( ) { long pk = com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ; com . liferay . message . boards . model . MBStatsUser missingMBStatsUser = _persistence . fetchByPrimaryKey ( pk ) ; "<AssertPlaceHolder>" ; } fetchByPrimaryKey ( long ) { return com . liferay . adaptive . media . image . service . persistence . AMImageEntryUtil . getPersistence ( ) . fetchByPrimaryKey ( amImageEntryId ) ; }
|
org . junit . Assert . assertNull ( missingMBStatsUser )
|
test_invalid_handle_exceed_maxlength ( ) { org . restfulwhois . rdap . common . dto . AutnumDto autnum = generateAutnumDto ( ) ; java . lang . String stringExceedOneMoreChar = createStringWithLength ( ( ( org . restfulwhois . rdap . common . util . UpdateValidateUtil . MAX_LENGTH_HANDLE ) + 1 ) ) ; autnum . setHandle ( stringExceedOneMoreChar ) ; "<AssertPlaceHolder>" ; java . lang . String content = org . restfulwhois . rdap . JsonHelper . serialize ( autnum ) ; mockMvc . perform ( post ( org . restfulwhois . rdap . controller . AutnumCreateControllerTest . URI_AUTNUM_U ) . contentType ( org . springframework . http . MediaType . parseMediaType ( rdapJson ) ) . content ( content ) ) . andExpect ( status ( ) . isBadRequest ( ) ) . andExpect ( content ( ) . contentType ( rdapJson ) ) . andExpect ( jsonPath ( "$.errorCode" ) . value ( 400 ) ) . andExpect ( jsonPath ( "$.subErrorCode" ) . value ( 4003 ) ) . andExpect ( jsonPath ( "$.description" ) . value ( org . hamcrest . CoreMatchers . hasItems ( java . lang . String . format ( ServiceErrorCode . ERROR_4003 . getMessage ( ) , "handle" , ( ( org . restfulwhois . rdap . common . util . UpdateValidateUtil . MAX_LENGTH_HANDLE ) + "" ) ) ) ) ) ; } getHandle ( ) { return handle ; }
|
org . junit . Assert . assertTrue ( ( ( autnum . getHandle ( ) . length ( ) ) > ( org . restfulwhois . rdap . common . util . UpdateValidateUtil . MAX_LENGTH_HANDLE ) ) )
|
testOperationRef ( ) { org . xml . sax . InputSource xml = new org . xml . sax . InputSource ( getClass ( ) . getResourceAsStream ( "Calculator.composite" ) ) ; javax . xml . xpath . XPathExpression exp = org . apache . tuscany . sca . policy . xml . PolicyXPathFunctionResolverTestCase . xpath . compile ( org . apache . tuscany . sca . policy . xml . PolicyXPathFunction . normalize ( "//sca:composite/sca:component/sca:reference[sca:OperationRef('AddService/add')]" , "sca" ) ) ; java . lang . Object result = exp . evaluate ( xml , XPathConstants . NODESET ) ; "<AssertPlaceHolder>" ; org . w3c . dom . NodeList nodes = ( ( org . w3c . dom . NodeList ) ( result ) ) ; } normalize ( java . lang . String , java . lang . String ) { attachTo = attachTo . trim ( ) ; java . util . regex . Matcher matcher = org . apache . tuscany . sca . policy . xml . PolicyXPathFunction . FUNCTION . matcher ( attachTo ) ; boolean result = matcher . find ( ) ; if ( result ) { java . lang . StringBuffer sb = new java . lang . StringBuffer ( ) ; do { java . lang . String function = matcher . group ( 1 ) ; java . lang . String args = matcher . group ( 2 ) ; if ( ( ( matcher . start ( ) ) == 0 ) || ( ( attachTo . charAt ( ( ( matcher . start ( ) ) - 1 ) ) ) != ':' ) ) { function = ( scaPrefix + ":" ) + function ; } java . lang . String replacement = null ; if ( ( args . trim ( ) . length ( ) ) > 0 ) { replacement = ( ( ( function + "(" ) + args ) + "," ) + "self::node())" ; } else { replacement = function + "(self::node())" ; } matcher . appendReplacement ( sb , replacement ) ; result = matcher . find ( ) ; } while ( result ) ; matcher . appendTail ( sb ) ; return sb . toString ( ) ; } return attachTo ; }
|
org . junit . Assert . assertTrue ( ( result instanceof org . w3c . dom . NodeList ) )
|
testDynamicQueryByPrimaryKeyMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . portal . kernel . model . Group . class , _dynamicQueryClassLoader ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . eq ( "groupId" , com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ) ) ; java . util . List < com . liferay . portal . kernel . model . Group > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
|
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
|
testCharArrayModification ( ) { org . apache . ignite . internal . binary . mutabletest . GridBinaryTestClasses . TestObjectAllTypes obj = new org . apache . ignite . internal . binary . mutabletest . GridBinaryTestClasses . TestObjectAllTypes ( ) ; obj . cArr = new char [ ] { 'a' , 'a' , 'a' } ; org . apache . ignite . internal . binary . builder . BinaryObjectBuilderImpl mutObj = wrap ( obj ) ; char [ ] arr = mutObj . getField ( "cArr" ) ; arr [ 0 ] = 'b' ; org . apache . ignite . internal . binary . mutabletest . GridBinaryTestClasses . TestObjectAllTypes res = mutObj . build ( ) . deserialize ( ) ; "<AssertPlaceHolder>" ; } deserialize ( ) { return ( ( T ) ( deserializeValue ( ) ) ) ; }
|
org . junit . Assert . assertArrayEquals ( new char [ ] { 'b' , 'a' , 'a' } , res . cArr )
|
readToWriteRoundTrip ( ) { java . io . InputStream inputStream = new java . io . ByteArrayInputStream ( getStructureFormatWorkflowBundle ( ) . getBytes ( "utf-8" ) ) ; org . apache . taverna . scufl2 . api . container . WorkflowBundle readBundle = bundleIO . readBundle ( inputStream , org . apache . taverna . scufl2 . api . io . TEXT_VND_TAVERNA_SCUFL2_STRUCTURE ) ; org . apache . commons . io . output . ByteArrayOutputStream output = new org . apache . commons . io . output . ByteArrayOutputStream ( ) ; bundleIO . writeBundle ( readBundle , output , org . apache . taverna . scufl2 . api . io . TEXT_VND_TAVERNA_SCUFL2_STRUCTURE ) ; java . lang . String bundleTxt = new java . lang . String ( output . toByteArray ( ) , org . apache . taverna . scufl2 . api . io . TestWorkflowBundleIO . UTF_8 ) ; java . lang . String getStructureFormatWorkflowBundle = getStructureFormatWorkflowBundle ( ) ; bundleTxt = bundleTxt . replaceAll ( "\r" , "" ) . replaceAll ( "\n" , "" ) ; getStructureFormatWorkflowBundle = getStructureFormatWorkflowBundle . replaceAll ( "\r" , "" ) . replaceAll ( "\n" , "" ) ; "<AssertPlaceHolder>" ; } getStructureFormatWorkflowBundle ( ) { java . io . InputStream helloWorldStream = getClass ( ) . getResourceAsStream ( "HelloWorld.txt" ) ; return org . apache . commons . io . IOUtils . toString ( helloWorldStream ) ; }
|
org . junit . Assert . assertEquals ( getStructureFormatWorkflowBundle , bundleTxt )
|
testWildcardArrayList ( ) { javax . ws . rs . client . Invocation . Builder request = org . jboss . resteasy . test . resource . param . MultiplePathSegmentTest . client . target ( generateURL ( "/a/b/c/arraylist/3" ) ) . request ( ) ; javax . ws . rs . core . Response response = request . get ( ) ; "<AssertPlaceHolder>" ; response . close ( ) ; } getStatus ( ) { return status ; }
|
org . junit . Assert . assertEquals ( 200 , response . getStatus ( ) )
|
avoidAnnoyingErrorMessageWhenRunningTestsInAnt ( ) { "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertTrue ( true )
|
testDrivingHistory ( ) { github . priyatam . springrest . domain . DrivingHistory h = github . priyatam . springrest . MockDataHelper . createDrivingHistory ( ) ; github . priyatam . springrest . domain . DomainTest . mapper . writeValue ( github . priyatam . springrest . domain . DomainTest . file ( "drivingHistory.json" ) , h ) ; github . priyatam . springrest . domain . DrivingHistory result = github . priyatam . springrest . domain . DomainTest . mapper . readValue ( github . priyatam . springrest . domain . DomainTest . file ( "drivingHistory.json" ) , github . priyatam . springrest . domain . DrivingHistory . class ) ; "<AssertPlaceHolder>" ; } file ( java . lang . String ) { return new java . io . File ( ( ( github . priyatam . springrest . domain . DomainTest . jsonRoot ) + name ) ) ; }
|
org . junit . Assert . assertNotNull ( result )
|
testNewStackIsEmpty ( ) { com . github . davidmoten . util . ImmutableStack < java . lang . Object > s = com . github . davidmoten . util . ImmutableStack . empty ( ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return ! ( head . isPresent ( ) ) ; }
|
org . junit . Assert . assertTrue ( s . isEmpty ( ) )
|
isGeauthoriseerdVoorDeDienstAfnemderindicatieVerwijderen ( ) { final nl . bzk . brp . model . algemeen . stamgegeven . autaut . Leveringsautorisatie la = nl . bzk . brp . model . algemeen . stamgegeven . autaut . TestLeveringsautorisatieBuilder . metDienst ( SoortDienst . VERWIJDEREN_AFNEMERINDICATIE ) ; final nl . bzk . brp . model . algemeen . stamgegeven . autaut . ToegangLeveringsautorisatie toegangLeveringsautorisatie = nl . bzk . brp . model . algemeen . stamgegeven . autaut . TestToegangLeveringautorisatieBuilder . maker ( ) . metLeveringsautorisatie ( la ) . maak ( ) ; final nl . bzk . brp . business . regels . context . AutorisatieRegelContext regelContext = new nl . bzk . brp . business . regels . context . AutorisatieRegelContext ( toegangLeveringsautorisatie , la . geefDiensten ( ) . iterator ( ) . next ( ) , persoon , nl . bzk . brp . model . algemeen . stamgegeven . kern . SoortAdministratieveHandeling . VERWIJDERING_AFNEMERINDICATIE ) ; final boolean resultaat = regel . valideer ( regelContext ) ; "<AssertPlaceHolder>" ; } valideer ( nl . bzk . brp . business . regels . context . BerichtRegelContext ) { final java . util . List < nl . bzk . brp . model . basis . BerichtIdentificeerbaar > objectenDieDeRegelOvertreden = new java . util . ArrayList ( ) ; final nl . bzk . brp . model . bericht . ber . BerichtZoekcriteriaPersoonGroepBericht zoekcriteria = regelContext . getBericht ( ) . getZoekcriteriaPersoon ( ) ; if ( ! ( preciesEenKeerNotNull ( zoekcriteria . getBurgerservicenummer ( ) , zoekcriteria . getAdministratienummer ( ) , zoekcriteria . getObjectSleutel ( ) ) ) ) { objectenDieDeRegelOvertreden . add ( zoekcriteria ) ; } return objectenDieDeRegelOvertreden ; }
|
org . junit . Assert . assertTrue ( resultaat )
|
deveObterDigestMethodComoFoiSetado ( ) { final com . fincatto . documentofiscal . nfe310 . classes . nota . assinatura . NFReference reference = new com . fincatto . documentofiscal . nfe310 . classes . nota . assinatura . NFReference ( ) ; final com . fincatto . documentofiscal . nfe310 . classes . nota . assinatura . NFDigestMethod digestMethod = new com . fincatto . documentofiscal . nfe310 . classes . nota . assinatura . NFDigestMethod ( ) ; digestMethod . setAlgorithm ( "digest<sp>method" ) ; reference . setDigestMethod ( digestMethod ) ; "<AssertPlaceHolder>" ; } getDigestMethod ( ) { return this . digestMethod ; }
|
org . junit . Assert . assertEquals ( digestMethod , reference . getDigestMethod ( ) )
|
testAck ( ) { conn . connect ( defUser , defPass ) ; subscribe ( conn , "sub1" , Stomp . Headers . Subscribe . AckModeValues . CLIENT ) ; sendJmsMessage ( getName ( ) ) ; org . apache . activemq . artemis . tests . integration . stomp . util . ClientStompFrame frame = conn . receiveFrame ( ) ; java . lang . String messageID = frame . getHeader ( Stomp . Headers . Message . MESSAGE_ID ) ; ack ( conn , "sub1" , messageID , null ) ; unsubscribe ( conn , "sub1" ) ; conn . disconnect ( ) ; javax . jms . MessageConsumer consumer = session . createConsumer ( queue ) ; javax . jms . Message message = consumer . receive ( 1000 ) ; "<AssertPlaceHolder>" ; } receive ( long ) { session . lock ( ) ; try { if ( ActiveMQRALogger . LOGGER . isTraceEnabled ( ) ) { ActiveMQRALogger . LOGGER . trace ( ( ( ( "receive<sp>" + ( this ) ) + "<sp>timeout=" ) + timeout ) ) ; } checkState ( ) ; javax . jms . Message message = consumer . receive ( timeout ) ; if ( ActiveMQRALogger . LOGGER . isTraceEnabled ( ) ) { ActiveMQRALogger . LOGGER . trace ( ( ( ( "received<sp>" + ( this ) ) + "<sp>result=" ) + message ) ) ; } if ( message == null ) { return null ; } else { return wrapMessage ( message ) ; } } finally { session . unlock ( ) ; } }
|
org . junit . Assert . assertNull ( message )
|
testObjectFactoryFixer ( ) { final java . io . File in = new java . io . File ( this . getClass ( ) . getResource ( "ObjectFactory.java" ) . getFile ( ) ) ; final java . io . File out = java . io . File . createTempFile ( "ObjectFactory" , null ) ; out . deleteOnExit ( ) ; final io . github . zlika . reproducible . JaxbObjectFactoryFixer fixer = new io . github . zlika . reproducible . JaxbObjectFactoryFixer ( java . nio . charset . StandardCharsets . UTF_8 ) ; fixer . strip ( in , out ) ; final byte [ ] expected = java . nio . file . Files . readAllBytes ( new java . io . File ( this . getClass ( ) . getResource ( "ObjectFactory-fixed.java" ) . getFile ( ) ) . toPath ( ) ) ; final byte [ ] actual = java . nio . file . Files . readAllBytes ( out . toPath ( ) ) ; "<AssertPlaceHolder>" ; out . delete ( ) ; } strip ( java . io . File , java . io . File ) { try ( final java . io . BufferedWriter writer = new java . io . BufferedWriter ( new java . io . OutputStreamWriter ( new java . io . FileOutputStream ( out ) , java . nio . charset . StandardCharsets . UTF_8 ) ) ) { final java . util . List < java . lang . String > lines = java . nio . file . Files . readAllLines ( in . toPath ( ) , StandardCharsets . UTF_8 ) ; for ( int i = 0 ; i < ( lines . size ( ) ) ; i ++ ) { if ( i != ( lineNumber ) ) { try { writer . write ( lines . get ( i ) ) ; writer . write ( "\r\n" ) ; } catch ( java . io . IOException e ) { } } } } }
|
org . junit . Assert . assertArrayEquals ( expected , actual )
|
testPurgeLogs ( ) { java . lang . String name = "distrlog-purge-logs" ; java . net . URI uri = createDLMURI ( ( "/" + name ) ) ; populateData ( new java . util . HashMap < java . lang . Long , org . apache . distributedlog . DLSN > ( ) , org . apache . distributedlog . TestTruncate . conf , name , 10 , 10 , false ) ; org . apache . distributedlog . api . DistributedLogManager distributedLogManager = createNewDLM ( org . apache . distributedlog . TestTruncate . conf , name ) ; java . util . List < org . apache . distributedlog . LogSegmentMetadata > segments = distributedLogManager . getLogSegments ( ) ; org . apache . distributedlog . TestTruncate . LOG . info ( "Segments<sp>before<sp>modifying<sp>completion<sp>time<sp>:<sp>{}" , segments ) ; org . apache . distributedlog . ZooKeeperClient zkc = org . apache . distributedlog . TestZooKeeperClientBuilder . newBuilder ( org . apache . distributedlog . TestTruncate . conf ) . uri ( uri ) . build ( ) ; long newTimeMs = ( java . lang . System . currentTimeMillis ( ) ) - ( ( ( 60 * 60 ) * 1000 ) * 2 ) ; for ( int i = 0 ; i < 5 ; i ++ ) { org . apache . distributedlog . LogSegmentMetadata segment = segments . get ( i ) ; org . apache . distributedlog . TestTruncate . updateCompletionTime ( zkc , segment , ( newTimeMs + i ) ) ; } zkc . close ( ) ; segments = distributedLogManager . getLogSegments ( ) ; org . apache . distributedlog . TestTruncate . LOG . info ( "Segments<sp>after<sp>modifying<sp>completion<sp>time<sp>:<sp>{}" , segments ) ; org . apache . distributedlog . DistributedLogConfiguration confLocal = new org . apache . distributedlog . DistributedLogConfiguration ( ) ; confLocal . loadConf ( org . apache . distributedlog . TestTruncate . conf ) ; confLocal . setRetentionPeriodHours ( 1 ) ; confLocal . setExplicitTruncationByApplication ( false ) ; org . apache . distributedlog . api . DistributedLogManager dlm = createNewDLM ( confLocal , name ) ; org . apache . distributedlog . api . AsyncLogWriter writer = dlm . startAsyncLogSegmentNonPartitioned ( ) ; long txid = 1 + ( 10 * 10 ) ; for ( int j = 1 ; j <= 10 ; j ++ ) { org . apache . distributedlog . util . Utils . ioResult ( writer . write ( org . apache . distributedlog . DLMTestUtil . getLogRecordInstance ( ( txid ++ ) ) ) ) ; } org . apache . distributedlog . DLSN lastDLSN = org . apache . distributedlog . util . Utils . ioResult ( dlm . getLastDLSNAsync ( ) ) ; org . apache . distributedlog . TestTruncate . LOG . info ( "Get<sp>last<sp>dlsn<sp>of<sp>stream<sp>{}<sp>:<sp>{}" , name , lastDLSN ) ; "<AssertPlaceHolder>" ; org . apache . distributedlog . util . Utils . close ( writer ) ; dlm . close ( ) ; distributedLogManager . close ( ) ; } getLogSegments ( ) { return org . apache . distributedlog . util . Utils . ioResult ( getLogSegmentsAsync ( ) ) ; }
|
org . junit . Assert . assertEquals ( 6 , distributedLogManager . getLogSegments ( ) . size ( ) )
|
testSubmissionUnauthorized ( ) { eval1 . setStatus ( EvaluationStatus . OPEN ) ; eval1 = entityServletHelper . createEvaluation ( eval1 , adminUserId ) ; evaluationsToDelete . add ( eval1 . getId ( ) ) ; org . sagebionetworks . repo . model . UserInfo ownerInfo = userManager . getUserInfo ( adminUserId ) ; java . lang . String nodeId = createNode ( "An<sp>entity" , ownerInfo ) ; "<AssertPlaceHolder>" ; nodesToDelete . add ( nodeId ) ; org . sagebionetworks . repo . model . Node node = nodeManager . get ( ownerInfo , nodeId ) ; sub1 . setEvaluationId ( eval1 . getId ( ) ) ; sub1 . setEntityId ( nodeId ) ; sub1 = entityServletHelper . createSubmission ( sub1 , testUserId , node . getETag ( ) ) ; } createNode ( java . lang . String , org . sagebionetworks . repo . model . UserInfo ) { org . sagebionetworks . repo . model . Node toCreate = new org . sagebionetworks . repo . model . Node ( ) ; toCreate . setName ( name ) ; java . lang . String ownerId = userInfo . getId ( ) . toString ( ) ; toCreate . setCreatedByPrincipalId ( java . lang . Long . parseLong ( ownerId ) ) ; toCreate . setModifiedByPrincipalId ( java . lang . Long . parseLong ( ownerId ) ) ; toCreate . setCreatedOn ( new java . util . Date ( java . lang . System . currentTimeMillis ( ) ) ) ; toCreate . setModifiedOn ( toCreate . getCreatedOn ( ) ) ; toCreate . setNodeType ( EntityType . project ) ; toCreate . setVersionComment ( "This<sp>is<sp>the<sp>first<sp>version<sp>of<sp>the<sp>first<sp>node<sp>ever!" ) ; toCreate . setVersionLabel ( "1" ) ; java . lang . String id = nodeManager . createNode ( toCreate , userInfo ) . getId ( ) ; nodesToDelete . add ( org . sagebionetworks . repo . model . jdo . KeyFactory . stringToKey ( id ) . toString ( ) ) ; return id ; }
|
org . junit . Assert . assertNotNull ( nodeId )
|
validate_twoGenerators_noErrors ( ) { java . util . SortedSet < liquibase . sqlgenerator . SqlGenerator > generators = new java . util . TreeSet < liquibase . sqlgenerator . SqlGenerator > ( new liquibase . sqlgenerator . SqlGeneratorComparator ( ) ) ; generators . add ( new liquibase . sqlgenerator . MockSqlGenerator ( 2 , "B1" , "B2" ) ) ; generators . add ( new liquibase . sqlgenerator . MockSqlGenerator ( 1 , "A1" , "A2" ) ) ; liquibase . sqlgenerator . SqlGeneratorChain chain = new liquibase . sqlgenerator . SqlGeneratorChain ( generators ) ; liquibase . exception . ValidationErrors validationErrors = chain . validate ( new liquibase . statement . core . MockSqlStatement ( ) , new liquibase . database . core . MockDatabase ( ) ) ; "<AssertPlaceHolder>" ; } hasErrors ( ) { return ! ( errorMessages . isEmpty ( ) ) ; }
|
org . junit . Assert . assertFalse ( validationErrors . hasErrors ( ) )
|
testFilePath ( ) { org . owasp . dependencycheck . xml . suppression . SuppressionRule instance = new org . owasp . dependencycheck . xml . suppression . SuppressionRule ( ) ; org . owasp . dependencycheck . xml . suppression . PropertyType expResult = new org . owasp . dependencycheck . xml . suppression . PropertyType ( ) ; expResult . setValue ( "test" ) ; instance . setFilePath ( expResult ) ; org . owasp . dependencycheck . xml . suppression . PropertyType result = instance . getFilePath ( ) ; "<AssertPlaceHolder>" ; } getFilePath ( ) { return filePath ; }
|
org . junit . Assert . assertEquals ( expResult , result )
|
createApplicationActiveTraceChartGroupTest ( ) { long time = 1495418083250L ; java . lang . String id = "app_1_1" 0 ; com . navercorp . pinpoint . web . vo . Range range = new com . navercorp . pinpoint . web . vo . Range ( ( time - 240000 ) , time ) ; com . navercorp . pinpoint . web . util . TimeWindow timeWindow = new com . navercorp . pinpoint . web . util . TimeWindow ( range ) ; java . util . List < com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo > aggreJoinActiveTraceBoList = new java . util . ArrayList ( 5 ) ; com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo aggreJoinActiveTraceBo1 = new com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo ( id , 1 , ( ( short ) ( 2 ) ) , 150 , 10 , "app_1_1" , 230 , "app_1_2" , time ) ; com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo aggreJoinActiveTraceBo2 = new com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo ( id , 1 , ( ( short ) ( 2 ) ) , 110 , 22 , "app_2_1" , 330 , "app_2_2" , ( time - 60000 ) ) ; com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo aggreJoinActiveTraceBo3 = new com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo ( id , 1 , ( ( short ) ( 2 ) ) , 120 , 24 , "app_3_1" , 540 , "app_3_2" , ( time - 120000 ) ) ; com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo aggreJoinActiveTraceBo4 = new com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo ( id , 1 , ( ( short ) ( 2 ) ) , 130 , 25 , "app_4_1" , 560 , "app_4_2" , ( time - 180000 ) ) ; com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo aggreJoinActiveTraceBo5 = new com . navercorp . pinpoint . web . vo . stat . AggreJoinActiveTraceBo ( id , 1 , ( ( short ) ( 2 ) ) , 140 , 12 , "app_5_1" , 260 , "app_5_2" , ( time - 240000 ) ) ; aggreJoinActiveTraceBoList . add ( aggreJoinActiveTraceBo1 ) ; aggreJoinActiveTraceBoList . add ( aggreJoinActiveTraceBo2 ) ; aggreJoinActiveTraceBoList . add ( aggreJoinActiveTraceBo3 ) ; aggreJoinActiveTraceBoList . add ( aggreJoinActiveTraceBo4 ) ; aggreJoinActiveTraceBoList . add ( aggreJoinActiveTraceBo5 ) ; com . navercorp . pinpoint . web . vo . stat . chart . StatChartGroup applicationActiveTraceChartGroup = new com . navercorp . pinpoint . web . vo . stat . chart . application . ApplicationActiveTraceChart . ApplicationActiveTraceChartGroup ( timeWindow , aggreJoinActiveTraceBoList ) ; java . util . Map < com . navercorp . pinpoint . web . vo . stat . chart . StatChartGroup . ChartType , com . navercorp . pinpoint . web . vo . chart . Chart < ? extends com . navercorp . pinpoint . web . vo . chart . Point > > charts = applicationActiveTraceChartGroup . getCharts ( ) ; com . navercorp . pinpoint . web . vo . chart . Chart < ? extends com . navercorp . pinpoint . web . vo . chart . Point > activeTraceChart = charts . get ( ApplicationActiveTraceChart . ApplicationActiveTraceChartGroup . ActiveTraceChartType . ACTIVE_TRACE_COUNT ) ; java . util . List < ? extends com . navercorp . pinpoint . web . vo . chart . Point > activeTracePointList = activeTraceChart . getPoints ( ) ; "<AssertPlaceHolder>" ; int index = activeTracePointList . size ( ) ; for ( com . navercorp . pinpoint . web . vo . chart . Point point : activeTracePointList ) { testActiveTraceCount ( ( ( com . navercorp . pinpoint . web . vo . stat . chart . application . ActiveTracePoint ) ( point ) ) , aggreJoinActiveTraceBoList . get ( ( -- index ) ) ) ; } } size ( ) { return this . queue . size ( ) ; }
|
org . junit . Assert . assertEquals ( 5 , activeTracePointList . size ( ) )
|
decorate_none ( ) { net . ripe . db . whois . scheduler . task . export . DecorationStrategy subject = new net . ripe . db . whois . scheduler . task . export . DecorationStrategy . None ( ) ; final net . ripe . db . whois . common . rpsl . RpslObject decorated = subject . decorate ( object ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertThat ( decorated , org . hamcrest . Matchers . is ( object ) )
|
testNonExistingSymbol ( ) { mockFindAllocatorOrDatacentre ( null ) ; "<AssertPlaceHolder>" ; org . powermock . api . easymock . PowerMock . verifyAll ( ) ; } isValidExistingSymbol ( ) { return isValid ( this , "existingSymbol" ) ; }
|
org . junit . Assert . assertFalse ( isValidExistingSymbol ( ) )
|
testAddFiles ( ) { model . Group group = new model . Group ( name , delimiter , columns , primary , "\\d+" ) ; group . addFile ( "src/main/resources/testfiles/test_input_metadata.txt" , true ) ; group . addFile ( "src/main/resources/testfiles/test_input.txt" , true ) ; "<AssertPlaceHolder>" ; } size ( ) { return chunkedData . size ( ) ; }
|
org . junit . Assert . assertEquals ( 2 , group . size ( ) )
|
testGetCurrentSize ( ) { final int DCS = WatchDataSourceImpl . DEFAULT_COLLECTION_SIZE ; org . rioproject . impl . watch . WatchDataSourceImpl impl = new org . rioproject . impl . watch . WatchDataSourceImpl ( ) ; impl . setID ( "watch" ) ; impl . setConfiguration ( EmptyConfiguration . INSTANCE ) ; for ( int j = 0 ; j < DCS ; j ++ ) { impl . addCalculable ( new org . rioproject . impl . watch . Calculable ( ) ) ; } org . rioproject . impl . watch . DataSourceMonitor mon = new org . rioproject . impl . watch . DataSourceMonitor ( impl ) ; mon . waitFor ( DCS ) ; int expected = java . lang . Math . min ( DCS , DCS ) ; "<AssertPlaceHolder>" ; impl . close ( ) ; } getCurrentSize ( ) { log . append ( "getCurrentSize()" ) ; return super . getCurrentSize ( ) ; }
|
org . junit . Assert . assertEquals ( expected , impl . getCurrentSize ( ) )
|
shouldFailCheckingRequiredBatchForGenealogyShiftBatchRequired ( ) { given ( order . getBelongsToField ( OrderFields . TECHNOLOGY ) ) . willReturn ( technology ) ; given ( order . getHasManyField ( "genealogies" ) . isEmpty ( ) ) . willReturn ( false ) ; given ( order . getHasManyField ( "genealogies" ) . iterator ( ) ) . willReturn ( iterator ) ; given ( iterator . hasNext ( ) ) . willReturn ( true , false ) ; given ( iterator . next ( ) ) . willReturn ( genealogy ) ; given ( technology . getBooleanField ( "batchRequired" ) ) . willReturn ( true ) ; given ( technology . getBooleanField ( "shiftFeatureRequired" ) ) . willReturn ( true ) ; given ( genealogy . getHasManyField ( "shiftFeatures" ) . isEmpty ( ) ) . willReturn ( true ) ; boolean result = orderService . checkRequiredBatch ( order ) ; "<AssertPlaceHolder>" ; } checkRequiredBatch ( com . qcadoo . model . api . Entity ) { com . qcadoo . model . api . Entity technology = order . getBelongsToField ( OrderFields . TECHNOLOGY ) ; if ( technology != null ) { if ( order . getHasManyField ( "genealogies" ) . isEmpty ( ) ) { if ( technology . getBooleanField ( "batchRequired" ) ) { return false ; } if ( technology . getBooleanField ( "shiftFeatureRequired" ) ) { return false ; } if ( technology . getBooleanField ( "postFeatureRequired" ) ) { return false ; } if ( technology . getBooleanField ( "otherFeatureRequired" ) ) { return false ; } for ( com . qcadoo . model . api . Entity operationComponent : technology . getTreeField ( TechnologyFields . OPERATION_COMPONENTS ) ) { for ( com . qcadoo . model . api . Entity operationProductComponent : operationComponent . getHasManyField ( TechnologyOperationComponentFields . OPERATION_PRODUCT_IN_COMPONENTS ) ) { if ( operationProductComponent . getBooleanField ( "batchRequired" ) ) { return false ; } } } } for ( com . qcadoo . model . api . Entity genealogy : order . getHasManyField ( "genealogies" ) ) { if ( ( technology . getBooleanField ( "batchRequired" ) ) && ( ( genealogy . getField ( "batch" ) ) == null ) ) { return false ; } if ( technology . getBooleanField ( "shiftFeatureRequired" ) ) { com . qcadoo . mes . orders . List < com . qcadoo . model . api . Entity > entityList = genealogy . getHasManyField ( "shiftFeatures" ) ; if ( entityList . isEmpty ( ) ) { return false ; } } if ( technology . getBooleanField ( "postFeatureRequired" ) ) { com . qcadoo . mes . orders . List < com . qcadoo . model . api . Entity > entityList = genealogy . getHasManyField ( "postFeatures" ) ; if ( entityList . isEmpty ( ) ) { return false ; } } if ( technology . getBooleanField ( "otherFeatureRequired" ) ) { com . qcadoo . mes . orders . List < com . qcadoo . model . api . Entity > entityList = genealogy . getHasManyField ( "otherFeatures" ) ; if ( entityList . isEmpty ( ) ) { return false ; } } for ( com . qcadoo . model . api . Entity genealogyProductIn : genealogy . getHasManyField ( "productInComponents" ) ) { if ( genealogyProductIn . getBelongsToField ( "shiftFeatures" 0 ) . getBooleanField ( "batchRequired" ) ) { com . qcadoo . mes . orders . List < com . qcadoo . model . api . Entity > entityList = genealogyProductIn . getHasManyField ( "batch" ) ; if ( entityList . isEmpty ( ) ) { return false ; } } } } } return true ; }
|
org . junit . Assert . assertFalse ( result )
|
testExpire ( ) { io . netty . channel . EventLoopGroup group = new io . netty . channel . DefaultEventLoopGroup ( 1 ) ; try { io . netty . channel . EventLoop loop = group . next ( ) ; final io . netty . resolver . dns . DefaultDnsCnameCache cache = new io . netty . resolver . dns . DefaultDnsCnameCache ( ) ; cache . cache ( "netty.io" , "mapping.netty.io" , 1 , loop ) ; java . lang . Throwable error = loop . schedule ( new java . util . concurrent . Callable < java . lang . Throwable > ( ) { @ io . netty . resolver . dns . Override public java . lang . Throwable call ( ) { try { "<AssertPlaceHolder>" ; return null ; } catch ( java . lang . Throwable cause ) { return cause ; } } } , 1 , TimeUnit . SECONDS ) . get ( ) ; if ( error != null ) { throw error ; } } finally { group . shutdownGracefully ( ) ; } } get ( java . lang . CharSequence ) { return get0 ( name ) ; }
|
org . junit . Assert . assertNull ( cache . get ( "netty.io" ) )
|
clientCalculatesCrc32FromDecompressedData_WhenCrc32IsValid ( ) { stubFor ( post ( urlEqualTo ( "/" ) ) . willReturn ( aResponse ( ) . withStatus ( 200 ) . withHeader ( "Content-Encoding" , "gzip" ) . withHeader ( "x-amz-crc32" , software . amazon . awssdk . protocol . tests . crc32 . AwsJsonCrc32ChecksumTests . JSON_BODY_Crc32_CHECKSUM ) . withBodyFile ( software . amazon . awssdk . protocol . tests . crc32 . AwsJsonCrc32ChecksumTests . JSON_BODY_GZIP ) ) ) ; software . amazon . awssdk . services . protocoljsonrpc . ProtocolJsonRpcClient jsonRpc = software . amazon . awssdk . services . protocoljsonrpc . ProtocolJsonRpcClient . builder ( ) . credentialsProvider ( software . amazon . awssdk . protocol . tests . crc32 . AwsJsonCrc32ChecksumTests . FAKE_CREDENTIALS_PROVIDER ) . region ( Region . US_EAST_1 ) . endpointOverride ( java . net . URI . create ( ( "http://localhost:" + ( mockServer . port ( ) ) ) ) ) . build ( ) ; software . amazon . awssdk . services . protocoljsonrpc . model . AllTypesResponse result = jsonRpc . allTypes ( software . amazon . awssdk . services . protocoljsonrpc . model . AllTypesRequest . builder ( ) . build ( ) ) ; "<AssertPlaceHolder>" ; } stringMember ( ) { return stringMember ; }
|
org . junit . Assert . assertEquals ( "foo" , result . stringMember ( ) )
|
testCreateDisplayNameFristNull ( ) { org . sagebionetworks . repo . model . auth . NewUser user = new org . sagebionetworks . repo . model . auth . NewUser ( ) ; user . setFirstName ( null ) ; user . setLastName ( "Last" ) ; java . lang . String result = org . sagebionetworks . repo . manager . principal . NewUserUtils . createDisplayName ( user ) ; "<AssertPlaceHolder>" ; } createDisplayName ( org . sagebionetworks . repo . model . auth . NewUser ) { java . lang . StringBuilder builder = new java . lang . StringBuilder ( ) ; if ( ( newUser . getFirstName ( ) ) != null ) { builder . append ( newUser . getFirstName ( ) . trim ( ) ) ; } if ( ( newUser . getLastName ( ) ) != null ) { if ( ( newUser . getFirstName ( ) ) != null ) { builder . append ( "<sp>" ) ; } builder . append ( newUser . getLastName ( ) ) ; } return builder . toString ( ) ; }
|
org . junit . Assert . assertEquals ( "Last" , result )
|
generateNewId_Service_CopyToLong ( ) { java . lang . String baseId = org . oscm . test . BaseAdmUmTest . TOO_LONG_ID ; sd . setServiceId ( baseId ) ; gen = new org . oscm . ui . generator . IdGenerator ( org . oscm . ui . generator . IdGeneratorTest . COPY_OF_PREFIX , sd , org . oscm . ui . generator . IdGeneratorTest . excludeService ( baseId , services ) ) ; java . lang . String expected = ( ( org . oscm . ui . generator . IdGeneratorTest . COPY_OF_PREFIX ) + baseId ) . substring ( 0 , ADMValidator . LENGTH_ID ) ; java . lang . String genId = gen . generateNewId ( ) ; "<AssertPlaceHolder>" ; } generateNewId ( ) { java . lang . String temp = ( ( prefix ) + ( baseId ) ) . trim ( ) ; if ( ( temp . length ( ) ) > ( org . oscm . validator . ADMValidator . LENGTH_ID ) ) { temp = temp . substring ( 0 , ADMValidator . LENGTH_ID ) ; } java . lang . String template = temp + "%s" ; if ( ( temp . length ( ) ) > ( ( org . oscm . validator . ADMValidator . LENGTH_ID ) - 7 ) ) { template = ( template . substring ( 0 , ( ( org . oscm . validator . ADMValidator . LENGTH_ID ) - 7 ) ) ) + "%s" ; } for ( int index = 2 ; ( index < ( Short . MAX_VALUE ) ) && ( excludedIds . contains ( temp ) ) ; index ++ ) { temp = java . lang . String . format ( template , ( ( "(" + index ) + ")" ) ) ; } return temp ; }
|
org . junit . Assert . assertEquals ( expected , genId )
|
shouldNotAllowMultipleRepositoriesWithSameRootDirectory ( ) { try ( org . jboss . forge . furnace . Furnace f = new org . jboss . forge . furnace . impl . FurnaceImpl ( ) ) { org . jboss . forge . furnace . repositories . AddonRepository repo1 = f . addRepository ( AddonRepositoryMode . IMMUTABLE , new java . io . File ( "target" ) ) ; org . jboss . forge . furnace . repositories . AddonRepository repo2 = f . addRepository ( new org . jboss . forge . furnace . FurnaceImplTest . TestAddonRepository ( new java . io . File ( "target" ) ) ) ; "<AssertPlaceHolder>" ; } } addRepository ( org . jboss . forge . furnace . repositories . AddonRepository ) { this . repositories . add ( repository ) ; }
|
org . junit . Assert . assertEquals ( repo1 , repo2 )
|
forumReplyOnlyShouldDeny ( ) { when ( userSession . getRoleManager ( ) ) . thenReturn ( roleManager ) ; when ( request . getParameter ( "forumId" ) ) . thenReturn ( "1" ) ; when ( userSession . isLogged ( ) ) . thenReturn ( true ) ; when ( roleManager . isForumAllowed ( 1 ) ) . thenReturn ( true ) ; when ( roleManager . isForumReadOnly ( 1 ) ) . thenReturn ( false ) ; when ( roleManager . isForumReplyOnly ( 1 ) ) . thenReturn ( true ) ; when ( repository . get ( 1 ) ) . thenReturn ( new net . jforum . entities . Forum ( ) ) ; net . jforum . security . CreateNewTopicRule rule = new net . jforum . security . CreateNewTopicRule ( repository , sessionManager ) ; "<AssertPlaceHolder>" ; } shouldProceed ( net . jforum . entities . UserSession , javax . servlet . http . HttpServletRequest ) { int userId = this . findUserId ( request ) ; boolean logged = userSession . isLogged ( ) ; if ( ! logged ) { return false ; } net . jforum . entities . User currentUser = userSession . getUser ( ) ; if ( ( currentUser . getId ( ) ) == userId ) { return true ; } net . jforum . entities . User user = userRepository . get ( userId ) ; return userSession . getRoleManager ( ) . getCanEditUser ( user , currentUser . getGroups ( ) ) ; }
|
org . junit . Assert . assertFalse ( rule . shouldProceed ( userSession , request ) )
|
moveCollection ( ) { final org . exist . storage . BrokerPool brokerPool = existEmbeddedServer . getBrokerPool ( ) ; final org . exist . storage . lock . LockTable lockTable = brokerPool . getLockManager ( ) . getLockTable ( ) ; lockTable . setTraceStackDepth ( org . exist . storage . NativeBrokerLockingTest . TRACE_STACK_DEPTH ) ; final org . exist . storage . NativeBrokerLockingTest . LockSymmetryListener lockSymmetryListener = new org . exist . storage . NativeBrokerLockingTest . LockSymmetryListener ( ) ; boolean registered = false ; try { try ( final org . exist . storage . DBBroker broker = brokerPool . get ( java . util . Optional . of ( brokerPool . getSecurityManager ( ) . getSystemSubject ( ) ) ) ; final org . exist . storage . txn . Txn transaction = brokerPool . getTransactionManager ( ) . beginTransaction ( ) ) { final org . exist . collections . Collection collectionA = broker . getCollection ( org . exist . storage . NativeBrokerLockingTest . COLLECTION_A ) ; final org . exist . collections . Collection collectionB = broker . getCollection ( org . exist . storage . NativeBrokerLockingTest . COLLECTION_B ) ; lockTable . registerListener ( lockSymmetryListener ) ; while ( ! ( lockSymmetryListener . isRegistered ( ) ) ) ; registered = true ; broker . moveCollection ( transaction , collectionA , collectionB , org . exist . xmldb . XmldbURI . create ( "colA" ) ) ; transaction . commit ( ) ; } } finally { if ( registered ) { lockTable . deregisterListener ( lockSymmetryListener ) ; } } while ( lockSymmetryListener . isRegistered ( ) ) { } "<AssertPlaceHolder>" ; } isSymmetrical ( ) { return ( ! ( error . get ( ) ) ) && ( events . empty ( ) ) ; }
|
org . junit . Assert . assertTrue ( lockSymmetryListener . isSymmetrical ( ) )
|
testStorageMemoryReplication3 ( ) { net . tomp2p . dht . StorageMemory storageMemoryReplication = new net . tomp2p . dht . StorageMemory ( ) ; net . tomp2p . peers . Number160 testLoc = net . tomp2p . peers . Number160 . createHash ( "test1" ) ; net . tomp2p . peers . Number160 testPer = net . tomp2p . peers . Number160 . createHash ( "test2" ) ; storageMemoryReplication . updateResponsibilities ( testLoc , testPer ) ; "<AssertPlaceHolder>" ; } findContentForResponsiblePeerID ( net . tomp2p . peers . Number160 ) { return responsibilityMapRev . get ( peerID ) ; }
|
org . junit . Assert . assertEquals ( testLoc , storageMemoryReplication . findContentForResponsiblePeerID ( testPer ) . iterator ( ) . next ( ) )
|
testValidateNoError ( ) { factory . setValidateConnection ( true ) ; org . springframework . data . redis . connection . RedisConnection conn2 = factory . getConnection ( ) ; "<AssertPlaceHolder>" ; } getNativeConnection ( ) { return delegate . getNativeConnection ( ) ; }
|
org . junit . Assert . assertSame ( connection . getNativeConnection ( ) , conn2 . getNativeConnection ( ) )
|
testJsonArrayGetObject ( ) { io . vertx . core . json . JsonObject expected = new io . vertx . core . json . JsonObject ( ) . put ( "foo" , "bar" ) ; io . vertx . codetrans . JsonTest . array = new io . vertx . core . json . JsonArray ( ) . add ( expected ) ; runAllExcept ( "json/JsArray" , "getObject" , io . vertx . codetrans . lang . scala . ScalaLang . class , ( ) -> { "<AssertPlaceHolder>" ; } ) ; } runAllExcept ( java . lang . String , java . lang . String , io . vertx . codetrans . Map , java . lang . Class , java . lang . Runnable ) { runAll ( path , method , globals , io . vertx . codetrans . Collections . singletonList ( except ) , after ) ; }
|
org . junit . Assert . assertEquals ( "bar" , io . vertx . codetrans . JsonTest . o )
|
factoryThrowsOnEnrollmentGroupManagerFail ( ) { new mockit . NonStrictExpectations ( ) { { com . microsoft . azure . sdk . iot . provisioning . service . auth . ProvisioningConnectionStringBuilder . createConnectionString ( tests . unit . com . microsoft . azure . sdk . iot . provisioning . service . ProvisioningServiceClientTest . PROVISIONING_CONNECTION_STRING ) ; result = mockedProvisioningConnectionString ; times = 1 ; com . microsoft . azure . sdk . iot . provisioning . service . contract . ContractApiHttp . createFromConnectionString ( mockedProvisioningConnectionString ) ; result = mockedContractApiHttp ; times = 1 ; mockit . Deencapsulation . invoke ( tests . unit . com . microsoft . azure . sdk . iot . provisioning . service . IndividualEnrollmentManager . class , "createFromContractApiHttp" , mockedContractApiHttp ) ; result = mockedIndividualEnrollmentManager ; times = 1 ; mockit . Deencapsulation . invoke ( tests . unit . com . microsoft . azure . sdk . iot . provisioning . service . EnrollmentGroupManager . class , "createFromContractApiHttp" , mockedContractApiHttp ) ; result = new java . lang . IllegalArgumentException ( ) ; times = 1 ; } } ; tests . unit . com . microsoft . azure . sdk . iot . provisioning . service . ProvisioningServiceClient provisioningServiceClient = tests . unit . com . microsoft . azure . sdk . iot . provisioning . service . ProvisioningServiceClient . createFromConnectionString ( tests . unit . com . microsoft . azure . sdk . iot . provisioning . service . ProvisioningServiceClientTest . PROVISIONING_CONNECTION_STRING ) ; "<AssertPlaceHolder>" ; } createFromConnectionString ( java . lang . String ) { return new com . microsoft . azure . sdk . iot . provisioning . service . ProvisioningServiceClient ( connectionString ) ; }
|
org . junit . Assert . assertNotNull ( provisioningServiceClient )
|
shouldReturnFalseIfPlayerDoesNotExist ( ) { java . lang . String playerName = "bobby" ; java . lang . String clearTextPass = "tables" ; given ( dataSource . getPassword ( playerName ) ) . willReturn ( null ) ; boolean result = passwordSecurity . comparePassword ( clearTextPass , playerName ) ; "<AssertPlaceHolder>" ; verify ( dataSource ) . getPassword ( playerName ) ; verify ( method , never ( ) ) . comparePassword ( anyString ( ) , any ( fr . xephi . authme . security . crypts . HashedPassword . class ) , anyString ( ) ) ; } comparePassword ( java . lang . String , java . lang . String ) { fr . xephi . authme . security . crypts . HashedPassword auth = dataSource . getPassword ( playerName ) ; return ( auth != null ) && ( comparePassword ( password , auth , playerName ) ) ; }
|
org . junit . Assert . assertThat ( result , org . hamcrest . Matchers . equalTo ( false ) )
|
testBindingsAreBoundOnlyWhenEvaluation ( ) { com . hortonworks . streamline . streams . runtime . script . engine . GroovyScriptEngine groovyScriptEngine = new com . hortonworks . streamline . streams . runtime . script . engine . GroovyScriptEngine ( ) ; java . lang . String groovyExpression = "temperature<sp>><sp>10<sp>&&<sp>humidity<sp><<sp>30" ; com . hortonworks . streamline . streams . runtime . script . GroovyScript < java . lang . Boolean > groovyScript = new com . hortonworks . streamline . streams . runtime . script . GroovyScript < java . lang . Boolean > ( groovyExpression , groovyScriptEngine ) ; java . util . HashMap < java . lang . String , java . lang . Object > fieldsAndValue = new java . util . HashMap ( ) ; fieldsAndValue . put ( "temperature" , 20 ) ; fieldsAndValue . put ( "humidity" , 10 ) ; try { "<AssertPlaceHolder>" ; } catch ( javax . script . ScriptException e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( "It<sp>shouldn't<sp>throw<sp>ScriptException" ) ; } fieldsAndValue . clear ( ) ; fieldsAndValue . put ( "no_related_field" , 3 ) ; try { groovyScript . evaluate ( com . hortonworks . streamline . streams . common . StreamlineEventImpl . builder ( ) . fieldsAndValues ( fieldsAndValue ) . dataSourceId ( "1" ) . build ( ) ) ; org . junit . Assert . fail ( "It<sp>should<sp>not<sp>evaluate<sp>correctly" ) ; } catch ( javax . script . ScriptException e ) { } } evaluate ( java . lang . Integer ) { return i > 0 ; }
|
org . junit . Assert . assertTrue ( groovyScript . evaluate ( com . hortonworks . streamline . streams . common . StreamlineEventImpl . builder ( ) . fieldsAndValues ( fieldsAndValue ) . dataSourceId ( "1" ) . build ( ) ) )
|
testWStringArrayTypeNodeLengthArg ( ) { java . lang . String [ ] array = new java . lang . String [ ] { "one" , "two" } ; java . lang . String expected = "" ; for ( java . lang . String s : array ) { expected += "<string>" ; expected += s ; expected += "</string>" ; } java . lang . String actual = new org . apache . nifi . processors . evtx . parser . bxml . value . WStringArrayTypeNode ( testBinaryReaderBuilder . putWString ( java . lang . String . join ( "
|
org . junit . Assert . assertEquals ( expected , actual )
|
testFlushOnBatchTimeout ( ) { org . apache . metron . common . system . FakeClock clock = new org . apache . metron . common . system . FakeClock ( ) ; bulkMessageWriterBolt = bulkMessageWriterBolt . withBatchTimeoutDivisor ( 3 ) ; bulkMessageWriterBolt . setCuratorFramework ( client ) ; bulkMessageWriterBolt . setZKCache ( cache ) ; bulkMessageWriterBolt . getConfigurations ( ) . updateSensorIndexingConfig ( sensorType , new java . io . FileInputStream ( sampleSensorIndexingConfigPath ) ) ; { bulkMessageWriterBolt . declareOutputFields ( declarer ) ; verify ( declarer , times ( 1 ) ) . declareStream ( eq ( "error" ) , argThat ( new org . apache . metron . writer . bolt . FieldsMatcher ( "message" ) ) ) ; } { java . util . Map stormConf = new java . util . HashMap ( ) ; when ( bulkMessageWriter . getName ( ) ) . thenReturn ( "elasticsearch" ) ; bulkMessageWriterBolt . prepare ( stormConf , topologyContext , outputCollector , clock ) ; verify ( bulkMessageWriter , times ( 1 ) ) . init ( eq ( stormConf ) , any ( org . apache . storm . task . TopologyContext . class ) , any ( org . apache . metron . common . configuration . writer . WriterConfiguration . class ) ) ; } { int batchTimeout = bulkMessageWriterBolt . getMaxBatchTimeout ( ) ; "<AssertPlaceHolder>" ; for ( int i = 0 ; i < 4 ; i ++ ) { bulkMessageWriterBolt . execute ( tupleList . get ( i ) ) ; verify ( bulkMessageWriter , times ( 0 ) ) . write ( eq ( sensorType ) , any ( org . apache . metron . common . configuration . writer . WriterConfiguration . class ) , any ( java . util . List . class ) ) ; } clock . elapseSeconds ( 5 ) ; org . apache . metron . common . writer . BulkWriterResponse response = new org . apache . metron . common . writer . BulkWriterResponse ( ) ; response . addAllSuccesses ( messageIdList ) ; when ( bulkMessageWriter . write ( eq ( sensorType ) , any ( org . apache . metron . common . configuration . writer . WriterConfiguration . class ) , eq ( messageList ) ) ) . thenReturn ( response ) ; bulkMessageWriterBolt . execute ( tupleList . get ( 4 ) ) ; verify ( bulkMessageWriter , times ( 1 ) ) . write ( eq ( sensorType ) , any ( org . apache . metron . common . configuration . writer . WriterConfiguration . class ) , eq ( messageList ) ) ; tupleList . forEach ( ( tuple ) -> verify ( org . apache . metron . writer . bolt . outputCollector , times ( 1 ) ) . ack ( tuple ) ) ; } verifyNoMoreInteractions ( outputCollector ) ; } getMaxBatchTimeout ( ) { if ( ! ( initialized ) ) { this . init ( ) ; } return maxBatchTimeoutAllowedSecs ; }
|
org . junit . Assert . assertEquals ( 4 , batchTimeout )
|
testImmutability ( ) { java . util . Date d = new java . util . Date ( 20L ) ; org . jfree . data . time . FixedMillisecond fm = new org . jfree . data . time . FixedMillisecond ( d ) ; d . setTime ( 22L ) ; "<AssertPlaceHolder>" ; } getFirstMillisecond ( ) { return this . firstMillisecond ; }
|
org . junit . Assert . assertEquals ( 20L , fm . getFirstMillisecond ( ) )
|
testSetAny ( ) { customProperty . setAny ( element ) ; "<AssertPlaceHolder>" ; } getAny ( ) { return any ; }
|
org . junit . Assert . assertEquals ( element , customProperty . getAny ( ) )
|
testAggregateChecksums ( ) { java . util . Map < java . lang . String , java . lang . String > checksums = new java . util . LinkedHashMap < java . lang . String , java . lang . String > ( ) ; checksums . put ( "jcr:content/foo" , "1234" ) ; checksums . put ( "jcr:content/bar" , "5678,9012" ) ; java . lang . String expected = org . apache . commons . codec . digest . DigestUtils . sha1Hex ( "jcr:content/foo=1234jcr:content/bar=5678,9012" ) ; java . lang . String actual = checksumGenerator . aggregateChecksums ( checksums ) ; "<AssertPlaceHolder>" ; } aggregateChecksums ( java . util . Map ) { if ( checksums . isEmpty ( ) ) { return null ; } java . lang . StringBuilder data = new java . lang . StringBuilder ( ) ; for ( Map . Entry < java . lang . String , java . lang . String > entry : checksums . entrySet ( ) ) { data . append ( ( ( ( entry . getKey ( ) ) + "=" ) + ( entry . getValue ( ) ) ) ) ; } return org . apache . commons . codec . digest . DigestUtils . sha1Hex ( data . toString ( ) ) ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.