input stringlengths 28 18.7k | output stringlengths 39 1.69k |
|---|---|
testIsClusterPreviewCommandAvailableForNoAssignments ( ) { when ( context . isFocusOnClusterBuild ( ) ) . thenReturn ( true ) ; org . springframework . test . util . ReflectionTestUtils . setField ( clusterCommands , "hostGroups" , singletonMap ( "group1" , emptyList ( ) ) ) ; boolean result = clusterCommands . isClusterPreviewCommandAvailable ( ) ; "<AssertPlaceHolder>" ; } isClusterPreviewCommandAvailable ( ) { return ( context . isFocusOnClusterBuild ( ) ) && ( isHostAssigned ( ) ) ; } | org . junit . Assert . assertFalse ( result ) |
testSimpleFlag ( ) { org . evosuite . Properties . TARGET_CLASS = com . examples . with . different . packagename . FlagExample1 . class . getCanonicalName ( ) ; java . lang . Class < ? > originalClass = org . evosuite . instrumentation . testability . TestTestabilityTransformation . defaultClassloader . loadClass ( com . examples . with . different . packagename . FlagExample1 . class . getCanonicalName ( ) ) ; java . lang . Class < ? > instrumentedClass = org . evosuite . instrumentation . testability . TestTestabilityTransformation . instrumentingClassloader . loadClass ( com . examples . with . different . packagename . FlagExample1 . class . getCanonicalName ( ) ) ; java . lang . Object originalInstance = originalClass . newInstance ( ) ; java . lang . Object instrumentedInstance = instrumentedClass . newInstance ( ) ; java . lang . reflect . Method originalMethod = originalClass . getMethod ( "testMe" , new java . lang . Class < ? > [ ] { int . class } ) ; java . lang . reflect . Method instrumentedMethod = instrumentedClass . getMethod ( "testMe" , new java . lang . Class < ? > [ ] { int . class } ) ; boolean originalResult = ( ( java . lang . Boolean ) ( originalMethod . invoke ( originalInstance , 0 ) ) ) ; boolean instrumentedResult = ( ( java . lang . Integer ) ( instrumentedMethod . invoke ( instrumentedInstance , 0 ) ) ) > 0 ; "<AssertPlaceHolder>" ; } getMethod ( java . lang . Class , java . lang . String ) { java . util . Set < java . lang . reflect . Method > methods = org . evosuite . setup . TestClusterUtils . getMethods ( clazz ) ; for ( java . lang . reflect . Method m : methods ) { if ( m . getName ( ) . equals ( name ) ) return m ; } org . junit . Assert . fail ( ( "No<sp>such<sp>method:<sp>" + name ) ) ; return null ; } | org . junit . Assert . assertEquals ( originalResult , instrumentedResult ) |
checkBatchMode_when_onCascadeSet ( ) { io . ebean . TxScope scope = new io . ebean . TxScope ( ) ; scope . setBatchSize ( 100 ) ; scope . setBatchOnCascade ( PersistBatch . ALL ) ; scope . checkBatchMode ( ) ; "<AssertPlaceHolder>" ; } getBatch ( ) { return batch ; } | org . junit . Assert . assertNull ( scope . getBatch ( ) ) |
testSuperclassDoesntSaveOverSubclass ( ) { com . github . droidfu . cachefu . ModelCache modelCache = new com . github . droidfu . cachefu . ModelCache ( 2 , 1 , 1 ) ; java . lang . String id = "123" ; com . github . droidfu . cachefu . SuperclassCachedObjectTest . SubclassTestObject subclassTestObject = new com . github . droidfu . cachefu . SuperclassCachedObjectTest . SubclassTestObject ( id ) ; subclassTestObject . setTestString ( "this<sp>is<sp>a<sp>test" ) ; subclassTestObject . setExtraString ( "yet<sp>another<sp>test" ) ; subclassTestObject . save ( modelCache ) ; com . github . droidfu . cachefu . SuperclassCachedObjectTest . TestObject testObject = new com . github . droidfu . cachefu . SuperclassCachedObjectTest . TestObject ( id ) ; testObject . setTestString ( "quite<sp>a<sp>few<sp>really" ) ; testObject . save ( modelCache ) ; subclassTestObject . reload ( modelCache ) ; "<AssertPlaceHolder>" ; } getTestString ( ) { return testString ; } | org . junit . Assert . assertEquals ( "this<sp>is<sp>a<sp>test" , subclassTestObject . getTestString ( ) ) |
returnsDefaultValueWhenListOfPathSegmentsIsEmpty ( ) { when ( parameter . getDefaultValue ( ) ) . thenReturn ( "default<sp>value" ) ; when ( typeProducer . createValue ( eq ( "foo" ) , emptyMultivaluedMap ( ) , eq ( "default<sp>value" ) ) ) . thenReturn ( "default<sp>value" ) ; when ( applicationContext . getUriInfo ( ) . getPathSegments ( true ) ) . thenReturn ( newArrayList ( ) ) ; java . lang . Object resolvedMatrixParameter = matrixParameterResolver . resolve ( parameter , applicationContext ) ; "<AssertPlaceHolder>" ; } resolve ( org . everrest . core . impl . method . org . everrest . core . Parameter , org . everrest . core . ApplicationContext ) { java . lang . String param = matrixParam . value ( ) ; org . everrest . core . method . TypeProducer typeProducer = typeProducerFactory . createTypeProducer ( parameter . getParameterClass ( ) , parameter . getGenericType ( ) ) ; java . util . List < javax . ws . rs . core . PathSegment > pathSegments = context . getUriInfo ( ) . getPathSegments ( ( ! ( parameter . isEncoded ( ) ) ) ) ; javax . ws . rs . core . PathSegment pathSegment = com . google . common . collect . Iterables . getLast ( pathSegments , null ) ; return typeProducer . createValue ( param , ( pathSegment == null ? new javax . ws . rs . core . MultivaluedHashMap ( ) : pathSegment . getMatrixParameters ( ) ) , parameter . getDefaultValue ( ) ) ; } | org . junit . Assert . assertEquals ( "default<sp>value" , resolvedMatrixParameter ) |
Memcached ( ) { java . lang . String cellName = "cellCacheTestCell" ; java . util . Map < java . lang . String , java . lang . Object > cell = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; java . lang . reflect . Constructor < com . fujitsu . dc . core . utils . MemcachedClient > c = com . fujitsu . dc . core . utils . MemcachedClient . class . getDeclaredConstructor ( ) ; c . setAccessible ( true ) ; com . fujitsu . dc . core . utils . MemcachedClient mockMemcachedClient = spy ( c . newInstance ( ) ) ; doReturn ( cell ) . when ( mockMemcachedClient ) . get ( anyString ( ) , any ( java . lang . Class . class ) ) ; org . powermock . api . mockito . PowerMockito . spy ( com . fujitsu . dc . core . model . impl . es . cache . CellCache . class ) ; org . powermock . api . mockito . PowerMockito . when ( com . fujitsu . dc . core . model . impl . es . cache . CellCache . class , "getMcdClient" ) . thenReturn ( mockMemcachedClient ) ; org . powermock . api . mockito . PowerMockito . spy ( com . fujitsu . dc . core . DcCoreConfig . class ) ; org . powermock . api . mockito . PowerMockito . when ( com . fujitsu . dc . core . DcCoreConfig . class , "isCellCacheEnabled" ) . thenReturn ( true ) ; java . util . Map < java . lang . String , java . lang . Object > cache = com . fujitsu . dc . core . model . impl . es . cache . CellCache . get ( cellName ) ; "<AssertPlaceHolder>" ; } get ( java . lang . String ) { if ( ! ( com . fujitsu . dc . core . DcCoreConfig . isCellCacheEnabled ( ) ) ) { return null ; } try { @ com . fujitsu . dc . core . model . impl . es . cache . SuppressWarnings ( "unchecked" ) java . util . Map < java . lang . String , java . lang . Object > cache = com . fujitsu . dc . core . model . impl . es . cache . CellCache . getMcdClient ( ) . get ( com . fujitsu . dc . core . model . impl . es . cache . CellCache . cacheKey ( cellName ) , java . util . Map . class ) ; return cache ; } catch ( com . fujitsu . dc . core . utils . MemcachedClient . MemcachedClientException e ) { com . fujitsu . dc . core . model . impl . es . cache . CellCache . log . info ( "Failed<sp>to<sp>get<sp>CellCache." ) ; return null ; } } | org . junit . Assert . assertEquals ( cell , cache ) |
testReportDelay ( ) { final com . net2plan . interfaces . networkDesign . IReport report = new com . net2plan . examples . ocnbook . reports . Report_delay ( ) ; java . util . Map < java . lang . String , java . util . List < java . lang . String > > testingParameters = new java . util . HashMap ( ) ; java . util . List < java . util . Map < java . lang . String , java . lang . String > > testsParam = com . net2plan . utils . InputParameter . getCartesianProductOfParameters ( testingParameters ) ; if ( testsParam . isEmpty ( ) ) testsParam = java . util . Arrays . asList ( com . net2plan . utils . InputParameter . getDefaultParameters ( report . getParameters ( ) ) ) ; for ( java . util . Map < java . lang . String , java . lang . String > params : testsParam ) { java . util . Map < java . lang . String , java . lang . String > paramsUsedToCall = com . net2plan . utils . InputParameter . getDefaultParameters ( report . getParameters ( ) ) ; paramsUsedToCall . putAll ( params ) ; java . lang . String result = report . executeReport ( np , paramsUsedToCall , com . google . common . collect . ImmutableMap . of ( "precisionFactor" , "0.0001" ) ) ; "<AssertPlaceHolder>" ; } } of ( T , java . lang . String ) { return new com . net2plan . gui . utils . StringLabeller < T > ( object , label , true ) ; } | org . junit . Assert . assertTrue ( ( ( result . length ( ) ) > 100 ) ) |
testGetNumberFormat ( ) { "<AssertPlaceHolder>" ; } getNumberFormat ( ) { return numberFormat ; } | org . junit . Assert . assertNotNull ( context . getNumberFormat ( ) ) |
testKieSession ( ) { org . kie . api . runtime . KieSession ksession = ( ( org . kie . api . runtime . KieSession ) ( org . kie . spring . tests . KieSpringExpressionTest . context . getBean ( "ksession2" ) ) ) ; "<AssertPlaceHolder>" ; } | org . junit . Assert . assertNotNull ( ksession ) |
testUserDetails ( ) { final com . codeabovelab . dm . common . security . ExtendedUserDetailsImpl . Builder b = new com . codeabovelab . dm . common . security . ExtendedUserDetailsImpl . Builder ( ) ; b . setUsername ( "one" ) ; b . setPassword ( "secret" ) ; b . setTitle ( "test<sp>user" ) ; b . setEmail ( "e@e.e" ) ; b . setEnabled ( true ) ; b . setAccountNonLocked ( true ) ; b . setAccountNonExpired ( true ) ; b . setCredentialsNonExpired ( true ) ; b . setTenant ( "34l" ) ; b . setAuthorities ( java . util . Arrays . asList ( new com . codeabovelab . dm . common . security . GrantedAuthorityImpl ( "ga1" , "3l" ) , new com . codeabovelab . dm . common . security . GrantedAuthorityImpl ( "ga2" , "6l" ) ) ) ; com . codeabovelab . dm . common . security . ExtendedUserDetailsImpl expected = b . build ( ) ; java . lang . String res = mapper . writeValueAsString ( expected ) ; com . codeabovelab . dm . common . security . ExtendedUserDetailsImpl actual = mapper . readValue ( res , com . codeabovelab . dm . common . security . ExtendedUserDetailsImpl . class ) ; "<AssertPlaceHolder>" ; } build ( ) { return new com . codeabovelab . dm . mail . dto . MailHeadImpl ( this ) ; } | org . junit . Assert . assertEquals ( expected , actual ) |
testBuildWithComplicated ( ) { java . lang . String query = "SELECT<sp>FROM<sp>SMTH<sp>%s" ; java . lang . String condition = "condition<sp>" ; java . lang . Object parameter = new java . lang . Object ( ) ; java . lang . String condition1 = "condition1<sp>" ; java . lang . Object nullParameter = null ; java . lang . String condition2 = "condition2<sp>" ; java . lang . Object parameter2 = "fdsfds" ; java . lang . String condition3 = "condition3<sp>" ; java . lang . Object parameter3 = "qwrvcvds" ; java . lang . String expected = "SELECT<sp>FROM<sp>SMTH<sp>WHERE<sp>(<sp>condition<sp>)<sp>AND<sp>condition2<sp>AND<sp>condition3<sp>" ; java . lang . String actual = org . lnu . is . dao . builder . BaseQueryBuilder . getInstance ( query ) . where ( ) . openBracket ( ) . addOrCondition ( condition , parameter ) . addOrCondition ( condition1 , nullParameter ) . closeBracket ( ) . addAndCondition ( condition2 , parameter2 ) . addAndCondition ( condition3 , parameter3 ) . build ( ) ; "<AssertPlaceHolder>" ; } build ( ) { return getFinalQuery ( ) ; } | org . junit . Assert . assertEquals ( expected , actual ) |
testReproducibleIds ( ) { java . lang . String id1 = idGenerator . createId ( "x" ) ; java . lang . String id2 = new org . eclipse . swt . internal . widgets . IdGenerator ( ) . createId ( "x" ) ; "<AssertPlaceHolder>" ; } createId ( java . lang . Object ) { if ( object instanceof org . eclipse . swt . widgets . Display ) { return "w1" ; } return ( getPrefix ( object ) ) + ( sequence . incrementAndGet ( ) ) ; } | org . junit . Assert . assertEquals ( id1 , id2 ) |
testSeekHeadSize ( ) { org . jcodec . containers . mkv . SeekHeadFactory a = new org . jcodec . containers . mkv . SeekHeadFactory ( ) ; a . add ( org . jcodec . containers . mkv . SeekHeadFactoryTest . createFakeElement ( Info . id , ( ( 255 - 4 ) - 2 ) ) ) ; a . add ( org . jcodec . containers . mkv . SeekHeadFactoryTest . createFakeElement ( Tracks . id , ( ( 65285 - 4 ) - 2 ) ) ) ; a . add ( org . jcodec . containers . mkv . SeekHeadFactoryTest . createFakeElement ( Cues . id , ( ( 16711679 - 4 ) - 3 ) ) ) ; int computeSize = a . computeSeekHeadSize ( ) ; System . out . println ( ( "SeekHeadSize:<sp>" + computeSize ) ) ; "<AssertPlaceHolder>" ; } estimateSize ( ) { int sizeEstimate = ( ( ( tokens . size ( ) ) << 3 ) + ( streams . estimateSize ( ) ) ) + 128 ; for ( java . lang . Integer stream : analyzers . keySet ( ) ) { sizeEstimate += analyzers . get ( stream ) . estimateSize ( ) ; } return sizeEstimate ; } | org . junit . Assert . assertEquals ( a . estimateSize ( ) , computeSize ) |
testGetEdmReferentialConstraintRole ( ) { try { com . sap . core . odata . processor . core . jpa . model . JPAEdmReferentialConstraintRoleTest . objJPAEdmReferentialConstraintRole . getBuilder ( ) . build ( ) ; } catch ( com . sap . core . odata . processor . api . jpa . exception . ODataJPAModelException e ) { org . junit . Assert . fail ( ( ( ( com . sap . core . odata . processor . core . jpa . common . ODataJPATestConstants . EXCEPTION_MSG_PART_1 ) + ( e . getMessage ( ) ) ) + ( com . sap . core . odata . processor . core . jpa . common . ODataJPATestConstants . EXCEPTION_MSG_PART_2 ) ) ) ; } catch ( com . sap . core . odata . processor . api . jpa . exception . ODataJPARuntimeException e ) { org . junit . Assert . fail ( ( ( ( com . sap . core . odata . processor . core . jpa . common . ODataJPATestConstants . EXCEPTION_MSG_PART_1 ) + ( e . getMessage ( ) ) ) + ( com . sap . core . odata . processor . core . jpa . common . ODataJPATestConstants . EXCEPTION_MSG_PART_2 ) ) ) ; } "<AssertPlaceHolder>" ; } getEdmReferentialConstraintRole ( ) { return currentRole ; } | org . junit . Assert . assertNotNull ( com . sap . core . odata . processor . core . jpa . model . JPAEdmReferentialConstraintRoleTest . objJPAEdmReferentialConstraintRole . getEdmReferentialConstraintRole ( ) ) |
testBuildModelView ( ) { org . eurekastreams . server . search . modelview . PersonModelView modelView = new org . eurekastreams . server . search . factories . PersonModelViewFactory ( ) . buildModelView ( ) ; "<AssertPlaceHolder>" ; } buildModelView ( ) { return new org . eurekastreams . commons . hibernate . ModelViewResultTransformerTest . MyModelView ( ) ; } | org . junit . Assert . assertNotNull ( modelView ) |
shouldReadEmptyJsonResponse ( ) { final com . fasterxml . jackson . databind . ObjectMapper mapper = new com . fasterxml . jackson . databind . ObjectMapper ( ) ; final org . apache . http . message . BasicHttpResponse httpResponse = buildResponse ( ) ; httpResponse . setEntity ( new org . apache . http . entity . StringEntity ( "" ) ) ; final org . talend . dataprep . command . DefaultsTest . Response value = org . talend . dataprep . command . Defaults . convertResponse ( mapper , org . talend . dataprep . command . DefaultsTest . Response . class ) . apply ( buildRequest ( ) , httpResponse ) ; "<AssertPlaceHolder>" ; } buildRequest ( ) { return new org . apache . http . client . methods . HttpGet ( "http://fake_url" ) ; } | org . junit . Assert . assertNull ( value ) |
testSendRecordsCorruptTimestamp ( ) { final java . lang . Long timestamp = - 3L ; createWorkerTask ( ) ; java . util . List < org . apache . kafka . connect . source . SourceRecord > records = java . util . Collections . singletonList ( new org . apache . kafka . connect . source . SourceRecord ( org . apache . kafka . connect . runtime . WorkerSourceTaskTest . PARTITION , org . apache . kafka . connect . runtime . WorkerSourceTaskTest . OFFSET , "topic" , null , org . apache . kafka . connect . runtime . WorkerSourceTaskTest . KEY_SCHEMA , org . apache . kafka . connect . runtime . WorkerSourceTaskTest . KEY , org . apache . kafka . connect . runtime . WorkerSourceTaskTest . RECORD_SCHEMA , org . apache . kafka . connect . runtime . WorkerSourceTaskTest . RECORD , timestamp ) ) ; org . easymock . Capture < org . apache . kafka . clients . producer . ProducerRecord < byte [ ] , byte [ ] > > sent = expectSendRecordAnyTimes ( ) ; org . powermock . api . easymock . PowerMock . replayAll ( ) ; org . powermock . reflect . Whitebox . setInternalState ( workerTask , "toSend" , records ) ; org . powermock . reflect . Whitebox . invokeMethod ( workerTask , "sendRecords" ) ; "<AssertPlaceHolder>" ; org . powermock . api . easymock . PowerMock . verifyAll ( ) ; } getValue ( ) { return ( ~ ( crc ) ) & 4294967295L ; } | org . junit . Assert . assertEquals ( null , sent . getValue ( ) . timestamp ( ) ) |
edgeCasesCase004 ( ) { processedUsages = usageRollupProcessor . processRecords ( allUsageRecordsInOrder , hourToRollup , lbsActiveDuringHour ) ; "<AssertPlaceHolder>" ; org . openstack . atlas . service . domain . entities . Usage actualUsage = processedUsages . get ( 0 ) ; org . openstack . atlas . usagerefactor . junit . AssertUsage . hasValues ( null , 1234 , 1234 , 50L , 0L , 100L , 0L , 0.667 , 0.0 , "2013-04-10<sp>20:23:59" , "2013-04-10<sp>20:25:00" , 3 , 1 , 0 , org . openstack . atlas . usagerefactor . CREATE_LOADBALANCER . name ( ) , 0 , true , null , actualUsage ) ; actualUsage = processedUsages . get ( 1 ) ; org . openstack . atlas . usagerefactor . junit . AssertUsage . hasValues ( null , 1234 , 1234 , 50L , 50L , 100L , 100L , 1.0 , 1.0 , "2013-04-10<sp>20:25:00" , "2013-04-10<sp>21:00:00" , 1 , 1 , 5 , org . openstack . atlas . usagerefactor . SSL_MIXED_ON . name ( ) , 0 , true , null , actualUsage ) ; } size ( ) { return size ; } | org . junit . Assert . assertEquals ( 2 , processedUsages . size ( ) ) |
bepalingVoorVolledigBericht ( ) { final nl . bzk . brp . service . maakbericht . algemeen . MaakBerichtParameters maakBerichtParameters = new nl . bzk . brp . service . maakbericht . algemeen . MaakBerichtParameters ( ) ; final nl . bzk . brp . service . maakbericht . algemeen . Berichtgegevens berichtgegevens = new nl . bzk . brp . service . maakbericht . algemeen . Berichtgegevens ( maakBerichtParameters , null , new nl . bzk . brp . service . maakbericht . algemeen . MaakBerichtPersoonInformatie ( nl . bzk . algemeenbrp . dal . domein . brp . enums . SoortSynchronisatie . VOLLEDIG_BERICHT ) , null , new nl . bzk . brp . service . maakbericht . bepaling . StatischePersoongegevens ( ) ) ; leegBepalerService . execute ( berichtgegevens ) ; "<AssertPlaceHolder>" ; } isLeegBericht ( ) { return leegBericht ; } | org . junit . Assert . assertTrue ( berichtgegevens . isLeegBericht ( ) ) |
testGlobStatusFilterWithEmptyPathResults ( ) { org . apache . hadoop . fs . Path [ ] testDirs = new org . apache . hadoop . fs . Path [ ] { getTestRootPath ( org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc , org . apache . hadoop . fs . FileContextMainOperationsBaseTest . TEST_DIR_AAA ) , getTestRootPath ( org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc , org . apache . hadoop . fs . FileContextMainOperationsBaseTest . TEST_DIR_AXA ) , getTestRootPath ( org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc , org . apache . hadoop . fs . FileContextMainOperationsBaseTest . TEST_DIR_AXX ) , getTestRootPath ( org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc , org . apache . hadoop . fs . FileContextMainOperationsBaseTest . TEST_DIR_AXX ) } ; if ( ( exists ( org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc , testDirs [ 0 ] ) ) == false ) { for ( org . apache . hadoop . fs . Path path : testDirs ) { org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc . mkdir ( path , org . apache . hadoop . fs . permission . FsPermission . getDefault ( ) , true ) ; } } org . apache . hadoop . fs . FileStatus [ ] filteredPaths = org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc . util ( ) . globStatus ( getTestRootPath ( org . apache . hadoop . fs . FileContextMainOperationsBaseTest . fc , "test/hadoop/?" ) , org . apache . hadoop . fs . FileContextMainOperationsBaseTest . DEFAULT_FILTER ) ; "<AssertPlaceHolder>" ; } getTestRootPath ( org . apache . hadoop . fs . FileContext , java . lang . String ) { return fc . makeQualified ( new org . apache . hadoop . fs . Path ( org . apache . hadoop . fs . FileContextTestHelper . TEST_ROOT_DIR , pathString ) ) ; } | org . junit . Assert . assertEquals ( 0 , filteredPaths . length ) |
testFailTaskDataSource ( ) { context . register ( org . springframework . cloud . task . jdbchdfs . common . support . JdbcHdfsDataSourceConfiguration . class ) ; org . springframework . cloud . task . jdbchdfs . common . support . JdbcHdfsDataSourceConfiguration config = context . getBean ( org . springframework . cloud . task . jdbchdfs . common . support . JdbcHdfsDataSourceConfiguration . class ) ; javax . sql . DataSource dataSource = config . taskDataSource ( ) ; "<AssertPlaceHolder>" ; connection = dataSource . getConnection ( ) ; } taskDataSource ( ) { return getDefaultDataSource ( ) ; } | org . junit . Assert . assertNotNull ( dataSource ) |
testGetInt ( ) { org . gradoop . common . model . impl . properties . PropertyValue p = org . gradoop . common . model . impl . properties . PropertyValue . create ( org . gradoop . common . model . impl . properties . INT_VAL_2 ) ; "<AssertPlaceHolder>" ; } getInt ( ) { return get ( java . lang . Integer . class ) ; } | org . junit . Assert . assertEquals ( org . gradoop . common . model . impl . properties . INT_VAL_2 , p . getInt ( ) ) |
coercedEitherThrowsOnAccessingMissingRight ( ) { java . lang . reflect . Type type = com . facebook . buck . rules . coercer . TypeCoercerTest . TestFields . class . getField ( "eitherStringSetOrStringToStringMap" ) . getGenericType ( ) ; com . facebook . buck . rules . coercer . TypeCoercer < ? > coercer = typeCoercerFactory . typeCoercerForType ( type ) ; java . util . Set < java . lang . String > inputSet = com . google . common . collect . ImmutableSet . of ( "a" , "b" , "x" ) ; com . facebook . buck . util . types . Either < ? , ? > either = ( ( com . facebook . buck . util . types . Either < ? , ? > ) ( coercer . coerce ( cellRoots , filesystem , java . nio . file . Paths . get ( "" ) , EmptyTargetConfiguration . INSTANCE , inputSet ) ) ) ; "<AssertPlaceHolder>" ; exception . expect ( com . facebook . buck . rules . coercer . RuntimeException . class ) ; either . getRight ( ) ; } getLeft ( ) { return leftColumn ; } | org . junit . Assert . assertEquals ( inputSet , either . getLeft ( ) ) |
testFloat32Array ( ) { com . eclipsesource . v8 . V8Value result = ( ( com . eclipsesource . v8 . V8Value ) ( v8 . executeScript ( "var<sp>floats<sp>=<sp>new<sp>Float32Array();<sp>floats" ) ) ) ; "<AssertPlaceHolder>" ; result . close ( ) ; } executeScript ( java . lang . String ) { return executeScript ( script , null , 0 ) ; } | org . junit . Assert . assertNotNull ( result ) |
testAddBooleanReplaceValue ( ) { v8 . add ( "foo" , true ) ; v8 . add ( "foo" , false ) ; boolean result = v8 . executeBooleanScript ( "foo" ) ; "<AssertPlaceHolder>" ; } executeBooleanScript ( java . lang . String ) { return executeBooleanScript ( script , null , 0 ) ; } | org . junit . Assert . assertFalse ( result ) |
testConditionDag ( ) { java . util . Set < io . cdap . cdap . etl . proto . Connection > connections = com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "csv" 0 , "csv" ) , new io . cdap . cdap . etl . proto . Connection ( "csv" , "c1" ) , new io . cdap . cdap . etl . proto . Connection ( "c1" , "t1" ) , new io . cdap . cdap . etl . proto . Connection ( "t1" , "agg1" ) , new io . cdap . cdap . etl . proto . Connection ( "agg1" , "agg2" ) , new io . cdap . cdap . etl . proto . Connection ( "agg2" , "sink1" ) , new io . cdap . cdap . etl . proto . Connection ( "c1" , "c2" ) , new io . cdap . cdap . etl . proto . Connection ( "c2" , "csv" 1 ) , new io . cdap . cdap . etl . proto . Connection ( "c2" , "c3" ) , new io . cdap . cdap . etl . proto . Connection ( "c3" , "sink3" ) ) ; java . util . Set < java . lang . String > conditions = new java . util . HashSet ( java . util . Arrays . asList ( "c1" , "c2" , "c3" ) ) ; java . util . Set < java . lang . String > reduceNodes = new java . util . HashSet ( java . util . Arrays . asList ( "agg1" , "agg2" ) ) ; java . util . Set < java . lang . String > isolationNodes = new java . util . HashSet ( ) ; java . util . Set < java . lang . String > multiPortNodes = new java . util . HashSet ( ) ; java . util . Set < io . cdap . cdap . etl . planner . Dag > actual = io . cdap . cdap . etl . planner . PipelinePlanner . split ( connections , conditions , reduceNodes , isolationNodes , io . cdap . cdap . etl . planner . ConnectorDagTest . EMPTY_ACTIONS , multiPortNodes , io . cdap . cdap . etl . planner . ConnectorDagTest . EMPTY_CONNECTORS ) ; io . cdap . cdap . etl . planner . Dag dag1 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "csv" 0 , "csv" ) , new io . cdap . cdap . etl . proto . Connection ( "csv" , "c1" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag2 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "c1" , "t1" ) , new io . cdap . cdap . etl . proto . Connection ( "t1" , "agg1" ) , new io . cdap . cdap . etl . proto . Connection ( "agg1" , "agg2.connector" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag3 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "agg2.connector" , "agg2" ) , new io . cdap . cdap . etl . proto . Connection ( "agg2" , "sink1" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag4 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "c1" , "c2" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag5 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "c2" , "csv" 1 ) ) ) ; io . cdap . cdap . etl . planner . Dag dag6 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "c2" , "c3" ) ) ) ; io . cdap . cdap . etl . planner . Dag dag7 = new io . cdap . cdap . etl . planner . Dag ( com . google . common . collect . ImmutableSet . of ( new io . cdap . cdap . etl . proto . Connection ( "c3" , "sink3" ) ) ) ; java . util . Set < io . cdap . cdap . etl . planner . Dag > expected = com . google . common . collect . ImmutableSet . of ( dag1 , dag2 , dag3 , dag4 , dag5 , dag6 , dag7 ) ; "<AssertPlaceHolder>" ; } of ( A , B ) { return new io . cdap . cdap . common . utils . ImmutablePair < > ( first , second ) ; } | org . junit . Assert . assertEquals ( actual , expected ) |
testBasicComponentScanning ( ) { org . springframework . ide . eclipse . beans . core . internal . model . BeansConfig config = new org . springframework . ide . eclipse . beans . core . internal . model . BeansConfig ( beansProject , "basic-component-scanning.xml" , IBeansConfig . Type . MANUAL ) ; org . springframework . ide . eclipse . beans . core . model . IBean bean = org . springframework . ide . eclipse . beans . core . internal . model . BeansModelUtils . getBean ( "simpleScannedBean" , config ) ; "<AssertPlaceHolder>" ; } getElementName ( ) { return elementName ; } | org . junit . Assert . assertEquals ( "simpleScannedBean" , bean . getElementName ( ) ) |
testSubAnd ( ) { net . sf . ntru . polynomial . IntegerPolynomial i1 = new net . sf . ntru . polynomial . IntegerPolynomial ( new int [ ] { 1368 , 2047 , 672 , 871 , 1662 , 1352 , 1099 , 1608 } ) ; net . sf . ntru . polynomial . IntegerPolynomial i2 = new net . sf . ntru . polynomial . IntegerPolynomial ( new int [ ] { 1729 , 1924 , 806 , 179 , 1530 , 1381 , 1695 , 60 } ) ; net . sf . ntru . polynomial . LongPolynomial2 a = new net . sf . ntru . polynomial . LongPolynomial2 ( i1 ) ; net . sf . ntru . polynomial . LongPolynomial2 b = new net . sf . ntru . polynomial . LongPolynomial2 ( i2 ) ; a . subAnd ( b , 2047 ) ; i1 . sub ( i2 ) ; i1 . modPositive ( 2048 ) ; "<AssertPlaceHolder>" ; } toIntegerPolynomial ( ) { int [ ] coeffs = new int [ N ] ; for ( int i : ones ) coeffs [ i ] = 1 ; for ( int i : negOnes ) coeffs [ i ] = - 1 ; return new net . sf . ntru . polynomial . IntegerPolynomial ( coeffs ) ; } | org . junit . Assert . assertArrayEquals ( a . toIntegerPolynomial ( ) . coeffs , i1 . coeffs ) |
testRemap ( ) { org . apache . accumulo . core . data . TableId tableId = org . apache . accumulo . core . data . TableId . of ( "3" ) ; java . util . SortedMap < org . apache . accumulo . core . dataImpl . KeyExtent , org . apache . accumulo . core . clientImpl . bulk . Bulk . Files > mapping = generateMapping ( tableId ) ; java . util . SortedMap < org . apache . accumulo . core . dataImpl . KeyExtent , org . apache . accumulo . core . clientImpl . bulk . Bulk . Files > newNameMapping = new java . util . TreeMap ( ) ; java . util . Map < java . lang . String , java . lang . String > nameMap = new java . util . HashMap ( ) ; mapping . forEach ( ( extent , files ) -> { org . apache . accumulo . core . clientImpl . bulk . Bulk . Files newFiles = new org . apache . accumulo . core . clientImpl . bulk . Bulk . Files ( ) ; files . forEach ( ( fi ) -> { newFiles . add ( new org . apache . accumulo . core . clientImpl . bulk . Bulk . FileInfo ( ( "N" + fi . name ) , fi . estSize , fi . estEntries ) ) ; nameMap . put ( fi . name , ( "N" + fi . name ) ) ; } ) ; newNameMapping . put ( extent , newFiles ) ; } ) ; java . io . ByteArrayOutputStream mappingBaos = new java . io . ByteArrayOutputStream ( ) ; java . io . ByteArrayOutputStream nameBaos = new java . io . ByteArrayOutputStream ( ) ; org . apache . accumulo . core . clientImpl . bulk . BulkSerialize . writeRenameMap ( nameMap , "/some/dir" , ( p ) -> nameBaos ) ; org . apache . accumulo . core . clientImpl . bulk . BulkSerialize . writeLoadMapping ( mapping , "/some/dir" , ( p ) -> mappingBaos ) ; org . apache . accumulo . core . clientImpl . bulk . BulkSerialize . Input input = ( p ) -> { if ( p . getName ( ) . equals ( Constants . BULK_LOAD_MAPPING ) ) { return new java . io . ByteArrayInputStream ( mappingBaos . toByteArray ( ) ) ; } else if ( p . getName ( ) . equals ( Constants . BULK_RENAME_FILE ) ) { return new java . io . ByteArrayInputStream ( nameBaos . toByteArray ( ) ) ; } else { throw new java . lang . IllegalArgumentException ( ( "bad<sp>path<sp>" + p ) ) ; } } ; try ( org . apache . accumulo . core . clientImpl . bulk . LoadMappingIterator lmi = org . apache . accumulo . core . clientImpl . bulk . BulkSerialize . getUpdatedLoadMapping ( "/some/dir" , tableId , input ) ) { java . util . SortedMap < org . apache . accumulo . core . dataImpl . KeyExtent , org . apache . accumulo . core . clientImpl . bulk . Bulk . Files > actual = new java . util . TreeMap ( ) ; lmi . forEachRemaining ( ( e ) -> actual . put ( e . getKey ( ) , e . getValue ( ) ) ) ; "<AssertPlaceHolder>" ; } } getValue ( ) { return value ; } | org . junit . Assert . assertEquals ( newNameMapping , actual ) |
testAbstractDatalinkEdit ( ) { org . apache . taverna . workflowmodel . impl . AbstractDatalinkEdit edit = new org . apache . taverna . workflowmodel . impl . AbstractDatalinkEdit ( datalink ) { @ org . apache . taverna . workflowmodel . impl . Override protected void doEditAction ( org . apache . taverna . workflowmodel . impl . DatalinkImpl datalink ) throws org . apache . taverna . workflowmodel . EditException { } } ; "<AssertPlaceHolder>" ; } getSubject ( ) { return configurable ; } | org . junit . Assert . assertEquals ( datalink , edit . getSubject ( ) ) |
testAddsTransitionListenerReturnsUIConfiguration ( ) { com . eclipsesource . tabris . ui . UIConfiguration configuration = new com . eclipsesource . tabris . ui . UIConfiguration ( ) ; com . eclipsesource . tabris . ui . TransitionListener listener = mock ( com . eclipsesource . tabris . ui . TransitionListener . class ) ; com . eclipsesource . tabris . ui . UIConfiguration actualConfiguration = configuration . addTransitionListener ( listener ) ; "<AssertPlaceHolder>" ; } addTransitionListener ( com . eclipsesource . tabris . ui . TransitionListener ) { transitionListeners . add ( listener ) ; } | org . junit . Assert . assertSame ( configuration , actualConfiguration ) |
testTruncateWhenItIsNotTruncated ( ) { final java . lang . String actualTruncate = dmnCard . truncate ( "12345" , 5 ) ; final java . lang . String expectedTruncate = "12345" ; "<AssertPlaceHolder>" ; } truncate ( java . lang . String , int ) { if ( ( value . length ( ) ) > limit ) { return "..." + ( value . substring ( ( ( value . length ( ) ) - limit ) ) ) ; } return value ; } | org . junit . Assert . assertEquals ( expectedTruncate , actualTruncate ) |
testCRLPathValidationWithFakeCerts ( ) { java . security . Security . addProvider ( new org . bouncycastle . jce . provider . BouncyCastleProvider ( ) ) ; org . apache . synapse . transport . utils . sslcert . Utils utils = new org . apache . synapse . transport . utils . sslcert . Utils ( ) ; java . security . cert . X509Certificate [ ] fakeCertificates = utils . getFakeCertificateChain ( ) ; java . lang . Throwable throwable = null ; try { crlPathValidation ( fakeCertificates ) ; } catch ( org . apache . synapse . transport . utils . sslcert . CertificateVerificationException e ) { throwable = e ; } "<AssertPlaceHolder>" ; } | org . junit . Assert . assertNotNull ( throwable ) |
emptyFrameAtHeadIsWritten ( ) { initState ( io . netty . handler . codec . http2 . UniformStreamByteDistributorTest . STREAM_A , 10 , true ) ; initState ( io . netty . handler . codec . http2 . UniformStreamByteDistributorTest . STREAM_B , 0 , true ) ; initState ( io . netty . handler . codec . http2 . UniformStreamByteDistributorTest . STREAM_C , 0 , true ) ; initState ( io . netty . handler . codec . http2 . UniformStreamByteDistributorTest . STREAM_D , 10 , true ) ; "<AssertPlaceHolder>" ; verifyWrite ( io . netty . handler . codec . http2 . UniformStreamByteDistributorTest . STREAM_A , 10 ) ; verifyWrite ( io . netty . handler . codec . http2 . UniformStreamByteDistributorTest . STREAM_B , 0 ) ; verifyWrite ( io . netty . handler . codec . http2 . UniformStreamByteDistributorTest . STREAM_C , 0 ) ; verifyNoMoreInteractions ( writer ) ; } write ( java . lang . Object ) { return write ( message , io . netty . channel . group . ChannelMatchers . all ( ) ) ; } | org . junit . Assert . assertTrue ( write ( 10 ) ) |
testRegexSerialization ( ) { io . symcpe . wraith . conditions . Condition regexCondition = new io . symcpe . wraith . conditions . relational . JavaRegexCondition ( "host" , ".*check_rtsock_rc.*" ) ; io . symcpe . wraith . rules . SimpleRule rule = new io . symcpe . wraith . rules . SimpleRule ( ( ( short ) ( 1 ) ) , "test" , true , regexCondition , new io . symcpe . wraith . actions . Action [ ] { new io . symcpe . wraith . actions . alerts . AlertAction ( ( ( short ) ( 0 ) ) , "test" , "email" , "test" ) } ) ; java . lang . String jsonRule = io . symcpe . wraith . rules . RuleSerializer . serializeRuleToJSONString ( rule , false ) ; rule = io . symcpe . wraith . rules . RuleSerializer . deserializeJSONStringToRule ( jsonRule ) ; io . symcpe . wraith . Event event = new io . symcpe . wraith . TestFactory ( ) . buildEvent ( ) ; event . getHeaders ( ) . put ( "host" , "MIB2D_RTSLIB_READ_FAILURE:<sp>check_rtsock_rc:<sp>failed<sp>in<sp>reading<sp>mac_db:<sp>0<sp>(Invalid<sp>argument)" ) ; "<AssertPlaceHolder>" ; } getCondition ( ) { return condition ; } | org . junit . Assert . assertTrue ( rule . getCondition ( ) . matches ( event ) ) |
extractIndexerName ( ) { org . searchisko . api . rest . IndexerRestService tested = getTested ( ) ; java . util . Map < java . lang . String , java . lang . Object > ic = new java . util . HashMap ( ) ; try { tested . extractIndexerName ( ic , "type" ) ; org . junit . Assert . fail ( "ObjectNotFoundException<sp>expected" ) ; } catch ( javax . ejb . ObjectNotFoundException e ) { } ic . put ( ProviderService . NAME , "myName" ) ; "<AssertPlaceHolder>" ; } extractIndexerName ( java . util . Map , java . lang . String ) { java . lang . String indexerName = org . searchisko . api . util . SearchUtils . trimToNull ( ( ( java . lang . String ) ( ic . get ( ProviderService . NAME ) ) ) ) ; if ( indexerName == null ) { throw new javax . ejb . ObjectNotFoundException ( ( "Indexer<sp>name<sp>is<sp>not<sp>configured<sp>correctly<sp>for<sp>content<sp>type<sp>" + contentType ) ) ; } return indexerName ; } | org . junit . Assert . assertEquals ( "myName" , tested . extractIndexerName ( ic , "type" ) ) |
testToFullyQualifiedPathClass ( ) { final java . lang . String expected = "org/apache/commons/lang3/Test.properties" ; final java . lang . String actual = org . apache . commons . lang3 . ClassPathUtils . toFullyQualifiedPath ( org . apache . commons . lang3 . ClassPathUtils . class , "Test.properties" ) ; "<AssertPlaceHolder>" ; } toFullyQualifiedPath ( java . lang . Class , java . lang . String ) { org . apache . commons . lang3 . Validate . notNull ( context , "Parameter<sp>'%s'<sp>must<sp>not<sp>be<sp>null!" , "context" ) ; org . apache . commons . lang3 . Validate . notNull ( resourceName , "Parameter<sp>'%s'<sp>must<sp>not<sp>be<sp>null!" , "resourceName" ) ; return org . apache . commons . lang3 . ClassPathUtils . toFullyQualifiedPath ( context . getPackage ( ) , resourceName ) ; } | org . junit . Assert . assertEquals ( expected , actual ) |
testGetOutputStreamWithDirs ( ) { try { out = locationManager . getOutputStream ( "test.properties" , "test" ) ; "<AssertPlaceHolder>" ; } catch ( org . hisp . dhis . external . location . LocationManagerException ex ) { org . hisp . dhis . external . location . LocationManagerTest . log . debug ( "External<sp>directory<sp>not<sp>set" ) ; } } getOutputStream ( java . lang . String , java . lang . String [ ] ) { java . io . File file = getFileForWriting ( fileName , directories ) ; try { java . io . OutputStream out = new java . io . BufferedOutputStream ( new java . io . FileOutputStream ( file ) ) ; return out ; } catch ( java . io . FileNotFoundException ex ) { throw new org . hisp . dhis . external . location . LocationManagerException ( "Could<sp>not<sp>find<sp>file" , ex ) ; } } | org . junit . Assert . assertNotNull ( out ) |
singleletters ( ) { java . util . LinkedList < lv . semti . morphology . Testi . Word > tokens ; tokens = lv . semti . morphology . Testi . Splitting . tokenize ( lv . semti . morphology . Testi . TokenizeTest . loctjs , "bv<sp>q<sp>i" , false ) ; "<AssertPlaceHolder>" ; } size ( ) { return attributes . entrySet ( ) . size ( ) ; } | org . junit . Assert . assertEquals ( 3 , tokens . size ( ) ) |
testBuildWithPaametersWithOrderBy ( ) { java . lang . String abbrName = "AN" ; java . lang . String name = "fdsfds" ; org . lnu . is . domain . employee . status . EmployeeStatus context = new org . lnu . is . domain . employee . status . EmployeeStatus ( ) ; context . setAbbrName ( abbrName ) ; context . setName ( name ) ; org . lnu . is . pagination . OrderBy orderBy1 = new org . lnu . is . pagination . OrderBy ( "abbrName" , org . lnu . is . pagination . OrderByType . ASC ) ; org . lnu . is . pagination . OrderBy orderBy2 = new org . lnu . is . pagination . OrderBy ( "name" , org . lnu . is . pagination . OrderByType . DESC ) ; java . util . List < org . lnu . is . pagination . OrderBy > orders = java . util . Arrays . asList ( orderBy1 , orderBy2 ) ; java . lang . String expectedQuery = "SELECT<sp>e<sp>FROM<sp>EmployeeStatus<sp>e<sp>WHERE<sp>(<sp>e.name<sp>LIKE<sp>CONCAT('%',:name,'%')<sp>AND<sp>e.abbrName<sp>LIKE<sp>CONCAT('%',:abbrName,'%')<sp>)<sp>AND<sp>e.status=:status<sp>AND<sp>e.crtUserGroup<sp>IN<sp>(:userGroups)<sp>ORDER<sp>BY<sp>e.abbrName<sp>ASC,<sp>e.name<sp>DESC" ; org . lnu . is . pagination . MultiplePagedSearch < org . lnu . is . domain . employee . status . EmployeeStatus > pagedSearch = new org . lnu . is . pagination . MultiplePagedSearch ( ) ; pagedSearch . setEntity ( context ) ; pagedSearch . setOrders ( orders ) ; java . lang . String actualQuery = unit . build ( pagedSearch ) ; "<AssertPlaceHolder>" ; } setOrders ( java . util . List ) { this . orders = orders ; } | org . junit . Assert . assertEquals ( expectedQuery , actualQuery ) |
testValidateSubscriptionWithExternalPriceMdel_nullSubscription ( ) { org . oscm . internal . vo . VOSubscriptionDetails subscription = null ; org . oscm . internal . vo . VOSubscriptionDetails result = bean . validateSubscription ( subscription ) ; "<AssertPlaceHolder>" ; } validateSubscription ( org . oscm . internal . vo . VOService ) { org . oscm . domobjects . PlatformUser currentUser = dm . getCurrentUser ( ) ; org . oscm . domobjects . Product product = dm . getReference ( org . oscm . domobjects . Product . class , service . getKey ( ) ) ; org . oscm . domobjects . Subscription subscription = validateSubscription ( service , currentUser , product ) ; org . oscm . internal . vo . VOSubscriptionDetails voSubscriptionDetails = subscriptionService . getSubscriptionDetailsWithoutOwnerCheck ( subscription . getKey ( ) ) ; return voSubscriptionDetails ; } | org . junit . Assert . assertTrue ( ( result == null ) ) |
forThreadLocalRandom ( ) { net . moznion . random . string . RandomStringGenerator generator = new net . moznion . random . string . RandomStringGenerator ( java . util . concurrent . ThreadLocalRandom . current ( ) ) ; java . lang . String randomString = generator . generateByRegex ( "\\w+\\d*\\W\\D{0,3}a\\{0,3}.\\s\\S[0-9][a-zA-Z]X" ) ; java . util . regex . Pattern patternToProve = java . util . regex . Pattern . compile ( ( "^[a-zA-Z0-9_]+[0-9]*[~`!@$%^&*()\\-+={}\\[\\]|\\\\:;\"\'.<>?/#,]" + ( "[a-zA-Z0-9~`!@$%^&*()\\-_+={}\\[\\]|\\\\:;\"\'.<>?/#,]{0,3}" + "a\\{0,3}.[<sp>\t].[0-9][a-zA-Z]X$" ) ) ) ; "<AssertPlaceHolder>" ; } generateByRegex ( java . lang . String ) { java . lang . String expanded = new net . moznion . random . string . RegexNormalizer ( numOfUpperLimit , random ) . normalizeQuantifiers ( regex ) ; final java . lang . String [ ] regexCharacters = expanded . split ( "w" 0 ) ; final int length = regexCharacters . length ; java . lang . StringBuilder sb = new java . lang . StringBuilder ( ) ; for ( int i = 0 ; i < length ; i ++ ) { java . lang . String character = regexCharacters [ i ] ; net . moznion . random . string . RandomLetterPicker picker = null ; java . lang . String candidateCharacter = null ; switch ( character ) { case "\\" : try { character = regexCharacters [ ( ++ i ) ] ; } catch ( java . lang . ArrayIndexOutOfBoundsException e ) { throw new java . lang . RuntimeException ( "w" 3 ) ; } switch ( character ) { case "w" : picker = pickers . getWord ( ) ; break ; case "d" : picker = pickers . getDigit ( ) ; break ; case "W" : picker = pickers . getNotWord ( ) ; break ; case "D" : picker = pickers . getNotDigit ( ) ; break ; case "w" 1 : picker = pickers . getSpace ( ) ; break ; case "w" 2 : picker = pickers . getAny ( ) ; break ; default : candidateCharacter = character ; } break ; case "[" : try { net . moznion . random . string . UserDefinedLetterPickerScanner . ScannedUserDefinedPicker scannedUserDefinedPicker = net . moznion . random . string . UserDefinedLetterPickerScanner . scan ( regexCharacters , i ) ; java . lang . String key = scannedUserDefinedPicker . getKey ( ) ; i = scannedUserDefinedPicker . getCursor ( ) ; if ( ( userDefinedPickers . get ( key ) ) == null ) { net . moznion . random . string . RandomLetterPicker userDefinedPicker = net . moznion . random . string . RandomLetterPicker . constructByCharacterRange ( scannedUserDefinedPicker . getBounds ( ) ) ; userDefinedPickers . put ( key , userDefinedPicker ) ; } picker = userDefinedPickers . get ( key ) ; } catch ( java . lang . ArrayIndexOutOfBoundsException e ) { throw new java . lang . RuntimeException ( "Occurs<sp>parsing<sp>error" ) ; } break ; case "." : picker = pickers . getAny ( ) ; break ; default : candidateCharacter = character ; } int repetitionNum = 1 ; if ( ( i + 1 ) < length ) { java . lang . String nextCharacter = regexCharacters [ ( i + 1 ) ] ; if ( nextCharacter . equals ( "{" ) ) { int j = i + 1 ; java . lang . StringBuilder sbForQuantifier = new java . lang . StringBuilder ( ) ; try { while ( ! ( ( nextCharacter = regexCharacters [ ( ++ j ) ] ) . equals ( "}" ) ) ) { sbForQuantifier . append ( nextCharacter ) ; } try { repetitionNum = java . lang . Integer . parseInt ( sbForQuantifier . toString ( ) , 10 ) ; i = j ; } catch ( java . lang . RuntimeException e ) { } } catch ( java . lang . ArrayIndexOutOfBoundsException e ) { } } } if ( picker != null ) { for ( int j = 0 ; j < repetitionNum ; j ++ ) { sb . append ( picker . pickRandomLetter ( ) ) ; } } else if ( candidateCharacter != null ) { for ( int j = 0 ; j < repetitionNum ; j ++ ) { sb . append ( candidateCharacter ) ; } } else { throw new java . lang . RuntimeException ( "Occurs<sp>parsing<sp>error" ) ; } } return sb . toString ( ) ; } | org . junit . Assert . assertTrue ( patternToProve . matcher ( randomString ) . find ( ) ) |
testModifyAddSameSpecificArea ( ) { org . apache . directory . api . ldap . model . entry . Entry caArea = new org . apache . directory . api . ldap . model . entry . DefaultEntry ( "ou=caArea,<sp>ou=system" , "ObjectClass:<sp>top" , "ObjectClass:<sp>organizationalUnit" , "ou:<sp>caArea" , "administrativeRole:<sp>collectiveAttributeSpecificArea" ) ; org . apache . directory . server . core . admin . AdministrativePointServiceIT . connection . add ( caArea ) ; org . apache . directory . api . ldap . model . entry . Modification modification = new org . apache . directory . api . ldap . model . entry . DefaultModification ( org . apache . directory . api . ldap . model . entry . ModificationOperation . ADD_ATTRIBUTE , new org . apache . directory . api . ldap . model . entry . DefaultAttribute ( "administrativeRole" , "collectiveAttributeSpecificArea" ) ) ; try { org . apache . directory . server . core . admin . AdministrativePointServiceIT . connection . modify ( "ou=caArea,<sp>ou=system" , modification ) ; org . junit . Assert . fail ( ) ; } catch ( org . apache . directory . api . ldap . model . exception . LdapAttributeInUseException lnsae ) { "<AssertPlaceHolder>" ; } } modify ( org . apache . directory . api . ldap . model . message . ModifyRequest , org . apache . directory . server . core . api . changelog . LogChange ) { org . apache . directory . server . core . api . interceptor . context . ModifyOperationContext modifyContext = new org . apache . directory . server . core . api . interceptor . context . ModifyOperationContext ( this , modifyRequest ) ; modifyContext . setLogChange ( log ) ; org . apache . directory . server . core . api . OperationManager operationManager = directoryService . getOperationManager ( ) ; try { operationManager . modify ( modifyContext ) ; } catch ( org . apache . directory . api . ldap . model . exception . LdapException e ) { modifyRequest . getResultResponse ( ) . addAllControls ( modifyContext . getResponseControls ( ) ) ; throw e ; } modifyRequest . getResultResponse ( ) . addAllControls ( modifyContext . getResponseControls ( ) ) ; } | org . junit . Assert . assertTrue ( true ) |
testVar ( ) { org . nd4j . linalg . api . ndarray . INDArray data = org . nd4j . linalg . factory . Nd4j . create ( new double [ ] { 4.0 , 4.0 , 4.0 , 4.0 , 8.0 , 8.0 , 8.0 , 8.0 , 4.0 , 4.0 , 4.0 , 4.0 , 8.0 , 8.0 , 8.0 , 8.0 , 4.0 , 4.0 , 4.0 , 4.0 , 8.0 , 8.0 , 8.0 , 8.0 , 4.0 , 4.0 , 4.0 , 4.0 , 8.0 , 8.0 , 8.0 , 8 , 2.0 , 2.0 , 2.0 , 2.0 , 4.0 , 4.0 , 4.0 , 4.0 , 2.0 , 2.0 , 2.0 , 2.0 , 4.0 , 4.0 , 4.0 , 4.0 , 2.0 , 2.0 , 2.0 , 2.0 , 4.0 , 4.0 , 4.0 , 4.0 , 2.0 , 2.0 , 2.0 , 2.0 , 4.0 , 4.0 , 4.0 , 4.0 } , new long [ ] { 2 , 2 , 4 , 4 } ) ; org . nd4j . linalg . api . ndarray . INDArray actualResult = data . var ( false , 0 ) ; org . nd4j . linalg . api . ndarray . INDArray expectedResult = org . nd4j . linalg . factory . Nd4j . create ( new double [ ] { 1.0 , 1.0 , 1.0 , 1.0 , 4.0 , 4.0 , 4.0 , 4.0 , 1.0 , 1.0 , 1.0 , 1.0 , 4.0 , 4.0 , 4.0 , 4.0 , 1.0 , 1.0 , 1.0 , 1.0 , 4.0 , 4.0 , 4.0 , 4.0 , 1.0 , 1.0 , 1.0 , 1.0 , 4.0 , 4.0 , 4.0 , 4.0 } , new long [ ] { 2 , 4 , 4 } ) ; "<AssertPlaceHolder>" ; } getFailureMessage ( ) { return ( ( "Failed<sp>with<sp>backend<sp>" + ( backend . getClass ( ) . getName ( ) ) ) + "<sp>and<sp>ordering<sp>" ) + ( ordering ( ) ) ; } | org . junit . Assert . assertEquals ( getFailureMessage ( ) , expectedResult , actualResult ) |
testSetGetMetadataAggregatorName ( ) { final java . lang . String aggregatorName = "Willi" ; org . esa . beam . binning . operator . BinningConfigTest . config . setMetadataAggregatorName ( aggregatorName ) ; "<AssertPlaceHolder>" ; } getMetadataAggregatorName ( ) { return metadataAggregatorName ; } | org . junit . Assert . assertEquals ( aggregatorName , org . esa . beam . binning . operator . BinningConfigTest . config . getMetadataAggregatorName ( ) ) |
exludedByArrayTest ( ) { java . lang . Object instanceOne = org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . clazz . newInstance ( ) ; java . lang . Object instanceTwo = org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . clazz . newInstance ( ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceOne , "excludedByProperty" , "one" ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceOne , "excludedByArray" , "two" ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceOne , "notExcluded" , "three" ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceOne , "notExcludedByProperty" , "four" ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceTwo , "excludedByProperty" , "one" ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceTwo , "excludedByArray" , "differentValue" ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceTwo , "notExcluded" , "three" ) ; org . jsonschema2pojo . integration . ExcludedFromEqualsAndHashCodeIT . setProperty ( instanceTwo , "notExcludedByProperty" , "four" ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { return opMixin . equals ( o ) ; } | org . junit . Assert . assertThat ( instanceOne . equals ( instanceTwo ) , org . hamcrest . core . Is . is ( true ) ) |
whenAddUserDuplicateWithNameThatTheyDontAdd ( ) { ru . szhernovoy . model . User vasja = new ru . szhernovoy . model . User ( "Vasja" , 25 ) ; ru . szhernovoy . model . User kolja = new ru . szhernovoy . model . User ( "Vasja" , 27 ) ; ru . szhernovoy . control . Validate [ ] valid = new ru . szhernovoy . control . Validate [ 2 ] ; valid [ 0 ] = new ru . szhernovoy . control . ValidateAge ( ) ; valid [ 1 ] = new ru . szhernovoy . control . ValidateName ( ) ; ru . szhernovoy . model . UserStorage storage = new ru . szhernovoy . model . UserStorage ( 2 ) ; storage . addUser ( valid , vasja ) ; boolean result = storage . addUser ( valid , kolja ) ; boolean control = false ; "<AssertPlaceHolder>" ; } addUser ( ru . szhernovoy . control . Validate [ ] , ru . szhernovoy . model . User ) { boolean result = checkParam ( valid , user ) ; if ( result ) { user . setId ( this . generateId ( ) ) ; users [ ( ( position ) ++ ) ] = user ; } return result ; } | org . junit . Assert . assertThat ( control , org . hamcrest . core . Is . is ( result ) ) |
testGetSecurityManager ( ) { org . apache . shiro . mgt . SecurityManager securityManager = createMock ( org . apache . shiro . mgt . SecurityManager . class ) ; org . apache . shiro . guice . GuiceEnvironment underTest = new org . apache . shiro . guice . GuiceEnvironment ( securityManager ) ; "<AssertPlaceHolder>" ; } getSecurityManager ( ) { return securityManager ; } | org . junit . Assert . assertSame ( securityManager , underTest . getSecurityManager ( ) ) |
getPrivateProxyIPa ( ) { java . lang . String proxyIP = "10.254.28.1" ; java . lang . String resultProxyIP = org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . getFirstPublicProxyIP ( proxyIP ) ; "<AssertPlaceHolder>" ; } getFirstPublicProxyIP ( java . lang . String ) { if ( ( xForwardedForHeader == null ) || ( ( xForwardedForHeader . length ( ) ) == 0 ) ) { if ( org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . isDebugOn ( ) ) { org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . log ( "getFirstPublicProxyIP()<sp>-<sp>x-forwarded-for<sp>header<sp>is<sp>empty!" ) ; } return "" ; } final java . util . StringTokenizer st = new java . util . StringTokenizer ( xForwardedForHeader , "," ) ; java . lang . String token = "" ; while ( st . hasMoreTokens ( ) ) { token = st . nextToken ( ) . trim ( ) ; if ( org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . isPublicIpAddress ( token ) ) { if ( org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . isDebugOn ( ) ) { org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . log ( ( "The<sp>first<sp>*public*<sp>proxy<sp>IP<sp>(x-forwarded-for)<sp>-<sp>" + token ) ) ; } return token ; } } if ( org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . isDebugOn ( ) ) { org . ebayopensource . turmeric . runtime . spf . impl . transport . http . HTTPServerUtils . log ( "getFirstPublicProxyIP()<sp>-<sp>no<sp>*public*<sp>proxy<sp>IP<sp>is<sp>found<sp>(x-forwarded-for)!" ) ; } return "" ; } | org . junit . Assert . assertEquals ( "" , resultProxyIP ) |
testGetRuntimeMXBean ( ) { java . lang . management . RuntimeMXBean runtimeMxBean = java . lang . management . ManagementFactory . getRuntimeMXBean ( ) ; "<AssertPlaceHolder>" ; System . out . println ( ( "runtimeMxBean<sp>=<sp>" + runtimeMxBean ) ) ; System . out . println ( ( "runtimeMxBean.getName()<sp>=<sp>" + ( runtimeMxBean . getName ( ) ) ) ) ; } | org . junit . Assert . assertNotNull ( runtimeMxBean ) |
updateUser ( ) { com . xkcoding . orm . beetlsql . entity . User user = userService . getUser ( 2L ) ; user . setName ( "beetlSql<sp>" ) ; com . xkcoding . orm . beetlsql . entity . User update = userService . updateUser ( user ) ; "<AssertPlaceHolder>" ; log . debug ( "update=<sp>{}" , update ) ; } getName ( ) { return java . lang . management . ManagementFactory . getRuntimeMXBean ( ) . getVmName ( ) ; } | org . junit . Assert . assertEquals ( "beetlSql<sp>" , update . getName ( ) ) |
testStartFollowedByUnit ( ) { java . lang . String outcome = opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . UNIT_A ; java . lang . String [ ] inputSequence = new java . lang . String [ ] { "TypeA" , "AnyType" , "something" } ; java . lang . String [ ] outcomesSequence = new java . lang . String [ ] { opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . START_A } ; "<AssertPlaceHolder>" ; } validSequence ( int , java . lang . String [ ] , java . lang . String [ ] , java . lang . String ) { if ( outcome . endsWith ( BioCodec . CONTINUE ) ) { int li = ( outcomesSequence . length ) - 1 ; if ( li == ( - 1 ) ) { return false ; } else if ( outcomesSequence [ li ] . endsWith ( BioCodec . OTHER ) ) { return false ; } else if ( ( outcomesSequence [ li ] . endsWith ( BioCodec . CONTINUE ) ) || ( outcomesSequence [ li ] . endsWith ( BioCodec . START ) ) ) { java . lang . String previousNameType = opennlp . tools . namefind . NameFinderME . extractNameType ( outcomesSequence [ li ] ) ; java . lang . String nameType = opennlp . tools . namefind . NameFinderME . extractNameType ( outcome ) ; if ( ( previousNameType != null ) || ( nameType != null ) ) { if ( nameType != null ) { if ( nameType . equals ( previousNameType ) ) { return true ; } } return false ; } } } return true ; } | org . junit . Assert . assertFalse ( opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . validator . validSequence ( 1 , inputSequence , outcomesSequence , outcome ) ) |
reversesTypeRingWalk ( ) { com . jnape . palatable . lambda . lens . Iso < java . util . List < java . lang . String > , java . util . Set < java . lang . Integer > , java . lang . String , java . lang . Integer > iso = com . jnape . palatable . lambda . lens . Iso . iso ( ( xs ) -> xs . get ( 0 ) , Collections :: singleton ) ; "<AssertPlaceHolder>" ; } under ( com . jnape . palatable . lambda . optics . Optic , java . util . function . Function , B ) { return com . jnape . palatable . lambda . optics . functions . Under . under ( optic , fn ) . apply ( b ) ; } | org . junit . Assert . assertEquals ( "1" , com . jnape . palatable . lambda . lens . functions . Under . under ( iso , ( set ) -> singletonList ( set . iterator ( ) . next ( ) . toString ( ) ) , 1 ) ) |
returns_files_by_partition ( ) { java . util . Map < java . lang . Integer , java . util . List < org . apache . hadoop . fs . Path > > filesByPartition = org . apache . metron . pcap . utils . FileFilterUtil . getFilesByPartition ( filesIn ) ; java . util . Map < java . lang . Integer , java . util . List < org . apache . hadoop . fs . Path > > expectedFilesPartitioned = new java . util . HashMap ( ) { { put ( 0 , toList ( "/apath/pcap_pcap5_1495135377055375000_0_pcap-9-1495134910" ) ) ; put ( 1 , toList ( "/apath/pcap_pcap5_1495135372168719000_1_pcap-9-1495134910" ) ) ; put ( 2 , toList ( "/apath/pcap_pcap5_1495135372055519000_2_pcap-9-1495134910" ) ) ; put ( 3 , toList ( "/apath/pcap_pcap5_1495135512123943000_3_pcap-9-1495134910" ) ) ; put ( 4 , toList ( "/apath/pcap_pcap5_1495135512102506000_4_pcap-9-1495134910" ) ) ; } } ; "<AssertPlaceHolder>" ; } toList ( java . lang . String [ ] ) { return java . util . Arrays . asList ( items ) . stream ( ) . map ( ( i ) -> new org . apache . hadoop . fs . Path ( i ) ) . collect ( java . util . stream . Collectors . toList ( ) ) ; } | org . junit . Assert . assertThat ( filesByPartition , org . hamcrest . CoreMatchers . equalTo ( expectedFilesPartitioned ) ) |
testGetLongWithStringWithDefaultWithInvalidProperty ( ) { final edu . illinois . library . cantaloupe . config . Configuration instance = getInstance ( ) ; instance . setProperty ( "test1" , "cats" ) ; "<AssertPlaceHolder>" ; } getLong ( java . lang . String , long ) { try { return getLong ( key ) ; } catch ( java . util . NoSuchElementException | java . lang . NumberFormatException e ) { return defaultValue ; } } | org . junit . Assert . assertEquals ( 5 , instance . getLong ( "test1" , 5 ) ) |
testCreateReader ( ) { org . talend . components . marklogic . tmarklogicinput . MarkLogicInputProperties inputProperties = new org . talend . components . marklogic . tmarklogicinput . MarkLogicInputProperties ( "inputProps" ) ; inputProperties . init ( ) ; source . initialize ( null , inputProperties ) ; org . talend . components . marklogic . runtime . input . MarkLogicCriteriaReader reader = source . createReader ( null ) ; "<AssertPlaceHolder>" ; } createReader ( org . talend . components . api . container . RuntimeContainer ) { return new $ . $ ( ) ; { componentName = $missing$ ; } Reader ( this ) ; } | org . junit . Assert . assertNotNull ( reader ) |
testAfterStop ( ) { client . setSleepBeforeReturn ( true ) ; try { client . putEntities ( org . apache . hadoop . yarn . client . api . impl . TestTimelineClientV2Impl . generateEntity ( "1" ) ) ; for ( int i = 2 ; i < 20 ; i ++ ) { client . putEntitiesAsync ( org . apache . hadoop . yarn . client . api . impl . TestTimelineClientV2Impl . generateEntity ( ( "" + i ) ) ) ; } client . stop ( ) ; try { client . putEntitiesAsync ( org . apache . hadoop . yarn . client . api . impl . TestTimelineClientV2Impl . generateEntity ( "50" ) ) ; org . junit . Assert . fail ( "Exception<sp>expected" ) ; } catch ( org . apache . hadoop . yarn . exceptions . YarnException e ) { } } catch ( org . apache . hadoop . yarn . exceptions . YarnException e ) { org . junit . Assert . fail ( "No<sp>exception<sp>expected" ) ; } for ( int i = 0 ; i < 5 ; i ++ ) { org . apache . hadoop . yarn . api . records . timelineservice . TimelineEntities publishedEntities = client . publishedEntities . get ( ( ( client . publishedEntities . size ( ) ) - 1 ) ) ; org . apache . hadoop . yarn . api . records . timelineservice . TimelineEntity timelineEntity = publishedEntities . getEntities ( ) . get ( ( ( publishedEntities . getEntities ( ) . size ( ) ) - 1 ) ) ; if ( ! ( timelineEntity . getId ( ) . equals ( "19" ) ) ) { java . lang . Thread . sleep ( ( 2 * ( org . apache . hadoop . yarn . client . api . impl . TestTimelineClientV2Impl . TIME_TO_SLEEP ) ) ) ; } } printReceivedEntities ( ) ; org . apache . hadoop . yarn . api . records . timelineservice . TimelineEntities publishedEntities = client . publishedEntities . get ( ( ( client . publishedEntities . size ( ) ) - 1 ) ) ; org . apache . hadoop . yarn . api . records . timelineservice . TimelineEntity timelineEntity = publishedEntities . getEntities ( ) . get ( ( ( publishedEntities . getEntities ( ) . size ( ) ) - 1 ) ) ; "<AssertPlaceHolder>" ; } getId ( ) { return id ; } | org . junit . Assert . assertEquals ( "" , "19" , timelineEntity . getId ( ) ) |
shouldLogInDescendingOrder ( ) { com . couchbase . client . core . tracing . ThresholdLogReporterTest . TestReporter reporter = null ; try { reporter = new com . couchbase . client . core . tracing . ThresholdLogReporterTest . TestReporter ( new com . couchbase . client . core . tracing . ThresholdLogReporter . Builder ( ) . kvThreshold ( 1 , TimeUnit . MILLISECONDS ) . logInterval ( 1 , TimeUnit . SECONDS ) ) ; java . util . List < java . lang . Long > allDurations = new java . util . ArrayList < java . lang . Long > ( ) ; int numRequests = 100 ; for ( int i = 0 ; i < numRequests ; i ++ ) { com . couchbase . client . core . message . CouchbaseRequest request = mock ( com . couchbase . client . core . message . CouchbaseRequest . class ) ; when ( request . operationId ( ) ) . thenReturn ( ( "0x" + i ) ) ; com . couchbase . client . core . tracing . ThresholdLogSpan span = mock ( com . couchbase . client . core . tracing . ThresholdLogSpan . class ) ; when ( span . compareTo ( any ( com . couchbase . client . core . tracing . ThresholdLogSpan . class ) ) ) . thenCallRealMethod ( ) ; when ( span . tag ( "peer.service" ) ) . thenReturn ( "kv" ) ; when ( span . operationName ( ) ) . thenReturn ( "get" ) ; long duration = TimeUnit . SECONDS . toMicros ( new java . util . Random ( ) . nextInt ( 10 ) ) ; when ( span . durationMicros ( ) ) . thenReturn ( duration ) ; allDurations . add ( duration ) ; when ( span . request ( ) ) . thenReturn ( request ) ; reporter . report ( span ) ; } reporter . waitUntilOverThreshold ( 1 ) ; java . util . List < java . lang . Long > totalDurations = new java . util . ArrayList < java . lang . Long > ( ) ; for ( java . util . List < java . util . Map < java . lang . String , java . lang . Object > > allLogEntries : reporter . overThreshold ( ) ) { for ( java . util . Map < java . lang . String , java . lang . Object > logEntry : allLogEntries ) { java . util . List < java . util . Map < java . lang . String , java . lang . Object > > topEntries = ( ( java . util . List < java . util . Map < java . lang . String , java . lang . Object > > ) ( logEntry . get ( "top" ) ) ) ; for ( java . util . Map < java . lang . String , java . lang . Object > entry : topEntries ) { totalDurations . add ( ( ( java . lang . Long ) ( entry . get ( "total_us" ) ) ) ) ; } } } java . util . Collections . sort ( allDurations , java . util . Collections . < java . lang . Long > reverseOrder ( ) ) ; java . util . List < java . lang . Long > sortedDescending = allDurations . subList ( 0 , 10 ) ; "<AssertPlaceHolder>" ; } finally { if ( reporter != null ) { reporter . shutdown ( ) ; } } } add ( com . couchbase . client . core . message . observe . ObserveViaMutationToken$ObserveItem ) { return new com . couchbase . client . core . message . observe . ObserveViaMutationToken . ObserveItem ( ( ( this . replicated ) + ( other . replicated ) ) , ( ( this . persisted ) + ( other . persisted ) ) , ( ( this . persistedMaster ) || ( other . persistedMaster ) ) ) ; } | org . junit . Assert . assertEquals ( sortedDescending , totalDurations ) |
testToString ( ) { System . out . println ( "toString" ) ; for ( org . netbeans . modules . search . matcher . BufferedCharSequenceTest . TypeOfStream stype : org . netbeans . modules . search . matcher . BufferedCharSequenceTest . TypeOfStream . values ( ) ) { java . io . InputStream stream = getInputStream ( stype , org . netbeans . modules . search . matcher . BufferedCharSequenceTest . TypeOfContent . BYTE_10 , cs_UTF_8 ) ; org . netbeans . modules . search . matcher . BufferedCharSequence instance = new org . netbeans . modules . search . matcher . BufferedCharSequence ( stream , cs_UTF_8 . newDecoder ( ) , 10 ) ; instance . setMaxBufferSize ( 5 ) ; java . lang . String expResult = org . netbeans . modules . search . matcher . BufferedCharSequenceTest . TypeOfContent . BYTE_10 . getContent ( ) ; java . lang . String result = instance . toString ( ) ; "<AssertPlaceHolder>" ; } } toString ( ) { return dir . getPath ( ) ; } | org . junit . Assert . assertEquals ( expResult , result ) |
createWithException ( ) { org . syncope . client . to . PasswordPolicyTO policy = new org . syncope . client . to . PasswordPolicyTO ( true ) ; policy . setSpecification ( new org . syncope . types . PasswordPolicySpec ( ) ) ; policy . setDescription ( "global<sp>password<sp>policy" ) ; java . lang . Throwable t = null ; try { restTemplate . postForObject ( ( ( BASE_URL ) + "policy/password/create" ) , policy , org . syncope . client . to . PasswordPolicyTO . class ) ; org . junit . Assert . fail ( ) ; } catch ( org . syncope . client . validation . SyncopeClientCompositeErrorException sccee ) { t = sccee . getException ( SyncopeClientExceptionType . InvalidPasswordPolicy ) ; } "<AssertPlaceHolder>" ; } getException ( org . syncope . types . SyncopeClientExceptionType ) { boolean found = false ; org . syncope . client . validation . SyncopeClientException syncopeClientException = null ; for ( java . util . Iterator < org . syncope . client . validation . SyncopeClientException > itor = exceptions . iterator ( ) ; ( itor . hasNext ( ) ) && ( ! found ) ; ) { syncopeClientException = itor . next ( ) ; if ( syncopeClientException . getType ( ) . equals ( exceptionType ) ) { found = true ; } } return found ? syncopeClientException : null ; } | org . junit . Assert . assertNotNull ( t ) |
testReachableViaLdcLoad ( ) { com . spotify . missinglink . datamodel . DeclaredClass remote = com . spotify . missinglink . Simple . newClass ( "other/Unknown" ) . build ( ) ; com . spotify . missinglink . datamodel . DeclaredClass root = com . spotify . missinglink . Simple . newClass ( "my/Root" ) . loadedClasses ( com . google . common . collect . ImmutableSet . of ( com . spotify . missinglink . datamodel . TypeDescriptors . fromClassName ( "other/Unknown" ) ) ) . build ( ) ; com . google . common . collect . ImmutableSet < com . spotify . missinglink . datamodel . DeclaredClass > myClasses = com . google . common . collect . ImmutableSet . of ( root ) ; com . google . common . collect . ImmutableMap < com . spotify . missinglink . datamodel . ClassTypeDescriptor , com . spotify . missinglink . datamodel . DeclaredClass > world = com . spotify . missinglink . Simple . classMap ( root , remote ) ; com . google . common . collect . ImmutableSet < com . spotify . missinglink . datamodel . TypeDescriptor > reachable = com . spotify . missinglink . ConflictChecker . reachableFrom ( myClasses , world ) ; "<AssertPlaceHolder>" ; } reachableFrom ( com . google . common . collect . ImmutableCollection , java . util . Map ) { java . util . Queue < com . spotify . missinglink . datamodel . DeclaredClass > toCheck = new java . util . LinkedList ( values ) ; java . util . Set < com . spotify . missinglink . datamodel . ClassTypeDescriptor > reachable = com . google . common . collect . Sets . newHashSet ( ) ; while ( ! ( toCheck . isEmpty ( ) ) ) { com . spotify . missinglink . datamodel . DeclaredClass current = toCheck . remove ( ) ; if ( ! ( reachable . add ( current . className ( ) ) ) ) { continue ; } toCheck . addAll ( current . parents ( ) . stream ( ) . map ( knownClasses :: get ) . filter ( ( declaredClass ) -> declaredClass != null ) . collect ( toList ( ) ) ) ; toCheck . addAll ( current . loadedClasses ( ) . stream ( ) . map ( knownClasses :: get ) . filter ( ( declaredClass ) -> declaredClass != null ) . collect ( toList ( ) ) ) ; toCheck . addAll ( current . methods ( ) . values ( ) . stream ( ) . flatMap ( ( declaredMethod ) -> declaredMethod . methodCalls ( ) . stream ( ) ) . map ( CalledMethod :: owner ) . filter ( ( typeDescriptor ) -> ! ( reachable . contains ( typeDescriptor ) ) ) . map ( knownClasses :: get ) . filter ( ( declaredClass ) -> declaredClass != null ) . collect ( toList ( ) ) ) ; toCheck . addAll ( current . methods ( ) . values ( ) . stream ( ) . flatMap ( ( declaredMethod ) -> declaredMethod . fieldAccesses ( ) . stream ( ) ) . map ( AccessedField :: owner ) . filter ( ( typeDescriptor ) -> ! ( reachable . contains ( typeDescriptor ) ) ) . map ( knownClasses :: get ) . filter ( ( declaredClass ) -> declaredClass != null ) . collect ( toList ( ) ) ) ; } return com . google . common . collect . ImmutableSet . copyOf ( reachable ) ; } | org . junit . Assert . assertEquals ( com . google . common . collect . ImmutableSet . of ( root . className ( ) , remote . className ( ) ) , reachable ) |
startProcess ( ) { org . activiti . engine . RepositoryService repositoryService = activitiRule . getRepositoryService ( ) ; repositoryService . createDeployment ( ) . addInputStream ( "MultiInstanceBug.bpmn20.xml" , new java . io . FileInputStream ( filename ) ) . deploy ( ) ; org . activiti . engine . RuntimeService runtimeService = activitiRule . getRuntimeService ( ) ; java . util . Map < java . lang . String , java . lang . Object > variableMap = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; variableMap . put ( "users" , java . util . Arrays . asList ( new java . lang . Object [ ] { "user1" , "user2" , "user3" } ) ) ; org . activiti . engine . runtime . ProcessInstance processInstance = runtimeService . startProcessInstanceByKey ( "MultiInstanceBug" , variableMap ) ; "<AssertPlaceHolder>" ; System . out . println ( ( ( ( "id<sp>" + ( processInstance . getId ( ) ) ) + "<sp>" ) + ( processInstance . getProcessDefinitionId ( ) ) ) ) ; org . activiti . engine . TaskService taskService = activitiRule . getTaskService ( ) ; java . util . List < org . activiti . engine . task . Task > list = taskService . createTaskQuery ( ) . list ( ) ; taskService . complete ( list . get ( 0 ) . getId ( ) ) ; taskService . complete ( list . get ( 1 ) . getId ( ) ) ; taskService . complete ( list . get ( 2 ) . getId ( ) ) ; } | org . junit . Assert . assertNotNull ( processInstance . getId ( ) ) |
testDoesNotSendAMessageToTheMessageBusIfAlreadyEnabled ( ) { java . util . Map < java . lang . String , java . lang . String > properties = new java . util . HashMap ( ) ; properties . put ( "max-height" , "100" ) ; properties . put ( "max-width" , "100" ) ; _amImageConfigurationHelper . addAMImageConfigurationEntry ( com . liferay . portal . kernel . test . util . TestPropsValues . getCompanyId ( ) , "one" , "onedesc" , "1" , properties ) ; java . util . List < com . liferay . portal . kernel . messaging . Message > messages = collectConfigurationMessages ( ( ) -> _amImageConfigurationHelper . enableAMImageConfigurationEntry ( com . liferay . portal . kernel . test . util . TestPropsValues . getCompanyId ( ) , "1" ) ) ; "<AssertPlaceHolder>" ; } toString ( ) { com . liferay . petra . string . StringBundler sb = new com . liferay . petra . string . StringBundler ( 23 ) ; sb . append ( ",<sp>width=" 1 ) ; sb . append ( uuid ) ; sb . append ( ",<sp>width=" 0 ) ; sb . append ( amImageEntryId ) ; sb . append ( ",<sp>groupId=" ) ; sb . append ( groupId ) ; sb . append ( ",<sp>companyId=" ) ; sb . append ( companyId ) ; sb . append ( ",<sp>createDate=" ) ; sb . append ( createDate ) ; sb . append ( ",<sp>configurationUuid=" ) ; sb . append ( configurationUuid ) ; sb . append ( ",<sp>fileVersionId=" ) ; sb . append ( fileVersionId ) ; sb . append ( ",<sp>mimeType=" ) ; sb . append ( mimeType ) ; sb . append ( ",<sp>height=" ) ; sb . append ( height ) ; sb . append ( ",<sp>width=" ) ; sb . append ( width ) ; sb . append ( ",<sp>size=" ) ; sb . append ( size ) ; sb . append ( "}" ) ; return sb . toString ( ) ; } | org . junit . Assert . assertEquals ( messages . toString ( ) , 0 , messages . size ( ) ) |
testFlowLogging ( ) { com . gargoylesoftware . htmlunit . html . HtmlPage page = webClient . getPage ( webUrl ) ; com . gargoylesoftware . htmlunit . html . HtmlSubmitInput enterFlow = ( ( com . gargoylesoftware . htmlunit . html . HtmlSubmitInput ) ( page . getElementById ( "enterFlow" ) ) ) ; page = enterFlow . click ( ) ; com . gargoylesoftware . htmlunit . html . HtmlElement e = ( ( com . gargoylesoftware . htmlunit . html . HtmlElement ) ( page . getElementById ( "initMessage" ) ) ) ; long flowInitTime = java . lang . Long . valueOf ( e . asText ( ) ) ; com . gargoylesoftware . htmlunit . html . HtmlSubmitInput next = ( ( com . gargoylesoftware . htmlunit . html . HtmlSubmitInput ) ( page . getElementById ( "a" ) ) ) ; page = next . click ( ) ; com . gargoylesoftware . htmlunit . html . HtmlSubmitInput returnButton = ( ( com . gargoylesoftware . htmlunit . html . HtmlSubmitInput ) ( page . getElementById ( "return" ) ) ) ; page = returnButton . click ( ) ; e = ( ( com . gargoylesoftware . htmlunit . html . HtmlElement ) ( page . getElementById ( "destroyMessage" ) ) ) ; long flowDestroyTime = java . lang . Long . valueOf ( e . asText ( ) ) ; "<AssertPlaceHolder>" ; } click ( ) { clicked = true ; } | org . junit . Assert . assertTrue ( ( flowInitTime < flowDestroyTime ) ) |
testBatchSaveAsListFromArray ( ) { java . util . List < com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass > objs = new java . util . ArrayList < com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass > ( ) ; for ( int i = 0 ; i < 40 ; i ++ ) { com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass obj = getUniqueNumericObject ( ) ; objs . add ( obj ) ; } com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . DynamoDBMapper mapper = new com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . DynamoDBMapper ( dynamo ) ; com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass [ ] objsArray = objs . toArray ( new com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass [ objs . size ( ) ] ) ; mapper . batchSave ( java . util . Arrays . asList ( objsArray ) ) ; for ( com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass obj : objs ) { com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass loaded = mapper . load ( com . amazonaws . mobileconnectors . dynamodbv2 . dynamodbmapper . NumberSetAttributeClass . class , obj . getKey ( ) ) ; "<AssertPlaceHolder>" ; } } getKey ( ) { return key ; } | org . junit . Assert . assertEquals ( obj , loaded ) |
keyRecovery ( ) { com . matthewmitchell . peercoinj . core . ECKey key = new com . matthewmitchell . peercoinj . core . ECKey ( ) ; java . lang . String message = "Hello<sp>World!" ; com . matthewmitchell . peercoinj . core . Sha256Hash hash = com . matthewmitchell . peercoinj . core . Sha256Hash . create ( message . getBytes ( ) ) ; com . matthewmitchell . peercoinj . core . ECKey . ECDSASignature sig = key . sign ( hash ) ; key = com . matthewmitchell . peercoinj . core . ECKey . fromPublicOnly ( key . getPubKeyPoint ( ) ) ; boolean found = false ; for ( int i = 0 ; i < 4 ; i ++ ) { com . matthewmitchell . peercoinj . core . ECKey key2 = com . matthewmitchell . peercoinj . core . ECKey . recoverFromSignature ( i , sig , hash , true ) ; checkNotNull ( key2 ) ; if ( key . equals ( key2 ) ) { found = true ; break ; } } "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == ob ) return true ; if ( ! ( ob instanceof com . subgraph . orchid . data . IPv4Address ) ) return false ; com . subgraph . orchid . data . IPv4Address other = ( ( com . subgraph . orchid . data . IPv4Address ) ( ob ) ) ; return ( other . addressData ) == ( addressData ) ; } | org . junit . Assert . assertTrue ( found ) |
shouldDelete ( ) { importDao . delete ( net . rrm . ehour . domain . User . class ) ; net . rrm . ehour . domain . User user = importDao . find ( 3 , net . rrm . ehour . domain . User . class ) ; "<AssertPlaceHolder>" ; } find ( java . lang . Class , java . lang . String ) { java . lang . String propertyName = path . substring ( ( ( path . lastIndexOf ( "/" ) ) + 1 ) ) ; java . io . File file = new java . io . File ( ( ( translationsDir ) + propertyName ) ) ; if ( file . exists ( ) ) { return new org . apache . wicket . util . resource . FileResourceStream ( file ) ; } else { return null ; } } | org . junit . Assert . assertNull ( user ) |
of_Period0 ( ) { org . javamoney . calc . common . FutureValueGrowingAnnuity val = org . javamoney . calc . common . FutureValueGrowingAnnuity . of ( org . javamoney . calc . common . Rate . of ( 0.07 ) , org . javamoney . calc . common . Rate . of ( 0.08 ) , 0 ) ; "<AssertPlaceHolder>" ; } of ( java . math . BigDecimal ) { return new org . javamoney . calc . common . Rate ( rate , null ) ; } | org . junit . Assert . assertNotNull ( val ) |
getZipkinData_WhenZipkinDataHasBeenBuilt_ShouldReturnBuiltData ( ) { java . lang . String spanName = "Span<sp>Name" ; when ( zipkinDataBuilder . spanName ( spanName ) ) . thenReturn ( zipkinDataBuilder ) ; when ( zipkinDataBuilder . build ( ) ) . thenReturn ( zipkinData ) ; victim = new com . betfair . cougar . modules . zipkin . impl . ZipkinRequestUUIDImpl ( cougarUuid , zipkinDataBuilder ) ; victim . buildZipkinData ( spanName ) ; "<AssertPlaceHolder>" ; } getZipkinData ( ) { if ( ( zipkinData ) == null ) { if ( isZipkinTracingEnabled ( ) ) { throw new java . lang . IllegalStateException ( "Zipkin<sp>Data<sp>is<sp>still<sp>incomplete" ) ; } else { throw new java . lang . IllegalStateException ( "Zipkin<sp>tracing<sp>is<sp>not<sp>enabled<sp>for<sp>this<sp>request" ) ; } } else { return zipkinData ; } } | org . junit . Assert . assertEquals ( victim . getZipkinData ( ) , zipkinData ) |
testOrderByWithLimit ( ) { java . util . List < java . lang . String > expected = rawInput . subList ( 0 , java . lang . Math . min ( limit , rawInput . size ( ) ) ) ; java . util . List < java . lang . String > inputs = com . google . common . collect . Lists . newArrayList ( rawInput ) ; java . util . Collections . shuffle ( inputs , new java . util . Random ( 2 ) ) ; org . apache . druid . java . util . common . guava . Sequence < java . lang . String > result = new org . apache . druid . query . groupby . orderby . TopNSequence < java . lang . String > ( org . apache . druid . java . util . common . guava . Sequences . simple ( inputs ) , ordering , limit ) ; "<AssertPlaceHolder>" ; } toList ( ) { synchronized ( lock ) { java . util . List < org . apache . druid . indexing . overlord . autoscaling . ScalingStats . ScalingEvent > retVal = com . google . common . collect . Lists . newArrayList ( recentEvents ) ; java . util . Collections . sort ( retVal , org . apache . druid . indexing . overlord . autoscaling . ScalingStats . COMPARATOR ) ; return retVal ; } } | org . junit . Assert . assertEquals ( expected , result . toList ( ) ) |
accept ( ) { java . lang . String targetFileName = java . lang . String . format ( "adsWriteFailure_%s.log.%d" , "000" , 1234567890123L ) ; "<AssertPlaceHolder>" ; } checkAcceptResult ( java . lang . String ) { java . io . File file = new java . io . File ( targetFileName ) ; file . deleteOnExit ( ) ; try { file . createNewFile ( ) ; com . fujitsu . dc . common . ads . AdsWriteFailureLogFilter filter = new com . fujitsu . dc . common . ads . AdsWriteFailureLogFilter ( "000" ) ; return filter . accept ( new java . io . File ( "." ) , targetFileName ) ; } finally { file . delete ( ) ; } } | org . junit . Assert . assertTrue ( checkAcceptResult ( targetFileName ) ) |
shouldApplyKeyOnVertexProperty ( ) { final org . apache . tinkerpop . gremlin . structure . VertexProperty e = mock ( org . apache . tinkerpop . gremlin . structure . VertexProperty . class ) ; when ( e . key ( ) ) . thenReturn ( "keyName" ) ; "<AssertPlaceHolder>" ; } apply ( org . apache . tinkerpop . gremlin . process . traversal . Traversal$Admin ) { final org . apache . tinkerpop . gremlin . structure . Graph graph = traversal . getGraph ( ) . orElse ( org . apache . tinkerpop . gremlin . structure . util . empty . EmptyGraph . instance ( ) ) ; for ( final org . apache . tinkerpop . gremlin . process . computer . traversal . step . map . TraversalVertexProgramStep step : org . apache . tinkerpop . gremlin . process . traversal . util . TraversalHelper . getStepsOfClass ( org . apache . tinkerpop . gremlin . process . computer . traversal . step . map . TraversalVertexProgramStep . class , traversal ) ) { final org . apache . tinkerpop . gremlin . process . traversal . Traversal . Admin < ? , ? > computerTraversal = step . generateProgram ( graph , org . apache . tinkerpop . gremlin . process . computer . util . EmptyMemory . instance ( ) ) . getTraversal ( ) . get ( ) . clone ( ) ; if ( ! ( computerTraversal . isLocked ( ) ) ) computerTraversal . applyStrategies ( ) ; boolean doesMessagePass = org . apache . tinkerpop . gremlin . process . traversal . util . TraversalHelper . hasStepOfAssignableClassRecursively ( Scope . global , org . apache . tinkerpop . gremlin . spark . process . computer . traversal . strategy . optimization . SparkSingleIterationStrategy . MULTI_ITERATION_CLASSES , computerTraversal ) ; if ( ! doesMessagePass ) { for ( final org . apache . tinkerpop . gremlin . process . traversal . step . map . VertexStep vertexStep : org . apache . tinkerpop . gremlin . process . traversal . util . TraversalHelper . getStepsOfAssignableClassRecursively ( Scope . global , org . apache . tinkerpop . gremlin . process . traversal . step . map . VertexStep . class , computerTraversal ) ) { if ( ( vertexStep . returnsVertex ( ) ) || ( ! ( vertexStep . getDirection ( ) . equals ( Direction . OUT ) ) ) ) { doesMessagePass = true ; break ; } } } if ( ( ( ! doesMessagePass ) && ( ! ( org . apache . tinkerpop . gremlin . process . computer . traversal . strategy . optimization . MessagePassingReductionStrategy . endsWithElement ( computerTraversal . getEndStep ( ) ) ) ) ) && ( ! ( ( computerTraversal . getTraverserRequirements ( ) . contains ( TraverserRequirement . LABELED_PATH ) ) || ( computerTraversal . getTraverserRequirements ( ) . contains ( TraverserRequirement . PATH ) ) ) ) ) { step . setComputer ( step . getComputer ( ) . configure ( Constants . GREMLIN_SPARK_SKIP_PARTITIONER , true ) . configure ( Constants . GREMLIN_SPARK_SKIP_GRAPH_CACHE , true ) ) ; } } } | org . junit . Assert . assertEquals ( "keyName" , T . key . apply ( e ) ) |
testBlankPropertyName ( ) { java . lang . Object result = cn . jimmyshi . beanquery . DefaultNullValuePropertyValueGetter . getProperty ( new cn . jimmyshi . beanquery . example . Book ( ) , "<sp>" ) ; "<AssertPlaceHolder>" ; } getProperty ( java . lang . Object , java . lang . String ) { if ( ( null == from ) || ( org . apache . commons . lang3 . StringUtils . isBlank ( propertyName ) ) ) { org . slf4j . LoggerFactory . getLogger ( cn . jimmyshi . beanquery . DefaultNullValuePropertyValueGetter . class ) . info ( "Object<sp>is<sp>null<sp>or<sp>the<sp>property<sp>[{}]<sp>is<sp>blank,<sp>returning<sp>null" , propertyName ) ; return null ; } try { return org . apache . commons . beanutils . PropertyUtils . getProperty ( from , propertyName ) ; } catch ( java . lang . Exception e ) { org . slf4j . LoggerFactory . getLogger ( cn . jimmyshi . beanquery . DefaultNullValuePropertyValueGetter . class ) . info ( "Exception<sp>[{}]<sp>when<sp>fetching<sp>property<sp>[{}]<sp>from<sp>object<sp>[{}],<sp>returning<sp>null<sp>as<sp>the<sp>value." , e . toString ( ) , propertyName , from ) ; return null ; } } | org . junit . Assert . assertNull ( result ) |
entry_hashCodeForNullKeyAndValue ( ) { org . eclipse . collections . api . map . MutableMap < java . lang . Integer , java . lang . String > map = this . newMapWithKeyValue ( null , null ) ; Map . Entry < java . lang . Integer , java . lang . String > entry = org . eclipse . collections . impl . utility . Iterate . getFirst ( map . entrySet ( ) ) ; "<AssertPlaceHolder>" ; } hashCode ( ) { org . eclipse . collections . impl . Counter counter = new org . eclipse . collections . impl . Counter ( ) ; this . forEachWithOccurrences ( ( each , count ) -> counter . add ( ( ( each == null ? 0 : each . hashCode ( ) ) ^ count ) ) ) ; return counter . getCount ( ) ; } | org . junit . Assert . assertEquals ( 0 , entry . hashCode ( ) ) |
givenNewThreadScheduler_whenCalled_shouldExecuteTheHook ( ) { io . reactivex . plugins . RxJavaPlugins . setInitNewThreadSchedulerHandler ( ( scheduler ) -> { initHookCalled = true ; return scheduler . call ( ) ; } ) ; io . reactivex . plugins . RxJavaPlugins . setNewThreadSchedulerHandler ( ( scheduler ) -> { hookCalled = true ; return scheduler ; } ) ; io . reactivex . Observable . range ( 1 , 15 ) . map ( ( v ) -> v * 2 ) . subscribeOn ( io . reactivex . schedulers . Schedulers . newThread ( ) ) . test ( ) ; "<AssertPlaceHolder>" ; } test ( ) { throw new java . lang . Exception ( "error!" ) ; } | org . junit . Assert . assertTrue ( ( ( hookCalled ) && ( initHookCalled ) ) ) |
getMetaData ( ) { "<AssertPlaceHolder>" ; } getMetaData ( ) { return this . metaData ; } | org . junit . Assert . assertSame ( this . metaData , this . session . getMetaData ( ) ) |
testNoMetadataRepo ( ) { "<AssertPlaceHolder>" ; } getMetadataRepositories ( ) { return java . util . Collections . unmodifiableList ( metadataRepos ) ; } | org . junit . Assert . assertEquals ( 0 , subject . getMetadataRepositories ( ) . size ( ) ) |
testKeyedCEPOperatorNFAUpdateWithRocksDB ( ) { java . lang . String rocksDbPath = tempFolder . newFolder ( ) . getAbsolutePath ( ) ; org . apache . flink . contrib . streaming . state . RocksDBStateBackend rocksDBStateBackend = new org . apache . flink . contrib . streaming . state . RocksDBStateBackend ( new org . apache . flink . runtime . state . memory . MemoryStateBackend ( ) ) ; rocksDBStateBackend . setDbStoragePath ( rocksDbPath ) ; org . apache . flink . cep . operator . CepOperator < org . apache . flink . cep . Event , java . lang . Integer , java . util . Map < java . lang . String , java . util . List < org . apache . flink . cep . Event > > > operator = org . apache . flink . cep . operator . CepOperatorTestUtilities . getKeyedCepOpearator ( true , new org . apache . flink . cep . operator . CEPOperatorTest . SimpleNFAFactory ( ) ) ; org . apache . flink . streaming . util . OneInputStreamOperatorTestHarness < org . apache . flink . cep . Event , java . util . Map < java . lang . String , java . util . List < org . apache . flink . cep . Event > > > harness = org . apache . flink . cep . operator . CepOperatorTestUtilities . getCepTestHarness ( operator ) ; try { harness . setStateBackend ( rocksDBStateBackend ) ; harness . open ( ) ; org . apache . flink . cep . Event startEvent = new org . apache . flink . cep . Event ( 42 , "c" , 1.0 ) ; org . apache . flink . cep . SubEvent middleEvent = new org . apache . flink . cep . SubEvent ( 42 , "a" , 1.0 , 10.0 ) ; org . apache . flink . cep . Event endEvent = new org . apache . flink . cep . Event ( 42 , "b" , 1.0 ) ; harness . processElement ( new org . apache . flink . streaming . runtime . streamrecord . StreamRecord ( startEvent , 1L ) ) ; org . apache . flink . runtime . checkpoint . OperatorSubtaskState snapshot = harness . snapshot ( 0L , 0L ) ; harness . close ( ) ; operator = org . apache . flink . cep . operator . CepOperatorTestUtilities . getKeyedCepOpearator ( true , new org . apache . flink . cep . operator . CEPOperatorTest . SimpleNFAFactory ( ) ) ; harness = org . apache . flink . cep . operator . CepOperatorTestUtilities . getCepTestHarness ( operator ) ; rocksDBStateBackend = new org . apache . flink . contrib . streaming . state . RocksDBStateBackend ( new org . apache . flink . runtime . state . memory . MemoryStateBackend ( ) ) ; rocksDBStateBackend . setDbStoragePath ( rocksDbPath ) ; harness . setStateBackend ( rocksDBStateBackend ) ; harness . setup ( ) ; harness . initializeState ( snapshot ) ; harness . open ( ) ; harness . processElement ( new org . apache . flink . streaming . runtime . streamrecord . StreamRecord ( new org . apache . flink . cep . Event ( 42 , "d" , 1.0 ) , 4L ) ) ; org . apache . flink . runtime . checkpoint . OperatorSubtaskState snapshot2 = harness . snapshot ( 0L , 0L ) ; harness . close ( ) ; operator = org . apache . flink . cep . operator . CepOperatorTestUtilities . getKeyedCepOpearator ( true , new org . apache . flink . cep . operator . CEPOperatorTest . SimpleNFAFactory ( ) ) ; harness = org . apache . flink . cep . operator . CepOperatorTestUtilities . getCepTestHarness ( operator ) ; rocksDBStateBackend = new org . apache . flink . contrib . streaming . state . RocksDBStateBackend ( new org . apache . flink . runtime . state . memory . MemoryStateBackend ( ) ) ; rocksDBStateBackend . setDbStoragePath ( rocksDbPath ) ; harness . setStateBackend ( rocksDBStateBackend ) ; harness . setup ( ) ; harness . initializeState ( snapshot2 ) ; harness . open ( ) ; harness . processElement ( new org . apache . flink . streaming . runtime . streamrecord . StreamRecord < org . apache . flink . cep . Event > ( middleEvent , 4L ) ) ; harness . processElement ( new org . apache . flink . streaming . runtime . streamrecord . StreamRecord ( endEvent , 4L ) ) ; java . util . Queue < java . lang . Object > result = harness . getOutput ( ) ; "<AssertPlaceHolder>" ; verifyPattern ( result . poll ( ) , startEvent , middleEvent , endEvent ) ; } finally { harness . close ( ) ; } } size ( ) { return routes . size ( ) ; } | org . junit . Assert . assertEquals ( 1 , result . size ( ) ) |
testCompleted ( ) { System . out . println ( "completed" ) ; com . sonymobile . tools . gerrit . gerritevents . dto . events . PatchsetCreated event = com . sonyericsson . hudson . plugins . gerrit . trigger . mock . Setup . createPatchsetCreated ( ) ; com . sonyericsson . hudson . plugins . gerrit . trigger . gerritnotifier . model . BuildMemory instance = new com . sonyericsson . hudson . plugins . gerrit . trigger . gerritnotifier . model . BuildMemory ( ) ; instance . completed ( event , build ) ; "<AssertPlaceHolder>" ; } isAllBuildsCompleted ( com . sonymobile . tools . gerrit . gerritevents . dto . events . GerritTriggeredEvent ) { com . sonyericsson . hudson . plugins . gerrit . trigger . gerritnotifier . model . BuildMemory . MemoryImprint pb = memory . get ( event ) ; if ( pb != null ) { return pb . isAllBuildsCompleted ( ) ; } else { return false ; } } | org . junit . Assert . assertTrue ( instance . isAllBuildsCompleted ( event ) ) |
testSetCalendarDate ( ) { int [ ] result = calendarDate1 . getCalendarDate ( ) ; int [ ] caldate = new int [ ] { 1995 , 5 , 5 } ; ( ( org . geotoolkit . temporal . object . DefaultCalendarDate ) ( calendarDate1 ) ) . setCalendarDate ( caldate ) ; "<AssertPlaceHolder>" ; } getCalendarDate ( ) { return calendarDate ; } | org . junit . Assert . assertFalse ( calendarDate1 . getCalendarDate ( ) . equals ( result ) ) |
testPLFM_1754UpdateNull ( ) { org . sagebionetworks . repo . model . FileEntity file = new org . sagebionetworks . repo . model . FileEntity ( ) ; file . setParentId ( project . getId ( ) ) ; file . setDataFileHandleId ( fileHandle1 . getId ( ) ) ; file = entityService . createEntity ( adminUserId , file , null , mockRequest ) ; "<AssertPlaceHolder>" ; file . setDataFileHandleId ( null ) ; file = entityService . updateEntity ( adminUserId , file , false , null , mockRequest ) ; } createEntity ( javax . servlet . http . HttpServlet , T extends org . sagebionetworks . repo . model . Entity , java . lang . Long , java . util . Map ) { org . springframework . mock . web . MockHttpServletRequest request = org . sagebionetworks . repo . web . controller . ServletTestHelperUtils . initRequest ( HTTPMODE . POST , UrlHelpers . ENTITY , userId , entity ) ; org . sagebionetworks . repo . web . controller . ServletTestHelperUtils . addExtraParams ( request , extraParams ) ; org . springframework . mock . web . MockHttpServletResponse response = org . sagebionetworks . repo . web . controller . ServletTestHelperUtils . dispatchRequest ( dispatchServlet , request , HttpStatus . CREATED ) ; return ( ( T ) ( org . sagebionetworks . repo . web . controller . ServletTestHelper . objectMapper . readValue ( response . getContentAsString ( ) , entity . getClass ( ) ) ) ) ; } | org . junit . Assert . assertNotNull ( file ) |
testGetOrientation_RTL ( ) { org . eclipse . swt . widgets . Menu menu = new org . eclipse . swt . widgets . Menu ( shell , ( ( org . eclipse . swt . SWT . POP_UP ) | ( org . eclipse . swt . SWT . RIGHT_TO_LEFT ) ) ) ; "<AssertPlaceHolder>" ; } getOrientation ( ) { if ( ( ( getCurSide ( ) ) == ( org . eclipse . swt . SWT . LEFT ) ) || ( ( getCurSide ( ) ) == ( org . eclipse . swt . SWT . RIGHT ) ) ) return org . eclipse . swt . SWT . VERTICAL ; return org . eclipse . swt . SWT . HORIZONTAL ; } | org . junit . Assert . assertEquals ( SWT . RIGHT_TO_LEFT , menu . getOrientation ( ) ) |
whenCheckedMultiplyTwoIntegerValues_shouldMultiplyThemAndReturnTheResultIfNotOverflow ( ) { int result = com . google . common . math . IntMath . checkedMultiply ( 1 , 2 ) ; "<AssertPlaceHolder>" ; } | org . junit . Assert . assertEquals ( 2 , result ) |
testNonStrictJsonParsing ( ) { java . lang . String raw = "{<sp>foo:<sp>'bar'<sp>}" ; com . jayway . jsonpath . DocumentContext dc = com . intuit . karate . JsonUtils . toJsonDoc ( raw ) ; com . intuit . karate . JsonUtilsTest . logger . debug ( "parsed<sp>json:<sp>{}" , dc . jsonString ( ) ) ; java . lang . String value = dc . read ( "$.foo" ) ; "<AssertPlaceHolder>" ; } read ( java . lang . String ) { com . intuit . karate . ScriptValue sv = com . intuit . karate . FileUtils . readFile ( fileName , context ) ; return sv . isJsonLike ( ) ? sv . getAfterConvertingFromJsonOrXmlIfNeeded ( ) : sv . getValue ( ) ; } | org . junit . Assert . assertEquals ( "bar" , value ) |
testConjunctionOfTwoNR ( ) { java . lang . String queryString = "FOO<sp>!~<sp>BAR<sp>&&<sp>BAR<sp>!~<sp>FOO" ; java . lang . String expectedQuery = "!(FOO<sp>=~<sp>BAR)<sp>&&<sp>!(BAR<sp>=~<sp>FOO)" ; org . apache . commons . jexl2 . parser . ASTJexlScript script = datawave . query . jexl . JexlASTHelper . parseJexlQuery ( queryString ) ; org . apache . commons . jexl2 . parser . ASTJexlScript negatedScript = datawave . query . jexl . visitors . RewriteNegationsVisitor . rewrite ( script ) ; java . lang . String negatedQuery = datawave . query . jexl . visitors . JexlStringBuildingVisitor . buildQuery ( negatedScript ) ; "<AssertPlaceHolder>" ; } buildQuery ( org . apache . commons . jexl2 . parser . JexlNode ) { datawave . query . jexl . visitors . JexlStringBuildingVisitor visitor = new datawave . query . jexl . visitors . JexlStringBuildingVisitor ( ) ; java . lang . String s = null ; try { java . lang . StringBuilder sb = ( ( java . lang . StringBuilder ) ( script . jjtAccept ( visitor , new java . lang . StringBuilder ( ) ) ) ) ; s = sb . toString ( ) ; try { datawave . query . jexl . JexlASTHelper . parseJexlQuery ( s ) ; } catch ( org . apache . commons . jexl2 . parser . ParseException e ) { datawave . query . jexl . visitors . JexlStringBuildingVisitor . log . error ( "Could<sp>not<sp>parse<sp>JEXL<sp>AST<sp>after<sp>performing<sp>transformations<sp>to<sp>run<sp>the<sp>query" , e ) ; for ( java . lang . String line : datawave . query . jexl . visitors . PrintingVisitor . formattedQueryStringList ( script ) ) { datawave . query . jexl . visitors . JexlStringBuildingVisitor . log . error ( line ) ; } datawave . query . jexl . visitors . JexlStringBuildingVisitor . log . error ( "" ) ; datawave . webservice . query . exception . QueryException qe = new datawave . webservice . query . exception . QueryException ( datawave . webservice . query . exception . DatawaveErrorCode . QUERY_EXECUTION_ERROR , e ) ; throw new datawave . query . exceptions . DatawaveFatalQueryException ( qe ) ; } } catch ( java . lang . StackOverflowError e ) { throw e ; } return s ; } | org . junit . Assert . assertEquals ( expectedQuery , negatedQuery ) |
testRefreshNodes ( ) { org . apache . hadoop . yarn . client . TestResourceManagerAdministrationProtocolPBClientImpl . resourceManager . getClientRMService ( ) ; org . apache . hadoop . yarn . server . api . protocolrecords . RefreshNodesRequest request = org . apache . hadoop . yarn . server . api . protocolrecords . RefreshNodesRequest . newInstance ( DecommissionType . NORMAL ) ; org . apache . hadoop . yarn . server . api . protocolrecords . RefreshNodesResponse response = org . apache . hadoop . yarn . client . TestResourceManagerAdministrationProtocolPBClientImpl . client . refreshNodes ( request ) ; "<AssertPlaceHolder>" ; } refreshNodes ( boolean ) { org . apache . hadoop . yarn . server . api . ResourceManagerAdministrationProtocol adminProtocol = createAdminProtocol ( ) ; org . apache . hadoop . yarn . server . api . protocolrecords . RefreshNodesRequest request = org . apache . hadoop . yarn . server . api . protocolrecords . RefreshNodesRequest . newInstance ( ( graceful ? org . apache . hadoop . yarn . api . records . DecommissionType . GRACEFUL : org . apache . hadoop . yarn . api . records . DecommissionType . NORMAL ) ) ; adminProtocol . refreshNodes ( request ) ; return 0 ; } | org . junit . Assert . assertNotNull ( response ) |
testGetC ( ) { org . openscience . cdk . interfaces . ICrystal crystal = ( ( org . openscience . cdk . interfaces . ICrystal ) ( newChemObject ( ) ) ) ; crystal . setC ( new javax . vecmath . Vector3d ( 1.0 , 2.0 , 3.0 ) ) ; javax . vecmath . Vector3d a = crystal . getC ( ) ; "<AssertPlaceHolder>" ; } getC ( ) { return cAxis ; } | org . junit . Assert . assertNotNull ( a ) |
testSubtract ( ) { java . util . List < java . lang . Integer > minuend = java . util . Arrays . asList ( new java . lang . Integer [ ] { 2 , 2 , 2 } ) ; java . util . List < java . lang . Integer > subtrahend = java . util . Arrays . asList ( new java . lang . Integer [ ] { 0 , 1 , 2 } ) ; java . util . List < java . lang . Integer > difference = java . util . Arrays . asList ( new java . lang . Integer [ ] { 2 , 1 , 0 } ) ; java . util . List < java . lang . Integer > result = org . numenta . nupic . util . ArrayUtils . subtract ( subtrahend , minuend ) ; "<AssertPlaceHolder>" ; } subtract ( int [ ] , int [ ] ) { int [ ] retVal = new int [ minuend . length ] ; for ( int i = 0 ; i < ( minuend . length ) ; i ++ ) { retVal [ i ] = ( minuend [ i ] ) - ( subtrahend [ i ] ) ; } return retVal ; } | org . junit . Assert . assertEquals ( difference , result ) |
test_convert_withSubclassedConverterInstance ( ) { com . github . dozermapper . core . Mapper mapper = com . github . dozermapper . core . DozerBeanMapperBuilder . create ( ) . withMappingFiles ( "mappings/customConverterMapperAware.xml" ) . withCustomConverter ( new com . github . dozermapper . core . functional_tests . CustomConverterMapperAwareTest . Converter ( ) { @ com . github . dozermapper . core . functional_tests . Override public java . util . Map convertTo ( java . util . List source , java . util . Map destination ) { return new java . util . HashMap ( ) { { put ( "foo" , "bar" ) ; } } ; } } ) . build ( ) ; java . util . HashMap result = mapper . map ( new java . util . ArrayList < java . lang . String > ( ) , java . util . HashMap . class ) ; "<AssertPlaceHolder>" ; } get ( KeyType ) { if ( key == null ) { throw new java . lang . IllegalArgumentException ( "Key<sp>cannot<sp>be<sp>null" ) ; } com . github . dozermapper . core . cache . CacheEntry < KeyType , ValueType > result = cacheMap . get ( key ) ; if ( result == null ) { return null ; } else { return result . getValue ( ) ; } } | org . junit . Assert . assertEquals ( "bar" , result . get ( "foo" ) ) |
testSerialization ( ) { org . jfree . chart . renderer . xy . DeviationRenderer r1 = new org . jfree . chart . renderer . xy . DeviationRenderer ( ) ; org . jfree . chart . renderer . xy . DeviationRenderer r2 = ( ( org . jfree . chart . renderer . xy . DeviationRenderer ) ( org . jfree . chart . TestUtilities . serialised ( r1 ) ) ) ; "<AssertPlaceHolder>" ; } serialised ( java . lang . Object ) { java . lang . Object result = null ; java . io . ByteArrayOutputStream buffer = new java . io . ByteArrayOutputStream ( ) ; java . io . ObjectOutput out ; try { out = new java . io . ObjectOutputStream ( buffer ) ; out . writeObject ( original ) ; out . close ( ) ; java . io . ObjectInput in = new java . io . ObjectInputStream ( new java . io . ByteArrayInputStream ( buffer . toByteArray ( ) ) ) ; result = in . readObject ( ) ; in . close ( ) ; } catch ( java . io . IOException e ) { throw new java . lang . RuntimeException ( e ) ; } catch ( java . lang . ClassNotFoundException e ) { throw new java . lang . RuntimeException ( e ) ; } return result ; } | org . junit . Assert . assertEquals ( r1 , r2 ) |
testSerialization ( ) { final org . apache . logging . log4j . core . impl . JdkMapAdapterStringMap original = new org . apache . logging . log4j . core . impl . JdkMapAdapterStringMap ( ) ; original . putValue ( "a" , "avalue" ) ; original . putValue ( "B" , "Bvalue" ) ; original . putValue ( "3" , "3value" ) ; final byte [ ] binary = serialize ( original ) ; final org . apache . logging . log4j . core . impl . JdkMapAdapterStringMap copy = deserialize ( binary ) ; "<AssertPlaceHolder>" ; } deserialize ( byte [ ] ) { final java . io . ByteArrayInputStream inArr = new java . io . ByteArrayInputStream ( binary ) ; try ( final java . io . ObjectInputStream in = new org . apache . logging . log4j . util . FilteredObjectInputStream ( inArr ) ) { final org . apache . logging . log4j . util . SortedArrayStringMap result = ( ( org . apache . logging . log4j . util . SortedArrayStringMap ) ( in . readObject ( ) ) ) ; return result ; } } | org . junit . Assert . assertEquals ( original , copy ) |
shouldNotFailWhenRemovingAndAddingLabelsInOneBatch ( ) { java . lang . String jsonString = new org . neo4j . server . rest . PrettyJSON ( ) . array ( ) . object ( ) . key ( "chef" 3 ) . value ( "POST" ) . key ( "to" ) . value ( "node" ) . key ( "chef" 1 ) . value ( 0 ) . key ( "chef" 2 ) . object ( ) . key ( "key" ) . value ( "name" ) . key ( "value" ) . value ( "Alice" ) . endObject ( ) . endObject ( ) . object ( ) . key ( "chef" 3 ) . value ( "POST" ) . key ( "to" ) . value ( "chef" 0 ) . key ( "chef" 1 ) . value ( 1 ) . key ( "chef" 2 ) . array ( ) . value ( "expert" ) . value ( "chef" 4 ) . endArray ( ) . endObject ( ) . object ( ) . key ( "chef" 3 ) . value ( "chef" 5 ) . key ( "to" ) . value ( "chef" 0 ) . key ( "chef" 1 ) . value ( 2 ) . key ( "chef" 2 ) . array ( ) . value ( "novice" ) . value ( "chef" ) . endArray ( ) . endObject ( ) . endArray ( ) . toString ( ) ; org . neo4j . server . rest . JaxRsResponse response = org . neo4j . server . rest . RestRequest . req ( ) . post ( batchUri ( ) , jsonString ) ; "<AssertPlaceHolder>" ; } getStatus ( ) { return status ; } | org . junit . Assert . assertEquals ( 200 , response . getStatus ( ) ) |
testManyConnectionsBlocking ( ) { pool . setMaxTotal ( 10 ) ; pool . setBlockWhenExhausted ( true ) ; java . util . concurrent . CountDownLatch counter = new java . util . concurrent . CountDownLatch ( 10000 ) ; for ( int i = 0 ; i < 100 ; i ++ ) { org . apache . directory . shared . client . api . LdapConnectionPoolTest . ConnectionThread thread = new org . apache . directory . shared . client . api . LdapConnectionPoolTest . ConnectionThread ( counter ) ; thread . start ( ) ; } boolean result = counter . await ( 100 , TimeUnit . SECONDS ) ; "<AssertPlaceHolder>" ; } start ( ) { } | org . junit . Assert . assertTrue ( result ) |
testGetTitle_1 ( ) { org . jinstagram . entity . oembed . OembedInformation fixture = new org . jinstagram . entity . oembed . OembedInformation ( ) ; fixture . setVersion ( "" ) ; fixture . setTitle ( "" ) ; fixture . setProviderUrl ( "" ) ; fixture . setUrl ( "" ) ; fixture . setAuthorName ( "" ) ; fixture . setHeight ( "" ) ; fixture . setMediaId ( "" ) ; fixture . setProviderName ( "" ) ; fixture . setType ( "" ) ; fixture . setWidth ( "" ) ; fixture . setAuthorUrl ( "" ) ; java . lang . String result = fixture . getTitle ( ) ; "<AssertPlaceHolder>" ; } getTitle ( ) { return title ; } | org . junit . Assert . assertEquals ( "" , result ) |
testNormalizedStringColumnSizeTooBig ( ) { original . setName ( "name" ) ; original . setColumnType ( ColumnType . STRING ) ; original . setEnumValues ( null ) ; original . setDefaultValue ( "123" ) ; original . setMaximumSize ( ( ( org . sagebionetworks . table . cluster . utils . ColumnConstants . MAX_ALLOWED_STRING_SIZE ) + 1 ) ) ; try { org . sagebionetworks . repo . model . dbo . persistence . table . ColumnModelUtils . createNormalizedClone ( original , org . sagebionetworks . StackConfigurationSingleton . singleton ( ) . getTableMaxEnumValues ( ) ) ; org . junit . Assert . fail ( "Should<sp>have<sp>failed<sp>as<sp>the<sp>size<sp>is<sp>too<sp>large" ) ; } catch ( java . lang . IllegalArgumentException e ) { "<AssertPlaceHolder>" ; } } getMessage ( ) { return message ; } | org . junit . Assert . assertTrue ( e . getMessage ( ) . contains ( ColumnConstants . MAX_ALLOWED_STRING_SIZE . toString ( ) ) ) |
testSourcePatternNoMatch ( ) { com . box . l10n . mojito . cli . filefinder . file . POFileType potFileType = new com . box . l10n . mojito . cli . filefinder . file . POFileType ( ) ; com . box . l10n . mojito . cli . filefinder . FilePattern sourceFilePattern = potFileType . getSourceFilePattern ( ) ; java . util . regex . Matcher matcher = sourceFilePattern . getPattern ( ) . matcher ( "/parent/fr_FR/LC_MESSAGES/messages.po" ) ; "<AssertPlaceHolder>" ; } getPattern ( ) { return pattern ; } | org . junit . Assert . assertFalse ( matcher . matches ( ) ) |
totalValueShouldBeEqualToSumOfAllFundsValues ( ) { expect ( fundA . getRegisterX ( ) ) . andStubReturn ( regAX ) ; expect ( fundA . getRegisterY ( ) ) . andStubReturn ( regAY ) ; expect ( fundB . getRegisterX ( ) ) . andStubReturn ( regBX ) ; expect ( fundB . getRegisterY ( ) ) . andStubReturn ( regBY ) ; expect ( regAX . getNbOfUnits ( ) ) . andStubReturn ( NB_OF_UNITS_AX ) ; expect ( regAY . getNbOfUnits ( ) ) . andStubReturn ( NB_OF_UNITS_AY ) ; expect ( regBX . getNbOfUnits ( ) ) . andStubReturn ( NB_OF_UNITS_BX ) ; expect ( regBY . getNbOfUnits ( ) ) . andStubReturn ( NB_OF_UNITS_BY ) ; expect ( fundA . getCurrentValue ( ) ) . andStubReturn ( currentValueA ) ; expect ( fundB . getCurrentValue ( ) ) . andStubReturn ( currentValueB ) ; expect ( currentValueA . getValue ( ) ) . andStubReturn ( FUND_A_VALUE ) ; expect ( currentValueB . getValue ( ) ) . andStubReturn ( FUND_B_VALUE ) ; client . addFund ( fundA ) ; client . addFund ( fundB ) ; replay ( fundA , fundB , regAX , regAY , regBX , regBY , currentValueA , currentValueB ) ; "<AssertPlaceHolder>" ; } getValueOfAllFunds ( ) { java . math . BigDecimal value = java . math . BigDecimal . ZERO ; for ( com . practicalunittesting . chp10 . proc_vs_oo . oo . IFund f : funds ) { value = value . add ( f . getValue ( ) ) ; } return value ; } | org . junit . Assert . assertEquals ( java . math . BigDecimal . valueOf ( ( ( ( 5 + 1 ) * 3 ) + ( ( 4 + 1 ) * 2 ) ) ) , client . getValueOfAllFunds ( ) ) |
gettingAResourceByANonAuthenticatedUser ( ) { try { com . sun . jersey . multipart . FormDataMultiPart form = new com . sun . jersey . multipart . FormDataMultiPart ( ) ; form . field ( "fileName" , "/Shared/marketing/my_test_document.txt" ) ; form . field ( "This<sp>test<sp>is<sp>trying<sp>to<sp>simulate<sp>the<sp>update<sp>of<sp>a<sp>content" 0 , "This<sp>test<sp>is<sp>trying<sp>to<sp>simulate<sp>the<sp>update<sp>of<sp>a<sp>content" 1 ) ; form . field ( "fileTitle" , "Upload<sp>test" ) ; form . field ( "fileDescription" , "This<sp>test<sp>is<sp>trying<sp>to<sp>simulate<sp>the<sp>update<sp>of<sp>a<sp>content" ) ; java . lang . String content = "This<sp>is<sp>a<sp>binary<sp>content" ; com . sun . jersey . multipart . FormDataBodyPart fdp = new com . sun . jersey . multipart . FormDataBodyPart ( "content" , new java . io . ByteArrayInputStream ( content . getBytes ( Charsets . UTF_8 ) ) , javax . ws . rs . core . MediaType . APPLICATION_OCTET_STREAM_TYPE ) ; form . bodyPart ( fdp ) ; resource ( ) . path ( ( ( aResourceURI ( ) ) + ( org . silverpeas . util . URLUtils . encodePathSegment ( "test.pdf" ) ) ) ) . type ( javax . ws . rs . core . MediaType . MULTIPART_FORM_DATA ) . post ( form ) ; org . junit . Assert . fail ( "A<sp>non<sp>authenticated<sp>user<sp>shouldn't<sp>access<sp>the<sp>resource" ) ; } catch ( com . sun . jersey . api . client . UniformInterfaceException ex ) { int receivedStatus = ex . getResponse ( ) . getStatus ( ) ; int unauthorized = Status . UNAUTHORIZED . getStatusCode ( ) ; "<AssertPlaceHolder>" ; } } is ( T ) { return java . util . Objects . equals ( this . value , value ) ; } | org . junit . Assert . assertThat ( receivedStatus , org . hamcrest . Matchers . is ( unauthorized ) ) |
testLimitedQueue_EmptyQueue ( ) { codeine . utils . LimitedQueue < java . lang . String > q = new codeine . utils . LimitedQueue < java . lang . String > ( 1 ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return ( original . size ( ) ) == 0 ; } | org . junit . Assert . assertTrue ( q . isEmpty ( ) ) |
testSerialization ( ) { org . jfree . data . xy . VectorSeries s1 = new org . jfree . data . xy . VectorSeries ( "Series" ) ; s1 . add ( 1.0 , 1.1 , 1.2 , 1.3 ) ; org . jfree . data . xy . VectorSeriesCollection c1 = new org . jfree . data . xy . VectorSeriesCollection ( ) ; c1 . addSeries ( s1 ) ; org . jfree . data . xy . VectorSeriesCollection c2 = ( ( org . jfree . data . xy . VectorSeriesCollection ) ( org . jfree . chart . TestUtils . serialised ( c1 ) ) ) ; "<AssertPlaceHolder>" ; } serialised ( java . lang . Object ) { java . lang . Object result = null ; java . io . ByteArrayOutputStream buffer = new java . io . ByteArrayOutputStream ( ) ; java . io . ObjectOutput out ; try { out = new java . io . ObjectOutputStream ( buffer ) ; out . writeObject ( original ) ; out . close ( ) ; java . io . ObjectInput in = new java . io . ObjectInputStream ( new java . io . ByteArrayInputStream ( buffer . toByteArray ( ) ) ) ; result = in . readObject ( ) ; in . close ( ) ; } catch ( java . io . IOException e ) { throw new java . lang . RuntimeException ( e ) ; } catch ( java . lang . ClassNotFoundException e ) { throw new java . lang . RuntimeException ( e ) ; } return result ; } | org . junit . Assert . assertEquals ( c1 , c2 ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.