input
stringlengths
28
18.7k
output
stringlengths
39
1.69k
peekTo_should_stop_at_end_of_input ( ) { org . sonar . sslr . channel . CodeReader reader = new org . sonar . sslr . channel . CodeReader ( "foo" ) ; java . lang . StringBuilder result = new java . lang . StringBuilder ( ) ; reader . peekTo ( ( i ) -> false , result ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( getType ( ) ) + ":<sp>" ) + ( getValue ( ) ) ; }
org . junit . Assert . assertEquals ( "foo" , result . toString ( ) )
shouldPersist ( ) { net . rrm . ehour . persistence . user . dao . UserDepartment org = net . rrm . ehour . persistence . user . dao . UserDepartmentObjectMother . createUserDepartment ( ) ; net . rrm . ehour . persistence . user . dao . User user = net . rrm . ehour . persistence . user . dao . UserObjectMother . createUser ( ) ; user . setUserId ( 5 ) ; net . rrm . ehour . persistence . user . dao . ProjectAssignment assignment = new net . rrm . ehour . persistence . user . dao . ProjectAssignment ( ) ; assignment . setUser ( user ) ; assignment . setAssignmentId ( 1 ) ; assignment . setProject ( new net . rrm . ehour . persistence . user . dao . Project ( 1 ) ) ; assignment . setAssignmentType ( new net . rrm . ehour . persistence . user . dao . ProjectAssignmentType ( net . rrm . ehour . util . EhourConstants . ASSIGNMENT_DATE ) ) ; java . util . Set < net . rrm . ehour . persistence . user . dao . ProjectAssignment > assignments = com . google . common . collect . Sets . newHashSet ( assignment ) ; user . setUserDepartment ( org ) ; user . setProjectAssignments ( assignments ) ; userDao . persist ( user ) ; "<AssertPlaceHolder>" ; } getUserId ( ) { return this . userId ; }
org . junit . Assert . assertNotNull ( user . getUserId ( ) )
testSameModeActivationSizes ( ) { int inH = 3 ; int inW = 4 ; int inDepth = 3 ; int minibatch = 5 ; int sH = 2 ; int sW = 2 ; int kH = 3 ; int kW = 3 ; org . deeplearning4j . convolution . org . deeplearning4j . nn [ ] l = new org . deeplearning4j . nn . conf . layers . Layer [ 2 ] ; l [ 0 ] = new org . deeplearning4j . nn . conf . layers . ConvolutionLayer . Builder ( ) . nOut ( 4 ) . kernelSize ( kH , kW ) . stride ( sH , sW ) . build ( ) ; l [ 1 ] = new org . deeplearning4j . nn . conf . layers . SubsamplingLayer . Builder ( ) . kernelSize ( kH , kW ) . stride ( sH , sW ) . build ( ) ; for ( int i = 0 ; i < ( l . length ) ; i ++ ) { org . deeplearning4j . nn . conf . MultiLayerConfiguration conf = new org . deeplearning4j . nn . conf . NeuralNetConfiguration . Builder ( ) . convolutionMode ( ConvolutionMode . Same ) . list ( ) . layer ( 0 , l [ i ] ) . layer ( 1 , new org . deeplearning4j . nn . conf . layers . OutputLayer . Builder ( ) . nOut ( 3 ) . build ( ) ) . setInputType ( org . deeplearning4j . nn . conf . inputs . InputType . convolutional ( inH , inW , inDepth ) ) . build ( ) ; org . deeplearning4j . nn . multilayer . MultiLayerNetwork net = new org . deeplearning4j . nn . multilayer . MultiLayerNetwork ( conf ) ; net . init ( ) ; org . nd4j . linalg . api . ndarray . INDArray inData = org . nd4j . linalg . factory . Nd4j . create ( minibatch , inDepth , inH , inW ) ; java . util . List < org . nd4j . linalg . api . ndarray . INDArray > activations = net . feedForward ( inData ) ; org . nd4j . linalg . api . ndarray . INDArray actL0 = activations . get ( 1 ) ; int outH = ( ( int ) ( java . lang . Math . ceil ( ( inH / ( ( double ) ( sH ) ) ) ) ) ) ; int outW = ( ( int ) ( java . lang . Math . ceil ( ( inW / ( ( double ) ( sW ) ) ) ) ) ) ; System . out . println ( java . util . Arrays . toString ( actL0 . shape ( ) ) ) ; "<AssertPlaceHolder>" ; } } shape ( ) { return sameDiff . shape ( this ) ; }
org . junit . Assert . assertArrayEquals ( new long [ ] { minibatch , i == 0 ? 4 : inDepth , outH , outW } , actL0 . shape ( ) )
testFixed64 ( ) { java . util . Random random = org . apache . trevni . TestUtil . createRandom ( ) ; org . apache . trevni . OutputBuffer out = new org . apache . trevni . OutputBuffer ( ) ; for ( int i = 0 ; i < ( org . apache . trevni . TestIOBuffers . COUNT ) ; i ++ ) out . writeFixed64 ( random . nextLong ( ) ) ; org . apache . trevni . InputBuffer in = new org . apache . trevni . InputBuffer ( new org . apache . trevni . InputBytes ( out . toByteArray ( ) ) ) ; random = org . apache . trevni . TestUtil . createRandom ( ) ; for ( int i = 0 ; i < ( org . apache . trevni . TestIOBuffers . COUNT ) ; i ++ ) "<AssertPlaceHolder>" ; } readFixed64 ( ) { return ( ( readFixed32 ( ) ) & 4294967295L ) | ( ( ( long ) ( readFixed32 ( ) ) ) << 32 ) ; }
org . junit . Assert . assertEquals ( random . nextLong ( ) , in . readFixed64 ( ) )
testNavigateToNonExistingStep ( ) { java . net . URI resource = getClass ( ) . getResource ( "/corrupted/flow_navigate_to_non_existing_step.sl" ) . toURI ( ) ; io . cloudslang . lang . compiler . modeller . result . ExecutableModellingResult result = compiler . preCompileSource ( io . cloudslang . lang . compiler . SlangSource . fromFile ( resource ) ) ; "<AssertPlaceHolder>" ; exception . expect ( io . cloudslang . lang . compiler . RuntimeException . class ) ; exception . expectMessage ( ( "Failed<sp>to<sp>compile<sp>step:<sp>Step1.<sp>" + ( "The<sp>step/result<sp>name:<sp>non_existing_step<sp>of<sp>navigation:<sp>" + "SUCCESS<sp>-><sp>non_existing_step<sp>is<sp>missing" ) ) ) ; throw result . getErrors ( ) . get ( 0 ) ; } getErrors ( ) { return errors ; }
org . junit . Assert . assertTrue ( ( ( result . getErrors ( ) . size ( ) ) > 0 ) )
shouldStartAllMockServices ( ) { int totalMockService = ( stubMockAsWarServlet . project . getRestMockServiceCount ( ) ) + ( stubMockAsWarServlet . project . getMockServiceCount ( ) ) ; stubMockAsWarServlet . service ( reqeust , response ) ; com . eviware . soapui . model . mock . MockRunner [ ] mockRunners = ( ( com . eviware . soapui . mockaswar . MockAsWarServlet . MockServletSoapUICore ) ( stubMockAsWarServlet . getMockServletCore ( ) ) ) . getMockRunners ( ) ; "<AssertPlaceHolder>" ; } getMockRunners ( ) { return mockRunners . toArray ( new com . eviware . soapui . model . mock . MockRunner [ mockRunners . size ( ) ] ) ; }
org . junit . Assert . assertThat ( mockRunners . length , org . hamcrest . core . Is . is ( totalMockService ) )
testCannotTurnTrueAfterTimeout ( ) { long start = java . lang . System . currentTimeMillis ( ) ; final int [ ] time = new int [ 1 ] ; com . thoughtworks . selenium . condition . JUnitConditionRunner conditionRunner1 = new com . thoughtworks . selenium . condition . JUnitConditionRunner ( null , 0 , 100 , 500 ) ; try { conditionRunner1 . waitFor ( new com . thoughtworks . selenium . condition . Condition ( ) { @ com . thoughtworks . selenium . condition . Override public boolean isTrue ( com . thoughtworks . selenium . condition . ConditionRunner . Context runner ) { return ( ( time [ 0 ] ) ++ ) == 52 ; } } ) ; org . junit . Assert . fail ( "the<sp>condition<sp>should<sp>have<sp>failed" ) ; } catch ( java . lang . AssertionError expected ) { long l = ( java . lang . System . currentTimeMillis ( ) ) - start ; "<AssertPlaceHolder>" ; } } fail ( java . lang . String ) { throw new java . lang . AssertionError ( message ) ; }
org . junit . Assert . assertTrue ( ( l >= 500 ) )
emptyHMapReusesInstance ( ) { "<AssertPlaceHolder>" ; } emptyHMap ( ) { return com . jnape . palatable . lambda . adt . hmap . HMap . EMPTY ; }
org . junit . Assert . assertSame ( com . jnape . palatable . lambda . adt . hmap . HMap . emptyHMap ( ) , com . jnape . palatable . lambda . adt . hmap . HMap . emptyHMap ( ) )
test_tracking_settings_google_analytics_get ( ) { com . sendgrid . SendGrid sg = new com . sendgrid . SendGrid ( "SENDGRID_API_KEY" , true ) ; sg . setHost ( "localhost:4010" ) ; sg . addRequestHeader ( "X-Mock" , "200" ) ; com . sendgrid . Request request = new com . sendgrid . Request ( ) ; request . setMethod ( Method . GET ) ; request . setEndpoint ( "tracking_settings/google_analytics" ) ; com . sendgrid . Response response = sg . api ( request ) ; "<AssertPlaceHolder>" ; } api ( com . sendgrid . Request ) { com . sendgrid . Request req = new com . sendgrid . Request ( ) ; req . setMethod ( request . getMethod ( ) ) ; req . setBaseUri ( this . host ) ; req . setEndpoint ( ( ( ( "/" + ( version ) ) + "/" ) + ( request . getEndpoint ( ) ) ) ) ; req . setBody ( request . getBody ( ) ) ; for ( Map . Entry < java . lang . String , java . lang . String > header : this . requestHeaders . entrySet ( ) ) { req . addHeader ( header . getKey ( ) , header . getValue ( ) ) ; } for ( Map . Entry < java . lang . String , java . lang . String > queryParam : request . getQueryParams ( ) . entrySet ( ) ) { req . addQueryParam ( queryParam . getKey ( ) , queryParam . getValue ( ) ) ; } return makeCall ( req ) ; }
org . junit . Assert . assertEquals ( 200 , response . getStatusCode ( ) )
testRemoveFromDoubleEmptyArray ( ) { double [ ] array = new double [ ] { } ; array = com . liferay . portal . kernel . util . ArrayUtil . remove ( array , 3.0 ) ; "<AssertPlaceHolder>" ; } isEmpty ( boolean [ ] ) { return com . liferay . portal . kernel . util . ArrayUtil . isEmpty ( array ) ; }
org . junit . Assert . assertTrue ( com . liferay . portal . kernel . util . ArrayUtil . isEmpty ( array ) )
testEvaluateIEvent1 ( ) { com . huawei . streaming . expression . MethodExpression methodExpression = new com . huawei . streaming . expression . MethodExpression ( new java . lang . String ( "abcdefg" ) , "substring" , new com . huawei . streaming . expression . IExpression [ ] { new com . huawei . streaming . expression . ConstExpression ( 0 ) , new com . huawei . streaming . expression . ConstExpression ( 1 ) } ) ; java . lang . String actual = ( ( java . lang . String ) ( methodExpression . evaluate ( new com . huawei . streaming . event . TupleEvent ( ) ) ) ) ; java . lang . String expected = "a" ; "<AssertPlaceHolder>" ; } evaluate ( com . huawei . streaming . event . IEvent [ ] ) { if ( ( null == eventsPerStream ) || ( 0 == ( eventsPerStream . length ) ) ) { com . huawei . streaming . expression . ArithmeticExpression . LOG . error ( "Streams<sp>events<sp>are<sp>null." ) ; throw new com . huawei . streaming . exception . StreamingRuntimeException ( "Streams<sp>events<sp>are<sp>null." ) ; } com . huawei . streaming . expression . IExpression leftExpr = getLeftExpr ( ) ; com . huawei . streaming . expression . IExpression rightExpr = getRightExpr ( ) ; java . lang . Object lo = leftExpr . evaluate ( eventsPerStream ) ; java . lang . Object ro = rightExpr . evaluate ( eventsPerStream ) ; return compute ( lo , ro ) ; }
org . junit . Assert . assertEquals ( expected , actual )
withTwoNodes ( ) { treegraph . BinaryTreeNode root = new treegraph . BinaryTreeNode ( 1 ) ; root . left = new treegraph . BinaryTreeNode ( 2 ) ; "<AssertPlaceHolder>" ; } isBalanced ( treegraph . BinaryTreeNode ) { return ( isBalancedWithHeight ( root ) ) != ( - 1 ) ; }
org . junit . Assert . assertTrue ( s . isBalanced ( root ) )
testCounterDoesNotFailOperationsWhenNoMetricsContainerPresent ( ) { org . apache . beam . sdk . metrics . MetricsEnvironment . setCurrentContainer ( null ) ; "<AssertPlaceHolder>" ; java . util . HashMap < java . lang . String , java . lang . String > labels = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; java . lang . String urn = MonitoringInfoConstants . Urns . ELEMENT_COUNT ; org . apache . beam . runners . core . metrics . MonitoringInfoMetricName name = org . apache . beam . runners . core . metrics . MonitoringInfoMetricName . named ( urn , labels ) ; org . apache . beam . sdk . metrics . Counter counter = org . apache . beam . runners . core . metrics . LabeledMetrics . counter ( name ) ; counter . inc ( ) ; counter . inc ( 5L ) ; counter . dec ( ) ; counter . dec ( 5L ) ; } getCurrentContainer ( ) { org . apache . beam . sdk . metrics . MetricsContainer container = org . apache . beam . sdk . metrics . MetricsEnvironment . CONTAINER_FOR_THREAD . get ( ) ; if ( ( container == null ) && ( org . apache . beam . sdk . metrics . MetricsEnvironment . REPORTED_MISSING_CONTAINER . compareAndSet ( false , true ) ) ) { if ( org . apache . beam . sdk . metrics . MetricsEnvironment . METRICS_SUPPORTED . get ( ) ) { org . apache . beam . sdk . metrics . MetricsEnvironment . LOG . error ( ( "Unable<sp>to<sp>update<sp>metrics<sp>on<sp>the<sp>current<sp>thread.<sp>" + "Most<sp>likely<sp>caused<sp>by<sp>using<sp>metrics<sp>outside<sp>the<sp>managed<sp>work-execution<sp>thread." ) ) ; } else { org . apache . beam . sdk . metrics . MetricsEnvironment . LOG . warn ( "Reporting<sp>metrics<sp>are<sp>not<sp>supported<sp>in<sp>the<sp>current<sp>execution<sp>environment." ) ; } } return container ; }
org . junit . Assert . assertNull ( org . apache . beam . sdk . metrics . MetricsEnvironment . getCurrentContainer ( ) )
testFindDocumentsByCrisisID ( ) { try { qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . documentDTO . setIsEvaluationSet ( true ) ; qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . entityManager . getTransaction ( ) . begin ( ) ; qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . documentDTO = qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . documentResourceFacadeImp . addDocument ( qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . documentDTO ) ; qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . entityManager . getTransaction ( ) . commit ( ) ; java . lang . Long crisisId = qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . documentDTO . getCrisisDTO ( ) . getCrisisID ( ) ; java . util . List < qa . qcri . aidr . dbmanager . dto . DocumentDTO > result = qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . documentResourceFacadeImp . findDocumentsByCrisisID ( crisisId ) ; "<AssertPlaceHolder>" ; } catch ( qa . qcri . aidr . common . exception . PropertyNotSetException ex ) { qa . qcri . aidr . dbmanager . ejb . remote . facade . imp . TestDocumentResourceFacadeImp . logger . error ( ( "PropertyNotSetException<sp>while<sp>finding<sp>document<sp>by<sp>crisisId<sp>" + ( ex . getMessage ( ) ) ) ) ; org . junit . Assert . fail ( "testFindDocumentsByCrisisID<sp>failed" ) ; } } getCrisisDTO ( ) { return this . crisisDTO ; }
org . junit . Assert . assertEquals ( crisisId , result . get ( 0 ) . getCrisisDTO ( ) . getCrisisID ( ) )
testRecoverWith ( ) { int value = com . annimon . stream . Exceptional . of ( com . annimon . stream . ExceptionalTest . ioExceptionSupplier ) . recoverWith ( new com . annimon . stream . function . Function < java . lang . Throwable , com . annimon . stream . Exceptional < java . lang . Integer > > ( ) { @ com . annimon . stream . Override public com . annimon . stream . Exceptional < java . lang . Integer > apply ( java . lang . Throwable throwable ) { return com . annimon . stream . Exceptional . of ( com . annimon . stream . ExceptionalTest . tenSupplier ) ; } } ) . get ( ) ; "<AssertPlaceHolder>" ; } get ( ) { return new java . lang . StringBuilder ( ) ; }
org . junit . Assert . assertEquals ( 10 , value )
modifySubscription_RemoveOwner ( ) { createdSubscription = createSubscription ( ) ; createdSubscription = org . oscm . ws . SubscriptionServiceWSTest . subscrServiceForCustomer . subscribeToService ( createdSubscription , org . oscm . ws . SubscriptionServiceWSTest . freeService , org . oscm . ws . SubscriptionServiceWSTest . usageLicences , null , null , new java . util . ArrayList < org . oscm . vo . VOUda > ( ) ) ; createdSubscription . setOwnerId ( null ) ; org . oscm . vo . VOSubscription modifiedSubscription = org . oscm . ws . SubscriptionServiceWSTest . subscrServiceForCustomer . modifySubscription ( createdSubscription , null , null ) ; "<AssertPlaceHolder>" ; } getOwnerId ( ) { return ownerId ; }
org . junit . Assert . assertNull ( modifiedSubscription . getOwnerId ( ) )
test_fixed_stream1 ( ) { java . math . BigDecimal testValue = new java . math . BigDecimal ( "471.1" ) ; org . omg . CORBA . Any any = setup . getClientOrb ( ) . create_any ( ) ; any . type ( setup . getClientOrb ( ) . create_fixed_tc ( ( ( short ) ( 4 ) ) , ( ( short ) ( 1 ) ) ) ) ; any . create_output_stream ( ) . write_fixed ( testValue ) ; "<AssertPlaceHolder>" ; } extract_fixed ( ) { checkExtract ( TCKind . _tk_fixed , "Cannot<sp>extract<sp>fixed" ) ; checkNull ( ) ; if ( ( value ) instanceof java . math . BigDecimal ) { return ( ( java . math . BigDecimal ) ( value ) ) ; } else if ( ( value ) instanceof org . omg . CORBA . FixedHolder ) { return ( ( org . omg . CORBA . FixedHolder ) ( value ) ) . value ; } else if ( ( value ) instanceof org . jacorb . orb . CDROutputStream ) { final org . jacorb . orb . CDRInputStream inputStream = ( ( org . jacorb . orb . CDRInputStream ) ( create_input_stream ( ) ) ) ; try { return inputStream . read_fixed ( typeCode . fixed_digits ( ) , typeCode . fixed_scale ( ) ) ; } catch ( org . omg . CORBA . TypeCodePackage . BadKind e ) { throw new org . omg . CORBA . INTERNAL ( "should<sp>not<sp>happen" ) ; } finally { inputStream . close ( ) ; } } else { throw new org . omg . CORBA . INTERNAL ( ( "Encountered<sp>unexpected<sp>type<sp>of<sp>value:<sp>" + ( value . getClass ( ) ) ) ) ; } }
org . junit . Assert . assertEquals ( testValue , any . extract_fixed ( ) )
testRegex2 ( ) { java . lang . String tags = "tag1=value1,tag2=value2,tag3=value3" ; java . lang . StringBuffer pattern = new java . lang . StringBuffer ( ) ; pattern . append ( "(^|.*,)" ) ; pattern . append ( "tag2" ) ; pattern . append ( "=" ) ; pattern . append ( "value\\d" ) ; pattern . append ( "(,.*|$)" ) ; java . util . regex . Pattern p = java . util . regex . Pattern . compile ( pattern . toString ( ) ) ; "<AssertPlaceHolder>" ; } toString ( ) { done ( ) ; java . lang . StringBuilder buf = new java . lang . StringBuilder ( ) ; buf . append ( "metric:<sp>" ) . append ( metric ) ; buf . append ( "<sp>tags:<sp>" ) . append ( tags ) ; buf . append ( "<sp>timestamp:<sp>" ) . append ( timestamp ) ; buf . append ( "<sp>count:<sp>" ) . append ( count ( ) ) ; buf . append ( "<sp>min:<sp>" ) . append ( min ( ) ) ; buf . append ( "<sp>max:<sp>" ) . append ( max ( ) ) ; buf . append ( "<sp>sum:<sp>" ) . append ( sum ( ) ) ; buf . append ( "<sp>50p:<sp>" ) . append ( getPercentile ( 50 ) ) ; buf . append ( "<sp>75p:<sp>" ) . append ( getPercentile ( 75 ) ) ; buf . append ( "<sp>99p:<sp>" ) . append ( getPercentile ( 99 ) ) ; return buf . toString ( ) ; }
org . junit . Assert . assertTrue ( p . matcher ( tags ) . matches ( ) )
testGetTitle ( ) { final java . lang . String errorKey = org . drools . workbench . screens . guided . dtable . client . resources . i18n . GuidedDecisionTableErraiConstants . SummaryPage_NewColumn ; final java . lang . String errorMessage = "Title" ; when ( translationService . format ( errorKey ) ) . thenReturn ( errorMessage ) ; final java . lang . String title = page . getTitle ( ) ; "<AssertPlaceHolder>" ; } getTitle ( ) { return translate ( GuidedDecisionTableErraiConstants . AttributeColumnPage_AddNewAttribute ) ; }
org . junit . Assert . assertEquals ( errorMessage , title )
testCheckTimeoutCommittingTimedOut ( ) { final java . lang . Exception [ ] checkTimeoutException = new com . sun . sgs . test . impl . service . transaction . Exception [ ] { null } ; com . sun . sgs . test . util . DummyTransactionParticipant participant = new com . sun . sgs . test . util . DummyTransactionParticipant ( ) { public void commit ( com . sun . sgs . service . Transaction txn ) { try { txn . checkTimeout ( ) ; } catch ( java . lang . RuntimeException e ) { checkTimeoutException [ 0 ] = e ; throw e ; } } } ; txn . join ( participant ) ; java . lang . Thread . sleep ( com . sun . sgs . test . impl . service . transaction . TestTransactionCoordinatorImpl . TIMED_OUT ) ; handle . commit ( ) ; "<AssertPlaceHolder>" ; } commit ( ) { isCommitted = true ; checkFlush ( ) ; }
org . junit . Assert . assertNull ( checkTimeoutException [ 0 ] )
testSetIpv4RouterIds ( ) { java . util . List list = new java . util . ArrayList ( ) ; list . add ( org . onlab . packet . Ip4Address . valueOf ( "1.1.1.1" ) ) ; ospfDeviceTed . setIpv4RouterIds ( list ) ; "<AssertPlaceHolder>" ; } ipv4RouterIds ( ) { return ipv4RouterIds ; }
org . junit . Assert . assertThat ( ospfDeviceTed . ipv4RouterIds ( ) . size ( ) , org . hamcrest . CoreMatchers . is ( 1 ) )
testRecursion ( ) { org . mapdb . elsa . AbstractMap . SimpleEntry b = new org . mapdb . elsa . AbstractMap . SimpleEntry ( "abcd" , null ) ; b . setValue ( b . getKey ( ) ) ; org . mapdb . elsa . AbstractMap . SimpleEntry bx = org . mapdb . elsa . ElsaSerializerBaseTest . clonePojo ( b ) ; "<AssertPlaceHolder>" ; assert ( bx . getKey ( ) ) == ( bx . getValue ( ) ) ; } clonePojo ( E ) { return ( ( E ) ( org . mapdb . elsa . ElsaSerializerBaseTest . clonePojo ( value , new org . mapdb . elsa . ElsaSerializerPojo ( ) ) ) ) ; }
org . junit . Assert . assertEquals ( bx , b )
testRidbagsUpdateDeltaAddWithCopy ( ) { com . orientechnologies . orient . core . db . ODatabaseSession db = null ; com . orientechnologies . orient . core . db . OrientDB odb = null ; try { odb = new com . orientechnologies . orient . core . db . OrientDB ( "memory:" , com . orientechnologies . orient . core . db . OrientDBConfig . defaultConfig ( ) ) ; odb . createIfNotExists ( com . orientechnologies . orient . core . record . impl . ODocumentTest . dbName , ODatabaseType . MEMORY ) ; db = odb . open ( com . orientechnologies . orient . core . record . impl . ODocumentTest . dbName , com . orientechnologies . orient . core . record . impl . ODocumentTest . defaultDbAdminCredentials , com . orientechnologies . orient . core . record . impl . ODocumentTest . defaultDbAdminCredentials ) ; com . orientechnologies . orient . core . metadata . schema . OClass claz = db . createClassIfNotExist ( "TestClass" ) ; com . orientechnologies . orient . core . record . impl . ODocument doc = new com . orientechnologies . orient . core . record . impl . ODocument ( claz ) ; java . lang . String fieldName = "testField" ; com . orientechnologies . orient . core . record . impl . ODocument first = new com . orientechnologies . orient . core . record . impl . ODocument ( claz ) ; first = db . save ( first ) ; com . orientechnologies . orient . core . record . impl . ODocument second = new com . orientechnologies . orient . core . record . impl . ODocument ( claz ) ; second = db . save ( second ) ; com . orientechnologies . orient . core . db . record . ridbag . ORidBag ridBag = new com . orientechnologies . orient . core . db . record . ridbag . ORidBag ( ) ; ridBag . add ( first ) ; ridBag . add ( second ) ; doc . field ( fieldName , ridBag , OType . LINKBAG ) ; doc = db . save ( doc ) ; com . orientechnologies . orient . core . record . impl . ODocument originalDoc = new com . orientechnologies . orient . core . record . impl . ODocument ( claz ) ; com . orientechnologies . orient . core . db . record . ridbag . ORidBag ridBagCopy = new com . orientechnologies . orient . core . db . record . ridbag . ORidBag ( ) ; ridBagCopy . add ( first ) ; ridBagCopy . add ( second ) ; originalDoc . field ( fieldName , ridBagCopy , OType . LINKBAG ) ; com . orientechnologies . orient . core . record . impl . ODocument third = new com . orientechnologies . orient . core . record . impl . ODocument ( claz ) ; third = db . save ( third ) ; ridBag = new com . orientechnologies . orient . core . db . record . ridbag . ORidBag ( ) ; ridBag . add ( first ) ; ridBag . add ( second ) ; ridBag . add ( third ) ; doc . field ( fieldName , ridBag , OType . LINKBAG ) ; com . orientechnologies . orient . core . delta . ODocumentDelta dc = doc . getDeltaFromOriginal ( ) ; dc = dc . field ( "u" ) . getValue ( ) ; originalDoc . mergeUpdateDelta ( dc ) ; com . orientechnologies . orient . core . db . record . ridbag . ORidBag mergedRidbag = originalDoc . field ( fieldName ) ; "<AssertPlaceHolder>" ; } finally { if ( db != null ) db . close ( ) ; if ( odb != null ) { odb . drop ( com . orientechnologies . orient . core . record . impl . ODocumentTest . dbName ) ; odb . close ( ) ; } } } field ( java . lang . Object ) { if ( ! ( fields . contains ( iField ) ) ) fields . add ( iField ) ; return this ; }
org . junit . Assert . assertEquals ( ridBag , mergedRidbag )
testNoOutputFolderCreation ( ) { org . eclipse . xtext . generator . OutputConfigurationProvider outputConfigurationProvider = new org . eclipse . xtext . generator . OutputConfigurationProvider ( ) { @ org . eclipse . xtext . builder . impl . Override public java . util . Set < org . eclipse . xtext . generator . OutputConfiguration > getOutputConfigurations ( ) { final java . util . Set < org . eclipse . xtext . generator . OutputConfiguration > result = super . getOutputConfigurations ( ) ; org . eclipse . xtext . generator . OutputConfiguration configuration = result . iterator ( ) . next ( ) ; configuration . setCreateOutputDirectory ( false ) ; return result ; } } ; org . eclipse . xtext . builder . preferences . BuilderPreferenceAccess . Initializer initializer = new org . eclipse . xtext . builder . preferences . BuilderPreferenceAccess . Initializer ( ) ; initializer . setOutputConfigurationProvider ( outputConfigurationProvider ) ; initializer . initialize ( preferenceStoreAccess ) ; org . eclipse . jdt . core . IJavaProject project = createJavaProject ( "foo" ) ; addNature ( project . getProject ( ) , XtextProjectHelper . NATURE_ID ) ; org . eclipse . core . resources . IFolder folder = project . getProject ( ) . getFolder ( "src" ) ; org . eclipse . core . resources . IFile file = folder . getFile ( ( "Foo" + ( F_EXT ) ) ) ; file . create ( new org . eclipse . xtext . util . StringInputStream ( "object<sp>Foo" ) , true , monitor ( ) ) ; build ( ) ; final org . eclipse . core . resources . IFile generatedFile = project . getProject ( ) . getFile ( "./src-gen/Foo.txt" ) ; "<AssertPlaceHolder>" ; } exists ( ) { return true ; }
org . junit . Assert . assertFalse ( generatedFile . exists ( ) )
increment ( ) { counter . reset ( ) ; counter . increment ( ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
org . junit . Assert . assertEquals ( counter . getValue ( ) , 1 )
getEntityDifferentUserNoRoster ( ) { org . sakaiproject . entitybroker . entityprovider . search . Search search = new org . sakaiproject . entitybroker . entityprovider . search . Search ( ) ; search . addRestriction ( new org . sakaiproject . entitybroker . entityprovider . search . Restriction ( org . sakaiproject . entitybroker . entityprovider . capabilities . CollectionResolvable . SEARCH_USER_REFERENCE , "otherUserId" ) ) ; org . sakaiproject . user . api . User user = mock ( org . sakaiproject . user . api . User . class ) ; when ( user . getId ( ) ) . thenReturn ( "otherUserId" ) ; when ( developerHelperService . getCurrentUserId ( ) ) . thenReturn ( "currentUserId" ) ; when ( userEntityProvider . findAndCheckUserId ( "otherUserId" , null ) ) . thenReturn ( "otherUserId" ) ; org . sakaiproject . site . api . Site site = mock ( org . sakaiproject . site . api . Site . class ) ; when ( site . getId ( ) ) . thenReturn ( "siteId" ) ; when ( siteService . getUserSites ( false , "otherUserId" ) ) . thenReturn ( org . sakaiproject . entitybroker . providers . Collections . singletonList ( site ) ) ; when ( siteService . allowViewRoster ( "siteId" ) ) . thenReturn ( false ) ; org . sakaiproject . entitybroker . providers . List < org . sakaiproject . entitybroker . entityprovider . extension . EntityData > entities = provider . getEntities ( null , search ) ; "<AssertPlaceHolder>" ; } size ( ) { return m_wrapper . size ( ) ; }
org . junit . Assert . assertEquals ( 0 , entities . size ( ) )
testQueryableDistinct ( ) { final java . lang . String [ ] expected = new java . lang . String [ ] { "aa" , "bb" , "-cc" , "dd" , "-ee" } ; final java . util . List < java . lang . String > actual = me . josephzhu . java8inaction . test . queryable . Queryable . of ( data ) . distinct ( ) . toList ( ) ; "<AssertPlaceHolder>" ; } toList ( ) { me . josephzhu . java8inaction . test . queryable . List < T > res = new me . josephzhu . java8inaction . test . queryable . ArrayList ( ) ; forEach ( ( e ) -> res . add ( e ) ) ; return res ; }
org . junit . Assert . assertArrayEquals ( expected , actual . toArray ( ) )
testUnknownCompressionCodec ( ) { com . streamsets . pipeline . stage . destination . hdfs . HdfsDTarget dTarget = new com . streamsets . pipeline . stage . destination . hdfs . BaseHdfsTargetIT . ForTestHdfsTarget ( ) ; configure ( dTarget ) ; dTarget . hdfsTargetConfigBean . compression = CompressionMode . OTHER ; dTarget . hdfsTargetConfigBean . otherCompression = "foo" ; com . streamsets . pipeline . stage . destination . hdfs . HdfsTarget target = ( ( com . streamsets . pipeline . stage . destination . hdfs . HdfsTarget ) ( dTarget . createTarget ( ) ) ) ; com . streamsets . pipeline . api . Target . Context context = com . streamsets . pipeline . sdk . ContextInfoCreator . createTargetContext ( com . streamsets . pipeline . stage . destination . hdfs . HdfsDTarget . class , "n" , false , OnRecordError . TO_ERROR , null ) ; "<AssertPlaceHolder>" ; } init ( com . streamsets . pipeline . stage . origin . eventhubs . Info , com . streamsets . pipeline . stage . origin . eventhubs . Context ) { java . util . List < com . streamsets . pipeline . stage . origin . eventhubs . ConfigIssue > issues = new java . util . ArrayList ( ) ; this . context = context ; consumerConfigBean . dataFormatConfig . stringBuilderPoolSize = getNumberOfThreads ( ) ; consumerConfigBean . dataFormatConfig . init ( context , consumerConfigBean . dataFormat , Groups . DATA_FORMAT . name ( ) , "dataFormatConfig" , DataFormatConstants . MAX_OVERRUN_LIMIT , issues ) ; parserFactory = consumerConfigBean . dataFormatConfig . getParserFactory ( ) ; errorQueue = new java . util . concurrent . ArrayBlockingQueue ( 100 ) ; errorList = new java . util . ArrayList ( 100 ) ; if ( ( issues . size ( ) ) == 0 ) { try { com . microsoft . azure . eventhubs . EventHubClient ehClient = eventHubCommon . createEventHubClient ( "event-hub-consumer-pool-%d" ) ; com . microsoft . azure . eventhubs . EventHubRuntimeInformation ehInfo = ehClient . getRuntimeInformation ( ) . get ( ) ; ehClient . close ( ) . get ( ) ; } catch ( java . lang . Exception e ) { issues . add ( context . createConfigIssue ( Groups . EVENT_HUB . toString ( ) , EventHubCommon . CONF_NAME_SPACE , Errors . EVENT_HUB_02 , e . getMessage ( ) ) ) ; } } return issues ; }
org . junit . Assert . assertEquals ( 1 , target . init ( null , context ) . size ( ) )
testConcurrentLeadershipOperationsBlockingSuspend ( ) { final java . util . concurrent . CompletableFuture < org . apache . flink . runtime . messages . Acknowledge > suspendedFuture = new java . util . concurrent . CompletableFuture ( ) ; org . apache . flink . runtime . jobmaster . factories . TestingJobMasterServiceFactory jobMasterServiceFactory = new org . apache . flink . runtime . jobmaster . factories . TestingJobMasterServiceFactory ( ( ) -> new org . apache . flink . runtime . jobmaster . TestingJobMasterService ( "localhost" , ( e ) -> suspendedFuture ) ) ; org . apache . flink . runtime . jobmaster . JobManagerRunner jobManagerRunner = createJobManagerRunner ( jobMasterServiceFactory ) ; jobManagerRunner . start ( ) ; leaderElectionService . isLeader ( java . util . UUID . randomUUID ( ) ) . get ( ) ; leaderElectionService . notLeader ( ) ; final java . util . concurrent . CompletableFuture < java . util . UUID > leaderFuture = leaderElectionService . isLeader ( java . util . UUID . randomUUID ( ) ) ; "<AssertPlaceHolder>" ; try { leaderFuture . get ( 1L , TimeUnit . MILLISECONDS ) ; org . junit . Assert . fail ( "Granted<sp>leadership<sp>even<sp>though<sp>the<sp>JobMaster<sp>has<sp>not<sp>been<sp>suspended." ) ; } catch ( java . util . concurrent . TimeoutException expected ) { } suspendedFuture . complete ( org . apache . flink . runtime . messages . Acknowledge . get ( ) ) ; leaderFuture . get ( ) ; } isDone ( ) { return delegate . isDone ( ) ; }
org . junit . Assert . assertThat ( leaderFuture . isDone ( ) , org . hamcrest . Matchers . is ( false ) )
testFindSSSR_IAtomContainer ( ) { org . openscience . cdk . interfaces . IAtomContainer molecule = org . openscience . cdk . templates . TestMoleculeFactory . makeAlphaPinene ( ) ; org . openscience . cdk . ringsearch . SSSRFinder sssrFinder = new org . openscience . cdk . ringsearch . SSSRFinder ( molecule ) ; org . openscience . cdk . interfaces . IRingSet ringSet = sssrFinder . findSSSR ( ) ; "<AssertPlaceHolder>" ; } getAtomContainerCount ( ) { return this . atomContainerCount ; }
org . junit . Assert . assertEquals ( 2 , ringSet . getAtomContainerCount ( ) )
hasElapsedIfCurrentClockReadingCompletelyExceedsTargetRange ( ) { expect ( mockClock . getGranularity ( ) ) . andReturn ( 1L ) . anyTimes ( ) ; expect ( mockClock . getNanoTime ( ) ) . andReturn ( new org . fishwife . jrugged . interval . DiscreteInterval ( 4L , 5L ) ) ; expect ( mockClock . getNanoTime ( ) ) . andReturn ( new org . fishwife . jrugged . interval . DiscreteInterval ( 2000L , 2015L ) ) . anyTimes ( ) ; replay ( mockClock ) ; impl . set ( 1000L , 100L ) ; impl . start ( ) ; "<AssertPlaceHolder>" ; verify ( mockClock ) ; } hasElapsed ( ) { if ( ( startTime ) == null ) return false ; return ( clock . getNanoTime ( ) . getMin ( ) ) >= ( targetEndTime . getMin ( ) ) ; }
org . junit . Assert . assertTrue ( impl . hasElapsed ( ) )
registerFirst ( ) { org . apache . apex . malhar . lib . state . spillable . SequentialSpillableIdentifierGenerator gen = new org . apache . apex . malhar . lib . state . spillable . SequentialSpillableIdentifierGenerator ( ) ; gen . register ( org . apache . apex . malhar . lib . util . TestUtils . getByte ( 0 ) ) ; byte [ ] id = gen . next ( ) ; "<AssertPlaceHolder>" ; } getByte ( int ) { com . google . common . base . Preconditions . checkArgument ( ( val <= ( Byte . MAX_VALUE ) ) ) ; return new byte [ ] { ( ( byte ) ( val ) ) } ; }
org . junit . Assert . assertArrayEquals ( org . apache . apex . malhar . lib . util . TestUtils . getByte ( 1 ) , id )
testEmpty ( ) { java . io . InputStream is = new java . io . ByteArrayInputStream ( new byte [ 0 ] ) ; byte [ ] bytes = io . grpc . internal . IoUtils . toByteArray ( is ) ; "<AssertPlaceHolder>" ; } toByteArray ( java . io . InputStream ) { java . io . ByteArrayOutputStream out = new java . io . ByteArrayOutputStream ( ) ; io . grpc . internal . IoUtils . copy ( in , out ) ; return out . toByteArray ( ) ; }
org . junit . Assert . assertEquals ( 0 , bytes . length )
test ( ) { com . examples . with . different . packagename . testcarver . DifficultDependencyClass dependency = new com . examples . with . different . packagename . testcarver . DifficultDependencyClass ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; dependency . inc ( ) ; com . examples . with . different . packagename . testcarver . DifficultClassWithoutCarving foo = new com . examples . with . different . packagename . testcarver . DifficultClassWithoutCarving ( ) ; boolean result = foo . testMe ( dependency ) ; "<AssertPlaceHolder>" ; } testMe ( java . util . List ) { if ( ( list . size ( ) ) == 2 ) return true ; else return false ; }
org . junit . Assert . assertTrue ( result )
testLocalMinimumThreshold ( ) { ops . run ( net . imagej . ops . Ops . Threshold . Minimum . class , out , in , new net . imglib2 . algorithm . neighborhood . RectangleShape ( 1 , false ) , new net . imglib2 . outofbounds . OutOfBoundsMirrorFactory < net . imglib2 . type . numeric . integer . ByteType , net . imglib2 . img . Img < net . imglib2 . type . numeric . integer . ByteType > > ( net . imglib2 . outofbounds . OutOfBoundsMirrorFactory . Boundary . SINGLE ) ) ; "<AssertPlaceHolder>" ; } firstElement ( ) { return cursor ( ) . next ( ) ; }
org . junit . Assert . assertEquals ( true , out . firstElement ( ) . get ( ) )
testReadB1GreatValue ( ) { _ios . writeByte ( 245 ) ; _ios . seek ( 0 ) ; final org . esa . s1tbx . io . binary . BinaryFileReader ceosReader = new org . esa . s1tbx . io . binary . BinaryFileReader ( _ios ) ; "<AssertPlaceHolder>" ; } readB1 ( ) { final long streamPosition = stream . getStreamPosition ( ) ; try { return ( stream . readByte ( ) ) & 255 ; } catch ( java . io . IOException e ) { final java . lang . String message = java . lang . String . format ( org . esa . s1tbx . io . binary . BinaryFileReader . EM_READING_X_TYPE , "B1" ) ; throw new org . esa . s1tbx . io . binary . IllegalBinaryFormatException ( message , streamPosition , e ) ; } }
org . junit . Assert . assertEquals ( 245 , ceosReader . readB1 ( ) )
testBigNumberToStringToBigNumberWithShortLimitValues ( ) { org . pentaho . di . core . row . ValueMetaInterface numValueMeta = new org . pentaho . di . core . row . ValueMeta ( "i" , ValueMetaInterface . TYPE_BIGNUMBER ) ; numValueMeta . setDecimalSymbol ( "." ) ; numValueMeta . setGroupingSymbol ( "," ) ; java . lang . String [ ] strings = new java . lang . String [ ] { "-32768" , "32767" } ; java . math . BigDecimal [ ] values = new java . math . BigDecimal [ ] { new java . math . BigDecimal ( "-32768" ) , new java . math . BigDecimal ( "32767" ) } ; for ( int i = 0 ; i < ( values . length ) ; i ++ ) { org . pentaho . di . core . row . ValueMetaInterface strValueMeta = new org . pentaho . di . core . row . ValueMeta ( "str" , ValueMetaInterface . TYPE_STRING ) ; strValueMeta . setConversionMetadata ( numValueMeta ) ; java . math . BigDecimal stringToBigNumber = ( ( java . math . BigDecimal ) ( strValueMeta . convertDataUsingConversionMetaData ( strings [ i ] ) ) ) ; "<AssertPlaceHolder>" ; } } convertDataUsingConversionMetaData ( java . lang . Object ) { if ( ( conversionMetadata ) == null ) { throw new org . pentaho . di . core . exception . KettleValueException ( ( "API<sp>coding<sp>error:<sp>please<sp>specify<sp>the<sp>conversion<sp>metadata<sp>before<sp>attempting<sp>to<sp>convert<sp>value<sp>" + ( name ) ) ) ; } return super . convertDataUsingConversionMetaData ( data2 ) ; }
org . junit . Assert . assertEquals ( values [ i ] , stringToBigNumber )
deveObterIdComoFoiSetado ( ) { final com . fincatto . documentofiscal . nfe400 . classes . evento . manifestacaodestinatario . NFInfoEventoManifestacaoDestinatario infoEventoManifestacaoDestinatario = new com . fincatto . documentofiscal . nfe400 . classes . evento . manifestacaodestinatario . NFInfoEventoManifestacaoDestinatario ( ) ; final java . lang . String id = "hluU2zKt4QK5bEktOiGfpZw64535p2A4Z5m5egLQbMpjnCH48c1aw6" ; infoEventoManifestacaoDestinatario . setId ( id ) ; "<AssertPlaceHolder>" ; } getId ( ) { return this . id ; }
org . junit . Assert . assertEquals ( id , infoEventoManifestacaoDestinatario . getId ( ) )
testTopicListHash ( ) { final org . kaaproject . kaa . client . persistence . KaaClientState clientState = org . mockito . Mockito . mock ( org . kaaproject . kaa . client . persistence . KaaClientState . class ) ; org . mockito . Mockito . doAnswer ( new org . mockito . stubbing . Answer < java . lang . Void > ( ) { @ org . kaaproject . kaa . client . channel . Override public org . kaaproject . kaa . client . channel . Void answer ( org . mockito . invocation . InvocationOnMock invocation ) throws java . lang . Throwable { java . lang . Object [ ] args = invocation . getArguments ( ) ; org . mockito . Mockito . when ( clientState . getTopicListHash ( ) ) . thenReturn ( ( ( java . lang . Integer ) ( args [ 0 ] ) ) ) ; return null ; } } ) . when ( clientState ) . setTopicListHash ( org . mockito . Mockito . anyInt ( ) ) ; org . kaaproject . kaa . client . notification . NotificationProcessor notificationProcessor = org . mockito . Mockito . mock ( org . kaaproject . kaa . client . notification . NotificationProcessor . class ) ; org . kaaproject . kaa . common . endpoint . gen . NotificationSyncResponse response = new org . kaaproject . kaa . common . endpoint . gen . NotificationSyncResponse ( ) ; response . setResponseStatus ( SyncResponseStatus . DELTA ) ; java . util . List < org . kaaproject . kaa . common . endpoint . gen . Topic > topicList = new java . util . ArrayList ( ) ; topicList . add ( new org . kaaproject . kaa . common . endpoint . gen . Topic ( 2L , null , org . kaaproject . kaa . common . endpoint . gen . SubscriptionType . MANDATORY_SUBSCRIPTION ) ) ; topicList . add ( new org . kaaproject . kaa . common . endpoint . gen . Topic ( 1L , null , org . kaaproject . kaa . common . endpoint . gen . SubscriptionType . OPTIONAL_SUBSCRIPTION ) ) ; response . setAvailableTopics ( topicList ) ; org . kaaproject . kaa . client . channel . KaaChannelManager channelManagerMock = org . mockito . Mockito . mock ( org . kaaproject . kaa . client . channel . KaaChannelManager . class ) ; org . kaaproject . kaa . client . channel . NotificationTransport transport = new org . kaaproject . kaa . client . channel . impl . transports . DefaultNotificationTransport ( ) ; transport . setChannelManager ( channelManagerMock ) ; transport . setNotificationProcessor ( notificationProcessor ) ; transport . setClientState ( clientState ) ; transport . onNotificationResponse ( response ) ; org . kaaproject . kaa . common . endpoint . gen . NotificationSyncRequest request = transport . createNotificationRequest ( ) ; "<AssertPlaceHolder>" ; } calculateTopicListHash ( java . util . List ) { if ( topics == null ) { return org . kaaproject . kaa . client . notification . TopicListHashCalculator . NULL_LIST_HASH ; } int result = org . kaaproject . kaa . client . notification . TopicListHashCalculator . EMPTRY_LIST_HASH ; if ( ! ( topics . isEmpty ( ) ) ) { java . util . List < org . kaaproject . kaa . common . endpoint . gen . Topic > newTopics = new java . util . LinkedList ( topics ) ; java . util . Collections . sort ( newTopics , new java . util . Comparator < org . kaaproject . kaa . common . endpoint . gen . Topic > ( ) { @ org . kaaproject . kaa . client . notification . Override public int compare ( org . kaaproject . kaa . common . endpoint . gen . Topic o1 , org . kaaproject . kaa . common . endpoint . gen . Topic o2 ) { return ( o1 . getId ( ) ) < ( o2 . getId ( ) ) ? - 1 : ( o1 . getId ( ) ) > ( o2 . getId ( ) ) ? 1 : 0 ; } } ) ; for ( org . kaaproject . kaa . common . endpoint . gen . Topic topic : newTopics ) { long topicId = topic . getId ( ) ; int elementHash = ( ( int ) ( topicId ^ ( topicId > > > 32 ) ) ) ; result = ( 31 * result ) + elementHash ; } } return result ; }
org . junit . Assert . assertEquals ( org . kaaproject . kaa . client . notification . TopicListHashCalculator . calculateTopicListHash ( topicList ) , request . getTopicListHash ( ) )
get_first_non_null_java ( ) { java . lang . String first = null ; java . lang . String second = "Stomp<sp>the<sp>Hawks,<sp>Badgers!" ; java . lang . String firstNullObject = null ; if ( first == null ) { if ( second != null ) { firstNullObject = second ; } } "<AssertPlaceHolder>" ; }
org . junit . Assert . assertEquals ( second , firstNullObject )
getApplicationContainerStatistics ( ) { javax . json . JsonObject stats = cut . getApplicationsContainerStatistics ( ) ; "<AssertPlaceHolder>" ; System . out . println ( ( "--<sp>getApplicationContainerStatistics--<sp>" + stats ) ) ; } getApplicationsContainerStatistics ( ) { javax . json . JsonObjectBuilder builder = javax . json . Json . createObjectBuilder ( ) ; javax . json . JsonObject applications = collector . fetchApplications ( ) ; java . util . Set < Map . Entry < java . lang . String , javax . json . JsonValue > > applicationsSet = applications . entrySet ( ) ; for ( Map . Entry < java . lang . String , javax . json . JsonValue > applicationEntry : applicationsSet ) { final java . lang . String applicationName = applicationEntry . getKey ( ) ; javax . json . JsonObject stats = collector . fetchApplicationStatistics ( applicationName ) ; builder . add ( applicationName , stats ) ; } return builder . build ( ) ; }
org . junit . Assert . assertNotNull ( stats )
testOneDelimiterByteStuffed ( ) { byte [ ] decodedPayload = new byte [ ] { 1 , 2 , HdlcTranslateConstants . FRAME_DELIMITER_BYTE , 3 , 4 } ; byte [ ] expectedEncodedPayload = new byte [ ] { HdlcTranslateConstants . FRAME_DELIMITER_BYTE , 1 , 2 , HdlcTranslateConstants . ESCAPE_BYTE , ( HdlcTranslateConstants . FRAME_DELIMITER_BYTE ) ^ 32 , 3 , 4 , HdlcTranslateConstants . FRAME_DELIMITER_BYTE } ; byte [ ] actualEncodedPayload = encode ( decodedPayload ) ; "<AssertPlaceHolder>" ; } encode ( byte [ ] ) { org . jboss . netty . handler . codec . embedder . EncoderEmbedder < org . jboss . netty . buffer . ChannelBuffer > embedder = new org . jboss . netty . handler . codec . embedder . EncoderEmbedder < org . jboss . netty . buffer . ChannelBuffer > ( new de . uniluebeck . itm . nettyprotocols . tinyos . HdlcTranslateEncoder ( ) ) ; embedder . offer ( org . jboss . netty . buffer . ChannelBuffers . wrappedBuffer ( payloadBytes ) ) ; org . jboss . netty . buffer . ChannelBuffer encodedBuffer = embedder . poll ( ) ; byte [ ] encodedBytes = new byte [ encodedBuffer . readableBytes ( ) ] ; encodedBuffer . readBytes ( encodedBytes ) ; return encodedBytes ; }
org . junit . Assert . assertArrayEquals ( expectedEncodedPayload , actualEncodedPayload )
testGetSiteInfo ( ) { net . sourceforge . jwbf . mediawiki . actions . meta . Siteinfo siteinfo = testee . getSiteinfo ( ) ; "<AssertPlaceHolder>" ; } getMainpage ( ) { return mainpage ; }
org . junit . Assert . assertEquals ( "" , siteinfo . getMainpage ( ) )
testGetParametersWithDefaultEntityAndDisabledSecurity ( ) { unit . setSecurity ( false ) ; org . lnu . is . domain . education . form . type . EducationFormType entity = new org . lnu . is . domain . education . form . type . EducationFormType ( ) ; java . util . Map < java . lang . String , java . lang . Object > expected = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; expected . put ( "status" , RowStatus . ACTIVE ) ; java . util . Map < java . lang . String , java . lang . Object > actual = unit . getParameters ( entity ) ; "<AssertPlaceHolder>" ; } getParameters ( org . springframework . web . context . request . NativeWebRequest ) { java . util . Map < java . lang . String , java . lang . Object > resultMap = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; java . util . Map < java . lang . String , java . lang . String > pathVariables = ( ( java . util . Map < java . lang . String , java . lang . String > ) ( webRequest . getAttribute ( HandlerMapping . URI_TEMPLATE_VARIABLES_ATTRIBUTE , RequestAttributes . SCOPE_REQUEST ) ) ) ; java . util . Map < java . lang . String , java . lang . Object > requestParams = getRequestParameterMap ( webRequest ) ; for ( Map . Entry < java . lang . String , java . lang . Object > entry : requestParams . entrySet ( ) ) { resultMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } resultMap . putAll ( pathVariables ) ; return resultMap ; }
org . junit . Assert . assertEquals ( expected , actual )
entityEquality_commited_transaction ( ) { javax . persistence . EntityManager em = org . meri . jpa . simplest . LoadEntityTest . factory . createEntityManager ( ) ; em . getTransaction ( ) . begin ( ) ; org . meri . jpa . simplest . entities . Person person1 = em . find ( org . meri . jpa . simplest . entities . Person . class , org . meri . jpa . simplest . LoadEntityTest . SIMON_SLASH_ID ) ; em . getTransaction ( ) . commit ( ) ; em . getTransaction ( ) . begin ( ) ; org . meri . jpa . simplest . entities . Person person2 = em . find ( org . meri . jpa . simplest . entities . Person . class , org . meri . jpa . simplest . LoadEntityTest . SIMON_SLASH_ID ) ; em . getTransaction ( ) . rollback ( ) ; em . close ( ) ; "<AssertPlaceHolder>" ; } close ( ) { }
org . junit . Assert . assertEquals ( person1 , person2 )
testInvokeVarargs07 ( ) { javax . el . BeanELResolver resolver = new javax . el . BeanELResolver ( ) ; javax . el . ELContext context = new javax . el . StandardELContext ( javax . el . ELManager . getExpressionFactory ( ) ) ; java . lang . Object result = resolver . invoke ( context , new javax . el . TesterBean ( javax . el . TestBeanELResolver . BEAN_NAME ) , "getNameVarargs" , new java . lang . Class < ? > [ ] { null } , null ) ; "<AssertPlaceHolder>" ; } invoke ( javax . el . ELContext , java . lang . Object , java . lang . Object , javax . el . Class [ ] , java . lang . Object [ ] ) { context . setPropertyResolved ( false ) ; int sz = this . size ; for ( int i = 0 ; i < sz ; i ++ ) { java . lang . Object obj = this . resolvers [ i ] . invoke ( context , base , method , paramTypes , params ) ; if ( context . isPropertyResolved ( ) ) { return obj ; } } return null ; }
org . junit . Assert . assertEquals ( javax . el . TestBeanELResolver . BEAN_NAME , result )
componentProcessesValueOfValuesFieldOnSubmit ( ) { final org . apache . tapestry5 . dom . Document document = this . renderPage ( se . unbound . tapestry . tagselect . components . TagSelectTest . PAGE_WITH_STRING_TAGS ) ; final org . apache . tapestry5 . dom . Element form = document . getElementById ( "form" ) ; final java . util . Map < java . lang . String , java . lang . String > params = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; params . put ( "tags-values" , "tag123;tag456" ) ; final org . apache . tapestry5 . dom . Document document2 = this . submitForm ( form , params ) ; final org . apache . tapestry5 . dom . Element element = document2 . getElementById ( "tags-values" ) ; "<AssertPlaceHolder>" ; } getAttribute ( java . lang . String ) { return null ; }
org . junit . Assert . assertEquals ( "value" , "tag123;tag456" , element . getAttribute ( "value" ) )
testSubList ( ) { java . util . List < org . spout . api . inventory . ItemStack > subList = inventory . subList ( 5 , 15 ) ; for ( int i = 0 ; i < ( subList . size ( ) ) ; i ++ ) { "<AssertPlaceHolder>" ; } } get ( int ) { return getContents ( ) [ i ] ; }
org . junit . Assert . assertEquals ( inventory . get ( ( i + 5 ) ) , subList . get ( i ) )
get_response_interceptors ( ) { java . util . List < com . amazon . ask . request . interceptor . GenericResponseInterceptor < com . amazon . ask . sdk . TestHandlerInput , com . amazon . ask . sdk . TestHandlerOutput > > responseInterceptors = java . util . Collections . singletonList ( mock ( com . amazon . ask . request . interceptor . GenericResponseInterceptor . class ) ) ; com . amazon . ask . builder . GenericSkillConfiguration config = com . amazon . ask . builder . TestSkillConfiguration . builder ( ) . withResponseInterceptors ( responseInterceptors ) . build ( ) ; "<AssertPlaceHolder>" ; } getResponseInterceptors ( ) { return responseInterceptors ; }
org . junit . Assert . assertEquals ( responseInterceptors , config . getResponseInterceptors ( ) )
testCacheStrategy ( ) { org . apache . druid . query . CacheStrategy < org . apache . druid . query . Result < org . apache . druid . query . timeboundary . TimeBoundaryResultValue > , java . lang . Object , org . apache . druid . query . timeboundary . TimeBoundaryQuery > strategy = new org . apache . druid . query . timeboundary . TimeBoundaryQueryQueryToolChest ( ) . getCacheStrategy ( new org . apache . druid . query . timeboundary . TimeBoundaryQuery ( new org . apache . druid . query . TableDataSource ( "dummy" ) , new org . apache . druid . query . spec . MultipleIntervalSegmentSpec ( com . google . common . collect . ImmutableList . of ( org . apache . druid . java . util . common . Intervals . of ( "2015-01-01/2015-01-02" ) ) ) , null , null , null ) ) ; final org . apache . druid . query . Result < org . apache . druid . query . timeboundary . TimeBoundaryResultValue > result = new org . apache . druid . query . Result ( org . apache . druid . java . util . common . DateTimes . utc ( 123L ) , new org . apache . druid . query . timeboundary . TimeBoundaryResultValue ( com . google . common . collect . ImmutableMap . of ( TimeBoundaryQuery . MIN_TIME , DateTimes . EPOCH . toString ( ) , TimeBoundaryQuery . MAX_TIME , org . apache . druid . java . util . common . DateTimes . of ( "2015-01-01" ) . toString ( ) ) ) ) ; java . lang . Object preparedValue = strategy . prepareForSegmentLevelCache ( ) . apply ( result ) ; com . fasterxml . jackson . databind . ObjectMapper objectMapper = new org . apache . druid . jackson . DefaultObjectMapper ( ) ; java . lang . Object fromCacheValue = objectMapper . readValue ( objectMapper . writeValueAsBytes ( preparedValue ) , strategy . getCacheObjectClazz ( ) ) ; org . apache . druid . query . Result < org . apache . druid . query . timeboundary . TimeBoundaryResultValue > fromCacheResult = strategy . pullFromSegmentLevelCache ( ) . apply ( fromCacheValue ) ; "<AssertPlaceHolder>" ; } apply ( java . lang . String ) { final org . joda . time . Interval interval = org . apache . druid . java . util . common . Intervals . of ( input ) ; try { return org . apache . druid . timeline . DataSegment . builder ( ) . dataSource ( "test_kill_task" ) . interval ( interval ) . loadSpec ( com . google . common . collect . ImmutableMap . of ( "type" , "local" , "path" , ( ( ( ( ( ( ( ( tmpSegmentDir . getCanonicalPath ( ) ) + "/druid/localStorage/wikipedia/" ) + ( interval . getStart ( ) ) ) + "-" ) + ( interval . getEnd ( ) ) ) + "/" ) + "2011-04-6T16:52:46.119-05:00" ) + "/0/index.zip" ) ) ) . version ( "2011-04-6T16:52:46.119-05:00" ) . dimensions ( com . google . common . collect . ImmutableList . of ( ) ) . metrics ( com . google . common . collect . ImmutableList . of ( ) ) . shardSpec ( org . apache . druid . timeline . partition . NoneShardSpec . instance ( ) ) . binaryVersion ( 9 ) . size ( 0 ) . build ( ) ; } catch ( java . io . IOException e ) { throw new org . apache . druid . java . util . common . ISE ( e , "Error<sp>creating<sp>segments" ) ; } }
org . junit . Assert . assertEquals ( result , fromCacheResult )
testHasObjectsReturnsTrueWhenNotEmpty ( ) { final org . bonitasoft . engine . archive . ArchiveInsertRecord record1 = new org . bonitasoft . engine . archive . ArchiveInsertRecord ( mock ( org . bonitasoft . engine . persistence . ArchivedPersistentObject . class ) ) ; final org . bonitasoft . engine . archive . ArchiveInsertRecord record2 = new org . bonitasoft . engine . archive . ArchiveInsertRecord ( mock ( org . bonitasoft . engine . persistence . ArchivedPersistentObject . class ) ) ; final org . bonitasoft . engine . archive . ArchiveInsertRecord record3 = new org . bonitasoft . engine . archive . ArchiveInsertRecord ( mock ( org . bonitasoft . engine . persistence . ArchivedPersistentObject . class ) ) ; final org . bonitasoft . engine . archive . ArchiveInsertRecord [ ] records = new org . bonitasoft . engine . archive . ArchiveInsertRecord [ ] { record1 , record2 , record3 } ; final org . bonitasoft . engine . archive . impl . BatchArchiveCallable callable = new org . bonitasoft . engine . archive . impl . BatchArchiveCallable ( null , records ) ; "<AssertPlaceHolder>" ; } hasObjects ( ) { return ( ( archivedObjects ) != null ) && ( ! ( archivedObjects . isEmpty ( ) ) ) ; }
org . junit . Assert . assertTrue ( callable . hasObjects ( ) )
testLoad2 ( ) { java . util . Properties properties = com . liferay . portal . kernel . util . PropertiesUtil . load ( new com . liferay . portal . kernel . io . unsync . UnsyncStringReader ( com . liferay . portal . kernel . util . PropertiesUtilTest . _PROPERTIES_STRING ) ) ; for ( java . lang . String [ ] property : com . liferay . portal . kernel . util . PropertiesUtilTest . _PROPERTIES_ARRAY ) { "<AssertPlaceHolder>" ; } } get ( java . lang . String ) { return com . liferay . journal . content . search . web . configuration . JournalContentSearchWebConfigurationUtil . _configuration . get ( key ) ; }
org . junit . Assert . assertEquals ( property [ 1 ] , properties . get ( property [ 0 ] ) )
test2 ( ) { org . trie4j . tail . index . TailIndexBuilder tib = new org . trie4j . tail . index . ArrayTailIndexBuilder ( ) ; org . trie4j . tail . index . TailIndex ti = tib . build ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return trie . size ( ) ; }
org . junit . Assert . assertEquals ( 0 , ti . size ( ) )
testSingleTestTEBASEwithNoConfigFileInATest ( ) { try { java . io . File tebase = new java . io . File ( classp , "tebase-no-config-in-test" ) ; com . occamlab . te . web . ConfigFileCreator configFileCreator = new com . occamlab . te . web . ConfigFileCreator ( ) ; configFileCreator . create ( tebase ) ; java . io . File configFile = new java . io . File ( ( ( tebase + ( java . io . File . separator ) ) + "config.xml" ) ) ; org . w3c . dom . Document config = builder . parse ( configFile ) ; org . w3c . dom . NodeList orgs = com . occamlab . te . web . XMLUtils . getAllNodes ( config , "/config/scripts/organization" ) ; "<AssertPlaceHolder>" ; configFile . delete ( ) ; } catch ( java . lang . Exception e ) { org . junit . Assert . fail ( ) ; e . printStackTrace ( ) ; } } getLength ( ) { if ( ( content ) == null ) { connect ( ) ; } return content . length ; }
org . junit . Assert . assertEquals ( 0 , orgs . getLength ( ) )
testSize ( ) { org . apache . tajo . datum . Datum d = org . apache . tajo . datum . DatumFactory . createText ( "12345" ) ; "<AssertPlaceHolder>" ; } size ( ) { return taskRequestQueue . size ( ) ; }
org . junit . Assert . assertEquals ( 5 , d . size ( ) )
givenWildcardPattern_andAgentIsEmpty_whenGetMatchSpecificity_thenReturnsZero ( ) { com . brandwatch . robots . matching . Matcher < java . lang . String > matcher = com . brandwatch . robots . matching . MatcherUtilsImplTest . newPatternMatcher ( "*" ) ; java . lang . String agent = "" ; com . google . common . base . Optional < java . lang . Double > specificity = utilities . getMatchSpecificity ( matcher , agent ) ; "<AssertPlaceHolder>" ; } get ( ) { return robots . build ( ) ; }
org . junit . Assert . assertThat ( specificity . get ( ) , org . hamcrest . Matchers . equalTo ( 0.0 ) )
testRecordVTReadWrite ( ) { java . lang . String ns = "testRecordVTWrite" ; org . lilyproject . repository . api . FieldType fieldType1 = org . lilyproject . repository . impl . test . ValueTypeTest . typeManager . createFieldType ( org . lilyproject . repository . impl . test . ValueTypeTest . typeManager . newFieldType ( org . lilyproject . repository . impl . test . ValueTypeTest . typeManager . getValueType ( "STRING" ) , new org . lilyproject . repository . api . QName ( ns , "field1" ) , Scope . NON_VERSIONED ) ) ; org . lilyproject . repository . api . RecordType rt1 = org . lilyproject . repository . impl . test . ValueTypeTest . typeManager . recordTypeBuilder ( ) . name ( new org . lilyproject . repository . api . QName ( ns , "rt1" ) ) . field ( fieldType1 . getId ( ) , false ) . create ( ) ; org . lilyproject . repository . api . ValueType recordVT = org . lilyproject . repository . impl . test . ValueTypeTest . typeManager . getValueType ( "RECORD" ) ; org . lilyproject . repository . api . RecordType rt2 = org . lilyproject . repository . impl . test . ValueTypeTest . typeManager . recordTypeBuilder ( ) . name ( new org . lilyproject . repository . api . QName ( ns , "rt2" ) ) . supertype ( ) . id ( rt1 . getId ( ) ) . add ( ) . create ( ) ; org . lilyproject . repository . api . Record createdRecord = org . lilyproject . repository . impl . test . ValueTypeTest . repository . recordBuilder ( ) . recordType ( new org . lilyproject . repository . api . QName ( ns , "rt2" ) ) . field ( new org . lilyproject . repository . api . QName ( ns , "field1" ) , "def" ) . create ( ) ; org . lilyproject . bytes . api . DataOutput dataOutput = new org . lilyproject . bytes . impl . DataOutputImpl ( ) ; recordVT . write ( createdRecord , dataOutput , new org . lilyproject . repository . api . IdentityRecordStack ( ) ) ; org . lilyproject . bytes . api . DataInput dataInput = new org . lilyproject . bytes . impl . DataInputImpl ( dataOutput . toByteArray ( ) ) ; org . lilyproject . repository . api . Record readRecord = recordVT . read ( dataInput ) ; "<AssertPlaceHolder>" ; } getFields ( ) { return java . util . Collections . unmodifiableList ( fields ) ; }
org . junit . Assert . assertEquals ( createdRecord . getFields ( ) , readRecord . getFields ( ) )
getResultTest ( ) { final java . lang . String testResult = org . threadly . util . StringUtils . makeRandomString ( 5 ) ; org . threadly . concurrent . PriorityScheduler scheduler = new org . threadly . concurrent . StrictPriorityScheduler ( 1 ) ; try { scheduler . schedule ( new java . lang . Runnable ( ) { @ org . threadly . concurrent . future . Override public void run ( ) { slf . setResult ( testResult ) ; } } , org . threadly . concurrent . future . DELAY_TIME ) ; "<AssertPlaceHolder>" ; } finally { scheduler . shutdownNow ( ) ; } } get ( ) { executeIfNotStarted ( ) ; return super . get ( ) ; }
org . junit . Assert . assertTrue ( ( ( slf . get ( ) ) == testResult ) )
removeIndexGroup ( ) { addIndexGroup ( ) ; final org . locationtech . geowave . core . store . cli . config . RemoveIndexCommand commandRemove = new org . locationtech . geowave . core . store . cli . config . RemoveIndexCommand ( ) ; commandRemove . setEntryName ( "abc" ) ; commandRemove . prepare ( operationParams ) ; commandRemove . execute ( operationParams ) ; final org . locationtech . geowave . core . store . cli . config . RemoveIndexGroupCommand command = new org . locationtech . geowave . core . store . cli . config . RemoveIndexGroupCommand ( ) ; command . setEntryName ( "ig1" ) ; command . prepare ( operationParams ) ; command . execute ( operationParams ) ; final java . util . Properties props = org . locationtech . geowave . core . cli . operations . config . options . ConfigOptions . loadProperties ( configFile ) ; "<AssertPlaceHolder>" ; } size ( ) { return getCluster ( ) . size ( ) ; }
org . junit . Assert . assertEquals ( 1 , props . size ( ) )
testCheckColumnEnforceVersions ( ) { org . apache . hadoop . hbase . regionserver . querymatcher . ScanWildcardColumnTracker tracker = new org . apache . hadoop . hbase . regionserver . querymatcher . ScanWildcardColumnTracker ( 0 , org . apache . hadoop . hbase . regionserver . querymatcher . TestScanWildcardColumnTracker . VERSIONS , Long . MIN_VALUE , org . apache . hadoop . hbase . CellComparatorImpl . COMPARATOR ) ; java . util . List < byte [ ] > qualifiers = new java . util . ArrayList ( 4 ) ; qualifiers . add ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "qualifier1" ) ) ; qualifiers . add ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "qualifier1" ) ) ; qualifiers . add ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "qualifier1" ) ) ; qualifiers . add ( org . apache . hadoop . hbase . util . Bytes . toBytes ( "qualifier2" ) ) ; java . util . List < org . apache . hadoop . hbase . regionserver . querymatcher . ScanQueryMatcher . MatchCode > expected = new java . util . ArrayList ( 4 ) ; expected . add ( ScanQueryMatcher . MatchCode . INCLUDE ) ; expected . add ( ScanQueryMatcher . MatchCode . INCLUDE ) ; expected . add ( ScanQueryMatcher . MatchCode . SEEK_NEXT_COL ) ; expected . add ( ScanQueryMatcher . MatchCode . INCLUDE ) ; java . util . List < org . apache . hadoop . hbase . regionserver . querymatcher . ScanQueryMatcher . MatchCode > actual = new java . util . ArrayList ( qualifiers . size ( ) ) ; long timestamp = 0 ; for ( byte [ ] qualifier : qualifiers ) { org . apache . hadoop . hbase . regionserver . querymatcher . ScanQueryMatcher . MatchCode mc = org . apache . hadoop . hbase . regionserver . querymatcher . ScanQueryMatcher . checkColumn ( tracker , qualifier , 0 , qualifier . length , ( ++ timestamp ) , KeyValue . Type . Put . getCode ( ) , false ) ; actual . add ( mc ) ; } for ( int i = 0 ; i < ( expected . size ( ) ) ; i ++ ) { "<AssertPlaceHolder>" ; } } get ( java . util . List ) { byte [ ] [ ] rows = new byte [ gets . size ( ) ] [ ] ; int maxVersions = 1 ; int count = 0 ; for ( org . apache . hadoop . hbase . client . Get g : gets ) { if ( count == 0 ) { maxVersions = g . getMaxVersions ( ) ; } else if ( ( g . getMaxVersions ( ) ) != maxVersions ) { org . apache . hadoop . hbase . rest . client . RemoteHTable . LOG . warn ( ( ( "MaxVersions<sp>on<sp>Gets<sp>do<sp>not<sp>match,<sp>using<sp>the<sp>first<sp>in<sp>the<sp>list<sp>(" + maxVersions ) + ")" ) ) ; } if ( ( g . getFilter ( ) ) != null ) { org . apache . hadoop . hbase . rest . client . RemoteHTable . LOG . warn ( "filters<sp>not<sp>supported<sp>on<sp>gets" ) ; } rows [ count ] = g . getRow ( ) ; count ++ ; } java . lang . String spec = buildMultiRowSpec ( rows , maxVersions ) ; return getResults ( spec ) ; }
org . junit . Assert . assertEquals ( expected . get ( i ) , actual . get ( i ) )
testRelatieDatumAanvangActueel ( ) { final java . util . Set < nl . bzk . brp . domain . algemeen . ZoekCriterium > zoekCriteria = new java . util . HashSet ( ) ; nl . bzk . brp . domain . algemeen . ZoekCriterium zoekCriteria1 = new nl . bzk . brp . domain . algemeen . ZoekCriterium ( getAttribuutElement ( Element . HUWELIJK_DATUMAANVANG ) , nl . bzk . algemeenbrp . dal . domein . brp . enums . Zoekoptie . EXACT , 20040101 ) ; zoekCriteria . add ( zoekCriteria1 ) ; nl . bzk . brp . delivery . dataaccess . bevraging . SqlStamementZoekPersoon sql = new nl . bzk . brp . delivery . dataaccess . bevraging . SqlBepaler ( zoekCriteria , 10 , false , null , false ) . maakSql ( ) ; final java . util . List < java . lang . Long > ids = zoekPersoonRepository . zoekPersonen ( sql , postgres ) ; "<AssertPlaceHolder>" ; } size ( ) { return elementen . size ( ) ; }
org . junit . Assert . assertEquals ( 2 , ids . size ( ) )
deveObterDestinatarioComoFoiSetado ( ) { final com . fincatto . documentofiscal . nfe310 . classes . nota . NFNotaInfo notaInfo = new com . fincatto . documentofiscal . nfe310 . classes . nota . NFNotaInfo ( ) ; final com . fincatto . documentofiscal . nfe310 . classes . nota . NFNotaInfoDestinatario destinatario = com . fincatto . documentofiscal . nfe310 . FabricaDeObjetosFake . getNFNotaInfoDestinatario ( ) ; notaInfo . setDestinatario ( destinatario ) ; "<AssertPlaceHolder>" ; } getDestinatario ( ) { return this . destinatario ; }
org . junit . Assert . assertEquals ( destinatario , notaInfo . getDestinatario ( ) )
invalidInput ( ) { "<AssertPlaceHolder>" ; } trace ( java . lang . String ) { if ( == null ) return com . bugquery . stacktrace . Extract . notFound ; java . lang . String $ = "" ; for ( final com . bugquery . stacktrace . Matcher m = com . bugquery . stacktrace . Extract . tracePattern . matcher ( ) ; m . find ( ) ; ) $ += m . group ( 0 ) ; return ( $ . length ( ) ) > 0 ? $ : com . bugquery . stacktrace . Extract . notFound ; }
org . junit . Assert . assertEquals ( Extract . notFound , com . bugquery . stacktrace . Extract . trace ( null ) )
testApplyTransforms_simple_hosshoi_kar ( ) { java . util . List < java . lang . Integer > glyphsAfterGsub = java . util . Arrays . asList ( 56 , 102 , 91 ) ; java . util . List < java . lang . Integer > result = gsubWorkerForBengali . applyTransforms ( getGlyphIds ( "" ) ) ; "<AssertPlaceHolder>" ; } getGlyphIds ( java . lang . String ) { java . util . List < java . lang . Integer > originalGlyphIds = new java . util . ArrayList ( ) ; for ( char unicodeChar : word . toCharArray ( ) ) { int glyphId = cmapLookup . getGlyphId ( unicodeChar ) ; org . junit . Assert . assertTrue ( ( glyphId > 0 ) ) ; originalGlyphIds . add ( glyphId ) ; } return originalGlyphIds ; }
org . junit . Assert . assertEquals ( glyphsAfterGsub , result )
testUnoundUsers3 ( ) { int previous = transactionService . runOnAnotherReadOnlyTransaction ( new org . libreplan . business . common . IOnTransaction < java . lang . Integer > ( ) { @ org . libreplan . business . test . users . daos . Override public org . libreplan . business . test . users . daos . Integer execute ( ) { return userDAO . getUnboundUsers ( null ) . size ( ) ; } } ) ; org . libreplan . business . users . entities . User user = createUser ( getUniqueName ( ) ) ; final org . libreplan . business . resources . entities . Worker worker = givenStoredWorkerRelatedTo ( user ) ; user . setWorker ( worker ) ; int size = transactionService . runOnAnotherReadOnlyTransaction ( new org . libreplan . business . common . IOnTransaction < java . lang . Integer > ( ) { @ org . libreplan . business . test . users . daos . Override public org . libreplan . business . test . users . daos . Integer execute ( ) { return userDAO . getUnboundUsers ( worker ) . size ( ) ; } } ) ; "<AssertPlaceHolder>" ; } getUnboundUsers ( org . libreplan . business . resources . entities . Worker ) { java . util . List < org . libreplan . business . users . entities . User > result = new java . util . ArrayList ( ) ; boolean condition ; for ( org . libreplan . business . users . entities . User user : getUsersOrderByLoginName ( ) ) { condition = ( ( user . getWorker ( ) ) == null ) || ( ( ( worker != null ) && ( ! ( worker . isNewObject ( ) ) ) ) && ( worker . getId ( ) . equals ( user . getWorker ( ) . getId ( ) ) ) ) ; if ( condition ) { result . add ( user ) ; } } return result ; }
org . junit . Assert . assertEquals ( ( previous + 1 ) , size )
testGetFirstReleaseNull ( ) { ch . puzzle . itc . mobiliar . business . releasing . entity . ReleaseEntity rel = resourceGroup . getFirstRelease ( ) ; "<AssertPlaceHolder>" ; } getFirstRelease ( ) { ch . puzzle . itc . mobiliar . business . releasing . entity . ReleaseEntity firstRelease = null ; if ( ( resources ) != null ) { for ( ch . puzzle . itc . mobiliar . business . resourcegroup . entity . ResourceEntity resource : resources ) { if ( ( firstRelease == null ) || ( ( firstRelease . compareTo ( resource . getRelease ( ) ) ) > 0 ) ) { firstRelease = resource . getRelease ( ) ; } } } return firstRelease ; }
org . junit . Assert . assertEquals ( null , rel )
getFieldValue ( ) { class MyClass { public com . vaadin . util . Integer getField ( ) { return 1 ; } public void setField ( java . lang . Integer i ) { } } class MySubClass extends MyClass { public java . lang . String field = "Hello" ; } MySubClass myInstance = new MySubClass ( ) ; java . lang . reflect . Field memberField ; java . lang . Object fieldValue = false ; try { memberField = myInstance . getClass ( ) . getField ( "field" ) ; fieldValue = com . vaadin . util . ReflectTools . getJavaFieldValue ( myInstance , memberField , java . lang . String . class ) ; } catch ( java . lang . Exception e ) { } "<AssertPlaceHolder>" ; } getJavaFieldValue ( java . lang . Object , java . lang . reflect . Field , java . lang . Class ) { java . beans . PropertyDescriptor pd ; try { pd = new java . beans . PropertyDescriptor ( field . getName ( ) , object . getClass ( ) ) ; if ( propertyType . isAssignableFrom ( pd . getPropertyType ( ) ) ) { java . lang . reflect . Method getter = pd . getReadMethod ( ) ; if ( getter != null ) { return getter . invoke ( object , ( ( java . lang . Object [ ] ) ( null ) ) ) ; } } } catch ( java . beans . IntrospectionException e1 ) { } if ( ! ( propertyType . isAssignableFrom ( field . getType ( ) ) ) ) { throw new java . lang . IllegalArgumentException ( ) ; } if ( ! ( field . isAccessible ( ) ) ) { field . setAccessible ( true ) ; } return field . get ( object ) ; }
org . junit . Assert . assertTrue ( ( fieldValue instanceof java . lang . String ) )
asMulti_array ( ) { ezvcard . io . json . JCardValue value = new ezvcard . io . json . JCardValue ( new ezvcard . io . json . JsonValue ( java . util . Arrays . asList ( new ezvcard . io . json . JsonValue ( "value1" ) , new ezvcard . io . json . JsonValue ( false ) ) ) ) ; "<AssertPlaceHolder>" ; } asMulti ( ) { if ( values . isEmpty ( ) ) { return java . util . Collections . emptyList ( ) ; } java . util . List < java . lang . String > multi = new java . util . ArrayList < java . lang . String > ( values . size ( ) ) ; for ( ezvcard . io . json . JsonValue value : values ) { if ( value . isNull ( ) ) { multi . add ( "" ) ; continue ; } java . lang . Object obj = value . getValue ( ) ; if ( obj != null ) { multi . add ( obj . toString ( ) ) ; continue ; } } return multi ; }
org . junit . Assert . assertEquals ( java . util . Arrays . asList ( ) , value . asMulti ( ) )
verifyCreatesAndRemovesNode ( ) { sut . put ( com . spotify . helios . servicescommon . coordination . ZooKeeperUpdatingPersistentDirectoryTest . FOO_NODE , com . spotify . helios . servicescommon . coordination . ZooKeeperUpdatingPersistentDirectoryTest . BAR1_DATA ) ; final byte [ ] remote = awaitNode ( com . spotify . helios . servicescommon . coordination . ZooKeeperUpdatingPersistentDirectoryTest . FOO_PATH ) ; "<AssertPlaceHolder>" ; sut . remove ( com . spotify . helios . servicescommon . coordination . ZooKeeperUpdatingPersistentDirectoryTest . FOO_NODE ) ; awaitNoNode ( com . spotify . helios . servicescommon . coordination . ZooKeeperUpdatingPersistentDirectoryTest . FOO_PATH ) ; } awaitNode ( java . lang . String ) { return com . spotify . helios . Polling . await ( 30 , com . spotify . helios . servicescommon . coordination . SECONDS , new java . util . concurrent . Callable < byte [ ] > ( ) { @ com . spotify . helios . servicescommon . coordination . Override public byte [ ] call ( ) throws com . spotify . helios . servicescommon . coordination . Exception { try { return zk . curatorWithSuperAuth ( ) . getData ( ) . forPath ( path ) ; } catch ( org . apache . zookeeper . KeeperException e ) { return null ; } } } ) ; }
org . junit . Assert . assertArrayEquals ( com . spotify . helios . servicescommon . coordination . ZooKeeperUpdatingPersistentDirectoryTest . BAR1_DATA , remote )
testTrainC_ClassificationDataSet_ExecutorService ( ) { System . out . println ( "trainC" ) ; for ( boolean useAverageModel : new boolean [ ] { true , false } ) for ( int burnin : new int [ ] { 0 , 50 , 100 , 250 } ) { jsat . classifiers . linear . kernelized . OSKL instance = new jsat . classifiers . linear . kernelized . OSKL ( new jsat . distributions . kernels . RBFKernel ( 0.5 ) , 1.5 ) ; instance . setBurnIn ( burnin ) ; instance . setUseAverageModel ( useAverageModel ) ; jsat . classifiers . linear . kernelized . ClassificationDataSet train = jsat . FixedProblems . getInnerOuterCircle ( 200 , jsat . utils . random . RandomUtil . getRandom ( ) ) ; jsat . classifiers . linear . kernelized . ClassificationDataSet test = jsat . FixedProblems . getInnerOuterCircle ( 100 , jsat . utils . random . RandomUtil . getRandom ( ) ) ; jsat . classifiers . linear . kernelized . ClassificationModelEvaluation cme = new jsat . classifiers . linear . kernelized . ClassificationModelEvaluation ( instance , train , true ) ; cme . evaluateTestSet ( test ) ; "<AssertPlaceHolder>" ; } } getErrorRate ( ) { return 1.0 - ( ( getCorrectWeights ( ) ) / ( sumOfWeights ) ) ; }
org . junit . Assert . assertEquals ( 0 , cme . getErrorRate ( ) , 0.0 )
testGPX10 ( ) { final javax . ws . rs . core . Response response = com . graphhopper . matching . http . MapMatchingResourceTest . app . client ( ) . target ( "http://localhost:8080/match" ) . request ( ) . buildPost ( javax . ws . rs . client . Entity . xml ( getClass ( ) . getResourceAsStream ( "gpxv1_0.gpx" ) ) ) . invoke ( ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertEquals ( 200 , response . getStatus ( ) )
testGetLargeJobExecutorPoolSize ( ) { int expResult = 200 ; int result = instance . getLargeJobExecutorPoolSize ( ) ; "<AssertPlaceHolder>" ; } getLargeJobExecutorPoolSize ( ) { return largejobPoolSize ; }
org . junit . Assert . assertEquals ( expResult , result )
testDeleteMseaMdById ( ) { org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . mefcfm . MaintenanceDomain md = new org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . mefcfm . DefaultMaintenanceDomain ( ) ; org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . mefcfm . maintenancedomain . mdnameandtypecombo . NameDomainName mdName = new org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . mefcfm . maintenancedomain . mdnameandtypecombo . DefaultNameDomainName ( ) ; mdName . nameDomainName ( org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . mefcfm . maintenancedomain . mdnameandtypecombo . namedomainname . NameDomainNameUnion . fromString ( "www.opennetworking.org" ) ) ; md . mdNameAndTypeCombo ( mdName ) ; md . id ( ( ( short ) ( 10 ) ) ) ; org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . MefCfm mefCfm = new org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . DefaultMefCfm ( ) ; mefCfm . addToMaintenanceDomain ( md ) ; org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . MseaCfmOpParam mseaCfm = new org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . MseaCfmOpParam ( ) ; mseaCfm . mefCfm ( mefCfm ) ; try { boolean deleted = mseaCfmService . deleteMseaMd ( mseaCfm , session , DatastoreId . RUNNING ) ; "<AssertPlaceHolder>" ; } catch ( org . onosproject . netconf . NetconfException e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( ) ; } catch ( org . onosproject . incubator . net . l2monitoring . cfm . service . CfmConfigException e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( ) ; } } deleteMseaMd ( org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . MseaCfmOpParam , org . onosproject . netconf . NetconfSession , org . onosproject . netconf . DatastoreId ) { if ( ( mseaCfm . mefCfm ( ) ) == null ) { throw new org . onosproject . incubator . net . l2monitoring . cfm . service . CfmConfigException ( "mefCfm<sp>object<sp>must<sp>be<sp>present<sp>before<sp>Meps<sp>can<sp>be<sp>added" ) ; } org . onosproject . yang . model . ModelObjectData mseCfmMepList = org . onosproject . yang . model . DefaultModelObjectData . builder ( ) . addModelObject ( ( ( org . onosproject . yang . model . ModelObject ) ( mseaCfm . mefCfm ( ) ) ) ) . build ( ) ; java . util . ArrayList < org . onosproject . yang . runtime . AnnotatedNodeInfo > anis = new java . util . ArrayList ( ) ; for ( org . onosproject . yang . gen . v1 . mseacfm . rev20160229 . mseacfm . mefcfm . MaintenanceDomain md : mseaCfm . mefCfm ( ) . maintenanceDomain ( ) ) { if ( ( md . id ( ) ) == 0 ) { throw new org . onosproject . incubator . net . l2monitoring . cfm . service . CfmConfigException ( "An<sp>MD<sp>numeric<sp>ID<sp>must<sp>be<sp>given" ) ; } org . onosproject . yang . model . ResourceId . Builder ridBuilder = org . onosproject . yang . model . ResourceId . builder ( ) . addBranchPointSchema ( "/" , null ) . addBranchPointSchema ( org . onosproject . drivers . microsemi . yang . impl . MseaCfmManager . MEF_CFM , org . onosproject . drivers . microsemi . yang . impl . MseaCfmManager . MSEA_CFM_NS ) . addBranchPointSchema ( org . onosproject . drivers . microsemi . yang . impl . MseaCfmManager . MAINTENANCE_DOMAIN , org . onosproject . drivers . microsemi . yang . impl . MseaCfmManager . MSEA_CFM_NS ) . addKeyLeaf ( org . onosproject . drivers . microsemi . yang . impl . MseaCfmManager . ID , org . onosproject . drivers . microsemi . yang . impl . MseaCfmManager . MSEA_CFM_NS , md . id ( ) ) ; org . onosproject . yang . runtime . AnnotatedNodeInfo ani = org . onosproject . yang . runtime . DefaultAnnotatedNodeInfo . builder ( ) . resourceId ( ridBuilder . build ( ) ) . addAnnotation ( new org . onosproject . yang . runtime . DefaultAnnotation ( NC_OPERATION , OP_DELETE ) ) . build ( ) ; anis . add ( ani ) ; } return setNetconfObject ( mseCfmMepList , session , targetDs , anis ) ; }
org . junit . Assert . assertTrue ( deleted )
getBestTest ( ) { int n = 3 ; org . openscience . cdk . group . AbstractDiscretePartitionRefinerTest . Graph g = new org . openscience . cdk . group . AbstractDiscretePartitionRefinerTest . Graph ( n ) ; g . connectionTable = new int [ ] [ ] { new int [ ] { 0 , 1 , 0 } , new int [ ] { 1 , 0 , 1 } , new int [ ] { 0 , 1 , 0 } } ; org . openscience . cdk . group . PermutationGroup group = new org . openscience . cdk . group . PermutationGroup ( n ) ; org . openscience . cdk . group . AbstractDiscretePartitionRefinerTest . MockRefiner refiner = new org . openscience . cdk . group . AbstractDiscretePartitionRefinerTest . MockRefiner ( g ) ; setup ( refiner , group , g ) ; refiner . refine ( org . openscience . cdk . group . Partition . unit ( n ) ) ; org . openscience . cdk . group . Permutation best = refiner . getBest ( ) ; org . openscience . cdk . group . Permutation expected = new org . openscience . cdk . group . Permutation ( 1 , 0 , 2 ) ; "<AssertPlaceHolder>" ; } unit ( int ) { org . openscience . cdk . group . Partition unit = new org . openscience . cdk . group . Partition ( ) ; unit . cells . add ( new java . util . TreeSet < java . lang . Integer > ( ) ) ; for ( int i = 0 ; i < size ; i ++ ) { unit . cells . get ( 0 ) . add ( i ) ; } return unit ; }
org . junit . Assert . assertEquals ( expected , best )
testGetVisibleDataTypeListItems ( ) { final org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeList dataTypeList = mock ( org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeList . class ) ; final org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem item1 = mock ( org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem . class ) ; final org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem item2 = mock ( org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem . class ) ; final org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem item3 = mock ( org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem . class ) ; final org . kie . workbench . common . dmn . client . editors . types . common . DataType dataType1 = fakeDataType ( "123" ) ; final org . kie . workbench . common . dmn . client . editors . types . common . DataType dataType2 = fakeDataType ( "456" ) ; final org . kie . workbench . common . dmn . client . editors . types . common . DataType dataType3 = fakeDataType ( "789" ) ; final elemental2 . dom . Element dataTypeRow1 = fakeDataTypeRow ( "123" ) ; final elemental2 . dom . Element dataTypeRow2 = fakeDataTypeRow ( "456" ) ; final elemental2 . dom . Element dataTypeRow3 = fakeDataTypeRow ( "789" ) ; final java . util . List < org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem > items = asList ( item1 , item2 , item3 ) ; final org . uberfire . client . views . pfly . selectpicker . JQueryList < elemental2 . dom . Element > jQueryList = mock ( org . uberfire . client . views . pfly . selectpicker . JQueryList . class ) ; when ( item1 . getDataType ( ) ) . thenReturn ( dataType1 ) ; when ( item2 . getDataType ( ) ) . thenReturn ( dataType2 ) ; when ( item3 . getDataType ( ) ) . thenReturn ( dataType3 ) ; when ( dataTypeList . getItems ( ) ) . thenReturn ( items ) ; when ( presenter . getDataTypeList ( ) ) . thenReturn ( dataTypeList ) ; jQueryList . length = 3 ; when ( jQueryList . get ( 0 ) ) . thenReturn ( dataTypeRow1 ) ; when ( jQueryList . get ( 1 ) ) . thenReturn ( dataTypeRow2 ) ; when ( jQueryList . get ( 2 ) ) . thenReturn ( dataTypeRow3 ) ; doReturn ( jQueryList ) . when ( view ) . filterVisible ( ) ; final java . util . List < org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem > actual = view . getVisibleDataTypeListItems ( ) ; final java . util . List < org . kie . workbench . common . dmn . client . editors . types . listview . DataTypeListItem > expected = asList ( item1 , item2 , item3 ) ; "<AssertPlaceHolder>" ; } getVisibleDataTypeListItems ( ) { return getVisibleDataTypeRows ( ) . stream ( ) . map ( this :: getUUID ) . map ( this :: getDataTypeListItem ) . filter ( Optional :: isPresent ) . map ( Optional :: get ) . collect ( java . util . stream . Collectors . toList ( ) ) ; }
org . junit . Assert . assertEquals ( expected , actual )
dontAllowRegistrationAfterNextCallTest ( ) { org . apache . apex . malhar . lib . state . spillable . SequentialSpillableIdentifierGenerator gen = new org . apache . apex . malhar . lib . state . spillable . SequentialSpillableIdentifierGenerator ( ) ; gen . next ( ) ; boolean exception = false ; try { gen . register ( org . apache . apex . malhar . lib . util . TestUtils . getByte ( 1 ) ) ; } catch ( java . lang . Exception e ) { exception = true ; } "<AssertPlaceHolder>" ; } getByte ( int ) { com . google . common . base . Preconditions . checkArgument ( ( val <= ( Byte . MAX_VALUE ) ) ) ; return new byte [ ] { ( ( byte ) ( val ) ) } ; }
org . junit . Assert . assertTrue ( exception )
testNoDefaultMetadataId ( ) { org . locationtech . geogig . model . Node ln = org . locationtech . geogig . model . RevObjectFactory . defaultInstance ( ) . createNode ( NodeRef . ROOT , RevTree . EMPTY_TREE_ID , ObjectId . NULL , TYPE . TREE , new org . locationtech . jts . geom . Envelope ( 0 , 0 , 0 , 0 ) , null ) ; org . locationtech . geogig . model . Node rn = org . locationtech . geogig . model . RevObjectFactory . defaultInstance ( ) . createNode ( NodeRef . ROOT , org . locationtech . geogig . model . impl . RevObjectTestSupport . hashString ( "rnd" ) , ObjectId . NULL , TYPE . TREE , new org . locationtech . jts . geom . Envelope ( 0 , 1 , 0 , 1 ) , null ) ; org . locationtech . geogig . model . ObjectId metadataId = org . locationtech . geogig . model . ObjectId . NULL ; org . locationtech . geogig . model . NodeRef left = new org . locationtech . geogig . model . NodeRef ( ln , null , metadataId ) ; org . locationtech . geogig . model . NodeRef right = new org . locationtech . geogig . model . NodeRef ( rn , null , metadataId ) ; org . locationtech . geogig . model . DiffEntry entry = new org . locationtech . geogig . model . DiffEntry ( left , right ) ; serializer . write ( out , entry ) ; byte [ ] array = stream . toByteArray ( ) ; org . locationtech . geogig . model . DiffEntry read = serializer . read ( new java . io . DataInputStream ( new java . io . ByteArrayInputStream ( array ) ) ) ; "<AssertPlaceHolder>" ; } read ( java . io . DataInput ) { final java . lang . String name = in . readUTF ( ) ; org . locationtech . geogig . model . FieldType type = org . locationtech . geogig . model . FieldType . valueOf ( in . readUnsignedByte ( ) ) ; final java . lang . Object val = DataStreamValueSerializerV2 . INSTANCE . decode ( type , in ) ; return new org . locationtech . geogig . model . internal . NodeId ( name , val ) ; }
org . junit . Assert . assertEquals ( entry , read )
testSelectSimple2 ( ) { com . iciql . test . models . Product p = new com . iciql . test . models . Product ( ) ; java . util . List < java . lang . String > productNames = db . from ( p ) . orderBy ( p . productId ) . select ( p . productName ) ; java . util . List < com . iciql . test . models . Product > products = com . iciql . test . models . Product . getList ( ) ; for ( int i = 0 ; i < ( products . size ( ) ) ; i ++ ) { "<AssertPlaceHolder>" ; } } get ( com . iciql . Token ) { if ( expr instanceof com . iciql . bytecode . Not ) { return ( ( com . iciql . bytecode . Not ) ( expr ) ) . expr ; } else if ( expr instanceof com . iciql . bytecode . Operation ) { return ( ( com . iciql . bytecode . Operation ) ( expr ) ) . reverse ( ) ; } return new com . iciql . bytecode . Not ( expr ) ; }
org . junit . Assert . assertEquals ( products . get ( i ) . productName , productNames . get ( i ) )
testNoViableAltAvoidance ( ) { java . lang . String expecting = "line<sp>1:1<sp>mismatched<sp>input<sp>\'.\'<sp>expecting<sp>\'!\'\n" ; java . lang . String result = stderrDuringParse ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertEquals ( expecting , result )
deveObterNumeroSequencialEventoComoFoiSetado ( ) { final com . fincatto . documentofiscal . nfe310 . classes . evento . NFInfoEventoRetorno eventoRetorno = new com . fincatto . documentofiscal . nfe310 . classes . evento . NFInfoEventoRetorno ( ) ; final int numeroSequencialEvento = 1 ; eventoRetorno . setNumeroSequencialEvento ( numeroSequencialEvento ) ; "<AssertPlaceHolder>" ; } getNumeroSequencialEvento ( ) { return this . numeroSequencialEvento ; }
org . junit . Assert . assertEquals ( numeroSequencialEvento , eventoRetorno . getNumeroSequencialEvento ( ) , 0 )
testShouldNotReturnItFromAllPortlets ( ) { com . liferay . portal . kernel . service . PortletPreferencesLocalServiceUtil . addPortletPreferences ( com . liferay . portal . kernel . test . util . TestPropsValues . getCompanyId ( ) , PortletKeys . PREFS_OWNER_ID_DEFAULT , PortletKeys . PREFS_OWNER_TYPE_LAYOUT , layout . getPlid ( ) , _testNonembeddedPortlet . getPortletId ( ) , _testNonembeddedPortlet , null ) ; java . util . List < com . liferay . portal . kernel . model . Portlet > allPortlets = com . liferay . portal . osgi . web . portlet . container . embedded . test . EmbeddedPortletWhenEmbeddingNonembeddablePortletInLayoutTest . _layoutTypePortlet . getAllPortlets ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { com . liferay . petra . string . StringBundler sb = new com . liferay . petra . string . StringBundler ( 23 ) ; sb . append ( ",<sp>width=" 1 ) ; sb . append ( uuid ) ; sb . append ( ",<sp>width=" 0 ) ; sb . append ( amImageEntryId ) ; sb . append ( ",<sp>groupId=" ) ; sb . append ( groupId ) ; sb . append ( ",<sp>companyId=" ) ; sb . append ( companyId ) ; sb . append ( ",<sp>createDate=" ) ; sb . append ( createDate ) ; sb . append ( ",<sp>configurationUuid=" ) ; sb . append ( configurationUuid ) ; sb . append ( ",<sp>fileVersionId=" ) ; sb . append ( fileVersionId ) ; sb . append ( ",<sp>mimeType=" ) ; sb . append ( mimeType ) ; sb . append ( ",<sp>height=" ) ; sb . append ( height ) ; sb . append ( ",<sp>width=" ) ; sb . append ( width ) ; sb . append ( ",<sp>size=" ) ; sb . append ( size ) ; sb . append ( "}" ) ; return sb . toString ( ) ; }
org . junit . Assert . assertFalse ( allPortlets . toString ( ) , allPortlets . contains ( _testNonembeddedPortlet ) )
testGetElements ( ) { final com . allanbank . mongodb . bson . Element subElement = new com . allanbank . mongodb . bson . element . BooleanElement ( "1" , false ) ; final com . allanbank . mongodb . bson . element . DocumentElement element = new com . allanbank . mongodb . bson . element . DocumentElement ( "foo" , subElement ) ; "<AssertPlaceHolder>" ; } getElements ( ) { return myElements ; }
org . junit . Assert . assertEquals ( java . util . Collections . singletonList ( subElement ) , element . getElements ( ) )
onDetachFocusLostSentToParent ( ) { jetbrains . jetpad . cell . TextCell v = new jetbrains . jetpad . cell . TextCell ( ) ; v . focusable ( ) . set ( true ) ; container . root . children ( ) . add ( v ) ; v . focus ( ) ; final jetbrains . jetpad . base . Value < java . lang . Boolean > focusLostCalled = new jetbrains . jetpad . base . Value ( false ) ; container . root . addTrait ( new jetbrains . jetpad . cell . trait . CellTrait ( ) { @ jetbrains . jetpad . cell . Override public void onFocusLost ( jetbrains . jetpad . cell . Cell cell , jetbrains . jetpad . cell . event . FocusEvent event ) { super . onFocusLost ( cell , event ) ; focusLostCalled . set ( true ) ; } } ) ; v . removeFromParent ( ) ; "<AssertPlaceHolder>" ; } get ( ) { return myValue . get ( ) ; }
org . junit . Assert . assertTrue ( focusLostCalled . get ( ) )
testExistsWithValue ( ) { org . hl7 . fhir . dstu3 . utils . Patient patient = new org . hl7 . fhir . dstu3 . utils . Patient ( ) ; patient . setDeceased ( new org . hl7 . fhir . dstu3 . utils . BooleanType ( false ) ) ; java . util . List < org . hl7 . fhir . dstu3 . utils . Base > eval = org . hl7 . fhir . dstu3 . utils . FhirPathEngineTest . ourEngine . evaluate ( patient , "Patient.deceased.exists()" ) ; org . hl7 . fhir . dstu3 . utils . FhirPathEngineTest . ourLog . info ( eval . toString ( ) ) ; "<AssertPlaceHolder>" ; } get ( int ) { if ( ( myOrderedTags ) == null ) { myOrderedTags = new java . util . ArrayList < ca . uhn . fhir . model . api . Tag > ( ) ; for ( ca . uhn . fhir . model . api . Tag next : myTagSet ) { myOrderedTags . add ( next ) ; } } return myOrderedTags . get ( theIndex ) ; }
org . junit . Assert . assertTrue ( ( ( org . hl7 . fhir . dstu3 . utils . BooleanType ) ( eval . get ( 0 ) ) ) . getValue ( ) )
testDefaults ( ) { int count = testDefaults ( 1000 , 1000 ) ; "<AssertPlaceHolder>" ; } is ( int ) { return ( get ( ) ) == err ; }
org . junit . Assert . assertThat ( count , org . hamcrest . CoreMatchers . is ( 1000 ) )
testIssue41 ( ) { for ( int x = 0 ; x < 64 ; x ++ ) { int [ ] a = new int [ ] { 2 , 3 , 4 , 5 } ; int [ ] b = new int [ 90 ] ; int [ ] c = new int [ a . length ] ; me . lemire . integercompression . SkippableIntegratedIntegerCODEC codec = new me . lemire . integercompression . SkippableIntegratedComposition ( new me . lemire . integercompression . IntegratedBinaryPacking ( ) , new me . lemire . integercompression . IntegratedVariableByte ( ) ) ; me . lemire . integercompression . IntWrapper aOffset = new me . lemire . integercompression . IntWrapper ( 0 ) ; me . lemire . integercompression . IntWrapper bOffset = new me . lemire . integercompression . IntWrapper ( x ) ; me . lemire . integercompression . IntWrapper initValue = new me . lemire . integercompression . IntWrapper ( 0 ) ; codec . headlessCompress ( a , aOffset , a . length , b , bOffset , initValue ) ; int len = ( bOffset . get ( ) ) - x ; bOffset . set ( x ) ; me . lemire . integercompression . IntWrapper cOffset = new me . lemire . integercompression . IntWrapper ( 0 ) ; initValue = new me . lemire . integercompression . IntWrapper ( 0 ) ; codec . headlessUncompress ( b , bOffset , len , c , cOffset , a . length , initValue ) ; "<AssertPlaceHolder>" ; } } headlessUncompress ( int [ ] , me . lemire . integercompression . IntWrapper , int , int [ ] , me . lemire . integercompression . IntWrapper , int , me . lemire . integercompression . IntWrapper ) { if ( inlength == 0 ) return ; int init = inpos . get ( ) ; F1 . headlessUncompress ( in , inpos , inlength , out , outpos , num , initvalue ) ; if ( ( inpos . get ( ) ) == init ) { inpos . increment ( ) ; } inlength -= ( inpos . get ( ) ) - init ; num -= outpos . get ( ) ; F2 . headlessUncompress ( in , inpos , inlength , out , outpos , num , initvalue ) ; }
org . junit . Assert . assertArrayEquals ( a , c )
testCreateFromBuffer ( ) { io . vertx . core . json . JsonArray excepted = new io . vertx . core . json . JsonArray ( ) ; excepted . add ( "foobar" ) ; excepted . add ( 123 ) ; io . vertx . core . buffer . Buffer buf = io . vertx . core . buffer . Buffer . buffer ( excepted . encode ( ) ) ; "<AssertPlaceHolder>" ; } encode ( ) { return io . vertx . core . json . Json . encode ( map ) ; }
org . junit . Assert . assertEquals ( excepted , new io . vertx . core . json . JsonArray ( buf ) )
testCopyLongArrayIntIntArrayImgOfT ( ) { final long [ ] input = new long [ ] { 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 } ; final int [ ] offsets = new int [ ] { 0 , 0 , 8 } ; final int [ ] [ ] strides = new int [ ] [ ] { new int [ ] { 1 , 3 } , new int [ ] { 3 , 1 } , new int [ ] { - 1 , - 3 } } ; final long [ ] [ ] [ ] expected = new long [ ] [ ] [ ] { new long [ ] [ ] { new long [ ] { 0 , 1 , 2 } , new long [ ] { 3 , 4 , 5 } , new long [ ] { 6 , 7 , 8 } } , new long [ ] [ ] { new long [ ] { 0 , 3 , 6 } , new long [ ] { 1 , 4 , 7 } , new long [ ] { 2 , 5 , 8 } } , new long [ ] [ ] { new long [ ] { 8 , 7 , 6 } , new long [ ] { 5 , 4 , 3 } , new long [ ] { 2 , 1 , 0 } } } ; for ( int i = 0 ; i < ( offsets . length ) ; i ++ ) { final net . imglib2 . img . Img < net . imglib2 . type . numeric . integer . LongType > img = new net . imglib2 . img . array . ArrayImgFactory ( new net . imglib2 . type . numeric . integer . LongType ( ) ) . create ( 3 , 3 ) ; net . imglib2 . util . ImgUtil . copy ( input , offsets [ i ] , strides [ i ] , img ) ; final net . imglib2 . RandomAccess < net . imglib2 . type . numeric . integer . LongType > ra = img . randomAccess ( ) ; final long [ ] location = new long [ 2 ] ; for ( int x = 0 ; x < 3 ; x ++ ) { location [ 0 ] = x ; for ( int y = 0 ; y < 3 ; y ++ ) { location [ 1 ] = y ; ra . setPosition ( location ) ; "<AssertPlaceHolder>" ; } } } } get ( ) { t . set ( this . source . get ( ) ) ; t . mul ( this . source . getDoublePosition ( ( ( this . source . numDimensions ( ) ) - 1 ) ) ) ; return t ; }
org . junit . Assert . assertEquals ( expected [ i ] [ y ] [ x ] , ra . get ( ) . get ( ) , 0 )
isInitialized_false ( ) { final org . apache . accumulo . core . client . Connector connector = getClusterInstance ( ) . getConnector ( ) ; final org . apache . rya . api . instance . RyaDetailsRepository repo = new org . apache . rya . accumulo . instance . AccumuloRyaInstanceDetailsRepository ( connector , getRyaInstanceName ( ) ) ; "<AssertPlaceHolder>" ; } isInitialized ( ) { return initialized ; }
org . junit . Assert . assertFalse ( repo . isInitialized ( ) )
serialize_bindingsSubsetOfVarOrder ( ) { final org . eclipse . rdf4j . query . impl . MapBindingSet originalBindingSet = new org . eclipse . rdf4j . query . impl . MapBindingSet ( ) ; originalBindingSet . addBinding ( "x" , org . apache . rya . indexing . pcj . storage . accumulo . AccumuloPcjSerializerTest . VF . createIRI ( "http://a" ) ) ; originalBindingSet . addBinding ( "y" , org . apache . rya . indexing . pcj . storage . accumulo . AccumuloPcjSerializerTest . VF . createIRI ( "http://b" ) ) ; final org . apache . rya . indexing . pcj . storage . accumulo . VariableOrder varOrder = new org . apache . rya . indexing . pcj . storage . accumulo . VariableOrder ( "x" , "a" , "y" , "b" ) ; org . apache . rya . indexing . pcj . storage . accumulo . BindingSetConverter < byte [ ] > converter = new org . apache . rya . indexing . pcj . storage . accumulo . AccumuloPcjSerializer ( ) ; byte [ ] serialized = converter . convert ( originalBindingSet , varOrder ) ; org . eclipse . rdf4j . query . BindingSet deserialized = converter . convert ( serialized , varOrder ) ; "<AssertPlaceHolder>" ; } convert ( java . lang . String , org . apache . rya . indexing . pcj . storage . accumulo . VariableOrder ) { requireNonNull ( bindingSetString ) ; requireNonNull ( varOrder ) ; if ( ( bindingSetString . isEmpty ( ) ) && ( varOrder . toString ( ) . isEmpty ( ) ) ) { return new org . eclipse . rdf4j . query . impl . MapBindingSet ( ) ; } final java . lang . String [ ] bindingStrings = bindingSetString . split ( org . apache . rya . indexing . pcj . storage . accumulo . BindingSetStringConverter . BINDING_DELIM ) ; final java . lang . String [ ] varOrderArr = varOrder . toArray ( ) ; checkArgument ( ( ( varOrderArr . length ) == ( bindingStrings . length ) ) , "The<sp>number<sp>of<sp>Bindings<sp>must<sp>match<sp>the<sp>length<sp>of<sp>the<sp>VariableOrder." ) ; final org . eclipse . rdf4j . query . algebra . evaluation . QueryBindingSet bindingSet = new org . eclipse . rdf4j . query . algebra . evaluation . QueryBindingSet ( ) ; for ( int i = 0 ; i < ( bindingStrings . length ) ; i ++ ) { final java . lang . String bindingString = bindingStrings [ i ] ; if ( ! ( org . apache . rya . indexing . pcj . storage . accumulo . BindingSetStringConverter . NULL_VALUE_STRING . equals ( bindingString ) ) ) { final java . lang . String name = varOrderArr [ i ] ; final org . eclipse . rdf4j . model . Value value = org . apache . rya . indexing . pcj . storage . accumulo . BindingSetStringConverter . toValue ( bindingStrings [ i ] ) ; bindingSet . addBinding ( name , value ) ; } } return bindingSet ; }
org . junit . Assert . assertEquals ( originalBindingSet , deserialized )
testTransformValues ( ) { gnu . trove . THashMap < java . lang . String , java . lang . String > tHashMapWithRealDelegate = new gnu . trove . THashMap ( ) ; tHashMapWithRealDelegate . put ( gnu . trove . THashMapTest . HELLO , gnu . trove . THashMapTest . WORLD ) ; tHashMapWithRealDelegate . transformValues ( ( value ) -> "New" ) ; "<AssertPlaceHolder>" ; } get ( java . lang . String ) { return metadata . find ( key ) ; }
org . junit . Assert . assertEquals ( "New" , tHashMapWithRealDelegate . get ( gnu . trove . THashMapTest . HELLO ) )
testJsonSerializer ( ) { com . github . ldriscoll . ektorplucene . designdocument . LuceneDefaults defaults = new com . github . ldriscoll . ektorplucene . designdocument . LuceneDefaults ( ) ; defaults . setStore ( "yes" ) ; defaults . setIndex ( "not_analyzed" ) ; com . github . ldriscoll . ektorplucene . designdocument . LuceneIndex index = new com . github . ldriscoll . ektorplucene . designdocument . LuceneIndex ( ) ; index . setIndex ( "function()<sp>{<sp>return<sp>new<sp>Document();<sp>}" ) ; index . setAnalyzer ( "en" ) ; index . setDefaults ( defaults ) ; com . fasterxml . jackson . databind . ObjectMapper mapper = new com . fasterxml . jackson . databind . ObjectMapper ( ) ; java . lang . String json = mapper . writeValueAsString ( index ) ; com . github . ldriscoll . ektorplucene . designdocument . LuceneIndex convertedIndex = mapper . readValue ( json , com . github . ldriscoll . ektorplucene . designdocument . LuceneIndex . class ) ; "<AssertPlaceHolder>" ; } setDefaults ( com . github . ldriscoll . ektorplucene . designdocument . LuceneDefaults ) { this . defaults = defaults ; }
org . junit . Assert . assertEquals ( index , convertedIndex )
testRecoveryWillIgnoreMinReplication ( ) { tearDown ( ) ; final int blockSize = 4096 ; final int numReplicas = 3 ; final java . lang . String filename = "/testIgnoreMinReplication" ; final org . apache . hadoop . fs . Path filePath = new org . apache . hadoop . fs . Path ( filename ) ; org . apache . hadoop . conf . Configuration configuration = new org . apache . hadoop . hdfs . HdfsConfiguration ( ) ; configuration . setInt ( org . apache . hadoop . hdfs . DFSConfigKeys . DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY , 2000 ) ; configuration . setInt ( org . apache . hadoop . hdfs . DFSConfigKeys . DFS_NAMENODE_REPLICATION_MIN_KEY , 2 ) ; configuration . setLong ( org . apache . hadoop . hdfs . DFSConfigKeys . DFS_BLOCK_SIZE_KEY , blockSize ) ; org . apache . hadoop . hdfs . MiniDFSCluster cluster = null ; try { cluster = new org . apache . hadoop . hdfs . MiniDFSCluster . Builder ( configuration ) . numDataNodes ( 5 ) . build ( ) ; cluster . waitActive ( ) ; final org . apache . hadoop . hdfs . DistributedFileSystem dfs = cluster . getFileSystem ( ) ; final org . apache . hadoop . hdfs . server . namenode . FSNamesystem fsn = cluster . getNamesystem ( ) ; org . apache . hadoop . fs . FSDataOutputStream out = dfs . create ( filePath , ( ( short ) ( numReplicas ) ) ) ; out . write ( org . apache . hadoop . hdfs . AppendTestUtil . randomBytes ( 0 , blockSize ) ) ; out . hsync ( ) ; org . apache . hadoop . hdfs . DFSClient dfsClient = new org . apache . hadoop . hdfs . DFSClient ( new java . net . InetSocketAddress ( "localhost" , cluster . getNameNodePort ( ) ) , configuration ) ; org . apache . hadoop . hdfs . protocol . LocatedBlock blk = dfsClient . getNamenode ( ) . getBlockLocations ( filename , 0 , blockSize ) . getLastLocatedBlock ( ) ; java . util . List < org . apache . hadoop . hdfs . protocol . DatanodeInfo > dataNodes = java . util . Arrays . asList ( blk . getLocations ( ) ) ; "<AssertPlaceHolder>" ; for ( org . apache . hadoop . hdfs . protocol . DatanodeInfo dataNode : dataNodes . subList ( 0 , ( numReplicas - 1 ) ) ) { cluster . stopDataNode ( dataNode . getName ( ) ) ; } org . apache . hadoop . test . GenericTestUtils . waitFor ( new com . google . common . base . Supplier < java . lang . Boolean > ( ) { @ org . apache . hadoop . hdfs . server . datanode . Override public org . apache . hadoop . hdfs . server . datanode . Boolean get ( ) { return ( fsn . getNumDeadDataNodes ( ) ) == 2 ; } } , 300 , 300000 ) ; cluster . setLeasePeriod ( 100L , 100L ) ; org . apache . hadoop . test . GenericTestUtils . waitFor ( new com . google . common . base . Supplier < java . lang . Boolean > ( ) { @ org . apache . hadoop . hdfs . server . datanode . Override public org . apache . hadoop . hdfs . server . datanode . Boolean get ( ) { try { return dfs . isFileClosed ( filePath ) ; } catch ( java . io . IOException e ) { } return false ; } } , 300 , 300000 ) ; org . apache . hadoop . hdfs . DFSTestUtil . waitForReplication ( cluster , org . apache . hadoop . hdfs . DFSTestUtil . getFirstBlock ( dfs , filePath ) , 1 , numReplicas , 0 ) ; } finally { if ( cluster != null ) { cluster . shutdown ( ) ; } } } size ( ) { return loggers . size ( ) ; }
org . junit . Assert . assertEquals ( dataNodes . size ( ) , numReplicas )
testShouldCleanUpProcessorsOnUpdateAndCheckIn ( ) { java . util . concurrent . atomic . AtomicBoolean cleanUp = registerCleanUpDLProcessor ( ) ; com . liferay . portal . kernel . repository . model . FileEntry fileEntry = com . liferay . document . library . kernel . service . DLAppServiceUtil . addFileEntry ( _serviceContext . getScopeGroupId ( ) , DLFolderConstants . DEFAULT_PARENT_FOLDER_ID , ( ( com . liferay . portal . kernel . util . StringUtil . randomString ( ) ) + ".pdf" ) , ContentTypes . APPLICATION_PDF , com . liferay . portal . kernel . util . StringUtil . randomString ( ) , com . liferay . portal . kernel . util . StringUtil . randomString ( ) , com . liferay . portal . kernel . util . StringUtil . randomString ( ) , com . liferay . portal . kernel . util . FileUtil . getBytes ( getClass ( ) , "dependencies/test.pdf" ) , _serviceContext ) ; byte [ ] bytes = com . liferay . portal . kernel . util . FileUtil . getBytes ( getClass ( ) , "dependencies/test.pdf" ) ; java . io . InputStream inputStream = new java . io . ByteArrayInputStream ( bytes ) ; com . liferay . document . library . kernel . service . DLAppServiceUtil . updateFileEntryAndCheckIn ( fileEntry . getFileEntryId ( ) , ( ( com . liferay . portal . kernel . util . StringUtil . randomString ( ) ) + ".pdf" ) , ContentTypes . APPLICATION_PDF , com . liferay . portal . kernel . util . StringUtil . randomString ( ) , com . liferay . portal . kernel . util . StringUtil . randomString ( ) , com . liferay . portal . kernel . util . StringUtil . randomString ( ) , DLVersionNumberIncrease . MAJOR , inputStream , bytes . length , _serviceContext ) ; "<AssertPlaceHolder>" ; } get ( ) { return _byteBuffer . get ( ) ; }
org . junit . Assert . assertTrue ( cleanUp . get ( ) )
shouldReturnColumnLabelFromQueryResultColumNames ( ) { for ( int i = 0 ; i != ( columnNames . length ) ; ++ i ) { "<AssertPlaceHolder>" ; } } getColumnLabel ( int ) { return provider . getStringValue ( adjustColumn ( index ) , ResultsMetadataConstants . COLUMN_LABEL ) ; }
org . junit . Assert . assertThat ( metadata . getColumnLabel ( ( i + 1 ) ) , org . hamcrest . core . Is . is ( columnNames [ i ] ) )
testNoSystemVariableSet ( ) { java . lang . System . clearProperty ( "nhinc.properties.dir" ) ; gov . hhs . fha . nhinc . properties . PropertyAccessorFileUtilities fileUtilities = new gov . hhs . fha . nhinc . properties . PropertyAccessorFileUtilities ( ) ; "<AssertPlaceHolder>" ; } getPropertyFileLocation ( ) { return propertyFileDirAbsolutePath ; }
org . junit . Assert . assertEquals ( "" , fileUtilities . getPropertyFileLocation ( ) )
getModifiersTest ( ) { for ( Map . Entry < java . lang . reflect . Field , jdk . vm . ci . meta . ResolvedJavaField > e : fields . entrySet ( ) ) { int expected = e . getKey ( ) . getModifiers ( ) ; int actual = e . getValue ( ) . getModifiers ( ) ; "<AssertPlaceHolder>" ; } }
org . junit . Assert . assertEquals ( expected , actual )
setAndGetAccount ( ) { fi . helsinki . cs . tmc . cli . backend . Account account = new fi . helsinki . cs . tmc . cli . backend . Account ( ) ; settings . setAccount ( null , account ) ; "<AssertPlaceHolder>" ; } getAccount ( ) { verifySearchIsCalled ( ) ; return account ; }
org . junit . Assert . assertEquals ( account , settings . getAccount ( ) )
testGetSubResourceProperties ( ) { org . apache . ambari . server . api . resources . ResourceDefinition resourceDefinition = new org . apache . ambari . server . api . resources . StackResourceDefinition ( ) ; java . util . Map < org . apache . ambari . server . controller . spi . Resource . Type , java . lang . String > mapIds = new java . util . HashMap ( ) ; mapIds . put ( Resource . Type . Stack , "HDP" ) ; org . apache . ambari . server . api . query . QueryImpl instance = new org . apache . ambari . server . api . query . QueryImplTest . TestQuery ( mapIds , resourceDefinition ) ; org . apache . ambari . server . controller . spi . Predicate predicate = new org . apache . ambari . server . controller . utilities . PredicateBuilder ( ) . property ( "Stacks/stack_name" ) . equals ( "HDP" ) . and ( ) . property ( "versions/stackServices/StackServices/service_name" ) . equals ( "HBASE" ) . and ( ) . property ( "versions/operatingSystems/OperatingSystems/os_type" ) . equals ( "centos5" ) . toPredicate ( ) ; org . apache . ambari . server . api . query . ProcessingPredicateVisitor visitor = new org . apache . ambari . server . api . query . ProcessingPredicateVisitor ( instance ) ; org . apache . ambari . server . controller . utilities . PredicateHelper . visit ( predicate , visitor ) ; java . util . Set < java . lang . String > properties = visitor . getSubResourceProperties ( ) ; java . util . Set < java . lang . String > expected = new java . util . HashSet ( ) ; expected . add ( "versions/stackServices/StackServices/service_name" ) ; expected . add ( "versions/operatingSystems/OperatingSystems/os_type" ) ; "<AssertPlaceHolder>" ; } add ( org . apache . ambari . server . controller . metrics . MetricsAveragePerSecondDownsampling$Accumulo ) { long ts = accumulo . ts ; if ( ts < 9999999999L ) { ts = ts * 1000 ; } if ( isWithinTemporalQueryRange ( ts , temporalInfo ) ) { return super . add ( accumulo ) ; } return false ; }
org . junit . Assert . assertEquals ( expected , properties )
shouldConsumeInCaseInsensitiveMannerWithExpectedValuesWhenMatchingNonExactCase ( ) { makeCaseInsensitive ( ) ; tokens . consume ( "SELECT" ) ; tokens . consume ( "ALL" ) ; tokens . consume ( "COLUMNS" ) ; tokens . consume ( "FROM" ) ; tokens . consume ( "THIS" ) ; tokens . consume ( "TABLE" ) ; "<AssertPlaceHolder>" ; } hasNext ( ) { return ( index ) < ( size ) ; }
org . junit . Assert . assertThat ( tokens . hasNext ( ) , org . hamcrest . core . Is . is ( false ) )