input
stringlengths 28
18.7k
| output
stringlengths 39
1.69k
|
|---|---|
testExtractDayOfYearDate ( ) { java . lang . String sqlText = ( "select<sp>d,<sp>EXTRACT(DAYOFYEAR<sp>FROM<sp>d)<sp>as<sp>\"DAYOFYEAR\"<sp>from<sp>" + ( com . splicemachine . derby . utils . SpliceDateFunctionsIT . tableWatcherI ) ) + "<sp>order<sp>by<sp>d" ; try ( com . splicemachine . derby . utils . ResultSet rs = methodWatcher . executeQuery ( sqlText ) ) { java . lang . String expected = "D<sp>|<sp>DAYOFYEAR<sp>|\n" + ( ( ( ( ( ( "------------------------\n" + "2009-01-02<sp>|<sp>2<sp>|\n" ) + "2009-01-02<sp>|<sp>2<sp>|\n" 0 ) + "2009-09-02<sp>|<sp>245<sp>|\n" ) + "2012-12-31<sp>|<sp>366<sp>|\n" ) + "2012-12-31<sp>|<sp>366<sp>|\n" ) + "2013-12-31<sp>|<sp>365<sp>|" ) ; "<AssertPlaceHolder>" ; } } toStringUnsorted ( com . splicemachine . homeless . ResultSet ) { return com . splicemachine . homeless . TestUtils . FormattedResult . ResultFactory . convert ( "" , rs , false ) . toString ( ) . trim ( ) ; }
|
org . junit . Assert . assertEquals ( ( ( "\n" + sqlText ) + "\n" ) , expected , TestUtils . FormattedResult . ResultFactory . toStringUnsorted ( rs ) )
|
testSelectClasses ( ) { org . qualipso . factory . jabuti . test . JabutiServiceTest . logger . info ( "testSelectClassesAndGetRequiredElements()" ) ; try { java . lang . String user = "SA" ; org . qualipso . factory . jabuti . test . MyStringArray classes = new org . qualipso . factory . jabuti . test . MyStringArray ( new java . lang . String [ ] { "*" } ) ; org . qualipso . factory . jabuti . test . JabutiServiceTest . logger . info ( ( "projectId<sp>=<sp>" + ( org . qualipso . factory . jabuti . test . JabutiServiceTest . projectId ) ) ) ; java . lang . String message = port . selectClassesToInstrument ( user , org . qualipso . factory . jabuti . test . JabutiServiceTest . projectId , classes ) ; "<AssertPlaceHolder>" ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( e . getMessage ( ) ) ; } } selectClassesToInstrument ( java . lang . String , java . lang . String , java . lang . String [ ] ) { init ( ) ; org . qualipso . factory . jabuti . ws . VerifingData verifingdata = new org . qualipso . factory . jabuti . ws . VerifingData ( ) ; if ( verifingdata . existProject ( projectId , props ) ) { org . qualipso . factory . jabuti . ws . WsProject control = new org . qualipso . factory . jabuti . ws . WsProject ( props ) ; control . selectClassesToInstrument ( projectId , classes ) ; return "The<sp>classes<sp>were<sp>instrumented." ; } else throw new org . qualipso . factory . jabuti . InvalidProjectIdFault ( "The<sp>project<sp>does<sp>not<sp>exist." ) ; }
|
org . junit . Assert . assertEquals ( message , "The<sp>classes<sp>were<sp>instrumented." )
|
testParserLexerCombo ( ) { org . antlr . v4 . misc . Graph < java . lang . String > g = new org . antlr . v4 . misc . Graph < java . lang . String > ( ) ; g . addEdge ( "JavaLexer.tokens" , "JavaLexer.g4" ) ; g . addEdge ( "JavaParser.g4" , "JavaLexer.tokens" ) ; g . addEdge ( "Def.g4" , "JavaLexer.tokens" ) ; g . addEdge ( "Ref.g4" , "JavaLexer.tokens" ) ; java . lang . String expecting = "[JavaLexer.g4,<sp>JavaLexer.tokens,<sp>JavaParser.g4,<sp>Def.g4,<sp>Ref.g4]" ; java . util . List < java . lang . String > nodes = g . sort ( ) ; java . lang . String result = nodes . toString ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { return toString ( false ) ; }
|
org . junit . Assert . assertEquals ( expecting , result )
|
testRevoke ( ) { java . lang . String cn = java . util . UUID . randomUUID ( ) . toString ( ) ; org . bouncycastle . pkcs . PKCS10CertificationRequest csr = generateCSR ( cn ) ; org . apache . hadoop . yarn . server . resourcemanager . security . RMAppSecurityActions actor = org . apache . hadoop . yarn . server . resourcemanager . security . RMAppSecurityActionsFactory . getInstance ( ) . getActor ( conf ) ; actor . sign ( csr ) ; int response = actor . revoke ( ( ( ( ( cn + "__" ) + ( org . apache . hadoop . yarn . server . resourcemanager . security . TestHopsworksRMAppSecurityActions . O ) ) + "__" ) + ( org . apache . hadoop . yarn . server . resourcemanager . security . TestHopsworksRMAppSecurityActions . OU ) ) ) ; "<AssertPlaceHolder>" ; } revoke ( java . lang . String ) { if ( ! ( x509Configured ) ) { x509NotConfigured ( "revoke" ) ; } org . apache . http . client . methods . CloseableHttpResponse response = null ; try { java . lang . String queryParams = buildQueryParams ( new org . apache . http . message . BasicNameValuePair ( org . apache . hadoop . yarn . server . resourcemanager . security . HopsworksRMAppSecurityActions . REVOKE_CERT_ID_PARAM , certificateIdentifier ) ) ; java . net . URL revokeUrl = buildUrl ( revokePath , queryParams ) ; response = delete ( revokeUrl . toURI ( ) , ( "Hopsworks<sp>CA<sp>could<sp>not<sp>revoke<sp>certificate<sp>" + certificateIdentifier ) ) ; return response . getStatusLine ( ) . getStatusCode ( ) ; } finally { if ( response != null ) { response . close ( ) ; } } }
|
org . junit . Assert . assertEquals ( 200 , response )
|
testFalse ( ) { this . verifier . withCreateActualResults ( false ) . verify ( TableTestUtils . TABLE_NAME , TableTestUtils . ACTUAL ) ; this . verifier . succeeded ( this . description . get ( ) ) ; "<AssertPlaceHolder>" ; } getActualFile ( ) { java . io . File dir = this . directoryStrategy . getActualDirectory ( this . description . getTestClass ( ) ) ; java . lang . String filename = this . fileStrategy . getActualFilename ( this . description . getTestClass ( ) , this . description . getMethodName ( ) ) ; return new java . io . File ( dir , filename ) ; }
|
org . junit . Assert . assertFalse ( this . verifier . getActualFile ( ) . exists ( ) )
|
testGetMappingName ( ) { java . lang . String name = "name" ; when ( delegate . getMappingName ( ) ) . thenReturn ( name ) ; "<AssertPlaceHolder>" ; } getMappingName ( ) { return delegate . getMappingName ( ) ; }
|
org . junit . Assert . assertEquals ( name , mapping . getMappingName ( ) )
|
testEqualsNull ( ) { org . apache . cayenne . ObjectId o = org . apache . cayenne . ObjectId . of ( "T" , "ARTIST_ID" , 42 ) ; "<AssertPlaceHolder>" ; } of ( java . lang . String , java . lang . String , java . lang . Object ) { if ( value instanceof java . lang . Number ) { return new org . apache . cayenne . ObjectIdNumber ( entityName , keyName , ( ( java . lang . Number ) ( value ) ) ) ; } return new org . apache . cayenne . ObjectIdSingle ( entityName , keyName , value ) ; }
|
org . junit . Assert . assertNotNull ( o )
|
testSerializesAndDeserializes ( ) { org . calrissian . mango . domain . Attribute attribute = new org . calrissian . mango . domain . Attribute ( "key" , "val" , com . google . common . collect . ImmutableMap . of ( "metaKey" , "metaVal" ) ) ; byte [ ] serialized = serialize ( new org . calrissian . accumulorecipes . commons . hadoop . AttributeWritable ( attribute ) ) ; org . calrissian . mango . domain . Attribute actual = asWritable ( serialized , org . calrissian . accumulorecipes . commons . hadoop . AttributeWritable . class ) . get ( ) ; "<AssertPlaceHolder>" ; } get ( ) { return attribute ; }
|
org . junit . Assert . assertEquals ( attribute , actual )
|
createBillingDataForOrganization_EmptyList ( ) { org . oscm . billingservice . business . model . billingresult . BillingDetailsType doc = createBillingDataForOrganization ( 1 , 1 , 10 , 20 , 0 , new org . oscm . domobjects . BillingResult ( ) , true , new java . util . ArrayList < org . oscm . domobjects . SubscriptionHistory > ( ) , "EUR" ) ; "<AssertPlaceHolder>" ; verifyZeroInteractions ( bb . bdr , dm ) ; } createBillingDataForOrganization ( long , long , long , long , long , org . oscm . domobjects . BillingResult , boolean , java . util . List , java . lang . String ) { org . oscm . billingservice . service . model . BillingInput . Builder billingInput = new org . oscm . billingservice . service . model . BillingInput . Builder ( ) ; billingInput . setOrganizationKey ( organizationKey ) ; billingInput . setSubscriptionKey ( subscriptionKey ) ; billingInput . setBillingPeriodStart ( startOfPeriod ) ; billingInput . setBillingPeriodEnd ( endOfPeriod ) ; billingInput . setCutOffDate ( cutOffDate ) ; billingInput . setStoreBillingResult ( storeResultXML ) ; billingInput . setSubscriptionHistoryEntries ( subHistEntries ) ; billingInput . setCurrencyIsoCode ( currency ) ; return bb . createBillingDataForOrganization ( billingInput . build ( ) , billingResult ) ; }
|
org . junit . Assert . assertNull ( doc )
|
checkMessageMethod ( ) { java . lang . String str = "test" ; info . smart_tools . smartactors . feature_loading_system . interfaces . ifilesystem_tracker . exception . FilesystemTrackerStartupException exception = new info . smart_tools . smartactors . feature_loading_system . interfaces . ifilesystem_tracker . exception . FilesystemTrackerStartupException ( str ) ; "<AssertPlaceHolder>" ; throw exception ; } getMessage ( ) { return message ; }
|
org . junit . Assert . assertEquals ( exception . getMessage ( ) , str )
|
testGroupByError ( ) { java . lang . String sql = "select<sp>count(clickcount_ag)<sp>as<sp>\"clickcount_ag\",<sp>testDim<sp>from<sp>tabletest<sp>group<sp>by<sp>testDim<sp>order<sp>by<sp>count(clickcount_ag)<sp>limit<sp>100" ; com . ebay . pulsar . analytics . query . sql . SQLTranslator sqlTranslator = org . mockito . Mockito . mock ( com . ebay . pulsar . analytics . query . sql . SQLTranslator . class , Mockito . CALLS_REAL_METHODS ) ; com . ebay . pulsar . analytics . query . sql . SQLTranslator . QueryDescription queryDesc = sqlTranslator . parse ( sql ) ; com . foundationdb . sql . parser . SelectNode selectNode = queryDesc . getSelectNode ( ) ; com . foundationdb . sql . parser . CursorNode cursorNode = queryDesc . getCursorNode ( ) ; java . util . Map < java . lang . String , java . lang . String > columnsMap = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; try { sqlTranslator . groupByCheck ( selectNode , cursorNode , columnsMap ) ; org . junit . Assert . fail ( "expected<sp>SqlTranslationException" ) ; } catch ( com . ebay . pulsar . analytics . exception . SqlTranslationException ex ) { "<AssertPlaceHolder>" ; } } groupByCheck ( com . foundationdb . sql . parser . SelectNode , com . foundationdb . sql . parser . CursorNode , java . util . Map ) { com . foundationdb . sql . parser . OrderByList orderByList = cursorNode . getOrderByList ( ) ; com . foundationdb . sql . parser . GroupByList groupByList = selectNode . getGroupByList ( ) ; if ( ( groupByList == null ) || ( ( groupByList . size ( ) ) == 0 ) ) { if ( ( orderByList != null ) && ( ( orderByList . size ( ) ) > 0 ) ) { throw new com . ebay . pulsar . analytics . exception . SqlTranslationException ( ( ( ExceptionErrorCode . SQL_PARSING_ERROR . getErrorMessage ( ) ) + "'Order<sp>By<sp>can<sp>only<sp>be<sp>used<sp>with<sp>Group<sp>By.'<sp>" ) ) ; } if ( ( columnsMap . size ( ) ) > 0 ) { throw new com . ebay . pulsar . analytics . exception . SqlTranslationException ( ( ( ( ExceptionErrorCode . INVALID_AGGREGATE . getErrorMessage ( ) ) + "Group<sp>By<sp>columns<sp>required<sp>for<sp>dimensions:<sp>" ) + ( columnsMap . values ( ) ) ) ) ; } } else { java . util . Set < java . lang . String > groupByColSet = com . google . common . collect . Sets . newHashSet ( ) ; int listSize = groupByList . size ( ) ; for ( int i = 0 ; i < listSize ; i ++ ) { com . foundationdb . sql . parser . GroupByColumn groupByCol = ( ( com . foundationdb . sql . parser . GroupByColumn ) ( groupByList . get ( i ) ) ) ; com . foundationdb . sql . parser . ValueNode vNode = groupByCol . getColumnExpression ( ) ; if ( vNode instanceof com . foundationdb . sql . parser . ColumnReference ) { com . foundationdb . sql . parser . ColumnReference column = ( ( com . foundationdb . sql . parser . ColumnReference ) ( vNode ) ) ; java . lang . String colName = column . getColumnName ( ) ; if ( ! ( columnsMap . containsKey ( colName ) ) ) { throw new com . ebay . pulsar . analytics . exception . SqlTranslationException ( ( ( ( ExceptionErrorCode . INVALID_AGGREGATE . getErrorMessage ( ) ) + "Group<sp>By<sp>column<sp>not<sp>found<sp>in<sp>Selection:<sp>" ) + colName ) ) ; } groupByColSet . add ( colName ) ; } else { throw new com . ebay . pulsar . analytics . exception . SqlTranslationException ( ( ( ExceptionErrorCode . INVALID_AGGREGATE . getErrorMessage ( ) ) + "No<sp>column<sp>refernce<sp>found<sp>for<sp>Group<sp>By." ) ) ; } } for ( Map . Entry < java . lang . String , java . lang . String > entry : columnsMap . entrySet ( ) ) { java . lang . String key = entry . getKey ( ) ; if ( ! ( groupByColSet . contains ( key ) ) ) { throw new com . ebay . pulsar . analytics . exception . SqlTranslationException ( ( ( ( ExceptionErrorCode . INVALID_AGGREGATE . getErrorMessage ( ) ) + "Group<sp>By<sp>column<sp>required<sp>for<sp>dimension<sp>in<sp>Selection:<sp>" ) + key ) ) ; } } } }
|
org . junit . Assert . assertTrue ( true )
|
testGeenMeldingOmdatKindZelfdeNaamHeeftAlsBroerEnZus ( ) { final nl . bzk . brp . model . hisvolledig . impl . kern . PersoonHisVolledigImpl ouder1 = new nl . bzk . brp . model . hisvolledig . impl . kern . PersoonHisVolledigImpl ( new nl . bzk . brp . model . algemeen . stamgegeven . kern . SoortPersoonAttribuut ( nl . bzk . brp . model . algemeen . stamgegeven . kern . SoortPersoon . INGESCHREVENE ) ) ; final nl . bzk . brp . model . hisvolledig . impl . kern . PersoonHisVolledigImpl ouder2 = new nl . bzk . brp . model . hisvolledig . impl . kern . PersoonHisVolledigImpl ( new nl . bzk . brp . model . algemeen . stamgegeven . kern . SoortPersoonAttribuut ( nl . bzk . brp . model . algemeen . stamgegeven . kern . SoortPersoon . INGESCHREVENE ) ) ; final nl . bzk . brp . model . hisvolledig . impl . kern . PersoonHisVolledigImpl kind1 = maakKind ( nl . bzk . brp . bijhouding . business . regels . impl . bijhouding . afstamming . acties . afstamming . BRBY0107Test . GESLACHTSNAAM , 1 , true , false ) ; final nl . bzk . brp . model . hisvolledig . impl . kern . PersoonHisVolledigImpl kind2 = maakKind ( nl . bzk . brp . bijhouding . business . regels . impl . bijhouding . afstamming . acties . afstamming . BRBY0107Test . GESLACHTSNAAM , 2 , true , false ) ; final nl . bzk . brp . model . hisvolledig . impl . kern . PersoonHisVolledigImpl kind3 = maakKind ( nl . bzk . brp . bijhouding . business . regels . impl . bijhouding . afstamming . acties . afstamming . BRBY0107Test . GESLACHTSNAAM , 3 , true , false ) ; nl . bzk . brp . util . RelatieTestUtil . bouwFamilieRechtelijkeBetrekking ( ouder1 , ouder2 , kind1 , actieModel ) ; nl . bzk . brp . util . RelatieTestUtil . bouwFamilieRechtelijkeBetrekking ( ouder1 , ouder2 , kind2 , actieModel ) ; nl . bzk . brp . util . RelatieTestUtil . bouwFamilieRechtelijkeBetrekking ( ouder1 , ouder2 , kind3 , actieModel ) ; final nl . bzk . brp . model . hisvolledig . impl . kern . FamilierechtelijkeBetrekkingHisVolledigImpl familie = nl . bzk . brp . util . RelatieTestUtil . haalFamilieRechtelijkeBetrekkingUitPersoonBetrokkenhedenWaarPersoonKindInIs ( kind1 ) ; final nl . bzk . brp . model . hisvolledig . momentview . kern . FamilierechtelijkeBetrekkingView familieView = new nl . bzk . brp . model . hisvolledig . momentview . kern . FamilierechtelijkeBetrekkingView ( familie , nl . bzk . brp . model . algemeen . attribuuttype . kern . DatumTijdAttribuut . nu ( ) , nl . bzk . brp . model . algemeen . attribuuttype . kern . DatumAttribuut . vandaag ( ) ) ; final java . util . List < nl . bzk . brp . model . basis . BerichtEntiteit > resultaat = brby0107 . voerRegelUit ( familieView , maakFamilierechtelijkeBetrekkingBericht ( ) ) ; "<AssertPlaceHolder>" ; } size ( ) { return elementen . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , resultaat . size ( ) )
|
testPlainTextToDeveloper ( ) { java . util . List < java . lang . String > errors = messageManager . processMessage ( org . sagebionetworks . repo . manager . MessageManagerImplSESTest . MESSAGE_ID_PLAIN_TEXT , mockProgressCallback ) ; "<AssertPlaceHolder>" ; } toString ( ) { double percent = ( ( ( double ) ( currentIndex ) ) / ( ( double ) ( totalCount ) ) ) * 100.0 ; return java . lang . String . format ( "%1$-30s<sp>%2$10d/%3$-10d<sp>%4$8.2f<sp>%%" , message , currentIndex , totalCount , percent ) ; }
|
org . junit . Assert . assertEquals ( errors . toString ( ) , 0 , errors . size ( ) )
|
existingDocumentNonTerminalFromUIDeprecatedCheckEscaping ( ) { org . xwiki . model . reference . DocumentReference documentReference = new org . xwiki . model . reference . DocumentReference ( "xwiki" , java . util . Arrays . asList ( "Main" ) , "WebHome" ) ; com . xpn . xwiki . doc . XWikiDocument document = mock ( com . xpn . xwiki . doc . XWikiDocument . class ) ; when ( document . getDocumentReference ( ) ) . thenReturn ( documentReference ) ; when ( document . isNew ( ) ) . thenReturn ( false ) ; context . setDoc ( document ) ; when ( mockRequest . getParameter ( "space" ) ) . thenReturn ( "X.Y" ) ; when ( mockRequest . getParameter ( "tocreate" ) ) . thenReturn ( "space" ) ; java . lang . String result = action . render ( context ) ; "<AssertPlaceHolder>" ; verify ( mockURLFactory ) . createURL ( "X\\.Y" , "WebHome" , "edit" , "template=&parent=Main.WebHome&title=X.Y" , null , "xwiki" , context ) ; } render ( com . xpn . xwiki . XWikiContext ) { com . xpn . xwiki . web . XWikiRequest request = context . getRequest ( ) ; java . lang . String path = request . getRequestURI ( ) ; java . lang . String filename = com . xpn . xwiki . util . Util . decodeURI ( path . substring ( ( ( path . lastIndexOf ( "/" ) ) + 1 ) ) , context ) ; try { ( ( com . xpn . xwiki . plugin . charts . ChartingPluginApi ) ( context . getWiki ( ) . getPluginApi ( "charting" , context ) ) ) . outputFile ( filename , context ) ; } catch ( java . io . IOException e ) { throw new com . xpn . xwiki . XWikiException ( com . xpn . xwiki . XWikiException . MODULE_XWIKI_APP , com . xpn . xwiki . XWikiException . ERROR_XWIKI_APP_SEND_RESPONSE_EXCEPTION , "Exception<sp>while<sp>sending<sp>response" , e ) ; } return null ; }
|
org . junit . Assert . assertNull ( result )
|
registered_device_with_rpk_to_server_with_x509cert ( ) { helper . createServerWithX509Cert ( ) ; helper . server . start ( ) ; boolean useServerCertifcatePublicKey = true ; helper . createRPKClient ( useServerCertifcatePublicKey ) ; helper . client . start ( ) ; helper . getSecurityStore ( ) . add ( org . eclipse . leshan . server . security . SecurityInfo . newRawPublicKeyInfo ( helper . getCurrentEndpoint ( ) , helper . clientPublicKey ) ) ; helper . client . start ( ) ; helper . waitForRegistrationAtServerSide ( 1 ) ; "<AssertPlaceHolder>" ; } getCurrentRegistration ( ) { return server . getRegistrationService ( ) . getByEndpoint ( currentEndpointIdentifier . get ( ) ) ; }
|
org . junit . Assert . assertNotNull ( helper . getCurrentRegistration ( ) )
|
testParseNegativeY ( ) { java . lang . String source = ( "{1" + ( getDecimalCharacter ( ) ) ) + "2323}" ; org . apache . commons . math3 . geometry . euclidean . oned . Vector1D expected = new org . apache . commons . math3 . geometry . euclidean . oned . Vector1D ( 1.2323 ) ; org . apache . commons . math3 . geometry . euclidean . oned . Vector1D actual = vector1DFormat . parse ( source ) ; "<AssertPlaceHolder>" ; } parse ( com . google . javascript . jscomp . AbstractCompiler ) { try { com . google . javascript . jscomp . JsAst . logger_ . fine ( ( "Parsing:<sp>" + ( sourceFile . getName ( ) ) ) ) ; com . google . javascript . jscomp . parsing . ParserRunner . ParseResult result = com . google . javascript . jscomp . parsing . ParserRunner . parse ( sourceFile , sourceFile . getCode ( ) , compiler . getParserConfig ( ) , compiler . getDefaultErrorReporter ( ) , com . google . javascript . jscomp . JsAst . logger_ ) ; root = result . ast ; compiler . setOldParseTree ( sourceFile . getName ( ) , result . oldAst ) ; } catch ( java . io . IOException e ) { compiler . report ( com . google . javascript . jscomp . JSError . make ( AbstractCompiler . READ_ERROR , sourceFile . getName ( ) ) ) ; } if ( ( ( root ) == null ) || ( compiler . hasHaltingErrors ( ) ) ) { root = com . google . javascript . rhino . IR . script ( ) ; } else { compiler . prepareAst ( root ) ; } root . setStaticSourceFile ( sourceFile ) ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testRootBean ( ) { final org . apache . commons . beanutils2 . bugs . RootBean bean = new org . apache . commons . beanutils2 . bugs . FirstChildBean ( ) ; final java . lang . Class < ? > propertyType = org . apache . commons . beanutils2 . PropertyUtils . getPropertyType ( bean , "file[0]" ) ; "<AssertPlaceHolder>" ; } getName ( ) { return this . name ; }
|
org . junit . Assert . assertEquals ( java . lang . String . class . getName ( ) , propertyType . getName ( ) )
|
testMinusOneParallelRulesBuildThresholdConfiguration ( ) { try { java . lang . System . getProperties ( ) . put ( ParallelRulesBuildThresholdOption . PROPERTY_NAME , "-1" ) ; org . drools . compiler . builder . impl . KnowledgeBuilderConfigurationImpl kbConfigImpl = new org . drools . compiler . builder . impl . KnowledgeBuilderConfigurationImpl ( ) ; "<AssertPlaceHolder>" ; } finally { java . lang . System . getProperties ( ) . remove ( ParallelRulesBuildThresholdOption . PROPERTY_NAME ) ; } } getParallelRulesBuildThreshold ( ) { return parallelRulesBuildThreshold . getParallelRulesBuildThreshold ( ) ; }
|
org . junit . Assert . assertThat ( kbConfigImpl . getParallelRulesBuildThreshold ( ) , org . hamcrest . CoreMatchers . is ( ( - 1 ) ) )
|
appendStringWithSeparatorAndStartAndEnd ( ) { org . eclipse . collections . api . RichIterable < java . lang . Object > collection = this . newWith ( 1 , 2 , 3 ) ; java . lang . Appendable builder = new java . lang . StringBuilder ( ) ; collection . appendString ( builder , "[" , ",<sp>" , "]" ) ; "<AssertPlaceHolder>" ; } toString ( ) { return new java . lang . String ( this . key ) ; }
|
org . junit . Assert . assertEquals ( collection . toString ( ) , builder . toString ( ) )
|
isValidAnonymous ( ) { boolean ok = container . isValid ( "anonymous" , "anonymous" ) ; "<AssertPlaceHolder>" ; } isValid ( java . lang . String , java . lang . String ) { if ( ( "anonymous" . equals ( principal ) ) && ( "anonymous" . equals ( ticket ) ) ) return true ; org . apache . zeppelin . ticket . TicketContainer . Entry entry = sessions . get ( principal ) ; return ( entry != null ) && ( entry . ticket . equals ( ticket ) ) ; }
|
org . junit . Assert . assertTrue ( ok )
|
testGetId ( ) { net . bluemix . todo . model . CloudantRow row = new net . bluemix . todo . model . CloudantRow ( ) ; row . setId ( "123" ) ; "<AssertPlaceHolder>" ; } getId ( ) { return id ; }
|
org . junit . Assert . assertEquals ( "123" , row . getId ( ) )
|
return_expression_name_when_use_constructor_with_message ( ) { final org . bonitasoft . engine . expression . exception . SExpressionEvaluationException sExpressionEvaluationException = new org . bonitasoft . engine . expression . exception . SExpressionEvaluationException ( message , expressionName ) ; final java . lang . String result = sExpressionEvaluationException . getExpressionName ( ) ; "<AssertPlaceHolder>" ; } getExpressionName ( ) { return expressionName ; }
|
org . junit . Assert . assertEquals ( expressionName , result )
|
testGlobal ( ) { "<AssertPlaceHolder>" ; } global ( ) { synchronized ( ch . cyberduck . core . threading . BackgroundActionRegistry . lock ) { if ( null == ( ch . cyberduck . core . threading . BackgroundActionRegistry . global ) ) { ch . cyberduck . core . threading . BackgroundActionRegistry . global = new ch . cyberduck . core . threading . BackgroundActionRegistry ( ) ; } return ch . cyberduck . core . threading . BackgroundActionRegistry . global ; } }
|
org . junit . Assert . assertSame ( ch . cyberduck . core . threading . BackgroundActionRegistry . global ( ) , ch . cyberduck . core . threading . BackgroundActionRegistry . global ( ) )
|
testDatanodeRollingUpgradeWithFinalize ( ) { try { startCluster ( ) ; org . apache . hadoop . fs . Path testFile1 = new org . apache . hadoop . fs . Path ( ( ( "/" + ( org . apache . hadoop . test . GenericTestUtils . getMethodName ( ) ) ) + ".01.dat" ) ) ; org . apache . hadoop . fs . Path testFile2 = new org . apache . hadoop . fs . Path ( ( ( "/" + ( org . apache . hadoop . test . GenericTestUtils . getMethodName ( ) ) ) + ".02.dat" ) ) ; org . apache . hadoop . hdfs . DFSTestUtil . createFile ( fs , testFile1 , org . apache . hadoop . hdfs . server . datanode . TestDataNodeRollingUpgrade . FILE_SIZE , org . apache . hadoop . hdfs . server . datanode . TestDataNodeRollingUpgrade . REPL_FACTOR , org . apache . hadoop . hdfs . server . datanode . TestDataNodeRollingUpgrade . SEED ) ; org . apache . hadoop . hdfs . DFSTestUtil . createFile ( fs , testFile2 , org . apache . hadoop . hdfs . server . datanode . TestDataNodeRollingUpgrade . FILE_SIZE , org . apache . hadoop . hdfs . server . datanode . TestDataNodeRollingUpgrade . REPL_FACTOR , org . apache . hadoop . hdfs . server . datanode . TestDataNodeRollingUpgrade . SEED ) ; startRollingUpgrade ( ) ; java . io . File blockFile = getBlockForFile ( testFile2 , true ) ; java . io . File trashFile = getTrashFileForBlock ( blockFile , false ) ; deleteAndEnsureInTrash ( testFile2 , blockFile , trashFile ) ; finalizeRollingUpgrade ( ) ; "<AssertPlaceHolder>" ; assert ! ( fs . exists ( testFile2 ) ) ; assert fs . exists ( testFile1 ) ; } finally { shutdownCluster ( ) ; } } isTrashRootPresent ( ) { org . apache . hadoop . hdfs . server . datanode . BlockPoolSliceStorage bps = dn0 . getStorage ( ) . getBPStorage ( blockPoolId ) ; return bps . trashEnabled ( ) ; }
|
org . junit . Assert . assertFalse ( isTrashRootPresent ( ) )
|
testSub ( ) { com . pack . one . Calculator calculator = new com . pack . one . Calculator ( ) ; "<AssertPlaceHolder>" ; } sub ( float , float ) { return a - b ; }
|
org . junit . Assert . assertEquals ( 10 , calculator . sub ( 15 , 5 ) , ( - 1 ) )
|
testGreaterThanOrEquals2 ( ) { com . liferay . dynamic . data . mapping . expression . internal . DDMExpressionImpl < java . lang . Boolean > ddmExpression = createDDMExpression ( "1<sp>>=<sp>2" ) ; "<AssertPlaceHolder>" ; } evaluate ( ) { java . util . stream . Stream < java . lang . reflect . Method > methodStream = _parentTestMethods . stream ( ) ; org . junit . Assume . assumeTrue ( methodStream . noneMatch ( ( m ) -> java . util . Objects . equals ( m . getName ( ) , description . getMethodName ( ) ) ) ) ; statement . evaluate ( ) ; }
|
org . junit . Assert . assertFalse ( ddmExpression . evaluate ( ) )
|
getLineFeedSeparatedList_whenConstructedWithALineFeedSeparatedListOfLocations_shouldReturnLineFeedSeparatedString ( ) { org . jkiss . dbeaver . ext . greenplum . model . GreenplumExternalTableUriLocationsHandler handler = new org . jkiss . dbeaver . ext . greenplum . model . GreenplumExternalTableUriLocationsHandler ( "location1\nlocation2" , '\n' ) ; "<AssertPlaceHolder>" ; } getLineFeedSeparatedList ( ) { return org . jkiss . utils . CommonUtils . joinStrings ( "\n" , this . uriLocations ) ; }
|
org . junit . Assert . assertEquals ( "location1\nlocation2" , handler . getLineFeedSeparatedList ( ) )
|
testLeeg ( ) { final nl . bzk . brp . util . hisvolledig . kern . PersoonHisVolledigImplBuilder builder = new nl . bzk . brp . util . hisvolledig . kern . PersoonHisVolledigImplBuilder ( nl . bzk . brp . model . algemeen . stamgegeven . kern . SoortPersoon . INGESCHREVENE ) ; final nl . bzk . brp . model . hisvolledig . predikaatview . kern . PersoonHisVolledigView persoonHisVolledig = new nl . bzk . brp . model . hisvolledig . predikaatview . kern . PersoonHisVolledigView ( builder . build ( ) , null ) ; final nl . bzk . migratiebrp . conversie . model . brp . BrpStapel < nl . bzk . migratiebrp . conversie . model . brp . groep . BrpBehandeldAlsNederlanderIndicatieInhoud > brpBijhoudingsaard = mapper . map ( persoonHisVolledig , new nl . bzk . brp . levering . lo3 . mapper . OnderzoekMapper ( persoonHisVolledig ) , new nl . bzk . brp . levering . lo3 . mapper . TestActieHisVolledigLocator ( ) ) ; "<AssertPlaceHolder>" ; } map ( B , nl . bzk . brp . levering . lo3 . mapper . OnderzoekMapper , nl . bzk . brp . levering . lo3 . mapper . ActieHisVolledigLocator ) { final java . util . List < nl . bzk . migratiebrp . conversie . model . brp . BrpGroep < G > > groepen = new java . util . ArrayList ( ) ; final java . lang . Iterable < H > histories = getHistorieIterable ( volledig ) ; if ( histories != null ) { for ( final H historie : histories ) { if ( historie != null ) { if ( ! ( behoortTotStatischePersoonlijst ( historie ) ) ) { continue ; } final nl . bzk . migratiebrp . conversie . model . brp . BrpGroep < G > groep = mapGroep ( historie , onderzoekMapper , actieHisVolledigLocator ) ; if ( groep != null ) { groepen . add ( groep ) ; } } } } if ( groepen . isEmpty ( ) ) { return null ; } else { return new nl . bzk . migratiebrp . conversie . model . brp . BrpStapel < G > ( groepen ) ; } }
|
org . junit . Assert . assertNull ( brpBijhoudingsaard )
|
testDoCollectStream ( ) { final java . util . Collection < eu . toolchain . concurrent . Stage < java . lang . Object > > futures = com . google . common . collect . ImmutableList . of ( f1 , f2 ) ; doReturn ( completable ) . when ( underTest ) . completable ( ) ; doNothing ( ) . when ( underTest ) . bindSignals ( completable , futures ) ; "<AssertPlaceHolder>" ; verify ( underTest ) . completable ( ) ; verify ( underTest ) . bindSignals ( completable , futures ) ; verify ( f1 ) . handle ( any ( eu . toolchain . concurrent . CollectHelper . class ) ) ; verify ( f2 ) . handle ( any ( eu . toolchain . concurrent . CollectHelper . class ) ) ; } doStreamCollect ( java . util . Collection , java . util . function . Consumer , java . util . function . Supplier ) { final eu . toolchain . concurrent . Completable < U > target = completable ( ) ; final eu . toolchain . concurrent . StreamCollectHelper < ? super T , ? extends U > done = new eu . toolchain . concurrent . StreamCollectHelper ( caller , stages . size ( ) , consumer , supplier , target ) ; for ( final eu . toolchain . concurrent . Stage < ? extends T > q : stages ) { q . handle ( done ) ; } bindSignals ( target , stages ) ; return target ; }
|
org . junit . Assert . assertEquals ( completable , underTest . doStreamCollect ( futures , consumer , supplier ) )
|
testConstructor ( ) { org . openhealthtools . mdht . uml . cda . cdt . operations . ReasonForVisitSectionOperations obj = new org . openhealthtools . mdht . uml . cda . cdt . operations . ReasonForVisitSectionOperations ( ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertTrue ( true )
|
testEmptyServiceList ( ) { final org . osgi . framework . BundleContext context = mock ( org . osgi . framework . BundleContext . class ) ; org . osgi . framework . ServiceReference [ ] refs = new org . osgi . framework . ServiceReference [ ] { } ; when ( context . getServiceReferences ( isA ( java . lang . String . class ) , isNull ( java . lang . String . class ) ) ) . thenReturn ( refs ) ; ddf . mime . MimeTypeToTransformerMapper matcher = new ddf . mime . mapper . MimeTypeToTransformerMapperImpl ( ) { @ ddf . mime . mapper . Override protected org . osgi . framework . BundleContext getContext ( ) { return context ; } } ; java . util . List < java . lang . Object > matches = matcher . findMatches ( java . lang . Object . class , new javax . activation . MimeType ( javax . ws . rs . core . MediaType . APPLICATION_ATOM_XML ) ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return map . isEmpty ( ) ; }
|
org . junit . Assert . assertThat ( matches . isEmpty ( ) , org . hamcrest . Matchers . is ( true ) )
|
testFirstWithNullInput ( ) { com . mitchellbosecke . pebble . PebbleEngine pebble = new com . mitchellbosecke . pebble . PebbleEngine . Builder ( ) . loader ( new com . mitchellbosecke . pebble . loader . StringLoader ( ) ) . strictVariables ( false ) . build ( ) ; com . mitchellbosecke . pebble . template . PebbleTemplate template = pebble . getTemplate ( "{{<sp>names<sp>|<sp>first<sp>}}" ) ; java . util . Map < java . lang . String , java . lang . Object > context = new java . util . HashMap ( ) ; context . put ( "names" , null ) ; java . io . Writer writer = new java . io . StringWriter ( ) ; template . evaluate ( writer , context ) ; "<AssertPlaceHolder>" ; } toString ( ) { return sb . toString ( ) ; }
|
org . junit . Assert . assertEquals ( "" , writer . toString ( ) )
|
shouldNotBeDiagnosticMode ( ) { java . io . File file = com . oracle . bedrock . runtime . console . FileWriterApplicationConsoleTest . temporaryFolder . newFile ( ) ; com . oracle . bedrock . runtime . console . FileWriterApplicationConsole console = new com . oracle . bedrock . runtime . console . FileWriterApplicationConsole ( new java . io . FileWriter ( file ) , false ) ; "<AssertPlaceHolder>" ; } isDiagnosticsEnabled ( ) { return false ; }
|
org . junit . Assert . assertThat ( console . isDiagnosticsEnabled ( ) , org . hamcrest . CoreMatchers . is ( false ) )
|
testLongReversed ( ) { for ( int n = 1 ; n < 1000 ; n ++ ) { final int N = n ; io . reactivex . Flowable < java . lang . Integer > a = io . reactivex . Flowable . range ( 1 , n ) . map ( new io . reactivex . functions . Function < java . lang . Integer , java . lang . Integer > ( ) { @ com . github . davidmoten . rx2 . internal . flowable . Override public com . github . davidmoten . rx2 . internal . flowable . Integer apply ( java . lang . Integer x ) { return ( N + 1 ) - x ; } } ) ; io . reactivex . Flowable < java . lang . Integer > b = io . reactivex . Flowable . range ( 1 , n ) ; boolean equals = io . reactivex . Flowable . sequenceEqual ( com . github . davidmoten . rx2 . internal . flowable . FlowableMatchTest . matchThem ( a , b ) . sorted ( ) , io . reactivex . Flowable . range ( 1 , n ) ) . blockingGet ( ) ; "<AssertPlaceHolder>" ; } } matchThem ( io . reactivex . Flowable , io . reactivex . Flowable ) { return a . compose ( com . github . davidmoten . rx2 . flowable . Transformers . matchWith ( b , com . github . davidmoten . rx2 . Functions . identity ( ) , com . github . davidmoten . rx2 . Functions . identity ( ) , com . github . davidmoten . rx2 . internal . flowable . FlowableMatchTest . COMBINER ) ) ; }
|
org . junit . Assert . assertTrue ( equals )
|
testFindByPrimaryKeyExisting ( ) { com . liferay . portal . workflow . kaleo . model . KaleoDefinition newKaleoDefinition = addKaleoDefinition ( ) ; com . liferay . portal . workflow . kaleo . model . KaleoDefinition existingKaleoDefinition = _persistence . findByPrimaryKey ( newKaleoDefinition . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
|
org . junit . Assert . assertEquals ( existingKaleoDefinition , newKaleoDefinition )
|
instantiation ( ) { int sum = com . m3 . scalaflavor4j . Seq . apply ( "aaa" , "bbbb" , "ccc" ) . foldRight ( 0 , new com . m3 . scalaflavor4j . FoldRightF2 < java . lang . String , java . lang . Integer > ( ) { public com . m3 . scalaflavor4j . Integer apply ( java . lang . String str , java . lang . Integer sum ) { return sum + ( str . length ( ) ) ; } } ) ; "<AssertPlaceHolder>" ; } apply ( T1 , T2 , T3 ) { return new com . m3 . scalaflavor4j . Tuple3 < T1 , T2 , T3 > ( _1 , _2 , _3 ) ; }
|
org . junit . Assert . assertThat ( sum , org . hamcrest . CoreMatchers . is ( org . hamcrest . CoreMatchers . equalTo ( 10 ) ) )
|
testCurrentSchemaSerializerForLazilyRegisteredStateSerializerProvider ( ) { org . apache . flink . runtime . testutils . statemigration . TestType . V1TestTypeSerializer serializer = new org . apache . flink . runtime . testutils . statemigration . TestType . V1TestTypeSerializer ( ) ; org . apache . flink . runtime . state . StateSerializerProvider < org . apache . flink . runtime . testutils . statemigration . TestType > testProvider = org . apache . flink . runtime . state . StateSerializerProvider . fromPreviousSerializerSnapshot ( serializer . snapshotConfiguration ( ) ) ; "<AssertPlaceHolder>" ; } currentSchemaSerializer ( ) { if ( ( registeredSerializer ) != null ) { org . apache . flink . runtime . state . StateSerializerProvider . checkState ( ( ! ( isRegisteredWithIncompatibleSerializer ) ) , ( "Unable<sp>to<sp>provide<sp>a<sp>serializer<sp>with<sp>the<sp>current<sp>schema,<sp>because<sp>the<sp>restored<sp>state<sp>was<sp>" + "registered<sp>with<sp>a<sp>new<sp>serializer<sp>that<sp>has<sp>incompatible<sp>schema." ) ) ; return registeredSerializer ; } return previousSchemaSerializer ( ) ; }
|
org . junit . Assert . assertTrue ( ( ( testProvider . currentSchemaSerializer ( ) ) instanceof org . apache . flink . runtime . testutils . statemigration . TestType . V1TestTypeSerializer ) )
|
testRemove ( ) { gatewayResource . remove ( ResourceType . EXPERIMENT , testExpID ) ; "<AssertPlaceHolder>" ; } isExists ( org . apache . airavata . registry . core . experiment . catalog . ResourceType , java . lang . Object ) { javax . persistence . EntityManager em = null ; try { switch ( type ) { case GATEWAY_WORKER : em = org . apache . airavata . registry . core . experiment . catalog . ExpCatResourceUtils . getEntityManager ( ) ; org . apache . airavata . registry . core . experiment . catalog . resources . GatewayWorkerPK gatewayWorkerPK = new org . apache . airavata . registry . core . experiment . catalog . resources . GatewayWorkerPK ( ) ; gatewayWorkerPK . setGatewayId ( gatewayId ) ; gatewayWorkerPK . setUserName ( name . toString ( ) ) ; org . apache . airavata . registry . core . experiment . catalog . resources . GatewayWorker existingWorker = em . find ( org . apache . airavata . registry . core . experiment . catalog . resources . GatewayWorker . class , gatewayWorkerPK ) ; em . close ( ) ; return existingWorker != null ; case USER : em = org . apache . airavata . registry . core . experiment . catalog . ExpCatResourceUtils . getEntityManager ( ) ; org . apache . airavata . registry . core . experiment . catalog . resources . UserPK userPK = new org . apache . airavata . registry . core . experiment . catalog . resources . UserPK ( ) ; userPK . setGatewayId ( getGatewayId ( ) ) ; userPK . setUserName ( name . toString ( ) ) ; org . apache . airavata . registry . core . experiment . catalog . resources . Users existingUser = em . find ( org . apache . airavata . registry . core . experiment . catalog . resources . Users . class , userPK ) ; em . close ( ) ; return existingUser != null ; case EXPERIMENT : em = org . apache . airavata . registry . core . experiment . catalog . ExpCatResourceUtils . getEntityManager ( ) ; org . apache . airavata . registry . core . experiment . catalog . resources . Experiment existingExp = em . find ( org . apache . airavata . registry . core . experiment . catalog . resources . Experiment . class , name . toString ( ) ) ; em . close ( ) ; return existingExp != null ; default : org . apache . airavata . registry . core . experiment . catalog . resources . GatewayResource . logger . error ( "Unsupported<sp>resource<sp>type<sp>for<sp>gateway<sp>resource." , new java . lang . IllegalArgumentException ( ) ) ; throw new java . lang . IllegalArgumentException ( "Unsupported<sp>resource<sp>type<sp>for<sp>gateway<sp>resource." ) ; } } catch ( java . lang . Exception e ) { org . apache . airavata . registry . core . experiment . catalog . resources . GatewayResource . logger . error ( e . getMessage ( ) , e ) ; throw new org . apache . airavata . registry . cpi . RegistryException ( e ) ; } finally { if ( ( em != null ) && ( em . isOpen ( ) ) ) { if ( em . getTransaction ( ) . isActive ( ) ) { em . getTransaction ( ) . rollback ( ) ; } em . close ( ) ; } } }
|
org . junit . Assert . assertFalse ( gatewayResource . isExists ( ResourceType . EXPERIMENT , testExpID ) )
|
forLoop ( ) { com . gs . collections . api . set . sorted . ImmutableSortedSet < java . lang . Integer > set = this . classUnderTest ( ) ; for ( java . lang . Integer each : set ) { "<AssertPlaceHolder>" ; } } classUnderTest ( ) { return SortedBags . immutable . empty ( ) ; }
|
org . junit . Assert . assertNotNull ( each )
|
testIncorrectPublicKey ( ) { org . kaaproject . kaa . server . bootstrap . service . security . BootstrapFileKeyStoreService ks = new org . kaaproject . kaa . server . bootstrap . service . security . BootstrapFileKeyStoreService ( ) ; "<AssertPlaceHolder>" ; ks . setPublicKeyLocation ( org . kaaproject . kaa . server . bootstrap . service . security . FileKeyStoreServiceTest . PUBLIC_KEY_LOCATION ) ; java . io . File pub = new java . io . File ( org . kaaproject . kaa . server . bootstrap . service . security . FileKeyStoreServiceTest . publicKeyPath ) ; if ( pub . exists ( ) ) { pub . delete ( ) ; } java . io . FileWriter pubfw ; pubfw = new java . io . FileWriter ( pub ) ; pubfw . write ( "ascasdca42314*&^*$@^#5$^&sdcasdc" ) ; pubfw . close ( ) ; ks . loadKeys ( ) ; }
|
org . junit . Assert . assertNotNull ( ks )
|
testWriteReadRoundtrip ( ) { final org . apache . flink . runtime . state . KeyGroupRange keyRange = new org . apache . flink . runtime . state . KeyGroupRange ( 0 , 2 ) ; org . apache . flink . runtime . state . KeyedStateCheckpointOutputStream stream = org . apache . flink . runtime . state . KeyedStateCheckpointOutputStreamTest . createStream ( keyRange ) ; org . apache . flink . runtime . state . KeyGroupsStateHandle fullHandle = writeAllTestKeyGroups ( stream , keyRange ) ; "<AssertPlaceHolder>" ; org . apache . flink . runtime . state . KeyedStateCheckpointOutputStreamTest . verifyRead ( fullHandle , keyRange ) ; } writeAllTestKeyGroups ( org . apache . flink . runtime . state . OperatorStateCheckpointOutputStream , int ) { org . apache . flink . core . memory . DataOutputView dov = new org . apache . flink . core . memory . DataOutputViewStreamWrapper ( stream ) ; for ( int i = 0 ; i < numPartitions ; ++ i ) { org . junit . Assert . assertEquals ( i , stream . getNumberOfPartitions ( ) ) ; stream . startNewPartition ( ) ; dov . writeInt ( i ) ; } return stream . closeAndGetHandle ( ) ; }
|
org . junit . Assert . assertNotNull ( fullHandle )
|
testGetFilesInExistingDir ( ) { java . lang . String file = this . getClass ( ) . getClassLoader ( ) . getResource ( "." ) . getFile ( ) ; org . sonar . plugins . tsql . rules . files . BaseReportsProvider cut = new org . sonar . plugins . tsql . rules . files . BaseReportsProvider ( ".xml" ) ; java . io . File [ ] files = cut . get ( file ) ; "<AssertPlaceHolder>" ; } get ( java . lang . String ) { final java . lang . String cgPath = this . settings . getString ( Constants . CG_APP_PATH ) ; if ( ( org . apache . commons . lang . StringUtils . isEmpty ( cgPath ) ) || ( ! ( new java . io . File ( cgPath ) . exists ( ) ) ) ) { org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . LOGGER . info ( java . lang . String . format ( "SQL<sp>Code<sp>guard<sp>path<sp>is<sp>empty,<sp>trying<sp>to<sp>search<sp>directories<sp>instead" ) ) ; return getCGANalysisFiles ( baseDir ) ; } final java . io . File sourceDir = new java . io . File ( baseDir ) ; final java . io . File tempResultsFile = folder . newFile ( "temp" , "results.xml" ) ; final java . lang . String [ ] args = new java . lang . String [ ] { cgPath , "-source" , sourceDir . getAbsolutePath ( ) , "temp" 0 , tempResultsFile . getAbsolutePath ( ) , "/include:all" } ; try { org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . LOGGER . debug ( java . lang . String . format ( "Running<sp>command:<sp>%s" , java . lang . String . join ( "<sp>" , args ) ) ) ; final java . lang . Process process = new java . lang . ProcessBuilder ( args ) . start ( ) ; final int result = process . waitFor ( ) ; if ( ( result != 0 ) || ( ( new java . io . File ( tempResultsFile . getAbsolutePath ( ) ) . length ( ) ) == 0 ) ) { org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . LOGGER . warn ( "SQL<sp>Code<sp>Guard<sp>finished<sp>with<sp>errors:<sp>{}.<sp>Output<sp>was:<sp>{}" , org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . read ( process . getErrorStream ( ) ) , org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . read ( process . getInputStream ( ) ) ) ; org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . read ( process . getErrorStream ( ) ) ; return getCGANalysisFiles ( baseDir ) ; } org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . LOGGER . debug ( "Running<sp>command<sp>finished" ) ; } catch ( final java . lang . Throwable e ) { org . sonar . plugins . tsql . rules . files . CodeGuardExecutingReportsProvider . LOGGER . warn ( "Error<sp>executing<sp>SQL<sp>code<sp>guard<sp>tool,<sp>trying<sp>to<sp>search<sp>directories<sp>instead" , e ) ; return getCGANalysisFiles ( baseDir ) ; } return new java . io . File [ ] { tempResultsFile } ; }
|
org . junit . Assert . assertEquals ( 5 , files . length )
|
testString ( ) { java . lang . String [ ] vals = new java . lang . String [ ] { "foo" , "bazz" 0 , "bazz" } ; int [ ] expectedLengths = new int [ ] { 5 , 8 , 6 } ; for ( org . apache . hadoop . hbase . util . Order ord : new org . apache . hadoop . hbase . util . Order [ ] { Order . ASCENDING , Order . DESCENDING } ) { byte [ ] [ ] encoded = new byte [ vals . length ] [ ] ; org . apache . hadoop . hbase . util . PositionedByteRange pbr = new org . apache . hadoop . hbase . util . SimplePositionedMutableByteRange ( ) ; for ( int i = 0 ; i < ( vals . length ) ; i ++ ) { encoded [ i ] = new byte [ expectedLengths [ i ] ] ; org . apache . hadoop . hbase . util . OrderedBytes . encodeString ( pbr . set ( encoded [ i ] ) , vals [ i ] , ord ) ; } java . util . Arrays . sort ( encoded , Bytes . BYTES_COMPARATOR ) ; java . lang . String [ ] sortedVals = java . util . Arrays . copyOf ( vals , vals . length ) ; if ( ord == ( Order . ASCENDING ) ) java . util . Arrays . sort ( sortedVals ) ; else java . util . Arrays . sort ( sortedVals , java . util . Collections . reverseOrder ( ) ) ; for ( int i = 0 ; i < ( sortedVals . length ) ; i ++ ) { pbr . set ( encoded [ i ] ) ; java . lang . String decoded = org . apache . hadoop . hbase . util . OrderedBytes . decodeString ( pbr ) ; "<AssertPlaceHolder>" ; } } } decodeString ( org . apache . hadoop . hbase . util . PositionedByteRange ) { final byte header = src . get ( ) ; if ( ( header == ( org . apache . hadoop . hbase . util . OrderedBytes . NULL ) ) || ( header == ( org . apache . hadoop . hbase . util . Order . DESCENDING . apply ( org . apache . hadoop . hbase . util . OrderedBytes . NULL ) ) ) ) return null ; assert ( header == ( org . apache . hadoop . hbase . util . OrderedBytes . TEXT ) ) || ( header == ( org . apache . hadoop . hbase . util . Order . DESCENDING . apply ( org . apache . hadoop . hbase . util . OrderedBytes . TEXT ) ) ) ; org . apache . hadoop . hbase . util . Order ord = ( header == ( org . apache . hadoop . hbase . util . OrderedBytes . TEXT ) ) ? org . apache . hadoop . hbase . util . Order . ASCENDING : org . apache . hadoop . hbase . util . Order . DESCENDING ; byte [ ] a = src . getBytes ( ) ; final int offset = src . getOffset ( ) ; final int start = src . getPosition ( ) ; final byte terminator = ord . apply ( org . apache . hadoop . hbase . util . OrderedBytes . TERM ) ; int rawStartPos = offset + start ; int rawTermPos = rawStartPos ; for ( ; ( a [ rawTermPos ] ) != terminator ; rawTermPos ++ ) ; src . setPosition ( ( ( rawTermPos - offset ) + 1 ) ) ; if ( ( org . apache . hadoop . hbase . util . Order . DESCENDING ) == ord ) { byte [ ] copy = new byte [ rawTermPos - rawStartPos ] ; java . lang . System . arraycopy ( a , rawStartPos , copy , 0 , copy . length ) ; ord . apply ( copy ) ; return new java . lang . String ( copy , org . apache . hadoop . hbase . util . OrderedBytes . UTF8 ) ; } else { return new java . lang . String ( a , rawStartPos , ( rawTermPos - rawStartPos ) , org . apache . hadoop . hbase . util . OrderedBytes . UTF8 ) ; } }
|
org . junit . Assert . assertEquals ( java . lang . String . format ( "bazz" 1 , sortedVals [ i ] , decoded , ord ) , sortedVals [ i ] , decoded )
|
validate_missingSampleValue_returnFalse ( ) { meterValue . setSampledValue ( null ) ; boolean isValid = meterValue . validate ( ) ; "<AssertPlaceHolder>" ; } validate ( ) { return true ; }
|
org . junit . Assert . assertThat ( isValid , org . hamcrest . CoreMatchers . is ( false ) )
|
testEquals ( ) { org . jfree . data . time . Day day1 = new org . jfree . data . time . Day ( 29 , org . jfree . date . MonthConstants . MARCH , 2002 ) ; org . jfree . data . time . Hour hour1 = new org . jfree . data . time . Hour ( 15 , day1 ) ; org . jfree . data . time . Minute minute1 = new org . jfree . data . time . Minute ( 15 , hour1 ) ; org . jfree . data . time . Day day2 = new org . jfree . data . time . Day ( 29 , org . jfree . date . MonthConstants . MARCH , 2002 ) ; org . jfree . data . time . Hour hour2 = new org . jfree . data . time . Hour ( 15 , day2 ) ; org . jfree . data . time . Minute minute2 = new org . jfree . data . time . Minute ( 15 , hour2 ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( o instanceof com . mysql . fabric . Server ) ) { return false ; } com . mysql . fabric . Server s = ( ( com . mysql . fabric . Server ) ( o ) ) ; return s . getUuid ( ) . equals ( getUuid ( ) ) ; }
|
org . junit . Assert . assertTrue ( minute1 . equals ( minute2 ) )
|
testDynamicQueryByProjectionMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . portal . workflow . kaleo . model . KaleoLog . class , _dynamicQueryClassLoader ) ; dynamicQuery . setProjection ( com . liferay . portal . kernel . dao . orm . ProjectionFactoryUtil . property ( "kaleoLogId" ) ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . in ( "kaleoLogId" , new java . lang . Object [ ] { com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) } ) ) ; java . util . List < java . lang . Object > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
|
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
|
toSortedArray ( ) { super . toSortedArray ( ) ; com . gs . collections . api . set . primitive . ImmutableByteSet set = this . newWith ( ( ( byte ) ( 0 ) ) , ( ( byte ) ( 1 ) ) , ( ( byte ) ( 31 ) ) ) ; "<AssertPlaceHolder>" ; } toSortedArray ( ) { int [ ] array = this . toArray ( ) ; java . util . Arrays . sort ( array ) ; return array ; }
|
org . junit . Assert . assertArrayEquals ( new byte [ ] { ( ( byte ) ( 0 ) ) , ( ( byte ) ( 1 ) ) , ( ( byte ) ( 31 ) ) } , set . toSortedArray ( ) )
|
testSortingWithOptions2 ( ) { io . anserini . eval . ResultDoc d1 = new io . anserini . eval . ResultDoc ( "1" , 1.0001 , true , false ) ; io . anserini . eval . ResultDoc d2 = new io . anserini . eval . ResultDoc ( "010" , 1.0001 , true , false ) ; io . anserini . eval . ResultDoc d3 = new io . anserini . eval . ResultDoc ( "1000" , 1.0001 , true , false ) ; io . anserini . eval . ResultDoc d4 = new io . anserini . eval . ResultDoc ( "00100" , 1.0001 , true , false ) ; java . util . List < io . anserini . eval . ResultDoc > l = new java . util . ArrayList ( ) ; l . add ( d1 ) ; l . add ( d2 ) ; l . add ( d3 ) ; l . add ( d4 ) ; java . util . Collections . sort ( l ) ; java . lang . String [ ] sorted = new java . lang . String [ 4 ] ; for ( int i = 0 ; i < ( l . size ( ) ) ; i ++ ) { sorted [ i ] = l . get ( i ) . getDocid ( ) ; } java . lang . String [ ] expected = new java . lang . String [ ] { "1000" , "00100" , "010" , "1" } ; "<AssertPlaceHolder>" ; } getDocid ( ) { return getHeaderMetadataItem ( "WARC-TREC-ID" ) ; }
|
org . junit . Assert . assertArrayEquals ( expected , sorted )
|
setFrontRemovedTest ( ) { org . threadly . concurrent . collections . ConcurrentArrayList . DataSet < java . lang . Integer > result = org . threadly . concurrent . collections . ConcurrentArrayListDataSetTest . removedFromFront . set ( 9 , 100 ) . set ( 5 , 0 ) . set ( 0 , 10 ) ; java . lang . Integer [ ] expectedResult = new java . lang . Integer [ ] { 10 , 2 , 3 , 4 , 5 , 0 , 7 , 8 , 9 , 100 } ; "<AssertPlaceHolder>" ; } makeDataSet ( java . lang . Object [ ] , int , int ) { return new org . threadly . concurrent . collections . ConcurrentArrayList . DataSet ( dataArray , startPosition , endPosition , 0 , 0 ) ; }
|
org . junit . Assert . assertTrue ( result . equals ( org . threadly . concurrent . collections . ConcurrentArrayListDataSetTest . makeDataSet ( expectedResult , 0 , expectedResult . length ) ) )
|
testTenantDeployer ( ) { org . alfresco . repo . content . ContentStore contentStore = new org . alfresco . repo . tenant . MultiTAdminServiceImplTest . ConcreteTenantDeployer ( ) ; org . alfresco . repo . tenant . TenantDeployer tenantDeployer = tenantAdmin . tenantDeployer ( contentStore ) ; "<AssertPlaceHolder>" ; } tenantDeployer ( org . alfresco . repo . content . ContentStore ) { if ( contentStore instanceof org . alfresco . repo . tenant . TenantDeployer ) { return ( ( org . alfresco . repo . tenant . TenantDeployer ) ( contentStore ) ) ; } else if ( contentStore instanceof org . alfresco . repo . content . ContentStoreCaps ) { org . alfresco . repo . content . ContentStoreCaps capabilities = ( ( org . alfresco . repo . content . ContentStoreCaps ) ( contentStore ) ) ; return ( ( org . alfresco . repo . tenant . TenantDeployer ) ( capabilities . getTenantDeployer ( ) ) ) ; } return null ; }
|
org . junit . Assert . assertNotNull ( tenantDeployer )
|
testFromJson ( ) { com . fasterxml . jackson . databind . node . ObjectNode node = null ; com . redhat . lightblue . query . ForEachExpression expResult = null ; com . redhat . lightblue . query . ForEachExpression result = com . redhat . lightblue . query . ForEachExpression . fromJson ( node ) ; "<AssertPlaceHolder>" ; } fromJson ( com . fasterxml . jackson . databind . node . ObjectNode ) { if ( ( node . size ( ) ) == 1 ) { com . fasterxml . jackson . databind . JsonNode argNode = node . get ( "$foreach" ) ; if ( argNode instanceof com . fasterxml . jackson . databind . node . ObjectNode ) { com . fasterxml . jackson . databind . node . ObjectNode objArg = ( ( com . fasterxml . jackson . databind . node . ObjectNode ) ( argNode ) ) ; if ( ( objArg . size ( ) ) == 2 ) { com . fasterxml . jackson . databind . JsonNode updateNode = null ; com . fasterxml . jackson . databind . JsonNode queryNode = null ; com . redhat . lightblue . util . Path field = null ; for ( java . util . Iterator < Map . Entry < java . lang . String , com . fasterxml . jackson . databind . JsonNode > > itr = objArg . fields ( ) ; itr . hasNext ( ) ; ) { Map . Entry < java . lang . String , com . fasterxml . jackson . databind . JsonNode > entry = itr . next ( ) ; if ( "$update" . equals ( entry . getKey ( ) ) ) { updateNode = entry . getValue ( ) ; } else { field = new com . redhat . lightblue . util . Path ( entry . getKey ( ) ) ; queryNode = entry . getValue ( ) ; } } if ( ( ( queryNode != null ) && ( updateNode != null ) ) && ( field != null ) ) { return new com . redhat . lightblue . query . ForEachExpression ( field , com . redhat . lightblue . query . UpdateQueryExpression . fromJson ( queryNode ) , com . redhat . lightblue . query . ForEachUpdateExpression . fromJson ( updateNode ) ) ; } } } } throw com . redhat . lightblue . util . Error . get ( QueryConstants . ERR_INVALID_ARRAY_UPDATE_EXPRESSION , node . toString ( ) ) ; }
|
org . junit . Assert . assertEquals ( expResult , result )
|
testGetIndicesVector ( ) { org . nd4j . linalg . api . ndarray . INDArray line = org . nd4j . linalg . factory . Nd4j . linspace ( 1 , 4 , 4 , DataType . DOUBLE ) . reshape ( 1 , ( - 1 ) ) ; org . nd4j . linalg . api . ndarray . INDArray test = org . nd4j . linalg . factory . Nd4j . create ( new double [ ] { 2 , 3 } ) ; org . nd4j . linalg . api . ndarray . INDArray result = line . get ( point ( 0 ) , interval ( 1 , 3 ) ) ; "<AssertPlaceHolder>" ; } interval ( int , int ) { return org . nd4j . linalg . indexing . NDArrayIndex . interval ( begin , 1 , end , false ) ; }
|
org . junit . Assert . assertEquals ( test , result )
|
should_know_the_parent_of_one_level_is_empty ( ) { net . imadz . common . DottedPath path = net . imadz . common . DottedPath . parse ( "a" ) ; "<AssertPlaceHolder>" ; } getParent ( ) { return this . parent ; }
|
org . junit . Assert . assertNull ( path . getParent ( ) )
|
testFetchByPrimaryKeysWithMultiplePrimaryKeysWhereNoPrimaryKeysExist ( ) { long pk1 = com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ; long pk2 = com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ; java . util . Set < java . io . Serializable > primaryKeys = new java . util . HashSet < java . io . Serializable > ( ) ; primaryKeys . add ( pk1 ) ; primaryKeys . add ( pk2 ) ; java . util . Map < java . io . Serializable , com . liferay . dynamic . data . mapping . model . DDMDataProviderInstanceLink > ddmDataProviderInstanceLinks = _persistence . fetchByPrimaryKeys ( primaryKeys ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return _portalCacheListeners . isEmpty ( ) ; }
|
org . junit . Assert . assertTrue ( ddmDataProviderInstanceLinks . isEmpty ( ) )
|
testDelete ( ) { routeDao . delete ( proto1 . getId ( ) ) ; "<AssertPlaceHolder>" ; } getAllRoutes ( ) { com . ctrip . xpipe . redis . console . model . DcIdNameMapper mapper = new com . ctrip . xpipe . redis . console . model . DcIdNameMapper . DefaultMapper ( dcService ) ; java . util . List < com . ctrip . xpipe . redis . console . model . RouteModel > clone = com . google . common . collect . Lists . transform ( routeDao . getAllRoutes ( ) , new com . google . common . base . Function < com . ctrip . xpipe . redis . console . model . RouteTbl , com . ctrip . xpipe . redis . console . model . RouteModel > ( ) { @ com . ctrip . xpipe . redis . console . service . impl . Override public com . ctrip . xpipe . redis . console . model . RouteModel apply ( com . ctrip . xpipe . redis . console . model . RouteTbl input ) { return com . ctrip . xpipe . redis . console . model . RouteModel . fromRouteTbl ( input , mapper ) ; } } ) ; return com . google . common . collect . Lists . newArrayList ( clone ) ; }
|
org . junit . Assert . assertEquals ( 1 , routeDao . getAllRoutes ( ) . size ( ) )
|
testNullQuery ( ) { try { connectorQueryBuilder . buildQuery ( client , null ) ; } catch ( java . lang . Exception exception ) { "<AssertPlaceHolder>" ; } } buildQuery ( org . elasticsearch . client . Client , com . stratio . connector . commons . engine . query . ProjectParsed ) { if ( null == elasticClient ) { throw new com . stratio . crossdata . common . exceptions . ExecutionException ( "Query<sp>builder<sp>received<sp>an<sp>empty<sp>client<sp>to<sp>execute<sp>the<sp>query." ) ; } if ( null == queryData ) { throw new com . stratio . crossdata . common . exceptions . ExecutionException ( "Query<sp>builder<sp>received<sp>an<sp>empty<sp>select<sp>clause<sp>to<sp>be<sp>processed." ) ; } createRequestBuilder ( elasticClient ) ; createProjection ( queryData . getProject ( ) ) ; createFilter ( queryData ) ; if ( isAggregation ( queryData ) ) { createNestedTermAggregation ( queryData , queryData . getGroupBy ( ) . getIds ( ) ) ; } else if ( isDistinctAggregation ( queryData ) ) { createNestedTermAggregation ( queryData , queryData . getSelect ( ) . getColumnMap ( ) . keySet ( ) ) ; } else { createSelect ( queryData . getSelect ( ) ) ; createSort ( queryData . getOrderBy ( ) ) ; createLimit ( queryData . getLimit ( ) ) ; } logQuery ( ) ; return requestBuilder ; }
|
org . junit . Assert . assertTrue ( ( exception instanceof com . stratio . crossdata . common . exceptions . ExecutionException ) )
|
testWithOnlyWiki ( ) { org . xwiki . model . reference . EntityReference reference = new org . xwiki . model . reference . RegexEntityReference ( java . util . regex . Pattern . compile ( org . xwiki . model . reference . RegexEntityReferenceTest . REFERENCETOMATCH . getWikiReference ( ) . getName ( ) , Pattern . LITERAL ) , org . xwiki . model . EntityType . WIKI ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( obj == ( this ) ) { return true ; } if ( ! ( obj instanceof org . xwiki . model . reference . EntityReference ) ) { return false ; } org . xwiki . model . reference . EntityReference ref = ( ( org . xwiki . model . reference . EntityReference ) ( obj ) ) ; return ( ( ( name . equals ( ref . name ) ) && ( type . equals ( ref . type ) ) ) && ( ( parent ) == null ? ( ref . parent ) == null : parent . equals ( ref . parent ) ) ) && ( ( parameters ) == null ? ( ref . parameters ) == null : parameters . equals ( ref . parameters ) ) ; }
|
org . junit . Assert . assertTrue ( reference . equals ( org . xwiki . model . reference . RegexEntityReferenceTest . REFERENCETOMATCH ) )
|
checkTime15 ( ) { com . alibaba . fastjson . parser . JSONScanner objectUnderTest = ( ( com . alibaba . fastjson . parser . JSONScanner ) ( com . diffblue . deeptestutils . Reflector . getInstance ( "com.alibaba.fastjson.parser.JSONScanner" ) ) ) ; objectUnderTest . hasSpecial = false ; objectUnderTest . token = 0 ; objectUnderTest . locale = null ; objectUnderTest . np = 0 ; objectUnderTest . features = 0 ; com . diffblue . deeptestutils . Reflector . setField ( objectUnderTest , "text" , "" ) ; objectUnderTest . calendar = null ; objectUnderTest . matchStat = 0 ; objectUnderTest . bp = 0 ; com . diffblue . deeptestutils . Reflector . setField ( objectUnderTest , "len" , 0 ) ; objectUnderTest . stringDefaultValue = "" ; objectUnderTest . pos = 0 ; objectUnderTest . sp = 0 ; objectUnderTest . sbuf = null ; objectUnderTest . ch = '
|
org . junit . Assert . assertEquals ( false , retval )
|
testNotifiesListenerOnSelectionEventWithCorrectDisplay ( ) { org . eclipse . swt . widgets . Display display = new org . eclipse . swt . widgets . Display ( ) ; com . eclipsesource . tabris . widgets . ClientDialog dialog = new com . eclipsesource . tabris . widgets . ClientDialog ( ) ; org . eclipse . swt . widgets . Listener listener = mock ( org . eclipse . swt . widgets . Listener . class ) ; org . eclipse . rap . json . JsonObject properties = new org . eclipse . rap . json . JsonObject ( ) ; properties . add ( "buttonType" , "buttonOk" ) ; dialog . setButton ( ButtonType . OK , "bar" , listener ) ; environment . dispatchNotify ( "Selection" , properties ) ; org . mockito . ArgumentCaptor < org . eclipse . swt . widgets . Event > captor = org . mockito . ArgumentCaptor . forClass ( org . eclipse . swt . widgets . Event . class ) ; verify ( listener ) . handleEvent ( captor . capture ( ) ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
|
org . junit . Assert . assertSame ( display , captor . getValue ( ) . display )
|
testDeleteSiteNavigationMenuItemsBySiteNavigationMenuId ( ) { com . liferay . site . navigation . util . SiteNavigationMenuTestUtil . addSiteNavigationMenuItem ( _siteNavigationMenu ) ; com . liferay . site . navigation . util . SiteNavigationMenuTestUtil . addSiteNavigationMenuItem ( _siteNavigationMenu ) ; int originalSiteNavigationMenuItemsCount = com . liferay . site . navigation . service . SiteNavigationMenuItemLocalServiceUtil . getSiteNavigationMenuItemsCount ( _siteNavigationMenu . getSiteNavigationMenuId ( ) ) ; com . liferay . site . navigation . service . SiteNavigationMenuItemLocalServiceUtil . deleteSiteNavigationMenuItems ( _siteNavigationMenu . getSiteNavigationMenuId ( ) ) ; int actualSiteNavigationMenuItemsCount = com . liferay . site . navigation . service . SiteNavigationMenuItemLocalServiceUtil . getSiteNavigationMenuItemsCount ( _siteNavigationMenu . getSiteNavigationMenuId ( ) ) ; "<AssertPlaceHolder>" ; } getSiteNavigationMenuId ( ) { return _siteNavigationMenuId ; }
|
org . junit . Assert . assertEquals ( ( originalSiteNavigationMenuItemsCount - 2 ) , actualSiteNavigationMenuItemsCount )
|
spiTest ( ) { org . xeustechnologies . jcl . JarClassLoader jcl = new org . xeustechnologies . jcl . JarClassLoader ( ) ; jcl . add ( "./target/test-classes/lucene-core-5.3.1.jar" ) ; java . lang . Class codecClass = jcl . loadClass ( "org.apache.lucene.codecs.Codec" ) ; java . util . ServiceLoader serviceLoader = java . util . ServiceLoader . load ( codecClass , jcl ) ; java . util . Iterator itr = serviceLoader . iterator ( ) ; "<AssertPlaceHolder>" ; } load ( java . lang . String , boolean ) { java . lang . Class result = null ; byte [ ] classBytes ; result = classes . get ( className ) ; if ( result != null ) { if ( logger . isTraceEnabled ( ) ) logger . trace ( ( "Returning<sp>local<sp>loaded<sp>class<sp>" + className ) ) ; return result ; } classBytes = loadClassBytes ( className ) ; if ( classBytes == null ) { return null ; } result = defineClass ( className , classBytes , 0 , classBytes . length ) ; if ( result == null ) { return null ; } if ( resolveIt ) resolveClass ( result ) ; classes . put ( className , result ) ; if ( logger . isTraceEnabled ( ) ) logger . trace ( ( "Return<sp>newly<sp>loaded<sp>class<sp>" + className ) ) ; return result ; }
|
org . junit . Assert . assertTrue ( itr . hasNext ( ) )
|
bulkContainsAddedStringItem ( ) { org . appenders . log4j2 . elasticsearch . BatchOperations < io . searchbox . core . Bulk > bulkOperations = org . appenders . log4j2 . elasticsearch . jest . JestHttpObjectFactoryTest . createTestObjectFactoryBuilder ( ) . build ( ) . createBatchOperations ( ) ; org . appenders . log4j2 . elasticsearch . BatchBuilder < io . searchbox . core . Bulk > batchBuilder = bulkOperations . createBatchBuilder ( ) ; java . lang . String testPayload = "{<sp>\"testfield\":<sp>\"testvalue\"<sp>}" ; io . searchbox . core . Index item = ( ( io . searchbox . core . Index ) ( bulkOperations . createBatchItem ( "testIndex" , testPayload ) ) ) ; batchBuilder . add ( item ) ; io . searchbox . core . Bulk bulk = batchBuilder . build ( ) ; io . searchbox . core . JestBatchIntrospector introspector = new io . searchbox . core . JestBatchIntrospector ( ) ; "<AssertPlaceHolder>" ; } items ( io . searchbox . core . Bulk ) { return introspected . bulkableActions . stream ( ) . map ( ( item ) -> itemIntrospector ( ) . getPayload ( ( ( AbstractDocumentTargetedAction < io . searchbox . core . DocumentResult > ) ( item ) ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) ; }
|
org . junit . Assert . assertEquals ( testPayload , introspector . items ( bulk ) . get ( 0 ) )
|
testFetchByPrimaryKeyMissing ( ) { long pk = com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ; com . liferay . portal . workflow . kaleo . model . KaleoTimer missingKaleoTimer = _persistence . fetchByPrimaryKey ( pk ) ; "<AssertPlaceHolder>" ; } fetchByPrimaryKey ( long ) { return com . liferay . adaptive . media . image . service . persistence . AMImageEntryUtil . getPersistence ( ) . fetchByPrimaryKey ( amImageEntryId ) ; }
|
org . junit . Assert . assertNull ( missingKaleoTimer )
|
testDoubleUnionBody ( ) { java . util . List < com . github . anno4j . model . Annotation > annotations = queryService . addCriteria ( "oa:hasBody/(ex:subBody<sp>|<sp>ex:subBody2)[is-a<sp>ex:unionBody<sp>|<sp>is-a<sp>ex:unionBody2]" ) . execute ( ) ; "<AssertPlaceHolder>" ; } size ( ) { try { if ( ( _size ) < 0 ) { synchronized ( this ) { if ( ( _size ) < 0 ) { int index = findSize ( ) ; _size = index ; } } } return _size ; } catch ( org . openrdf . repository . RepositoryException e ) { throw new org . openrdf . repository . object . exceptions . ObjectStoreException ( e ) ; } }
|
org . junit . Assert . assertEquals ( 2 , annotations . size ( ) )
|
testHashCodeNull ( ) { org . bff . javampd . art . MPDArtwork artwork = new org . bff . javampd . art . MPDArtwork ( "name" , null ) ; "<AssertPlaceHolder>" ; } hashCode ( ) { int result = super . hashCode ( ) ; result = ( 31 * result ) + ( artistName . hashCode ( ) ) ; result = ( 31 * result ) + ( date . hashCode ( ) ) ; result = ( 31 * result ) + ( genre . hashCode ( ) ) ; return result ; }
|
org . junit . Assert . assertEquals ( 0 , artwork . hashCode ( ) )
|
find_A$Function1_Nil ( ) { com . m3 . scalaflavor4j . Seq < java . lang . String > seq = com . m3 . scalaflavor4j . IndexedSeq . apply ( ) ; com . m3 . scalaflavor4j . F1 < java . lang . String , java . lang . Boolean > p = new com . m3 . scalaflavor4j . F1 < java . lang . String , java . lang . Boolean > ( ) { public com . m3 . scalaflavor4j . Boolean apply ( java . lang . String v1 ) { return v1 . equals ( "c" ) ; } } ; com . m3 . scalaflavor4j . Option < java . lang . String > actual = seq . find ( p ) ; "<AssertPlaceHolder>" ; } isDefined ( ) { return false ; }
|
org . junit . Assert . assertThat ( actual . isDefined ( ) , org . hamcrest . CoreMatchers . is ( false ) )
|
noLazyLoadingOneToOne ( ) { javax . persistence . EntityManager em = factory . createEntityManager ( ) ; org . meri . jpa . relationships . entities . Person simon = em . find ( org . meri . jpa . relationships . entities . Person . class , org . meri . jpa . relationships . SIMON_SLASH_ID ) ; em . close ( ) ; "<AssertPlaceHolder>" ; } getFacebookAccount ( ) { return facebookAccount ; }
|
org . junit . Assert . assertNotNull ( simon . getFacebookAccount ( ) )
|
testTokenParamWhichIsTooLong ( ) { java . lang . String longStr1 = org . apache . commons . lang3 . RandomStringUtils . randomAlphanumeric ( ( ( ca . uhn . fhir . jpa . model . entity . ResourceIndexedSearchParamString . MAX_LENGTH ) + 100 ) ) ; java . lang . String longStr2 = org . apache . commons . lang3 . RandomStringUtils . randomAlphanumeric ( ( ( ca . uhn . fhir . jpa . model . entity . ResourceIndexedSearchParamString . MAX_LENGTH ) + 100 ) ) ; ca . uhn . fhir . jpa . dao . dstu3 . Organization org = new ca . uhn . fhir . jpa . dao . dstu3 . Organization ( ) ; org . getNameElement ( ) . setValue ( "testTokenParamWhichIsTooLong" ) ; org . addType ( ) . addCoding ( ) . setSystem ( longStr1 ) . setCode ( longStr2 ) ; java . lang . String subStr1 = longStr1 . substring ( 0 , ResourceIndexedSearchParamString . MAX_LENGTH ) ; java . lang . String subStr2 = longStr2 . substring ( 0 , ResourceIndexedSearchParamString . MAX_LENGTH ) ; ca . uhn . fhir . jpa . dao . dstu3 . Set < java . lang . Long > val = myOrganizationDao . searchForIds ( new ca . uhn . fhir . jpa . searchparam . SearchParameterMap ( "type" , new ca . uhn . fhir . jpa . dao . dstu3 . TokenParam ( subStr1 , subStr2 ) ) ) ; int initial = val . size ( ) ; myOrganizationDao . create ( org , mySrd ) ; val = myOrganizationDao . searchForIds ( new ca . uhn . fhir . jpa . searchparam . SearchParameterMap ( "type" , new ca . uhn . fhir . jpa . dao . dstu3 . TokenParam ( subStr1 , subStr2 ) ) ) ; "<AssertPlaceHolder>" ; try { myOrganizationDao . searchForIds ( new ca . uhn . fhir . jpa . searchparam . SearchParameterMap ( "type" , new ca . uhn . fhir . jpa . dao . dstu3 . TokenParam ( longStr1 , subStr2 ) ) ) ; org . junit . Assert . fail ( ) ; } catch ( ca . uhn . fhir . jpa . dao . dstu3 . InvalidRequestException e ) { } try { myOrganizationDao . searchForIds ( new ca . uhn . fhir . jpa . searchparam . SearchParameterMap ( "type" , new ca . uhn . fhir . jpa . dao . dstu3 . TokenParam ( subStr1 , longStr2 ) ) ) ; org . junit . Assert . fail ( ) ; } catch ( ca . uhn . fhir . jpa . dao . dstu3 . InvalidRequestException e ) { } } size ( ) { return myTagSet . size ( ) ; }
|
org . junit . Assert . assertEquals ( ( initial + 1 ) , val . size ( ) )
|
testGetUserProfileAttribute ( ) { try { "<AssertPlaceHolder>" ; com . agiletec . aps . system . services . user . UserDetails user = new org . entando . entando . web . utils . OAuth2TestUtils . UserBuilder ( "jack_bauer" , "Authorization" 1 ) . grantedToRoleAdmin ( ) . build ( ) ; java . lang . String accessToken = mockOAuthInterceptor ( user ) ; this . executeProfileTypePost ( "2_POST_valid.json" , accessToken , status ( ) . isOk ( ) ) ; org . springframework . test . web . servlet . ResultActions result1 = mockMvc . perform ( get ( "Authorization" 3 , new java . lang . Object [ ] { "XXX" , "TextAttribute" } ) . header ( "Authorization" , ( "Authorization" 4 + accessToken ) ) ) ; result1 . andExpect ( status ( ) . isNotFound ( ) ) ; result1 . andExpect ( jsonPath ( "$.payload" , org . hamcrest . Matchers . hasSize ( 0 ) ) ) ; result1 . andExpect ( jsonPath ( "$.errors" , org . hamcrest . Matchers . hasSize ( 1 ) ) ) ; result1 . andExpect ( jsonPath ( "$.metaData.size()" , org . hamcrest . CoreMatchers . is ( 0 ) ) ) ; org . springframework . test . web . servlet . ResultActions result2 = mockMvc . perform ( get ( "Authorization" 3 , new java . lang . Object [ ] { "Authorization" 2 , "Authorization" 6 } ) . header ( "Authorization" , ( "Authorization" 4 + accessToken ) ) ) ; result2 . andExpect ( status ( ) . isNotFound ( ) ) ; result2 . andExpect ( jsonPath ( "$.payload" , org . hamcrest . Matchers . hasSize ( 0 ) ) ) ; result2 . andExpect ( jsonPath ( "$.errors" , org . hamcrest . Matchers . hasSize ( 1 ) ) ) ; result2 . andExpect ( jsonPath ( "$.metaData.size()" , org . hamcrest . CoreMatchers . is ( 0 ) ) ) ; org . springframework . test . web . servlet . ResultActions result3 = mockMvc . perform ( get ( "Authorization" 3 , new java . lang . Object [ ] { "Authorization" 2 , "TextAttribute" } ) . header ( "Authorization" , ( "Authorization" 4 + accessToken ) ) ) ; result3 . andExpect ( status ( ) . isOk ( ) ) ; result3 . andExpect ( jsonPath ( "$.payload.code" , org . hamcrest . CoreMatchers . is ( "TextAttribute" ) ) ) ; result3 . andExpect ( jsonPath ( "Authorization" 5 , org . hamcrest . CoreMatchers . is ( "Text" ) ) ) ; result3 . andExpect ( jsonPath ( "$.errors" , org . hamcrest . Matchers . hasSize ( 0 ) ) ) ; result3 . andExpect ( jsonPath ( "$.metaData.size()" , org . hamcrest . CoreMatchers . is ( 1 ) ) ) ; result3 . andExpect ( jsonPath ( "Authorization" 0 , org . hamcrest . CoreMatchers . is ( "Authorization" 2 ) ) ) ; } finally { if ( null != ( this . userProfileManager . getEntityPrototype ( "Authorization" 2 ) ) ) { ( ( com . agiletec . aps . system . common . entity . IEntityTypesConfigurer ) ( this . userProfileManager ) ) . removeEntityPrototype ( "Authorization" 2 ) ; } } } getEntityPrototype ( java . lang . String ) { com . agiletec . aps . system . common . entity . model . IApsEntity prototype = null ; try { com . agiletec . aps . system . common . entity . model . IApsEntity mainPrototype = this . getEntityTypeFactory ( ) . extractEntityType ( typeCode , this . getEntityClass ( ) , this . getConfigItemName ( ) , this . getEntityTypeDom ( ) , super . getName ( ) , this . getEntityDom ( ) ) ; if ( null == mainPrototype ) { return null ; } prototype = mainPrototype . getEntityPrototype ( ) ; } catch ( java . lang . Exception e ) { com . agiletec . aps . system . common . entity . ApsEntityManager . logger . error ( "Error<sp>while<sp>extracting<sp>entity<sp>type<sp>{}" , typeCode , e ) ; throw new java . lang . RuntimeException ( ( "Error<sp>while<sp>extracting<sp>entity<sp>type<sp>" + typeCode ) , e ) ; } return prototype ; }
|
org . junit . Assert . assertNull ( this . userProfileManager . getEntityPrototype ( "Authorization" 2 ) )
|
testMaxObjectLength ( ) { io . netty . channel . embedded . EmbeddedChannel ch = new io . netty . channel . embedded . EmbeddedChannel ( new io . netty . handler . codec . json . JsonObjectDecoder ( 6 ) ) ; try { ch . writeInbound ( io . netty . buffer . Unpooled . copiedBuffer ( "[2,4,5]" , CharsetUtil . UTF_8 ) ) ; } finally { "<AssertPlaceHolder>" ; } org . junit . Assert . fail ( ) ; } finish ( ) { org . junit . Assert . assertFalse ( channel . finish ( ) ) ; }
|
org . junit . Assert . assertFalse ( ch . finish ( ) )
|
testDescribeProcessParsing ( ) { if ( org . geotools . data . wps . ParsingTest . DISABLE ) { return ; } java . lang . Object object ; java . io . BufferedReader in = null ; try { org . geotools . xsd . Configuration config = new org . geotools . wps . WPSConfiguration ( ) ; java . net . URL url ; if ( org . geotools . data . wps . ParsingTest . ONLINE ) { url = new java . net . URL ( "http://schemas.opengis.net/wps/1.0.0/examples/40_wpsDescribeProcess_response.xml" ) ; } else { url = org . geotools . TestData . url ( this , "referenceProcessDescriptions.xml" ) ; } org . geotools . xsd . Parser parser = new org . geotools . xsd . Parser ( config ) ; in = new java . io . BufferedReader ( new java . io . InputStreamReader ( url . openStream ( ) ) ) ; object = parser . parse ( in ) ; net . opengis . wps10 . ProcessDescriptionsType processDesc = ( ( net . opengis . wps10 . ProcessDescriptionsType ) ( object ) ) ; "<AssertPlaceHolder>" ; } finally { in . close ( ) ; } } parse ( java . lang . String ) { if ( ( text == null ) || ( ( ( text = text . trim ( ) ) . length ( ) ) == 0 ) ) { return null ; } final java . text . ParsePosition position = new java . text . ParsePosition ( 0 ) ; final java . lang . Object value = parseObject ( text , position ) ; int index = position . getIndex ( ) ; final int error = position . getErrorIndex ( ) ; if ( ( error >= 0 ) && ( error < index ) ) { index = error ; } if ( index < ( text . length ( ) ) ) { doLogWarning ( org . geotools . util . logging . LoggedFormat . formatUnparsable ( text , 0 , index , getWarningLocale ( ) , level ) ) ; } else if ( ( value != null ) && ( ! ( type . isInstance ( value ) ) ) ) { doLogWarning ( org . geotools . metadata . i18n . Errors . getResources ( getWarningLocale ( ) ) . getLogRecord ( level , ErrorKeys . ILLEGAL_CLASS_ .2 , value . getClass ( ) , type ) ) ; return null ; } return type . cast ( value ) ; }
|
org . junit . Assert . assertNotNull ( processDesc )
|
testSerialization ( ) { org . jfree . chart . axis . DateAxis a1 = new org . jfree . chart . axis . DateAxis ( "Test<sp>Axis" ) ; org . jfree . chart . axis . DateAxis a2 = ( ( org . jfree . chart . axis . DateAxis ) ( org . jfree . chart . TestUtils . serialised ( a1 ) ) ) ; "<AssertPlaceHolder>" ; } serialised ( java . lang . Object ) { java . lang . Object result = null ; java . io . ByteArrayOutputStream buffer = new java . io . ByteArrayOutputStream ( ) ; java . io . ObjectOutput out ; try { out = new java . io . ObjectOutputStream ( buffer ) ; out . writeObject ( original ) ; out . close ( ) ; java . io . ObjectInput in = new java . io . ObjectInputStream ( new java . io . ByteArrayInputStream ( buffer . toByteArray ( ) ) ) ; result = in . readObject ( ) ; in . close ( ) ; } catch ( java . io . IOException e ) { throw new java . lang . RuntimeException ( e ) ; } catch ( java . lang . ClassNotFoundException e ) { throw new java . lang . RuntimeException ( e ) ; } return result ; }
|
org . junit . Assert . assertEquals ( a1 , a2 )
|
test_CaptureEnPassantFromHFile ( ) { classUnderTest = addWhitePawn ( GenericPosition . h5 ) ; addBlackPawn ( GenericPosition . g7 ) ; pm = new eubos . board . pieces . PositionManager ( new eubos . board . Board ( pl ) , eubos . board . pieces . Piece . Colour . white ) ; pm . performMove ( new eubos . board . pieces . GenericMove ( GenericPosition . g7 , GenericPosition . g5 ) ) ; ml = classUnderTest . generateMoves ( pm . getTheBoard ( ) ) ; expectedMove = new eubos . board . pieces . GenericMove ( GenericPosition . h5 , GenericPosition . g6 ) ; "<AssertPlaceHolder>" ; } getTheBoard ( ) { return theBoard ; }
|
org . junit . Assert . assertTrue ( ml . contains ( expectedMove ) )
|
testHorizontalRandomly ( ) { final int TRIALS = 100000 ; for ( int i = 0 ; i < TRIALS ; i ++ ) { int len = ( org . geogebra . commands . ConvexHullTest . rand . nextInt ( 30 ) ) + 1 ; java . util . List < org . geogebra . common . kernel . discrete . geom . Point2D > points = new java . util . ArrayList ( ) ; if ( org . geogebra . commands . ConvexHullTest . rand . nextBoolean ( ) ) { double y = org . geogebra . commands . ConvexHullTest . rand . nextGaussian ( ) ; for ( int j = 0 ; j < len ; j ++ ) points . add ( new org . geogebra . common . kernel . discrete . geom . Point2D ( org . geogebra . commands . ConvexHullTest . rand . nextGaussian ( ) , y ) ) ; } else { int y = ( org . geogebra . commands . ConvexHullTest . rand . nextInt ( 20 ) ) - 10 ; for ( int j = 0 ; j < len ; j ++ ) points . add ( new org . geogebra . common . kernel . discrete . geom . Point2D ( org . geogebra . commands . ConvexHullTest . rand . nextInt ( 30 ) , y ) ) ; } java . util . List < org . geogebra . common . kernel . discrete . geom . Point2D > actual = org . geogebra . common . kernel . discrete . geom . algorithms . ConvexHull . makeHull ( points ) ; java . util . List < org . geogebra . common . kernel . discrete . geom . Point2D > expected = new java . util . ArrayList ( ) ; expected . add ( java . util . Collections . min ( points ) ) ; if ( ! ( java . util . Collections . max ( points ) . equals ( expected . get ( 0 ) ) ) ) expected . add ( java . util . Collections . max ( points ) ) ; "<AssertPlaceHolder>" ; } } max ( double [ ] ) { double max = data [ 0 ] ; for ( int i = 0 ; i < ( data . length ) ; i ++ ) { if ( ( data [ i ] ) > max ) { max = data [ i ] ; } } return max ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testSettingValueFulfillsPromise ( ) { final de . markusrother . concurrent . Promise < java . lang . String > promise = new de . markusrother . concurrent . Promise ( ) ; promise . fulfill ( "foobar" ) ; "<AssertPlaceHolder>" ; } isFulfilled ( ) { return isFulfilled ; }
|
org . junit . Assert . assertTrue ( promise . isFulfilled ( ) )
|
testCharsetLookupNullInput ( ) { java . nio . charset . Charset c1 = org . apache . james . mime4j . util . CharsetUtil . lookup ( null ) ; "<AssertPlaceHolder>" ; } lookup ( java . lang . String ) { if ( name == null ) { return null ; } try { return java . nio . charset . Charset . forName ( name ) ; } catch ( java . nio . charset . IllegalCharsetNameException ex ) { return null ; } catch ( java . nio . charset . UnsupportedCharsetException ex ) { return null ; } }
|
org . junit . Assert . assertNull ( c1 )
|
testSwitchPopupToAnotherPresenter2 ( com . gwtplatform . mvp . client . PresenterWidgetTest$PresenterWidgetA , com . gwtplatform . mvp . client . PresenterWidgetTest$PresenterWidgetB , com . gwtplatform . mvp . client . PresenterWidgetTest$PresenterWidgetPopupC ) { presenterWidgetA . internalReveal ( ) ; presenterWidgetB . internalReveal ( ) ; presenterWidgetA . addToPopupSlot ( popupContentC ) ; presenterWidgetB . addToPopupSlot ( popupContentC ) ; presenterWidgetB . internalHide ( ) ; presenterWidgetA . addToPopupSlot ( popupContentC ) ; "<AssertPlaceHolder>" ; } isVisible ( ) { return visible ; }
|
org . junit . Assert . assertTrue ( popupContentC . isVisible ( ) )
|
multiFieldTest ( ) { if ( org . apache . hyracks . storage . am . bloomfilter . LOGGER . isInfoEnabled ( ) ) { org . apache . hyracks . storage . am . bloomfilter . LOGGER . info ( "TESTING<sp>BLOOM<sp>FILTER" ) ; } org . apache . hyracks . storage . common . buffercache . IBufferCache bufferCache = harness . getBufferCache ( ) ; int numElements = 10000 ; int [ ] keyFields = new int [ ] { 2 , 4 , 1 } ; org . apache . hyracks . storage . am . bloomfilter . impls . BloomFilter bf = new org . apache . hyracks . storage . am . bloomfilter . impls . BloomFilter ( bufferCache , harness . getFileReference ( ) , keyFields ) ; double acceptanleFalsePositiveRate = 0.1 ; int maxBucketsPerElement = org . apache . hyracks . storage . am . bloomfilter . impls . BloomCalculations . maxBucketsPerElement ( numElements ) ; org . apache . hyracks . storage . am . bloomfilter . impls . BloomFilterSpecification bloomFilterSpec = org . apache . hyracks . storage . am . bloomfilter . impls . BloomCalculations . computeBloomSpec ( maxBucketsPerElement , acceptanleFalsePositiveRate ) ; bf . create ( ) ; bf . activate ( ) ; org . apache . hyracks . storage . common . IIndexBulkLoader builder = bf . createBuilder ( numElements , bloomFilterSpec . getNumHashes ( ) , bloomFilterSpec . getNumBucketsPerElements ( ) ) ; int fieldCount = 5 ; org . apache . hyracks . api . dataflow . value . ISerializerDeserializer [ ] fieldSerdes = new org . apache . hyracks . api . dataflow . value . ISerializerDeserializer [ ] { new org . apache . hyracks . dataflow . common . data . marshalling . UTF8StringSerializerDeserializer ( ) , new org . apache . hyracks . dataflow . common . data . marshalling . UTF8StringSerializerDeserializer ( ) , org . apache . hyracks . dataflow . common . data . marshalling . IntegerSerializerDeserializer . INSTANCE , new org . apache . hyracks . dataflow . common . data . marshalling . UTF8StringSerializerDeserializer ( ) , new org . apache . hyracks . dataflow . common . data . marshalling . UTF8StringSerializerDeserializer ( ) } ; org . apache . hyracks . dataflow . common . comm . io . ArrayTupleBuilder tupleBuilder = new org . apache . hyracks . dataflow . common . comm . io . ArrayTupleBuilder ( fieldCount ) ; org . apache . hyracks . dataflow . common . comm . io . ArrayTupleReference tuple = new org . apache . hyracks . dataflow . common . comm . io . ArrayTupleReference ( ) ; int maxLength = 20 ; java . util . ArrayList < java . lang . String > s1 = new java . util . ArrayList ( ) ; java . util . ArrayList < java . lang . String > s2 = new java . util . ArrayList ( ) ; java . util . ArrayList < java . lang . String > s3 = new java . util . ArrayList ( ) ; java . util . ArrayList < java . lang . String > s4 = new java . util . ArrayList ( ) ; for ( int i = 0 ; i < numElements ; ++ i ) { s1 . add ( randomString ( ( ( rnd . nextInt ( ) ) % maxLength ) , rnd ) ) ; s2 . add ( randomString ( ( ( rnd . nextInt ( ) ) % maxLength ) , rnd ) ) ; s3 . add ( randomString ( ( ( rnd . nextInt ( ) ) % maxLength ) , rnd ) ) ; s4 . add ( randomString ( ( ( rnd . nextInt ( ) ) % maxLength ) , rnd ) ) ; } for ( int i = 0 ; i < numElements ; ++ i ) { org . apache . hyracks . dataflow . common . utils . TupleUtils . createTuple ( tupleBuilder , tuple , fieldSerdes , s1 . get ( i ) , s2 . get ( i ) , i , s3 . get ( i ) , s4 . get ( i ) ) ; builder . add ( tuple ) ; } builder . end ( ) ; bf . pinAllPages ( ) ; long [ ] hashes = org . apache . hyracks . storage . am . bloomfilter . impls . BloomFilter . createHashArray ( ) ; for ( int i = 0 ; i < numElements ; ++ i ) { org . apache . hyracks . dataflow . common . utils . TupleUtils . createTuple ( tupleBuilder , tuple , fieldSerdes , s1 . get ( i ) , s2 . get ( i ) , i , s3 . get ( i ) , s4 . get ( i ) ) ; "<AssertPlaceHolder>" ; } bf . unpinAllPages ( ) ; bf . deactivate ( ) ; bf . destroy ( ) ; } contains ( org . apache . hyracks . dataflow . common . data . accessors . ITupleReference , long [ ] ) { if ( ( numPages ) == 0 ) { return false ; } org . apache . hyracks . storage . am . bloomfilter . impls . MurmurHash128Bit . hash3_x64_128 ( tuple , keyFields , org . apache . hyracks . storage . am . bloomfilter . impls . BloomFilter . SEED , hashes ) ; if ( ( version ) == ( org . apache . hyracks . storage . am . bloomfilter . impls . BloomFilter . BLOCKED_BLOOM_FILTER_VERSION ) ) { return blockContains ( hashes ) ; } else { return legacyContains ( hashes ) ; } }
|
org . junit . Assert . assertTrue ( bf . contains ( tuple , hashes ) )
|
dockerShell ( ) { java . lang . Boolean result = dJenkins . call ( new com . github . kostyasha . it . tests . FreestyleTest . DockerShellCallable ( com . github . kostyasha . it . tests . FreestyleTest . slaveJnlpImage ) ) ; "<AssertPlaceHolder>" ; } call ( com . github . kostyasha . it . other . BCallable ) { return caller ( cli , callable ) ; }
|
org . junit . Assert . assertThat ( result , org . hamcrest . Matchers . is ( true ) )
|
testUnionWithNoDuplicates ( ) { double [ ] leftTimes = new double [ ] { 1.0 , 2.0 , 3.0 } ; double [ ] rightTimes = new double [ ] { 0.5 , 1.5 } ; net . finmath . time . TimeDiscretization union = new net . finmath . time . TimeDiscretizationFromArray ( leftTimes ) . union ( new net . finmath . time . TimeDiscretizationFromArray ( rightTimes ) ) ; "<AssertPlaceHolder>" ; } getAsDoubleArray ( ) { return timeDiscretization . clone ( ) ; }
|
org . junit . Assert . assertThat ( union . getAsDoubleArray ( ) , org . hamcrest . CoreMatchers . is ( org . hamcrest . CoreMatchers . equalTo ( new double [ ] { 0.5 , 1.0 , 1.5 , 2.0 , 3.0 } ) ) )
|
testDoRequestGet ( ) { addPageRequest ( "GET" , "http://test.com/index.html?query=ab+cd&page=1" , "Test<sp>Page" ) ; com . gistlabs . mechanize . Resource page = agent ( ) . doRequest ( "http://test.com/index.html" ) . add ( "query" , "ab<sp>cd" ) . add ( "page" , "1" ) . get ( ) ; "<AssertPlaceHolder>" ; } asString ( ) { return htmlElements . toString ( ) ; }
|
org . junit . Assert . assertEquals ( "Test<sp>Page" , page . asString ( ) )
|
testTapParserException1 ( ) { exception = new org . tap4j . parser . ParserException ( ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertNotNull ( exception )
|
testModeRegistration ( ) { for ( net . tridentsdk . ui . chat . ClientChatMode mode : net . tridentsdk . ui . chat . ClientChatMode . values ( ) ) { "<AssertPlaceHolder>" ; } } of ( int ) { for ( net . tridentsdk . ui . chat . ClientChatMode chatMode : net . tridentsdk . ui . chat . ClientChatMode . values ( ) ) { if ( ( chatMode . getData ( ) ) == data ) { return chatMode ; } } throw new java . lang . IllegalArgumentException ( ( "no<sp>client<sp>chat<sp>mode<sp>with<sp>id=" + data ) ) ; }
|
org . junit . Assert . assertEquals ( mode , net . tridentsdk . ui . chat . ClientChatMode . of ( mode . getData ( ) ) )
|
readDataConfigTest ( ) { doReturn ( immediateFluentFuture ( java . util . Optional . of ( org . opendaylight . restconf . nb . rfc8040 . rests . utils . ReadDataTransactionUtilTest . DATA . data3 ) ) ) . when ( read ) . read ( LogicalDatastoreType . CONFIGURATION , org . opendaylight . restconf . nb . rfc8040 . rests . utils . ReadDataTransactionUtilTest . DATA . path ) ; doReturn ( org . opendaylight . restconf . nb . rfc8040 . rests . utils . ReadDataTransactionUtilTest . DATA . path ) . when ( context ) . getInstanceIdentifier ( ) ; final java . lang . String valueOfContent = RestconfDataServiceConstant . ReadData . CONFIG ; final org . opendaylight . yangtools . yang . data . api . schema . NormalizedNode < ? , ? > normalizedNode = org . opendaylight . restconf . nb . rfc8040 . rests . utils . ReadDataTransactionUtil . readData ( valueOfContent , wrapper , schemaContext ) ; "<AssertPlaceHolder>" ; } readData ( java . lang . String , org . opendaylight . restconf . nb . rfc8040 . rests . transactions . TransactionVarsWrapper , org . opendaylight . yangtools . yang . model . api . SchemaContext ) { return org . opendaylight . restconf . nb . rfc8040 . rests . utils . ReadDataTransactionUtil . readData ( valueOfContent , transactionNode , null , schemaContext ) ; }
|
org . junit . Assert . assertEquals ( org . opendaylight . restconf . nb . rfc8040 . rests . utils . ReadDataTransactionUtilTest . DATA . data3 , normalizedNode )
|
testGetImportSession ( ) { setup ( org . pentaho . platform . plugin . services . importer . RepositoryFileImportFileHandlerTest . MIMENAME , org . pentaho . platform . plugin . services . importer . RepositoryFileImportFileHandlerTest . MIME_EXTENSION , "" , "" , false ) ; "<AssertPlaceHolder>" ; } setup ( java . lang . String , java . lang . String , java . lang . String , java . lang . String , boolean ) { java . util . List < java . lang . String > extensions = java . util . Arrays . asList ( extension ) ; org . pentaho . platform . api . mimetype . IMimeType mimeType = new org . pentaho . platform . core . mimetype . MimeType ( mimeTypeName , extensions ) ; mimeType . setConverter ( mock ( org . pentaho . platform . api . repository2 . unified . Converter . class ) ) ; java . util . List < org . pentaho . platform . api . mimetype . IMimeType > mimeTypeList = java . util . Arrays . asList ( mimeType ) ; org . pentaho . platform . plugin . services . importer . NameBaseMimeResolver mimeResolver = new org . pentaho . platform . plugin . services . importer . NameBaseMimeResolver ( ) ; mimeResolver . addMimeType ( mimeType ) ; SolutionFileImportHelper . testMimeResolver = mimeResolver ; userProvider = new org . pentaho . platform . plugin . services . importer . RepositoryFileImportFileHandlerTest . UserProvider ( ) ; mockRepository = new org . pentaho . test . platform . repository2 . unified . MockUnifiedRepository ( userProvider ) ; fileHandler = new org . pentaho . platform . plugin . services . importer . RepositoryFileImportFileHandler ( mimeTypeList ) ; fileHandler . setRepository ( mockRepository ) ; fileHandler . setDefaultAclHandler ( new org . pentaho . platform . plugin . services . importer . RepositoryFileImportFileHandlerTest . DefaultAclHandler ( ) ) ; fileHandler . setKnownExtensions ( java . util . Arrays . asList ( "prpt" ) ) ; org . pentaho . platform . plugin . services . importer . IPlatformImporter mockPlatformImporter = mock ( org . pentaho . platform . plugin . services . importer . IPlatformImporter . class ) ; when ( mockPlatformImporter . getRepositoryImportLogger ( ) ) . thenReturn ( new org . pentaho . platform . plugin . services . importexport . Log4JRepositoryImportLogger ( ) ) ; org . pentaho . platform . plugin . services . importexport . ImportSession . iPlatformImporter = mockPlatformImporter ; importSession = fileHandler . getImportSession ( ) ; importSession . initialize ( ) ; mockBundle = mock ( org . pentaho . platform . plugin . services . importer . RepositoryFileImportBundle . class ) ; when ( mockBundle . getPath ( ) ) . thenReturn ( path ) ; when ( mockBundle . getName ( ) ) . thenReturn ( fileName ) ; when ( mockBundle . getMimeType ( ) ) . thenReturn ( mimeTypeName ) ; when ( mockBundle . isFolder ( ) ) . thenReturn ( folder ) ; }
|
org . junit . Assert . assertNotNull ( importSession )
|
testRenameOrderingComparator ( ) { java . util . List < java . lang . String > fieldPaths = java . util . Arrays . asList ( "/elementPrim" , "/elementList[0]/elementMap/elementList[1]/elementPrim" 1 , "/elementList[0]/elementMap/elementList[1]/elementPrim" 0 , "/elementList[0]/elementMap/elementList[1]/elementPrim" 5 , "/elementList2[0]" , "/elementList2[1]" , "/elementList[0]/elementMap/elementList[1]/elementPrim" 4 , "/elementList[2]/elementMap/elementList[0]/elementPrim" , "/elementList[0]/elementMap/elementList[1]/elementPrim" 2 , "/elementList[0]/elementMap/elementList[1]/elementPrim" , "/elementList[1]/elementMap/elementList[1]/elementPrim" , "/elementList[0]/elementMap/elementList[0]/elementPrim" , "/elementList[2]/elementMap/elementList[2]/elementPrim" , "/elementList[0]/elementMap/elementList[2]/elementPrim" , "/elementList[2]/elementMap/elementList[1]/elementPrim" , "/elementList[0]/elementMap/elementList[1]/elementPrim" 3 ) ; java . util . Collections . sort ( fieldPaths , new com . streamsets . pipeline . stage . processor . fieldrenamer . FieldRenamerProcessor . FieldRenamerPathComparator ( ) ) ; java . util . List < java . lang . String > expectedOrder = java . util . Arrays . asList ( "/elementPrim" , "/elementList[0]/elementMap/elementList[1]/elementPrim" 1 , "/elementList[0]/elementMap/elementList[1]/elementPrim" 0 , "/elementList[0]/elementMap/elementList[1]/elementPrim" 5 , "/elementList[0]/elementMap/elementList[1]/elementPrim" 4 , "/elementList2[1]" , "/elementList2[0]" , "/elementList[2]/elementMap/elementList[2]/elementPrim" , "/elementList[2]/elementMap/elementList[1]/elementPrim" , "/elementList[2]/elementMap/elementList[0]/elementPrim" , "/elementList[0]/elementMap/elementList[1]/elementPrim" 2 , "/elementList[1]/elementMap/elementList[1]/elementPrim" , "/elementList[0]/elementMap/elementList[1]/elementPrim" 3 , "/elementList[0]/elementMap/elementList[2]/elementPrim" , "/elementList[0]/elementMap/elementList[1]/elementPrim" , "/elementList[0]/elementMap/elementList[0]/elementPrim" ) ; for ( int i = 0 ; i < ( expectedOrder . size ( ) ) ; i ++ ) { "<AssertPlaceHolder>" ; } } get ( java . lang . String ) { return null ; }
|
org . junit . Assert . assertEquals ( expectedOrder . get ( i ) , fieldPaths . get ( i ) )
|
testCholesky ( ) { mikera . matrixx . AMatrix m = mikera . matrixx . Matrixx . create ( mikera . vectorz . Vector . of ( 4 , 12 , ( - 16 ) ) , mikera . vectorz . Vector . of ( 12 , 37 , ( - 43 ) ) , mikera . vectorz . Vector . of ( ( - 16 ) , ( - 43 ) , 98 ) ) ; mikera . matrixx . AMatrix L = mikera . matrixx . decompose . Cholesky . decompose ( m ) . getL ( ) ; "<AssertPlaceHolder>" ; } of ( double , double , double ) { return new mikera . vectorz . Vector3 ( x , y , z ) ; }
|
org . junit . Assert . assertEquals ( mikera . matrixx . Matrixx . create ( mikera . vectorz . Vector . of ( 2 , 0 , 0 ) , mikera . vectorz . Vector . of ( 6 , 1 , 0 ) , mikera . vectorz . Vector . of ( ( - 8 ) , 5 , 3 ) ) , L )
|
hashingStrategy ( ) { com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategy < java . lang . Integer , java . lang . Integer > map = com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategy . newWithKeysValues ( com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategyTest . INTEGER_HASHING_STRATEGY , 1 , 1 , 2 , 2 ) ; "<AssertPlaceHolder>" ; } hashingStrategy ( ) { com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategy < java . lang . Integer , java . lang . Integer > map = com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategy . newWithKeysValues ( com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategyTest . INTEGER_HASHING_STRATEGY , 1 , 1 , 2 , 2 ) ; org . junit . Assert . assertSame ( com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategyTest . INTEGER_HASHING_STRATEGY , map . hashingStrategy ( ) ) ; }
|
org . junit . Assert . assertSame ( com . gs . collections . impl . map . strategy . mutable . UnifiedMapWithHashingStrategyTest . INTEGER_HASHING_STRATEGY , map . hashingStrategy ( ) )
|
testOneHot1 ( ) { org . nd4j . autodiff . opvalidation . List < java . lang . String > failed = new org . nd4j . autodiff . opvalidation . ArrayList ( ) ; for ( int i = - 1 ; i <= 0 ; i ++ ) { org . nd4j . linalg . api . ndarray . INDArray indicesArr = org . nd4j . linalg . factory . Nd4j . create ( new double [ ] { 0 , 1 , 2 } ) ; int depth = 3 ; org . nd4j . autodiff . samediff . SameDiff sd = org . nd4j . autodiff . samediff . SameDiff . create ( ) ; org . nd4j . autodiff . samediff . SDVariable indices = sd . var ( indicesArr ) ; org . nd4j . autodiff . samediff . SDVariable oneHot = sd . oneHot ( indices , depth , i , 1.0 , 0.0 , DataType . DOUBLE ) ; org . nd4j . linalg . api . ndarray . INDArray exp = org . nd4j . linalg . factory . Nd4j . eye ( 3 ) . castTo ( DataType . DOUBLE ) ; java . lang . String msg = "Axis:<sp>" + i ; log . info ( ( "Test<sp>case:<sp>" + msg ) ) ; java . lang . String err = org . nd4j . autodiff . validation . OpValidation . validate ( new org . nd4j . autodiff . validation . TestCase ( sd ) . testName ( msg ) . gradientCheck ( false ) . expected ( oneHot , exp ) ) ; if ( err != null ) { failed . add ( err ) ; } } "<AssertPlaceHolder>" ; } toString ( ) { java . lang . StringBuilder buff = new java . lang . StringBuilder ( "[" ) ; int sz = objects . size ( ) ; int i ; for ( i = 0 ; i < sz ; i ++ ) { java . lang . Object e = objects . get ( i ) ; buff . append ( e ) ; if ( i < ( sz - 1 ) ) buff . append ( "<sp>,<sp>" ) ; } buff . append ( "]" ) ; return buff . toString ( ) ; }
|
org . junit . Assert . assertEquals ( failed . toString ( ) , 0 , failed . size ( ) )
|
shouldReturnReturnWhenTimeIsBeforeTheLowerLimit ( ) { final org . neo4j . kernel . impl . transaction . log . pruning . EntryTimespanThreshold threshold = new org . neo4j . kernel . impl . transaction . log . pruning . EntryTimespanThreshold ( clock , java . util . concurrent . TimeUnit . MILLISECONDS , 100 ) ; when ( source . getFirstStartRecordTimestamp ( version ) ) . thenReturn ( 800L ) ; threshold . init ( ) ; final boolean result = threshold . reached ( file , version , source ) ; "<AssertPlaceHolder>" ; } reached ( java . io . File , long , org . neo4j . kernel . impl . transaction . log . LogFileInformation ) { try { long lastTx = source . getFirstEntryId ( ( version + 1 ) ) ; if ( lastTx == ( - 1 ) ) { throw new java . lang . IllegalStateException ( ( "The<sp>next<sp>version<sp>should<sp>always<sp>exist,<sp>since<sp>this<sp>is<sp>called<sp>after<sp>rotation<sp>and<sp>the<sp>" + "PruneStrategy<sp>never<sp>checks<sp>the<sp>current<sp>active<sp>log<sp>file" ) ) ; } long highest = source . getLastEntryId ( ) ; return ( highest - lastTx ) >= ( maxTransactionCount ) ; } catch ( java . io . IOException e ) { throw new java . lang . RuntimeException ( e ) ; } }
|
org . junit . Assert . assertTrue ( result )
|
givenZeroValue_whenSetMaxFileSizeBytes_thenValueIsStored ( ) { final int maxFileSizeBytes = 0 ; config . setMaxFileSizeBytes ( maxFileSizeBytes ) ; "<AssertPlaceHolder>" ; } getMaxFileSizeBytes ( ) { return maxFileSizeBytes ; }
|
org . junit . Assert . assertThat ( config . getMaxFileSizeBytes ( ) , org . hamcrest . CoreMatchers . equalTo ( maxFileSizeBytes ) )
|
listShouldNotContainIncorrectObjects ( ) { java . lang . Object incorrect = new java . lang . Object ( ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertThat ( list . contains ( incorrect ) , org . hamcrest . Matchers . is ( false ) )
|
testEarliestChild ( ) { org . dcache . services . info . base . StateValue testVal = newEphemeralStateValue ( ) ; "<AssertPlaceHolder>" ; } getEarliestChildExpiryDate ( ) { return ( _earliestChildExpiry ) != null ? new java . util . Date ( _earliestChildExpiry . getTime ( ) ) : null ; }
|
org . junit . Assert . assertNull ( testVal . getEarliestChildExpiryDate ( ) )
|
testUniqueLabels ( ) { java . util . List < iot . jcypher . query . result . JcError > errors ; iot . jcypher . domain . IDomainAccess da = iot . jcypher . domain . DomainAccessFactory . createDomainAccess ( test . domainmapping . DomainMappingTest . dbAccess , test . domainmapping . DomainMappingTest . domainName ) ; iot . jcypher . domain . IDomainAccess da1 ; boolean equals ; test . domainmapping . unique_label . Person person ; test . domainmapping . unique_label . Person person_1 ; test . domainmapping . unique_label . other . Person otherPerson ; test . domainmapping . unique_label . other . Person otherPerson_1 ; person = new test . domainmapping . unique_label . Person ( ) ; person . setFirstName ( "John" ) ; person . setLastName ( "Smith" ) ; otherPerson = new test . domainmapping . unique_label . other . Person ( ) ; otherPerson . setName ( "John<sp>Smith" ) ; java . util . List < java . lang . Object > domainObjects = new java . util . ArrayList < java . lang . Object > ( ) ; domainObjects . add ( person ) ; domainObjects . add ( otherPerson ) ; errors = test . domainmapping . DomainMappingTest . dbAccess . clearDatabase ( ) ; if ( ( errors . size ( ) ) > 0 ) { printErrors ( errors ) ; throw new iot . jcypher . query . result . JcResultException ( errors ) ; } errors = da . store ( domainObjects ) ; if ( ( errors . size ( ) ) > 0 ) { printErrors ( errors ) ; throw new iot . jcypher . query . result . JcResultException ( errors ) ; } iot . jcypher . domain . SyncInfo syncInfo_1 = da . getSyncInfo ( person ) ; iot . jcypher . domain . SyncInfo syncInfo_2 = da . getSyncInfo ( otherPerson ) ; da1 = iot . jcypher . domain . DomainAccessFactory . createDomainAccess ( test . domainmapping . DomainMappingTest . dbAccess , test . domainmapping . DomainMappingTest . domainName ) ; person_1 = da1 . loadById ( test . domainmapping . unique_label . Person . class , ( - 1 ) , syncInfo_1 . getId ( ) ) ; otherPerson_1 = da1 . loadById ( test . domainmapping . unique_label . other . Person . class , ( - 1 ) , syncInfo_2 . getId ( ) ) ; equals = ( ( person . getFirstName ( ) . equals ( person_1 . getFirstName ( ) ) ) && ( person . getLastName ( ) . equals ( person_1 . getLastName ( ) ) ) ) && ( otherPerson . getName ( ) . equals ( otherPerson_1 . getName ( ) ) ) ; "<AssertPlaceHolder>" ; return ; } getName ( ) { return name ; }
|
org . junit . Assert . assertTrue ( equals )
|
testConcurrentModificationOfProperties ( ) { org . securegraph . test . Vertex v = graph . prepareVertex ( "v1" , org . securegraph . test . GraphTestBase . VISIBILITY_EMPTY ) . setProperty ( "prop1" , "value1" , org . securegraph . test . GraphTestBase . VISIBILITY_A ) . setProperty ( "prop2" , "value2" , org . securegraph . test . GraphTestBase . VISIBILITY_A ) . save ( AUTHORIZATIONS_A_AND_B ) ; int i = 0 ; for ( org . securegraph . test . Property p : v . getProperties ( ) ) { "<AssertPlaceHolder>" ; if ( i == 0 ) { v . setProperty ( "prop3" , "value3" , org . securegraph . test . GraphTestBase . VISIBILITY_A , AUTHORIZATIONS_A_AND_B ) ; } i ++ ; } } toString ( ) { org . securegraph . Property prop = getP ( ) ; if ( prop == null ) { return null ; } return org . securegraph . cli . model . LazyProperty . toString ( prop , getToStringHeaderLine ( ) ) ; }
|
org . junit . Assert . assertNotNull ( p . toString ( ) )
|
link$top_AssociationEnd ( ) { java . lang . String top = java . lang . Integer . toString ( topMaxNum ) ; org . json . simple . JSONObject body = new org . json . simple . JSONObject ( ) ; try { body . put ( "__id" , toUserDataId ) ; createUserData ( body , HttpStatus . SC_CREATED , Setup . TEST_CELL1 , Setup . TEST_BOX1 , Setup . TEST_ODATA , "Sales" ) ; body . put ( "__id" , fromUserDataId ) ; createUserData ( body , HttpStatus . SC_CREATED , Setup . TEST_CELL1 , Setup . TEST_BOX1 , Setup . TEST_ODATA , "srcPath" 2 ) ; linkUserData ( "Sales" , toUserDataId , "srcPath" 2 , fromUserDataId ) ; com . fujitsu . dc . test . utils . TResponse res = com . fujitsu . dc . test . utils . Http . request ( "box/odatacol/list-link-with-query.txt" ) . with ( "cellPath" , Setup . TEST_CELL1 ) . with ( "srcPath" 3 , Setup . TEST_BOX1 ) . with ( "srcPath" 1 , Setup . TEST_ODATA ) . with ( "srcPath" , ( ( ( "srcPath" 2 + "srcPath" 4 ) + ( fromUserDataId ) ) + "srcPath" 5 ) ) . with ( "trgPath" , "Sales" ) . with ( "srcPath" 6 , ( "?\\$top=" + top ) ) . with ( "srcPath" 0 , com . fujitsu . dc . core . DcCoreConfig . getMasterToken ( ) ) . with ( "accept" , MediaType . APPLICATION_JSON ) . returns ( ) . statusCode ( HttpStatus . SC_OK ) . debug ( ) ; org . json . simple . JSONArray results = ( ( org . json . simple . JSONArray ) ( ( ( org . json . simple . JSONObject ) ( res . bodyAsJson ( ) . get ( "d" ) ) ) . get ( "results" ) ) ) ; "<AssertPlaceHolder>" ; } finally { deleteUserDataLinks ( "Sales" , toUserDataId , "srcPath" 2 , fromUserDataId ) ; deleteUserData ( Setup . TEST_CELL1 , Setup . TEST_BOX1 , Setup . TEST_ODATA , "srcPath" 2 , fromUserDataId , com . fujitsu . dc . core . DcCoreConfig . getMasterToken ( ) , HttpStatus . SC_NO_CONTENT ) ; deleteUserData ( Setup . TEST_CELL1 , Setup . TEST_BOX1 , Setup . TEST_ODATA , "Sales" , toUserDataId , com . fujitsu . dc . core . DcCoreConfig . getMasterToken ( ) , HttpStatus . SC_NO_CONTENT ) ; } } get ( java . lang . String ) { com . fujitsu . dc . test . jersey . DcRequest req = new com . fujitsu . dc . test . jersey . DcRequest ( url ) ; req . method = javax . ws . rs . HttpMethod . GET ; return req ; }
|
org . junit . Assert . assertEquals ( 1 , results . size ( ) )
|
testMultipleNestedProperty ( ) { java . lang . String a = "a" ; java . lang . String b = "b" ; java . lang . String nestedKey = "c.d" ; java . lang . String nestedProperty = java . lang . String . format ( "${%s}" , nestedKey ) ; int someValue = 1234 ; com . ctrip . framework . apollo . Config config = mock ( com . ctrip . framework . apollo . Config . class ) ; when ( config . getProperty ( eq ( a ) , anyString ( ) ) ) . thenReturn ( a ) ; when ( config . getProperty ( eq ( b ) , anyString ( ) ) ) . thenReturn ( b ) ; when ( config . getProperty ( eq ( java . lang . String . format ( "%s.%s" , a , b ) ) , anyString ( ) ) ) . thenReturn ( nestedProperty ) ; when ( config . getProperty ( eq ( nestedKey ) , anyString ( ) ) ) . thenReturn ( java . lang . String . valueOf ( someValue ) ) ; mockConfig ( ConfigConsts . NAMESPACE_APPLICATION , config ) ; org . springframework . context . annotation . AnnotationConfigApplicationContext context = new org . springframework . context . annotation . AnnotationConfigApplicationContext ( com . ctrip . framework . apollo . spring . JavaConfigPlaceholderTest . NestedPropertyConfig1 . class ) ; com . ctrip . framework . apollo . spring . JavaConfigPlaceholderTest . TestNestedPropertyBean bean = context . getBean ( com . ctrip . framework . apollo . spring . JavaConfigPlaceholderTest . TestNestedPropertyBean . class ) ; "<AssertPlaceHolder>" ; } getNestedProperty ( ) { return nestedProperty ; }
|
org . junit . Assert . assertEquals ( someValue , bean . getNestedProperty ( ) )
|
splitAtFractionPreservesOverallEventCount ( ) { long n = 55729L ; org . apache . beam . sdk . nexmark . sources . generator . GeneratorConfig initialConfig = makeConfig ( n ) ; long expected = ( initialConfig . getStopEventId ( ) ) - ( initialConfig . getStartEventId ( ) ) ; long actual = 0 ; org . apache . beam . sdk . nexmark . sources . generator . Generator initialGenerator = new org . apache . beam . sdk . nexmark . sources . generator . Generator ( initialConfig ) ; actual += consume ( 5000 , initialGenerator ) ; org . apache . beam . sdk . nexmark . sources . generator . GeneratorConfig remainConfig1 = initialGenerator . splitAtEventId ( 9000L ) ; org . apache . beam . sdk . nexmark . sources . generator . Generator remainGenerator1 = new org . apache . beam . sdk . nexmark . sources . generator . Generator ( remainConfig1 ) ; actual += consume ( 2000 , initialGenerator ) ; actual += consume ( 3000 , remainGenerator1 ) ; org . apache . beam . sdk . nexmark . sources . generator . GeneratorConfig remainConfig2 = remainGenerator1 . splitAtEventId ( 30000L ) ; org . apache . beam . sdk . nexmark . sources . generator . Generator remainGenerator2 = new org . apache . beam . sdk . nexmark . sources . generator . Generator ( remainConfig2 ) ; actual += consume ( initialGenerator ) ; actual += consume ( remainGenerator1 ) ; actual += consume ( remainGenerator2 ) ; "<AssertPlaceHolder>" ; } consume ( java . util . Iterator ) { long n = 0 ; while ( itr . hasNext ( ) ) { itr . next ( ) ; n ++ ; } return n ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testColumns ( ) { "<AssertPlaceHolder>" ; } columns ( ) { return new org . richfaces . component . DataTableColumnsIterator ( this ) ; }
|
org . junit . Assert . assertTrue ( ( ( table . columns ( ) ) instanceof org . richfaces . component . DataTableColumnsIterator ) )
|
testContainsAll ( ) { listenedList . add ( "A" ) ; java . util . ArrayList < java . lang . String > listToAdd = new java . util . ArrayList < java . lang . String > ( ) ; listToAdd . add ( "B" ) ; listToAdd . add ( "C" ) ; listenedList . addAll ( listToAdd ) ; listenedList . add ( "D" ) ; "<AssertPlaceHolder>" ; } containsAll ( java . util . Collection ) { return this . list . containsAll ( c ) ; }
|
org . junit . Assert . assertTrue ( listenedList . containsAll ( listToAdd ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.