input
stringlengths
28
18.7k
output
stringlengths
39
1.69k
test ( ) { final cz . habarta . typescript . generator . Settings settings = cz . habarta . typescript . generator . TestUtils . settings ( ) ; settings . outputKind = TypeScriptOutputKind . module ; settings . outputFileType = TypeScriptFileType . implementationFile ; settings . mapClasses = ClassMapping . asClasses ; settings . extensions . add ( new cz . habarta . typescript . generator . ext . JsonDeserializationExtension ( ) ) ; final java . io . File actualFile = new java . io . File ( "target/JsonDeserializationTest-actual.ts" ) ; new cz . habarta . typescript . generator . TypeScriptGenerator ( settings ) . generateTypeScript ( cz . habarta . typescript . generator . Input . from ( cz . habarta . typescript . generator . JsonDeserializationTest . User . class ) , cz . habarta . typescript . generator . Output . to ( actualFile ) ) ; final cz . habarta . typescript . generator . List < java . lang . String > actualLines = java . nio . file . Files . readAllLines ( actualFile . toPath ( ) , StandardCharsets . UTF_8 ) ; final cz . habarta . typescript . generator . List < java . lang . String > expectedLines = cz . habarta . typescript . generator . util . Utils . readLines ( getClass ( ) . getResourceAsStream ( "JsonDeserializationTest-expected.ts" ) ) ; int contentLines = 0 ; int foundLines = 0 ; final cz . habarta . typescript . generator . List < java . lang . String > notFoundLines = new cz . habarta . typescript . generator . ArrayList ( ) ; for ( java . lang . String expectedLine : expectedLines ) { if ( ( ! ( expectedLine . isEmpty ( ) ) ) || ( ! ( expectedLine . trim ( ) . equals ( "}" ) ) ) ) { contentLines ++ ; if ( actualLines . contains ( expectedLine ) ) { foundLines ++ ; } else { notFoundLines . add ( expectedLine ) ; } } } System . out . println ( java . lang . String . format ( "Number<sp>of<sp>correctly<sp>generated<sp>content<sp>lines:<sp>%d/%d<sp>(%d%%)." , foundLines , contentLines , ( ( 100 * foundLines ) / contentLines ) ) ) ; System . out . println ( "Following<sp>lines<sp>were<sp>not<sp>generated:" ) ; for ( java . lang . String notFoundLine : notFoundLines ) { System . out . println ( notFoundLine ) ; } "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == obj ) { return true ; } if ( obj == null ) { return false ; } if ( ( getClass ( ) ) != ( obj . getClass ( ) ) ) { return false ; } final cz . habarta . typescript . generator . OptionalTest . Person other = ( ( cz . habarta . typescript . generator . OptionalTest . Person ) ( obj ) ) ; if ( ! ( java . util . Objects . equals ( this . name , other . name ) ) ) { return false ; } if ( ! ( java . util . Objects . equals ( this . email , other . email ) ) ) { return false ; } return true ; }
org . junit . Assert . assertEquals ( 0 , notFoundLines . size ( ) )
test_search_with_moduleNames_success ( ) { java . nio . file . Path path = createTempDir ( ) ; org . apache . solr . core . SolrResourceLoader loader = new org . apache . solr . core . SolrResourceLoader ( path ) ; org . apache . solr . core . NodeConfig config = new org . apache . solr . core . NodeConfig . NodeConfigBuilder ( "testnode" , loader ) . setConfigSetBaseDirectory ( java . nio . file . Paths . get ( org . ikasan . wiretap . service . SolrWiretapServiceTest . TEST_HOME ( ) ) . resolve ( "configsets" ) . toString ( ) ) . build ( ) ; try ( org . apache . solr . client . solrj . embedded . EmbeddedSolrServer server = new org . apache . solr . client . solrj . embedded . EmbeddedSolrServer ( config , "ikasan" ) ) { org . apache . solr . client . solrj . request . CoreAdminRequest . Create createRequest = new org . apache . solr . client . solrj . request . CoreAdminRequest . Create ( ) ; createRequest . setCoreName ( "ikasan" ) ; createRequest . setConfigSet ( "minimal" ) ; server . request ( createRequest ) ; java . util . HashMap < java . lang . String , java . lang . Object > fields = new java . util . HashMap ( ) ; fields . put ( "testnode" 1 , new java . lang . Integer ( 1 ) ) ; org . apache . solr . client . solrj . request . schema . SchemaRequest . AddField schemaRequest = new org . apache . solr . client . solrj . request . schema . SchemaRequest . AddField ( fields ) ; server . request ( schemaRequest ) ; org . ikasan . wiretap . dao . SolrWiretapDao solrCloudBase = new org . ikasan . wiretap . dao . SolrWiretapDao ( ) ; solrCloudBase . setSolrClient ( server ) ; solrCloudBase . setDaysToKeep ( 0 ) ; org . ikasan . wiretap . model . SolrWiretapEvent event = new org . ikasan . wiretap . model . SolrWiretapEvent ( 1L , "moduleName" , "flowName" , "componentName" , "eventId" , "relatedEventId" , java . lang . System . currentTimeMillis ( ) , "event" ) ; solrCloudBase . save ( event ) ; event = new org . ikasan . wiretap . model . SolrWiretapEvent ( 2L , "moduleName" , "flowName" , "componentName" , "eventId" , "relatedEventId" , java . lang . System . currentTimeMillis ( ) , "event" ) ; solrCloudBase . save ( event ) ; event = new org . ikasan . wiretap . model . SolrWiretapEvent ( 3L , "moduleName" , "flowName" , "componentName" , "eventId" , "relatedEventId" , java . lang . System . currentTimeMillis ( ) , "event" ) ; solrCloudBase . save ( event ) ; event = new org . ikasan . wiretap . model . SolrWiretapEvent ( 4L , "moduleName" , "flowName" , "componentName" , "eventId" , "relatedEventId" , java . lang . System . currentTimeMillis ( ) , "event" ) ; solrCloudBase . save ( event ) ; event = new org . ikasan . wiretap . model . SolrWiretapEvent ( 5L , "moduleName" , "flowName" , "componentName" , "eventId" , "relatedEventId" , java . lang . System . currentTimeMillis ( ) , "event" ) ; solrCloudBase . save ( event ) ; java . util . HashSet < java . lang . String > moduleNames = new java . util . HashSet < java . lang . String > ( ) ; moduleNames . add ( "moduleName" ) ; java . lang . String flowName = null ; org . ikasan . wiretap . service . SolrWiretapServiceImpl wiretapService = new org . ikasan . wiretap . service . SolrWiretapServiceImpl ( solrCloudBase , moduleService ) ; org . ikasan . spec . search . PagedSearchResult < org . ikasan . spec . wiretap . WiretapEvent > results = wiretapService . findWiretapEvents ( 0 , 10 , "testnode" 2 , true , moduleNames , flowName , null , null , null , new java . util . Date ( ( ( java . lang . System . currentTimeMillis ( ) ) - 100000000 ) ) , new java . util . Date ( ( ( java . lang . System . currentTimeMillis ( ) ) + 100000000 ) ) , "testnode" 0 ) ; "<AssertPlaceHolder>" ; server . close ( ) ; } } getResultSize ( ) { return resultSize ; }
org . junit . Assert . assertEquals ( "testnode" 3 , results . getResultSize ( ) , 10 )
testOne ( ) { org . streaminer . stream . cardinality . HyperLogLogPlus one = new org . streaminer . stream . cardinality . HyperLogLogPlus ( 8 , 25 ) ; one . offer ( "a" ) ; "<AssertPlaceHolder>" ; } cardinality ( ) { org . streaminer . stream . cardinality . HashMap < java . lang . Integer , java . lang . Integer > results = new org . streaminer . stream . cardinality . HashMap < java . lang . Integer , java . lang . Integer > ( ) ; for ( int i = 0 ; i < ( numMedians ) ; i ++ ) { int currentGuess = ( ( int ) ( ( buffers . get ( i ) . size ( ) ) * ( java . lang . Math . pow ( 2 , limits . get ( i ) ) ) ) ) ; if ( ! ( results . containsKey ( currentGuess ) ) ) { results . put ( currentGuess , 1 ) ; } else { int currentCount = results . get ( currentGuess ) ; results . put ( currentGuess , ( currentCount + 1 ) ) ; } } int finalEstimate = 0 ; int highestVote = 0 ; for ( Map . Entry < java . lang . Integer , java . lang . Integer > pair : results . entrySet ( ) ) { int possibleAnswer = pair . getValue ( ) ; if ( possibleAnswer > highestVote ) { highestVote = possibleAnswer ; finalEstimate = pair . getKey ( ) ; } } return finalEstimate ; }
org . junit . Assert . assertEquals ( 1 , one . cardinality ( ) )
testToEdit ( ) { com . horstmann . violet . product . diagram . property . text . decorator . OneLineText oneLineString = new com . horstmann . violet . product . diagram . property . text . decorator . OneLineText ( "<b><<test>></b>" ) ; "<AssertPlaceHolder>" ; } toEdit ( ) { return getOneLineString ( ) . toEdit ( ) ; }
org . junit . Assert . assertEquals ( "<b><<test>></b>" , oneLineString . toEdit ( ) )
testConfigureVRRPIPv6WrongParams ( ) { org . opennaas . extensions . router . capability . vrrp . IVRRPCapability vrrpCapability = ( ( org . opennaas . extensions . router . capability . vrrp . IVRRPCapability ) ( routerResource . getCapability ( org . opennaas . itests . helpers . InitializerTestHelper . getCapabilityInformation ( TestsConstants . VRRP_CAPABILITY_TYPE ) ) ) ) ; vrrpCapability . configureVRRP ( ( ( org . opennaas . extensions . router . model . VRRPProtocolEndpoint ) ( org . opennaas . itests . router . helpers . ParamCreationHelper . newParamsVRRPGroupWithOneEndpointIPv6 ( "fecd:123:a1::4" , "f8:34::12" , "fe-1/0/1" , "fecd:123:a1::5/64" ) . getProtocolEndpoint ( ) . get ( 0 ) ) ) ) ; org . opennaas . extensions . queuemanager . IQueueManagerCapability queueCapability = ( ( org . opennaas . extensions . queuemanager . IQueueManagerCapability ) ( routerResource . getCapability ( org . opennaas . itests . helpers . InitializerTestHelper . getCapabilityInformation ( TestsConstants . QUEUE_CAPABILIY_TYPE ) ) ) ) ; org . opennaas . core . resources . queue . QueueResponse queueResponse = ( ( org . opennaas . core . resources . queue . QueueResponse ) ( queueCapability . execute ( ) ) ) ; "<AssertPlaceHolder>" ; stopResource ( ) ; } isOk ( ) { for ( org . opennaas . core . resources . action . ActionResponse action : responses ) { if ( ( action . getStatus ( ) ) == ( org . opennaas . core . resources . action . ActionResponse . STATUS . ERROR ) ) return false ; } return ( getConfirmResponse ( ) . getStatus ( ) . equals ( STATUS . OK ) ) && ( getRefreshResponse ( ) . getStatus ( ) . equals ( STATUS . OK ) ) ; }
org . junit . Assert . assertTrue ( queueResponse . isOk ( ) )
readOverFile ( ) { io . datakernel . bytebuf . ByteBuf byteBuf = await ( io . datakernel . csp . file . ChannelFileReader . readFile ( java . util . concurrent . Executors . newSingleThreadExecutor ( ) , java . nio . file . Paths . get ( "test_data/in.dat" ) ) . withOffset ( ( ( java . nio . file . Files . size ( java . nio . file . Paths . get ( "test_data/in.dat" ) ) ) + 100 ) ) . withBufferSize ( io . datakernel . util . MemSize . of ( 1 ) ) . toCollector ( io . datakernel . bytebuf . ByteBufQueue . collector ( ) ) ) ; "<AssertPlaceHolder>" ; } asString ( java . nio . charset . Charset ) { java . lang . String string = getString ( charset ) ; recycle ( ) ; return string ; }
org . junit . Assert . assertEquals ( "" , byteBuf . asString ( io . datakernel . csp . file . UTF_8 ) )
testReferencesS_V ( ) { com . squareup . javapoet . TypeVariableName s = com . squareup . javapoet . TypeVariableName . get ( "S" , java . lang . String . class ) ; com . squareup . javapoet . TypeVariableName v = com . squareup . javapoet . TypeVariableName . get ( "V" , s ) ; "<AssertPlaceHolder>" ; } references ( com . squareup . javapoet . TypeName , com . squareup . javapoet . TypeVariableName ) { if ( ! ( net . zerobuilder . compiler . generate . ZeroUtil . maybeTypevars ( type ) ) ) { return false ; } if ( ( type instanceof com . squareup . javapoet . TypeVariableName ) && ( ( ( com . squareup . javapoet . TypeVariableName ) ( type ) ) . bounds . isEmpty ( ) ) ) { return type . equals ( test ) ; } net . zerobuilder . compiler . generate . ZeroUtil . TypeWalk walk = new net . zerobuilder . compiler . generate . ZeroUtil . TypeWalk ( type ) ; while ( walk . hasNext ( ) ) { if ( walk . next ( ) . equals ( test ) ) { return true ; } } return false ; }
org . junit . Assert . assertTrue ( net . zerobuilder . compiler . generate . ZeroUtil . references ( v , s ) )
testUpdateRoute ( ) { route1 . setTag ( Route . TAG_CONSOLE ) ; service . updateRoute ( route1 ) ; com . ctrip . xpipe . redis . console . model . RouteModel route = null ; for ( com . ctrip . xpipe . redis . console . model . RouteModel mode : service . getAllRoutes ( ) ) { if ( ( mode . getId ( ) ) == ( route1 . getId ( ) ) ) { route = mode ; break ; } } "<AssertPlaceHolder>" ; } getTag ( ) { return tag ; }
org . junit . Assert . assertEquals ( Route . TAG_CONSOLE , route . getTag ( ) )
testUnixTimestampSeconds_deserialize ( ) { com . google . gson . Gson gson = new io . gsonfire . GsonFireBuilder ( ) . dateSerializationPolicy ( DateSerializationPolicy . unixTimeSeconds ) . createGson ( ) ; long timeNoMillis = ( java . lang . System . currentTimeMillis ( ) ) / 1000L ; com . google . gson . JsonElement element = new com . google . gson . JsonPrimitive ( timeNoMillis ) ; java . util . Date parsedDate = gson . fromJson ( element , java . util . Date . class ) ; "<AssertPlaceHolder>" ; } createGson ( ) { return createGsonBuilder ( ) . create ( ) ; }
org . junit . Assert . assertEquals ( ( timeNoMillis * 1000 ) , parsedDate . getTime ( ) )
non_recursive ( ) { parse ( "-r<sp>foo" ) ; "<AssertPlaceHolder>" ; } isReturningReferencedObjects ( ) { return ! ( ( ( ( ( queryParser . hasOption ( QueryFlag . NO_REFERENCED ) ) || ( isShortHand ( ) ) ) || ( isKeysOnly ( ) ) ) || ( isResource ( ) ) ) || ( isBriefAbuseContact ( ) ) ) ; }
org . junit . Assert . assertFalse ( subject . isReturningReferencedObjects ( ) )
testSerde ( ) { org . apache . druid . discovery . DruidService expected = new org . apache . druid . discovery . LookupNodeService ( "tier" ) ; com . fasterxml . jackson . databind . ObjectMapper mapper = org . apache . druid . segment . TestHelper . makeJsonMapper ( ) ; org . apache . druid . discovery . DruidService actual = mapper . readValue ( mapper . writeValueAsString ( expected ) , org . apache . druid . discovery . DruidService . class ) ; "<AssertPlaceHolder>" ; } makeJsonMapper ( ) { final com . fasterxml . jackson . databind . ObjectMapper mapper = new org . apache . druid . jackson . DefaultObjectMapper ( ) ; mapper . setInjectableValues ( new com . fasterxml . jackson . databind . InjectableValues . Std ( ) . addValue ( org . apache . druid . math . expr . ExprMacroTable . class . getName ( ) , TestExprMacroTable . INSTANCE ) . addValue ( com . fasterxml . jackson . databind . ObjectMapper . class . getName ( ) , mapper ) . addValue ( DataSegment . PruneLoadSpecHolder . class , DataSegment . PruneLoadSpecHolder . DEFAULT ) ) ; return mapper ; }
org . junit . Assert . assertEquals ( expected , actual )
testAtan2_2 ( ) { org . nd4j . linalg . api . ndarray . INDArray x = org . nd4j . linalg . factory . Nd4j . create ( 10 ) . assign ( 1.0 ) ; org . nd4j . linalg . api . ndarray . INDArray y = org . nd4j . linalg . factory . Nd4j . create ( 10 ) . assign ( 0.0 ) ; org . nd4j . linalg . api . ndarray . INDArray exp = org . nd4j . linalg . factory . Nd4j . create ( 10 ) . assign ( 0.0 ) ; org . nd4j . linalg . api . ndarray . INDArray z = org . nd4j . linalg . ops . transforms . Transforms . atan2 ( x , y ) ; "<AssertPlaceHolder>" ; } atan2 ( org . nd4j . linalg . api . ndarray . INDArray , org . nd4j . linalg . api . ndarray . INDArray ) { return org . nd4j . linalg . factory . Nd4j . getExecutioner ( ) . exec ( new org . nd4j . linalg . api . ops . impl . transforms . pairwise . arithmetic . OldAtan2Op ( x , y , x . ulike ( ) ) ) ; }
org . junit . Assert . assertEquals ( exp , z )
baseTestFails ( ) { io . grpc . testing . GrpcCleanupRule . Resource resource = mock ( io . grpc . testing . GrpcCleanupRule . Resource . class ) ; org . junit . runners . model . Statement statement = mock ( org . junit . runners . model . Statement . class ) ; doThrow ( new java . lang . Exception ( ) ) . when ( statement ) . evaluate ( ) ; io . grpc . testing . GrpcCleanupRule grpcCleanup = new io . grpc . testing . GrpcCleanupRule ( ) ; grpcCleanup . register ( resource ) ; boolean baseTestFailed = false ; try { grpcCleanup . apply ( statement , null ) . evaluate ( ) ; } catch ( java . lang . Exception e ) { baseTestFailed = true ; } "<AssertPlaceHolder>" ; verify ( resource ) . forceCleanUp ( ) ; verifyNoMoreInteractions ( resource ) ; verify ( resource , never ( ) ) . cleanUp ( ) ; verify ( resource , never ( ) ) . awaitReleased ( anyLong ( ) , any ( java . util . concurrent . TimeUnit . class ) ) ; } evaluate ( ) { channel = grpcServerRule . getChannel ( ) ; server = grpcServerRule . getServer ( ) ; }
org . junit . Assert . assertTrue ( baseTestFailed )
testSetBigDecimal ( ) { org . gradoop . common . model . impl . properties . PropertyValue p = new org . gradoop . common . model . impl . properties . PropertyValue ( ) ; p . setBigDecimal ( org . gradoop . common . model . impl . properties . BIG_DECIMAL_VAL_7 ) ; "<AssertPlaceHolder>" ; } getBigDecimal ( ) { return get ( java . math . BigDecimal . class ) ; }
org . junit . Assert . assertEquals ( org . gradoop . common . model . impl . properties . BIG_DECIMAL_VAL_7 , p . getBigDecimal ( ) )
testAttachment ( ) { final java . lang . String testContent = "This<sp>is<sp>a<sp>test" ; final com . xpn . xwiki . XWikiContext xc = ( ( com . xpn . xwiki . XWikiContext ) ( com . xpn . xwiki . web . Utils . getComponent ( org . xwiki . context . Execution . class ) . getContext ( ) . getProperty ( "xwikicontext" ) ) ) ; final org . xwiki . model . reference . DocumentReference ref = new org . xwiki . model . reference . DocumentReference ( "xwiki" , "XWiki" , "XWikiDocument" ) ; final com . xpn . xwiki . doc . XWikiDocument testDoc = new com . xpn . xwiki . doc . XWikiDocument ( ref ) ; testDoc . addAttachment ( "file.txt" , testContent . getBytes ( "UTF-8" ) , xc ) ; this . store . saveXWikiDoc ( testDoc , null ) ; final com . xpn . xwiki . doc . XWikiDocument testDoc2 = new com . xpn . xwiki . doc . XWikiDocument ( ref ) ; this . store . loadXWikiDoc ( testDoc2 , null ) ; final com . xpn . xwiki . doc . XWikiAttachment attach = testDoc2 . getAttachment ( "file.txt" ) ; byte [ ] content = attach . getContent ( xc ) ; "<AssertPlaceHolder>" ; } loadXWikiDoc ( com . xpn . xwiki . doc . XWikiDocument , com . xpn . xwiki . XWikiContext ) { final java . lang . String key = org . xwiki . store . legacy . internal . datanucleus . PersistableXWikiDocument . keyGen ( doc ) ; final java . util . List < org . xwiki . store . objects . PersistableObject > out = new java . util . ArrayList < org . xwiki . store . objects . PersistableObject > ( 1 ) ; final org . xwiki . store . StartableTransactionRunnable < javax . jdo . PersistenceManager > transaction = this . provider . get ( ) ; this . objStore . getLoadTransactionRunnable ( new java . util . ArrayList < java . lang . String > ( 1 ) { { add ( key ) ; } } , org . xwiki . store . legacy . internal . datanucleus . PersistableXWikiDocument . class . getName ( ) , out ) . runIn ( transaction ) ; try { transaction . start ( ) ; } catch ( org . xwiki . store . TransactionException e ) { throw new java . lang . RuntimeException ( ( "Failed<sp>to<sp>load<sp>document<sp>" + ( java . util . Arrays . asList ( key ) ) ) , e ) ; } return ( out . size ( ) ) == 0 ? doc : ( ( org . xwiki . store . legacy . internal . datanucleus . PersistableXWikiDocument ) ( out . get ( 0 ) ) ) . toXWikiDocument ( doc ) ; }
org . junit . Assert . assertTrue ( new java . lang . String ( content , "UTF-8" ) . equals ( testContent ) )
testName ( ) { org . apache . cayenne . map . DataMap map = new org . apache . cayenne . map . DataMap ( ) ; java . lang . String tstName = "tst_name" ; map . setName ( tstName ) ; "<AssertPlaceHolder>" ; } getName ( ) { beforePropertyRead ( "name" ) ; return this . name ; }
org . junit . Assert . assertEquals ( tstName , map . getName ( ) )
whatHappensWhenASuiteContainsItselfIndirectly ( ) { org . junit . runner . Result result = org . junit . runner . JUnitCore . runClasses ( org . junit . tests . running . classes . SuiteTest . Hydra . class ) ; "<AssertPlaceHolder>" ; } getFailureCount ( ) { return failures . size ( ) ; }
org . junit . Assert . assertEquals ( 2 , result . getFailureCount ( ) )
test07_01 ( ) { java . lang . String t1 = com . github . eulerlcs . regularexpression . Utils . readAllFromResouce ( "07.txt" ) ; System . out . println ( "--orignal<sp>text<sp>start--" ) ; System . out . print ( t1 ) ; System . out . println ( "--orignal<sp>text<sp>end<sp>--" ) ; java . lang . String ret1 = t1 . replaceAll ( "(\r\n)|\r" , "\n" ) ; System . out . println ( "--<sp>start--" ) ; System . out . print ( ret1 ) ; System . out . println ( "--orignal<sp>text<sp>end<sp>--" 0 ) ; java . lang . String ret2 = ret1 . replaceAll ( "(?m)^\\s*(.*?)\\s*$" , "$1" ) ; System . out . println ( "--trim<sp>start--" ) ; System . out . println ( ret2 ) ; System . out . println ( "--trim<sp>end<sp>--" ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( obj instanceof org . v3 . runner . Description ) ) return false ; org . v3 . runner . Description d = ( ( org . v3 . runner . Description ) ( obj ) ) ; return ( getDisplayName ( ) . equals ( d . getDisplayName ( ) ) ) && ( getChildren ( ) . equals ( d . getChildren ( ) ) ) ; }
org . junit . Assert . assertFalse ( ret2 . equals ( t1 ) )
testOauthWithNoFlows ( ) { org . eclipse . microprofile . openapi . models . security . SecurityScheme . Type type = org . eclipse . microprofile . openapi . models . security . SecurityScheme . Type . OAUTH2 ; com . ibm . ws . microprofile . openapi . impl . model . security . SecuritySchemeImpl noFlows = new com . ibm . ws . microprofile . openapi . impl . model . security . SecuritySchemeImpl ( ) ; noFlows . setType ( type ) ; com . ibm . ws . microprofile . openapi . test . utils . TestValidationHelper vh = new com . ibm . ws . microprofile . openapi . test . utils . TestValidationHelper ( ) ; com . ibm . ws . microprofile . openapi . impl . validation . SecuritySchemeValidator validator = com . ibm . ws . microprofile . openapi . impl . validation . SecuritySchemeValidator . getInstance ( ) ; validator . validate ( vh , context , key , noFlows ) ; "<AssertPlaceHolder>" ; } getEventsSize ( ) { return result . getEvents ( ) . size ( ) ; }
org . junit . Assert . assertEquals ( 1 , vh . getEventsSize ( ) )
test1 ( ) { com . creactiviti . piper . core . error . ErrorHandler errorHandler = new com . creactiviti . piper . core . error . ErrorHandler < com . creactiviti . piper . core . job . Job > ( ) { public void handle ( com . creactiviti . piper . core . job . Job j ) { "<AssertPlaceHolder>" ; } } ; com . creactiviti . piper . core . error . ErrorHandlerChain chain = new com . creactiviti . piper . core . error . ErrorHandlerChain ( java . util . Arrays . asList ( errorHandler ) ) ; chain . handle ( new com . creactiviti . piper . core . job . SimpleJob ( ) ) ; } handle ( com . creactiviti . piper . core . task . TaskExecution ) { org . junit . Assert . assertEquals ( com . creactiviti . piper . core . task . SimpleTaskExecution . class , jt . getClass ( ) ) ; }
org . junit . Assert . assertEquals ( com . creactiviti . piper . core . job . SimpleJob . class , j . getClass ( ) )
workingDirectoryShouldBeDirectory ( ) { builder . setDirectory ( "path" ) ; net . sourceforge . cruisecontrol . util . Commandline cmdLine = builder . buildCommandline ( ) ; "<AssertPlaceHolder>" ; } getWorkingDirectory ( ) { return workingDir ; }
org . junit . Assert . assertEquals ( "path" , cmdLine . getWorkingDirectory ( ) . getPath ( ) )
allTradesTest ( ) { com . precioustech . fxtrading . trade . TradeInfoService < java . lang . Long , java . lang . String , java . lang . Long > service = createService ( ) ; java . util . Collection < com . precioustech . fxtrading . trade . Trade < java . lang . Long , java . lang . String , java . lang . Long > > allTrades = service . getAllTrades ( ) ; "<AssertPlaceHolder>" ; } getAllTrades ( ) { lock . readLock ( ) . lock ( ) ; try { java . util . Collection < com . precioustech . fxtrading . trade . Trade < M , N , K > > trades = com . google . common . collect . Lists . newArrayList ( ) ; for ( K accId : this . tradesCache . keySet ( ) ) { trades . addAll ( getTradesForAccount ( accId ) ) ; } return trades ; } finally { lock . readLock ( ) . unlock ( ) ; } }
org . junit . Assert . assertEquals ( 7 , allTrades . size ( ) )
testConcurrentWrites ( ) { int errCode = org . apache . hadoop . hbase . wal . WALPerformanceEvaluation . innerMain ( new org . apache . hadoop . conf . Configuration ( org . apache . hadoop . hbase . wal . TestBoundedRegionGroupingStrategy . CONF ) , new java . lang . String [ ] { "-threads" , "3" , "-verify" , "-noclosefs" , "-iterations" , "3000" } ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertEquals ( 0 , errCode )
testClone ( ) { org . eclipse . collections . api . set . MutableSet < java . lang . String > set = this . newWith ( ) ; org . eclipse . collections . api . set . MutableSet < java . lang . String > clone = set . clone ( ) ; "<AssertPlaceHolder>" ; } clone ( ) { synchronized ( this . getLock ( ) ) { return org . eclipse . collections . impl . set . mutable . SynchronizedMutableSet . of ( this . getMutableSet ( ) . clone ( ) ) ; } }
org . junit . Assert . assertSame ( clone , set )
shouldReadInt ( ) { int i = 200 ; byte [ ] bytes = new byte [ ] { ( ( byte ) ( i ) ) } ; java . io . ByteArrayInputStream inputStream = new java . io . ByteArrayInputStream ( bytes ) ; int readInt = de . slackspace . openkeepass . util . ByteUtils . readInt ( inputStream ) ; "<AssertPlaceHolder>" ; } readInt ( java . io . InputStream ) { byte [ ] bytesToRead = new byte [ 4 ] ; int readBytes = inputStream . read ( bytesToRead ) ; if ( readBytes == ( - 1 ) ) { return - 1 ; } java . nio . ByteBuffer buffer = java . nio . ByteBuffer . wrap ( bytesToRead ) ; buffer . order ( ByteOrder . LITTLE_ENDIAN ) ; return buffer . getInt ( ) ; }
org . junit . Assert . assertEquals ( 200 , readInt )
testGetDBEntries ( ) { try { java . util . List < com . mongodb . BasicDBObject > dbObjects = connectionSpout . getDBEntries ( ) ; "<AssertPlaceHolder>" ; } catch ( java . lang . Exception e ) { org . junit . Assert . fail ( "Did<sp>not<sp>catch<sp>unexpected<sp>exception." ) ; } } getDBEntries ( ) { try { java . util . Date oneHourAfterLastProcessed = null ; long lastProcessedEvent = com . streamreduce . storm . spouts . EventSpout . mongoClient . readLastProcessedEventDate ( "EventSpout" ) ; if ( lastProcessedEvent != ( - 1 ) ) { lastProcessedEventDate = new java . util . Date ( lastProcessedEvent ) ; } com . streamreduce . storm . spouts . EventSpout . logger . info ( "Determine<sp>last<sp>processed<sp>event<sp>date...." ) ; if ( ( lastProcessedEventDate ) != null ) { oneHourAfterLastProcessed = new java . util . Date ( ( ( lastProcessedEventDate . getTime ( ) ) + ( TimeUnit . HOURS . toMillis ( 1 ) ) ) ) ; com . streamreduce . storm . spouts . EventSpout . logger . info ( ( ( ( "Retrieving<sp>all<sp>events<sp>between<sp>" + ( lastProcessedEventDate ) ) + "<sp>and<sp>" ) + oneHourAfterLastProcessed ) ) ; } com . streamreduce . storm . spouts . EventSpout . logger . debug ( ( "Mongo<sp>geEvents<sp>Query<sp>Time,<sp>Begin:" + ( java . lang . System . currentTimeMillis ( ) ) ) ) ; java . util . List < com . mongodb . BasicDBObject > events = com . streamreduce . storm . spouts . EventSpout . mongoClient . getEvents ( lastProcessedEventDate , oneHourAfterLastProcessed ) ; com . streamreduce . storm . spouts . EventSpout . logger . debug ( ( "Mongo<sp>geEvents<sp>Query<sp>Time,<sp>End:" + ( java . lang . System . currentTimeMillis ( ) ) ) ) ; com . streamreduce . storm . spouts . EventSpout . logger . info ( ( "Number<sp>of<sp>events<sp>to<sp>be<sp>emitted:<sp>" + ( events . size ( ) ) ) ) ; persistLastProcessedEventDate ( oneHourAfterLastProcessed , events ) ; return events ; } catch ( java . lang . Exception e ) { com . streamreduce . storm . spouts . EventSpout . logger . error ( "0<sp>events<sp>emmitted<sp>due<sp>to<sp>failure<sp>in<sp>getDBEntries" , e ) ; return new java . util . ArrayList ( ) ; } }
org . junit . Assert . assertEquals ( 0 , dbObjects . size ( ) )
deveObterDataHoraRecebimentoComoFoiSetado ( ) { final com . fincatto . documentofiscal . nfe400 . classes . evento . cancelamento . NFRetornoInfoCancelamento retornoInfoCancelamento = new com . fincatto . documentofiscal . nfe400 . classes . evento . cancelamento . NFRetornoInfoCancelamento ( ) ; final java . time . LocalDateTime dataHoraRecebimento = java . time . LocalDateTime . from ( java . time . format . DateTimeFormatter . ofPattern ( "yyyy-MM-dd<sp>HH:mm:ss" ) . parse ( "2010-10-10<sp>10:10:10" ) ) ; retornoInfoCancelamento . setDatahoraRecebimento ( dataHoraRecebimento ) ; "<AssertPlaceHolder>" ; } getDatahoraRecebimento ( ) { return this . datahoraRecebimento ; }
org . junit . Assert . assertEquals ( dataHoraRecebimento , retornoInfoCancelamento . getDatahoraRecebimento ( ) )
testGetMaskNone ( ) { org . sagebionetworks . repo . model . table . QueryOptions options = new org . sagebionetworks . repo . model . table . QueryOptions ( ) ; long mask = options . getPartMask ( ) ; "<AssertPlaceHolder>" ; } getPartMask ( ) { long partMask = 0 ; if ( this . runQuery ) { partMask = partMask | ( org . sagebionetworks . repo . model . table . QueryOptions . BUNDLE_MASK_QUERY_RESULTS ) ; } if ( this . runCount ) { partMask = partMask | ( org . sagebionetworks . repo . model . table . QueryOptions . BUNDLE_MASK_QUERY_COUNT ) ; } if ( this . returnSelectColumns ) { partMask = partMask | ( org . sagebionetworks . repo . model . table . QueryOptions . BUNDLE_MASK_QUERY_SELECT_COLUMNS ) ; } if ( this . returnMaxRowsPerPage ) { partMask = partMask | ( org . sagebionetworks . repo . model . table . QueryOptions . BUNDLE_MASK_QUERY_MAX_ROWS_PER_PAGE ) ; } if ( this . returnColumnModels ) { partMask = partMask | ( org . sagebionetworks . repo . model . table . QueryOptions . BUNDLE_MASK_QUERY_COLUMN_MODELS ) ; } if ( this . returnFacets ) { partMask = partMask | ( org . sagebionetworks . repo . model . table . QueryOptions . BUNDLE_MASK_QUERY_FACETS ) ; } if ( this . runSumFileSizes ) { partMask = partMask | ( org . sagebionetworks . repo . model . table . QueryOptions . BUNDLE_MASK_SUM_FILE_SIZES ) ; } return partMask ; }
org . junit . Assert . assertEquals ( 0L , mask )
testFactoryWithWrongType ( ) { copyShapefiles ( "shapes/archsites.shp" ) ; java . io . File f = copyShapefiles ( "shapes/bugsites.shp" ) ; tempDir = f . getParentFile ( ) ; java . util . Map params = new java . util . HashMap ( ) ; params . put ( ShapefileDataStoreFactory . URLP . key , org . geotools . util . URLs . fileToUrl ( tempDir ) ) ; params . put ( ShapefileDataStoreFactory . FILE_TYPE . key , "abcdef..." ) ; org . geotools . data . DataStore store = org . geotools . data . DataStoreFinder . getDataStore ( params ) ; "<AssertPlaceHolder>" ; } getDataStore ( java . util . Map ) { java . util . Iterator < org . geotools . data . DataStoreFactorySpi > ps = org . geotools . data . DataStoreFinder . getAvailableDataStores ( ) ; org . geotools . data . DataAccess < ? extends org . opengis . feature . type . FeatureType , ? extends org . opengis . feature . Feature > dataStore ; dataStore = org . geotools . data . DataAccessFinder . getDataStore ( params , ps ) ; return ( ( org . geotools . data . DataStore ) ( dataStore ) ) ; }
org . junit . Assert . assertNull ( store )
getRequestMetadata_blocking_cached ( ) { com . google . auth . TestClock testClock = new com . google . auth . TestClock ( ) ; java . security . PrivateKey privateKey = com . google . auth . oauth2 . ServiceAccountCredentials . privateKeyFromPkcs8 ( com . google . auth . oauth2 . ServiceAccountJwtAccessCredentialsTest . SA_PRIVATE_KEY_PKCS8 ) ; com . google . auth . oauth2 . ServiceAccountJwtAccessCredentials credentials = com . google . auth . oauth2 . ServiceAccountJwtAccessCredentials . newBuilder ( ) . setClientId ( com . google . auth . oauth2 . ServiceAccountJwtAccessCredentialsTest . SA_CLIENT_ID ) . setClientEmail ( com . google . auth . oauth2 . ServiceAccountJwtAccessCredentialsTest . SA_CLIENT_EMAIL ) . setPrivateKey ( privateKey ) . setPrivateKeyId ( com . google . auth . oauth2 . ServiceAccountJwtAccessCredentialsTest . SA_PRIVATE_KEY_ID ) . build ( ) ; credentials . clock = testClock ; java . util . Map < java . lang . String , java . util . List < java . lang . String > > metadata1 = credentials . getRequestMetadata ( com . google . auth . oauth2 . ServiceAccountJwtAccessCredentialsTest . CALL_URI ) ; long lifeSpanMs = TimeUnit . SECONDS . toMillis ( 10 ) ; testClock . setCurrentTime ( lifeSpanMs ) ; java . util . Map < java . lang . String , java . util . List < java . lang . String > > metadata2 = credentials . getRequestMetadata ( com . google . auth . oauth2 . ServiceAccountJwtAccessCredentialsTest . CALL_URI ) ; "<AssertPlaceHolder>" ; } getRequestMetadata ( java . net . URI ) { if ( uri == null ) { if ( ( defaultAudience ) != null ) { uri = defaultAudience ; } else { throw new java . io . IOException ( ( "JwtAccess<sp>requires<sp>Audience<sp>uri<sp>to<sp>be<sp>passed<sp>in<sp>or<sp>the<sp>" + "defaultAudience<sp>to<sp>be<sp>specified" ) ) ; } } java . lang . String assertion = getJwtAccess ( uri ) ; java . lang . String authorizationHeader = ( com . google . auth . oauth2 . ServiceAccountJwtAccessCredentials . JWT_ACCESS_PREFIX ) + assertion ; java . util . List < java . lang . String > newAuthorizationHeaders = java . util . Collections . singletonList ( authorizationHeader ) ; return java . util . Collections . singletonMap ( AuthHttpConstants . AUTHORIZATION , newAuthorizationHeaders ) ; }
org . junit . Assert . assertEquals ( metadata1 , metadata2 )
waitUntilSpeechStart ( ) { java . util . List < java . lang . Class < ? extends edu . cmu . sphinx . decoder . scorer . SimpleAcousticScorer > > scorerClasses = new java . util . ArrayList < java . lang . Class < ? extends edu . cmu . sphinx . decoder . scorer . SimpleAcousticScorer > > ( ) ; scorerClasses . add ( edu . cmu . sphinx . decoder . scorer . SimpleAcousticScorer . class ) ; scorerClasses . add ( edu . cmu . sphinx . decoder . scorer . ThreadedAcousticScorer . class ) ; for ( java . lang . Class < ? extends edu . cmu . sphinx . decoder . scorer . SimpleAcousticScorer > scorerClass : scorerClasses ) { System . err . println ( ( "testing:<sp>" + ( scorerClass . getSimpleName ( ) ) ) ) ; edu . cmu . sphinx . frontend . databranch . DataBufferProcessor dummyFrontEnd = createDummyFrontEnd ( ) ; java . util . Map < java . lang . String , java . lang . Object > props = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; props . put ( SimpleAcousticScorer . FEATURE_FRONTEND , dummyFrontEnd ) ; edu . cmu . sphinx . decoder . scorer . AcousticScorer scorer = edu . cmu . sphinx . util . props . ConfigurationManager . getInstance ( scorerClass , props ) ; int startBufferSize = dummyFrontEnd . getBufferSize ( ) ; scorer . allocate ( ) ; scorer . startRecognition ( ) ; java . util . List < edu . cmu . sphinx . decoder . scorer . Scoreable > dummyTokens = java . util . Arrays . asList ( testToken ) ; scorer . calculateScores ( dummyTokens ) ; "<AssertPlaceHolder>" ; scorer . stopRecognition ( ) ; scorer . deallocate ( ) ; } } getBufferSize ( ) { return featureBuffer . size ( ) ; }
org . junit . Assert . assertTrue ( ( ( dummyFrontEnd . getBufferSize ( ) ) < ( startBufferSize - 100 ) ) )
testFindById ( ) { org . apache . ambari . server . orm . entities . PrincipalEntity entity = new org . apache . ambari . server . orm . entities . PrincipalEntity ( ) ; expect ( entityManager . find ( org . apache . ambari . server . orm . entities . PrincipalEntity . class , 99L ) ) . andReturn ( entity ) ; replay ( entityManager ) ; org . apache . ambari . server . orm . dao . PrincipalDAO dao = new org . apache . ambari . server . orm . dao . PrincipalDAO ( ) ; dao . entityManagerProvider = entityManagerProvider ; "<AssertPlaceHolder>" ; } findById ( java . lang . Long ) { return entityManagerProvider . get ( ) . find ( org . apache . ambari . server . orm . entities . TopologyHostRequestEntity . class , id ) ; }
org . junit . Assert . assertEquals ( entity , dao . findById ( 99L ) )
testTell ( ) { long expectedTellPos = ( org . apache . avro . mapreduce . TestFsInput . FILE_CONTENTS . length ( ) ) / 2 ; fsInput . seek ( expectedTellPos ) ; long actualTellPos = fsInput . tell ( ) ; "<AssertPlaceHolder>" ; } tell ( ) { return ( ( offset ) - ( limit ) ) + ( pos ) ; }
org . junit . Assert . assertThat ( actualTellPos , org . hamcrest . Matchers . is ( org . hamcrest . Matchers . equalTo ( expectedTellPos ) ) )
testDynamicQueryByProjectionMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . portal . kernel . model . UserGroupRole . class , _dynamicQueryClassLoader ) ; dynamicQuery . setProjection ( com . liferay . portal . kernel . dao . orm . ProjectionFactoryUtil . property ( "id.userId" ) ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . in ( "id.userId" , new java . lang . Object [ ] { com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) } ) ) ; java . util . List < java . lang . Object > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
testCompileArcsColn ( ) { fr . ign . cogit . geoxygene . api . spatial . coordgeom . IDirectPositionList list1 = new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPositionList ( ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . IDirectPositionList list2 = new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPositionList ( ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . IDirectPositionList list3 = new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPositionList ( ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . IDirectPositionList list4 = new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPositionList ( ) ; list1 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 0.0 , 0.0 ) ) ; list4 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 0.0 , 0.0 ) ) ; list1 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 2.0 , 2.0 ) ) ; list4 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 2.0 , 2.0 ) ) ; list1 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 4.0 , 5.0 ) ) ; list4 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 4.0 , 5.0 ) ) ; list2 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 4.0 , 5.0 ) ) ; list2 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 8.0 , 7.0 ) ) ; list4 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 8.0 , 7.0 ) ) ; list2 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 10.0 , 10.0 ) ) ; list4 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 10.0 , 10.0 ) ) ; list3 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 10.0 , 10.0 ) ) ; list3 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 15.0 , 13.0 ) ) ; list4 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 15.0 , 13.0 ) ) ; list3 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 20.0 , 20.0 ) ) ; list4 . add ( new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPosition ( 20.0 , 20.0 ) ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString line1 = new fr . ign . cogit . geoxygene . spatial . coordgeom . GM_LineString ( list1 ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString line2 = new fr . ign . cogit . geoxygene . spatial . coordgeom . GM_LineString ( list2 ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString line3 = new fr . ign . cogit . geoxygene . spatial . coordgeom . GM_LineString ( list3 ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString finalLine = new fr . ign . cogit . geoxygene . spatial . coordgeom . GM_LineString ( list4 ) ; java . util . Set < fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString > linesToCompile = new java . util . HashSet ( ) ; linesToCompile . add ( line3 ) ; linesToCompile . add ( line1 ) ; linesToCompile . add ( line2 ) ; fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString compiled = fr . ign . cogit . geoxygene . contrib . geometrie . Operateurs . compileArcs ( linesToCompile , 0.1 ) ; "<AssertPlaceHolder>" ; } compileArcs ( java . util . List , double ) { fr . ign . cogit . geoxygene . contrib . geometrie . Operateurs . logger . debug ( "compile<sp>geometries" ) ; for ( fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString l : geometries ) { fr . ign . cogit . geoxygene . contrib . geometrie . Operateurs . logger . debug ( ( "\t" + l ) ) ; } fr . ign . cogit . geoxygene . api . spatial . coordgeom . IDirectPositionList finalPoints = new fr . ign . cogit . geoxygene . spatial . coordgeom . DirectPositionList ( ) ; if ( geometries . isEmpty ( ) ) { fr . ign . cogit . geoxygene . contrib . geometrie . Operateurs . logger . error ( "ATTENTION.<sp>Erreur<sp><sp>la<sp>compilation<sp>de<sp>lignes<sp>:<sp>aucune<sp>ligne<sp>en<sp>entre" ) ; return null ; } fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString currentLine = geometries . get ( 0 ) ; if ( ( geometries . size ( ) ) == 1 ) { return currentLine ; } fr . ign . cogit . geoxygene . api . spatial . coordgeom . ILineString nextLine = geometries . get ( 1 ) ; fr . ign . cogit . geoxygene . api . spatial .
org . junit . Assert . assertEquals ( finalLine , compiled )
testFixHrefs_invalid_prefix ( ) { java . lang . String packageHref = "123456789/" ; java . lang . String resourceHref = "1/2.html" ; nl . siegmann . epublib . domain . Resources resources = mock ( nl . siegmann . epublib . domain . Resources . class ) ; nl . siegmann . epublib . domain . Resource resource = mock ( nl . siegmann . epublib . domain . Resource . class ) ; when ( resources . getAll ( ) ) . thenReturn ( java . util . Arrays . asList ( resource ) ) ; when ( resource . getHref ( ) ) . thenReturn ( resourceHref ) ; nl . siegmann . epublib . epub . PackageDocumentReader . fixHrefs ( packageHref , resources ) ; "<AssertPlaceHolder>" ; } fixHrefs ( java . lang . String , nl . siegmann . epublib . domain . Resources ) { int lastSlashPos = packageHref . lastIndexOf ( '/' ) ; if ( lastSlashPos < 0 ) { return resourcesByHref ; } nl . siegmann . epublib . domain . Resources result = new nl . siegmann . epublib . domain . Resources ( ) ; for ( nl . siegmann . epublib . domain . Resource resource : resourcesByHref . getAll ( ) ) { if ( ( nl . siegmann . epublib . util . StringUtil . isNotBlank ( resource . getHref ( ) ) ) && ( ( resource . getHref ( ) . length ( ) ) > lastSlashPos ) ) { resource . setHref ( resource . getHref ( ) . substring ( ( lastSlashPos + 1 ) ) ) ; } result . add ( resource ) ; } return result ; }
org . junit . Assert . assertTrue ( true )
testExecuteInSequence ( ) { boolean b = cha . executeInSequence ( ) ; "<AssertPlaceHolder>" ; } executeInSequence ( ) { return true ; }
org . junit . Assert . assertFalse ( b )
testContainsMappeableArrayContainer_IncludeProperSubsetDifferentStart ( ) { org . roaringbitmap . buffer . MappeableContainer rc = new org . roaringbitmap . buffer . MappeableRunContainer ( ) . add ( 0 , 10 ) ; org . roaringbitmap . buffer . MappeableContainer subset = new org . roaringbitmap . buffer . MappeableArrayContainer ( ) . add ( 2 , 9 ) ; "<AssertPlaceHolder>" ; } contains ( int ) { throw new java . lang . UnsupportedOperationException ( "Not<sp>implemented<sp>in<sp>ImmutableConciseSet" ) ; }
org . junit . Assert . assertTrue ( rc . contains ( subset ) )
testBuildWithDisabledActiveStatusWithOrderBy ( ) { unit . setActive ( false ) ; org . lnu . is . domain . asset . status . AssetStatus context = new org . lnu . is . domain . asset . status . AssetStatus ( ) ; org . lnu . is . pagination . OrderBy orderBy1 = new org . lnu . is . pagination . OrderBy ( "name" , org . lnu . is . pagination . OrderByType . ASC ) ; java . util . List < org . lnu . is . pagination . OrderBy > orders = java . util . Arrays . asList ( orderBy1 ) ; java . lang . String expected = "SELECT<sp>e<sp>FROM<sp>AssetStatus<sp>e<sp>WHERE<sp>e.crtUserGroup<sp>IN<sp>(:userGroups)<sp>ORDER<sp>BY<sp>e.name<sp>ASC" ; org . lnu . is . pagination . MultiplePagedSearch < org . lnu . is . domain . asset . status . AssetStatus > pagedSearch = new org . lnu . is . pagination . MultiplePagedSearch ( ) ; pagedSearch . setEntity ( context ) ; pagedSearch . setOrders ( orders ) ; java . lang . String actualQuery = unit . build ( pagedSearch ) ; "<AssertPlaceHolder>" ; } setOrders ( java . util . List ) { this . orders = orders ; }
org . junit . Assert . assertEquals ( expected , actualQuery )
testSetPositionIntArray ( ) { final double [ ] initial = new double [ ] { 5.3 , 2.6 , 3.1 , - 852.1 } ; final int [ ] fynal = new int [ ] { 98 , - 16 , 44 , 0 } ; final net . imglib2 . RealPoint p1 = new net . imglib2 . RealPoint ( initial ) ; p1 . setPosition ( fynal ) ; for ( int j = 0 ; j < 4 ; j ++ ) { "<AssertPlaceHolder>" ; } } getDoublePosition ( int ) { return position . getDoublePosition ( d ) ; }
org . junit . Assert . assertEquals ( p1 . getDoublePosition ( j ) , fynal [ j ] , 0 )
periodosNaoColidemExclusive ( ) { java . util . Date inicioPeriodo1 = new org . joda . time . DateTime ( 2016 , 10 , 10 , 5 , 10 , 0 ) . toDate ( ) ; java . util . Date fimPeriodo1 = new org . joda . time . DateTime ( 2016 , 10 , 20 , 5 , 10 , 0 ) . toDate ( ) ; java . util . Date inicioPeriodo2 = new org . joda . time . DateTime ( 2016 , 10 , 20 , 5 , 10 , 0 ) . toDate ( ) ; java . util . Date fimPeriodo2 = new org . joda . time . DateTime ( 2016 , 10 , 25 , 5 , 10 , 0 ) . toDate ( ) ; boolean resultado1 = br . com . any . utils . DataUtils . periodosColidem ( inicioPeriodo1 , fimPeriodo1 , inicioPeriodo2 , fimPeriodo2 , false ) ; "<AssertPlaceHolder>" ; } periodosColidem ( java . util . Date , java . util . Date , java . util . Date , java . util . Date , boolean ) { org . joda . time . Interval intervalo1 = new org . joda . time . Interval ( new org . joda . time . DateTime ( inicioPeriodo1 ) , new org . joda . time . DateTime ( fimPeriodo1 ) ) ; org . joda . time . Interval intervalo2 = new org . joda . time . Interval ( new org . joda . time . DateTime ( inicioPeriodo2 ) , new org . joda . time . DateTime ( fimPeriodo2 ) ) ; if ( inclusive ) { return ( intervalo1 . overlaps ( intervalo2 ) ) || ( intervalo1 . abuts ( intervalo2 ) ) ; } return intervalo1 . overlaps ( intervalo2 ) ; }
org . junit . Assert . assertFalse ( resultado1 )
newInputStreamUseEmptyInputStreamIfNotAlreadyWritten ( ) { java . io . InputStream in = virtualTempPath . newInputStream ( ) ; "<AssertPlaceHolder>" ; } newInputStream ( ) { return pool . getBlobStore ( ) . get ( transaction , binaryDocument . getBlobId ( ) ) ; }
org . junit . Assert . assertEquals ( java . io . ByteArrayInputStream . class , in . getClass ( ) )
shouldParseCorrectly ( ) { org . semanticweb . owlapi . model . OWLClass a = Class ( IRI ( ( ( org . semanticweb . owlapi . api . test . syntax . ManchesterOWLSyntaxParserTestCase . NS ) + "#" ) , "A" ) ) ; java . lang . String text1 = "\'GWAS<sp>study\'<sp>and<sp>has_publication_date<sp>some<sp>dateTime[<<sp>\"2009-01-01T00:00:00+00:00\"^^dateTime]" ; org . semanticweb . owlapi . model . OWLClassExpression expected = df . getOWLObjectIntersectionOf ( a , df . getOWLDataSomeValuesFrom ( p , df . getOWLDatatypeRestriction ( dateTime , OWLFacet . MAX_EXCLUSIVE , df . getOWLLiteral ( "2009-01-01T00:00:00+00:00" , dateTime ) ) ) ) ; org . semanticweb . owlapi . model . OWLOntology o = getOWLOntology ( ) ; o . add ( df . getOWLDeclarationAxiom ( a ) , df . getOWLDeclarationAxiom ( p ) , df . getOWLDeclarationAxiom ( dateTime ) , annotation ( a , "'GWAS<sp>study'" ) , annotation ( p , "has_publication_date" ) , annotation ( dateTime , "dateTime" ) ) ; org . semanticweb . owlapi . util . ShortFormProvider sfp = new org . semanticweb . owlapi . util . AnnotationValueShortFormProvider ( java . util . Arrays . asList ( df . getRDFSLabel ( ) ) , java . util . Collections . < org . semanticweb . owlapi . model . OWLAnnotationProperty , java . util . List < java . lang . String > > emptyMap ( ) , m ) ; org . semanticweb . owlapi . util . BidirectionalShortFormProvider shortFormProvider = new org . semanticweb . owlapi . util . BidirectionalShortFormProviderAdapter ( asList ( m . ontologies ( ) ) , sfp ) ; org . semanticweb . owlapi . util . mansyntax . ManchesterOWLSyntaxParser parser = org . semanticweb . owlapi . apibinding . OWLManager . createManchesterParser ( ) ; parser . setStringToParse ( text1 ) ; org . semanticweb . owlapi . expression . ShortFormEntityChecker owlEntityChecker = new org . semanticweb . owlapi . expression . ShortFormEntityChecker ( shortFormProvider ) ; parser . setOWLEntityChecker ( owlEntityChecker ) ; parser . setDefaultOntology ( o ) ; org . semanticweb . owlapi . model . OWLClassExpression dsvf = parser . parseClassExpression ( ) ; "<AssertPlaceHolder>" ; } parseClassExpression ( ) { org . semanticweb . owlapi . model . OWLClassExpression desc = parseUnion ( ) ; if ( ! ( org . semanticweb . owlapi . manchestersyntax . parser . ManchesterOWLSyntaxTokenizer . eof ( consumeToken ( ) ) ) ) { throw new org . semanticweb . owlapi . manchestersyntax . parser . ManchesterOWLSyntaxParserImpl . ExceptionBuilder ( ) . withKeyword ( org . semanticweb . owlapi . manchestersyntax . parser . ManchesterOWLSyntaxTokenizer . EOFTOKEN ) . build ( ) ; } return desc ; }
org . junit . Assert . assertEquals ( expected , dsvf )
whenAllocatingUntilSomeEndDateBeforeTheStartNothingIsDone ( ) { org . joda . time . LocalDate start = new org . joda . time . LocalDate ( 2006 , 10 , 5 ) ; givenTaskWithStartAndEnd ( org . libreplan . business . test . planner . entities . GenericResourceAllocationTest . toInterval ( start , org . joda . time . Period . days ( 4 ) ) ) ; givenGenericResourceAllocationForTask ( task ) ; givenWorkersWithoutLoadAndWithoutCalendar ( ) ; org . libreplan . business . workingday . ResourcesPerDay resourcesPerDay = org . libreplan . business . workingday . ResourcesPerDay . amount ( 1 ) ; genericResourceAllocation . forResources ( java . util . Collections . singletonList ( worker1 ) ) . resourcesPerDayUntil ( org . libreplan . business . test . planner . entities . GenericResourceAllocationTest . minusDays ( start , 1 ) ) . allocate ( resourcesPerDay ) ; "<AssertPlaceHolder>" ; } getOrderedAssignmentsFor ( org . libreplan . business . resources . entities . Resource ) { java . util . List < org . libreplan . business . planner . entities . GenericDayAssignment > assignments = getOrderedAssignmentsFor ( ) . get ( resource ) ; return assignments == null ? java . util . Collections . emptyList ( ) : java . util . Collections . unmodifiableList ( assignments ) ; }
org . junit . Assert . assertTrue ( genericResourceAllocation . getOrderedAssignmentsFor ( worker1 ) . isEmpty ( ) )
testMinFilter ( ) { ops . run ( net . imagej . ops . filter . min . MinFilterOp . class , out , in , shape , oobFactory ) ; byte min = Byte . MAX_VALUE ; net . imglib2 . algorithm . neighborhood . RectangleShape . NeighborhoodsIterableInterval < net . imglib2 . type . numeric . integer . ByteType > neighborhoods = shape . neighborhoods ( net . imglib2 . view . Views . interval ( net . imglib2 . view . Views . extendMirrorSingle ( in ) , in ) ) ; for ( net . imglib2 . type . numeric . integer . ByteType t : neighborhoods . firstElement ( ) ) { min = ( ( byte ) ( java . lang . Math . min ( t . getInteger ( ) , min ) ) ) ; } "<AssertPlaceHolder>" ; } firstElement ( ) { return cursor ( ) . next ( ) ; }
org . junit . Assert . assertEquals ( min , out . firstElement ( ) . get ( ) )
whenApplyDistinct_thenRemoveDuplicatesFromStream ( ) { java . util . List < java . lang . Integer > intList = java . util . Arrays . asList ( 2 , 5 , 3 , 2 , 4 , 3 ) ; java . util . List < java . lang . Integer > distinctIntList = intList . stream ( ) . distinct ( ) . collect ( java . util . stream . Collectors . toList ( ) ) ; "<AssertPlaceHolder>" ; } stream ( ) { return java . util . stream . Stream . of ( com . baeldung . java . enumiteration . DaysOfWeekEnum . values ( ) ) ; }
org . junit . Assert . assertEquals ( distinctIntList , java . util . Arrays . asList ( 2 , 5 , 3 , 4 ) )
testUpdateEntity ( ) { final long dummyId = 0 ; org . hibernate . Session session = sf . openSession ( ) ; org . hibernate . Transaction tx = session . beginTransaction ( ) ; session . save ( new com . hazelcast . hibernate . entity . DummyEntity ( dummyId , null , 0 , null ) ) ; tx . commit ( ) ; tx = session . beginTransaction ( ) ; com . hazelcast . hibernate . entity . DummyEntity ent = session . get ( com . hazelcast . hibernate . entity . DummyEntity . class , dummyId ) ; ent . setName ( "updatedName" ) ; session . update ( ent ) ; tx . commit ( ) ; session . close ( ) ; session = sf2 . openSession ( ) ; com . hazelcast . hibernate . entity . DummyEntity entity = session . get ( com . hazelcast . hibernate . entity . DummyEntity . class , dummyId ) ; "<AssertPlaceHolder>" ; } getName ( ) { return name ; }
org . junit . Assert . assertEquals ( "updatedName" , entity . getName ( ) )
testIsNietValideNederlandsGemeenteCode2 ( ) { final nl . moderniseringgba . migratie . conversie . model . lo3 . element . Lo3GemeenteCode gemeenteCode = new nl . moderniseringgba . migratie . conversie . model . lo3 . element . Lo3GemeenteCode ( "123a" ) ; "<AssertPlaceHolder>" ; } isValideNederlandseGemeenteCode ( ) { boolean result = false ; if ( ( code . length ( ) ) == ( nl . moderniseringgba . migratie . conversie . model . lo3 . element . Lo3GemeenteCode . LENGTE_NEDERLANDSE_CODE ) ) { try { java . lang . Integer . parseInt ( code ) ; result = true ; } catch ( final java . lang . NumberFormatException nfe ) { result = false ; } } return result ; }
org . junit . Assert . assertFalse ( gemeenteCode . isValideNederlandseGemeenteCode ( ) )
test37createUserSearchQuery ( ) { javax . persistence . EntityManager entityManager = org . mockito . Mockito . mock ( javax . persistence . EntityManager . class ) ; java . lang . String queryString = "Select<sp>id,loginId,emailAddress,firstName,lastName,statusList,publicScreenName,status<sp>from<sp>XXPortalUser" ; javax . persistence . Query query = org . mockito . Mockito . mock ( javax . persistence . Query . class ) ; org . apache . ranger . common . SearchCriteria searchCriteria = new org . apache . ranger . common . SearchCriteria ( ) ; searchCriteria . setDistinct ( true ) ; searchCriteria . setGetChildren ( true ) ; searchCriteria . setGetCount ( true ) ; searchCriteria . setMaxRows ( 12 ) ; searchCriteria . setOwnerId ( org . apache . ranger . biz . TestUserMgr . userId ) ; searchCriteria . setStartIndex ( 1 ) ; searchCriteria . setSortBy ( "asc" ) ; org . apache . ranger . view . VXPortalUser vXPortalUser = userProfile ( ) ; java . util . List < java . lang . String > userRoleList = new java . util . ArrayList < java . lang . String > ( ) ; userRoleList . add ( "ROLE_SYS_ADMIN" 1 ) ; java . util . List < java . lang . Integer > statusList = new java . util . ArrayList < java . lang . Integer > ( ) ; statusList . add ( 1 ) ; searchCriteria . addParam ( "roleList" , userRoleList ) ; searchCriteria . addParam ( "userId" , vXPortalUser . getId ( ) ) ; searchCriteria . addParam ( "loginId" , vXPortalUser . getLoginId ( ) ) ; searchCriteria . addParam ( "ROLE_SYS_ADMIN" 3 , vXPortalUser . getEmailAddress ( ) ) ; searchCriteria . addParam ( "ROLE_SYS_ADMIN" 0 , vXPortalUser . getFirstName ( ) ) ; searchCriteria . addParam ( "lastName" , vXPortalUser . getLastName ( ) ) ; searchCriteria . addParam ( "statusList" , statusList ) ; searchCriteria . addParam ( "ROLE_SYS_ADMIN" 2 , vXPortalUser . getPublicScreenName ( ) ) ; searchCriteria . addParam ( "status" , vXPortalUser . getStatus ( ) ) ; searchCriteria . addParam ( "familyScreenName" , vXPortalUser . getPublicScreenName ( ) ) ; org . mockito . Mockito . when ( daoManager . getEntityManager ( ) ) . thenReturn ( entityManager ) ; org . mockito . Mockito . when ( entityManager . createQuery ( org . mockito . Mockito . anyString ( ) ) ) . thenReturn ( query ) ; javax . persistence . Query newQuery = userMgr . createUserSearchQuery ( query . toString ( ) , queryString , searchCriteria ) ; "<AssertPlaceHolder>" ; userRoleList . add ( "ROLE_SYS_ADMIN" ) ; statusList . add ( 0 ) ; searchCriteria . addParam ( "statusList" , statusList ) ; searchCriteria . addParam ( "roleList" , userRoleList ) ; newQuery = userMgr . createUserSearchQuery ( query . toString ( ) , queryString , searchCriteria ) ; } toString ( ) { return ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( "KylinProjectResponse<sp>[uuid=" + ( uuid ) ) + ",<sp>version=" ) + ( version ) ) + ",<sp>name=" ) + ( name ) ) + ",<sp>owner=" ) + ( owner ) ) + ",<sp>status=" ) + ( status ) ) + ",<sp>description=" ) + ( description ) ) + ",<sp>tables=" ) + ( tables ) ) + ",<sp>models=" ) + ( models ) ) + "]" ; }
org . junit . Assert . assertNotNull ( newQuery )
testErrorHelper ( ) { java . io . IOException e = new java . io . IOException ( ) ; try { rethrow ( e ) ; } catch ( java . lang . Exception ee ) { "<AssertPlaceHolder>" ; } } rethrow ( java . io . IOException ) { org . simpleflatmapper . util . ErrorHelper . rethrow ( e ) ; }
org . junit . Assert . assertSame ( e , ee )
testJAXBBeanRepresentationFastInfoset ( ) { initiateWebApplication ( com . sun . jersey . impl . entity . EntityTypesTest . JAXBBeanResourceFastInfoset . class ) ; com . sun . jersey . api . client . WebResource r = resource ( "/" ) ; com . sun . jersey . impl . entity . JAXBBean in = new com . sun . jersey . impl . entity . JAXBBean ( "CONTENT" ) ; com . sun . jersey . impl . entity . JAXBBean out = r . entity ( in , "application/fastinfoset" ) . post ( com . sun . jersey . impl . entity . JAXBBean . class ) ; "<AssertPlaceHolder>" ; } post ( java . lang . String ) { return content ; }
org . junit . Assert . assertEquals ( in . value , out . value )
valideGeenSoortSynchronisatieNietVerplichtBijGeenCategorieDienst ( ) { final nl . bzk . brp . model . internbericht . ProtocolleringOpdracht protocolleringOpdracht = maakProtocolleringOpdracht ( SoortDienst . MUTATIELEVERING_OP_BASIS_VAN_AFNEMERINDICATIE , datumMaterieelSelectie , datumAanvangMaterielePeriode , datumEindeMaterielePeriode , datumTijdAanvangFormelePeriode , datumTijdEindeFormelePeriode , null ) ; org . springframework . test . util . ReflectionTestUtils . setField ( protocolleringOpdracht . getLevering ( ) , "soortSynchronisatie" , null ) ; "<AssertPlaceHolder>" ; } isValide ( ) { boolean resultaat ; if ( ( levering ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "Levering<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( ( personen ) == null ) || ( personen . isEmpty ( ) ) ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "Personen<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( levering . getToegangLeveringsautorisatieId ( ) ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "ToegangAbonnementId<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( levering . getDienstId ( ) ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "DienstId<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( ( levering . getDatumTijdKlaarzettenLevering ( ) ) == null ) || ( levering . getDatumTijdKlaarzettenLevering ( ) . heeftGeenWaarde ( ) ) ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "DatumTijdKlaarzettenLevering<sp>dient<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( getSoortDienst ( ) ) == null ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "Soort<sp>dienst<sp>gevuld<sp>te<sp>zijn." ) ; } else if ( ( nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . SOORTDIENSTEN_MET_SOORT_SYNCHRONISATIE_VERPLICHT . contains ( getSoortDienst ( ) ) ) && ( ( ( levering . getSoortSynchronisatie ( ) ) == null ) || ( levering . getSoortSynchronisatie ( ) . heeftGeenWaarde ( ) ) ) ) { resultaat = false ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( "ToegangAbonnementId<sp>dient<sp>gevuld<sp>te<sp>zijn." 0 , getSoortDienst ( ) ) ; } else { switch ( soortDienst ) { case ATTENDERING : case MUTATIELEVERING_OP_BASIS_VAN_DOELBINDING : resultaat = isValideAttenderingOfMutatieLeveringDoelbinding ( ) ; break ; case GEEF_DETAILS_PERSOON : case GEEF_DETAILS_PERSOON_BULK : resultaat = isValideGeefDetailsPersoon ( getHistorievorm ( ) ) ; break ; case MUTATIELEVERING_OP_BASIS_VAN_AFNEMERINDICATIE : case PLAATSEN_AFNEMERINDICATIE : case VERWIJDEREN_AFNEMERINDICATIE : resultaat = isValideAfnemerindicatie ( ) ; break ; case SYNCHRONISATIE_PERSOON : resultaat = isValideSynchronisatiePersoon ( ) ; break ; case GEEF_MEDEBEWONERS_VAN_PERSOON : resultaat = isValideGeefMedebewonersVanPersoon ( ) ; break ; default : final java . lang . String foutmelding = "Voor<sp>deze<sp>catalogusoptie<sp>is<sp>geen<sp>protocollering<sp>validatie<sp>ingesteld:<sp>" + ( soortDienst ) ; nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . error ( foutmelding ) ; throw new java . lang . IllegalArgumentException ( foutmelding ) ; } if ( ! resultaat ) { nl . bzk . brp . model . internbericht . ProtocolleringOpdracht . LOGGER . debug ( ( "De<sp>protocollering<sp>is<sp>niet<sp>valide<sp>voor<sp>de<sp>catalogusoptie:<sp>{},<sp>" + ( ( "datum<sp>materieel<sp>selectie:<sp>{},<sp>datum<sp>aanvang<sp>materiele<sp>periode:<sp>{},<sp>" + "datum<sp>einde<sp>materiele<sp>periode:<sp>{},<sp>datum<sp>tijd<sp>aanv<sp>form<sp>periode:<sp>{},<sp>" ) + "ToegangAbonnementId<sp>dient<sp>gevuld<sp>te<sp>zijn." 1 ) ) , soortDienst , levering . getDatumMaterieelSelectie ( ) , levering . getDatumAanvangMaterielePeriodeResultaat ( ) , levering . getDatumEindeMaterielePeriodeResultaat ( ) , levering . getDatumTijdAanvangFormelePeriodeResultaat ( ) , levering . getDatumTijdEindeFormelePeriodeResultaat ( ) , historievorm ) ; } } return resultaat ; }
org . junit . Assert . assertFalse ( protocolleringOpdracht . isValide ( ) )
equalityOtherObject ( ) { canvas = new com . opera . core . systems . model . Canvas ( ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == obj ) { return true ; } if ( ( obj == null ) || ( ( getClass ( ) ) != ( obj . getClass ( ) ) ) ) { return false ; } com . opera . core . systems . arguments . OperaArgument other = ( ( com . opera . core . systems . arguments . OperaArgument ) ( obj ) ) ; if ( ( argument ) == null ) { if ( ( other . argument ) != null ) { return false ; } } else if ( ! ( argument . equals ( other . argument ) ) ) { return false ; } return true ; }
org . junit . Assert . assertFalse ( canvas . equals ( "" ) )
values ( ) { com . gs . collections . api . list . MutableList < java . lang . String > result = Lists . mutable . of ( ) ; com . gs . collections . api . map . MutableMap < java . lang . Integer , java . lang . String > map = new com . gs . collections . impl . map . fixed . SingletonMap ( 1 , "One" ) ; for ( java . lang . String value : map . values ( ) ) { result . add ( value ) ; } "<AssertPlaceHolder>" ; } newListWith ( E [ ] ) { return new com . gs . collections . impl . list . mutable . FastList < E > ( elements ) ; }
org . junit . Assert . assertEquals ( com . gs . collections . impl . list . mutable . FastList . newListWith ( "One" ) , result )
testGetDeviceByIDEmptyNetwork ( ) { com . digi . xbee . api . RemoteXBeeDevice found = network . getDevice ( com . digi . xbee . api . XBeeNetworkGetTest . NODE_ID ) ; "<AssertPlaceHolder>" ; } getDevice ( com . digi . xbee . api . models . XBee16BitAddress ) { if ( ( localDevice . getXBeeProtocol ( ) ) == ( com . digi . xbee . api . models . XBeeProtocol . DIGI_MESH ) ) throw new com . digi . xbee . api . exceptions . OperationNotSupportedException ( "DigiMesh<sp>protocol<sp>does<sp>not<sp>support<sp>16-bit<sp>addressing." ) ; if ( ( localDevice . getXBeeProtocol ( ) ) == ( com . digi . xbee . api . models . XBeeProtocol . DIGI_POINT ) ) throw new com . digi . xbee . api . exceptions . OperationNotSupportedException ( "Point-to-Multipoint<sp>protocol<sp>does<sp>not<sp>support<sp>16-bit<sp>addressing." ) ; if ( address == null ) throw new java . lang . NullPointerException ( "16-bit<sp>address<sp>cannot<sp>be<sp>null." ) ; if ( address . equals ( XBee16BitAddress . UNKNOWN_ADDRESS ) ) throw new java . lang . IllegalArgumentException ( "16-bit<sp>address<sp>cannot<sp>be<sp>unknown." ) ; logger . debug ( "{}Getting<sp>device<sp>'{}'<sp>from<sp>network." , localDevice . toString ( ) , address ) ; com . digi . xbee . api . RemoteXBeeDevice devInNetwork = null ; java . util . Collection < com . digi . xbee . api . RemoteXBeeDevice > devices = remotesBy64BitAddr . values ( ) ; for ( com . digi . xbee . api . RemoteXBeeDevice d : devices ) { com . digi . xbee . api . models . XBee16BitAddress a = get16BitAddress ( d ) ; if ( ( a != null ) && ( a . equals ( address ) ) ) { devInNetwork = d ; break ; } } if ( devInNetwork == null ) devInNetwork = remotesBy16BitAddr . get ( address ) ; return devInNetwork ; }
org . junit . Assert . assertNull ( found )
shouldSetPresenceUnavailableOnRosterReception ( ) { session . receives ( ( "<iq<sp>type='set'><query<sp>xmlns='jabber:iq:roster'>" + "<item<sp>jid='friend@domain'<sp>name='MyFriend'<sp>/></query></iq>" ) ) ; final com . calclab . emite . im . roster . RosterItem item = roster . getItemByJID ( uri ( "friend@domain" ) ) ; "<AssertPlaceHolder>" ; } uri ( java . lang . String ) { if ( com . google . common . base . Strings . isNullOrEmpty ( uri ) ) return null ; if ( com . calclab . emite . core . XmppURI . cache . containsKey ( uri ) ) return com . calclab . emite . core . XmppURI . cache . get ( uri ) ; java . lang . String node = null ; java . lang . String domain = null ; java . lang . String resource = null ; final int atIndex = ( uri . indexOf ( '@' ) ) + 1 ; if ( atIndex > 0 ) { node = uri . substring ( 0 , ( atIndex - 1 ) ) ; if ( ( node . length ( ) ) == 0 ) return null ; } final int barIndex = uri . indexOf ( '/' , atIndex ) ; if ( atIndex == barIndex ) return null ; if ( barIndex > 0 ) { domain = uri . substring ( atIndex , barIndex ) ; resource = uri . substring ( ( barIndex + 1 ) ) ; } else { domain = uri . substring ( atIndex ) ; } if ( ( domain . length ( ) ) == 0 ) return null ; final com . calclab . emite . core . XmppURI result = com . calclab . emite . core . XmppURI . uri ( node , domain , resource ) ; com . calclab . emite . core . XmppURI . cache . put ( uri , result ) ; return result ; }
org . junit . Assert . assertNotNull ( item )
testParamWithMultipleBasesToken ( ) { ca . uhn . fhir . jpa . dao . dstu3 . SearchParameter sp = new ca . uhn . fhir . jpa . dao . dstu3 . SearchParameter ( ) ; sp . setUrl ( "http://clinicalcloud.solutions/fhir/SearchParameter/request-reason" ) ; sp . setName ( "reason" ) ; sp . setStatus ( Enumerations . PublicationStatus . ACTIVE ) ; sp . setCode ( "reason" ) ; sp . addBase ( "MedicationRequest" ) ; sp . addBase ( "ProcedureRequest" ) ; sp . setType ( Enumerations . SearchParamType . TOKEN ) ; sp . setExpression ( "MedicationRequest.reasonCode<sp>|<sp>ProcedureRequest.reasonCode" ) ; mySearchParameterDao . create ( sp ) ; mySearchParamRegistry . forceRefresh ( ) ; ca . uhn . fhir . jpa . dao . dstu3 . MedicationRequest mr = new ca . uhn . fhir . jpa . dao . dstu3 . MedicationRequest ( ) ; mr . addReasonCode ( ) . addCoding ( ) . setSystem ( "foo" ) . setCode ( "bar" ) ; java . lang . String mrId = myMedicationRequestDao . create ( mr ) . getId ( ) . toUnqualifiedVersionless ( ) . getValue ( ) ; ca . uhn . fhir . jpa . dao . dstu3 . ProcedureRequest pr = new ca . uhn . fhir . jpa . dao . dstu3 . ProcedureRequest ( ) ; pr . addReasonCode ( ) . addCoding ( ) . setSystem ( "foo" ) . setCode ( "bar" ) ; myProcedureRequestDao . create ( pr ) ; ca . uhn . fhir . jpa . searchparam . SearchParameterMap map = new ca . uhn . fhir . jpa . searchparam . SearchParameterMap ( ) ; map . setLoadSynchronous ( true ) ; map . add ( "reason" , new ca . uhn . fhir . jpa . dao . dstu3 . TokenParam ( "foo" , "bar" ) ) ; java . util . List < java . lang . String > results = toUnqualifiedVersionlessIdValues ( myMedicationRequestDao . search ( map ) ) ; "<AssertPlaceHolder>" ; } contains ( java . lang . Object ) { return data . contains ( o ) ; }
org . junit . Assert . assertThat ( results , contains ( mrId ) )
testGetLength ( ) { when ( mockMojiFile . length ( ) ) . thenReturn ( 333L ) ; fm . last . moji . local . LocalMojiFileAttributes attributes = new fm . last . moji . local . LocalMojiFileAttributes ( mockMojiFile ) ; "<AssertPlaceHolder>" ; } getLength ( ) { return length ; }
org . junit . Assert . assertThat ( attributes . getLength ( ) , org . hamcrest . CoreMatchers . is ( 333L ) )
testGetServerAddress ( ) { org . apache . hadoop . hbase . HServerAddress hsa1 = new org . apache . hadoop . hbase . HServerAddress ( "localhost" , 1234 ) ; org . apache . hadoop . hbase . HServerInfo hsi1 = new org . apache . hadoop . hbase . HServerInfo ( hsa1 , 1L , 5678 ) ; "<AssertPlaceHolder>" ; } getServerAddress ( ) { return new org . apache . hadoop . hbase . HServerAddress ( this . hostname , this . port ) ; }
org . junit . Assert . assertEquals ( hsi1 . getServerAddress ( ) , hsa1 )
testResolveOldAPIVersionFromSysProp ( ) { org . cloudifysource . shell . rest . APIVersionResolver fixture = new org . cloudifysource . shell . rest . APIVersionResolver ( ) ; final java . lang . String sysPropBefore = java . lang . System . getProperty ( CloudifyConstants . SYSTEM_PROPERTY_CLI_OLD_REST_API_VERSION ) ; java . lang . String value = "10.0.0" ; java . lang . System . setProperty ( CloudifyConstants . SYSTEM_PROPERTY_CLI_OLD_REST_API_VERSION , value ) ; java . lang . String result = fixture . resolveOldAPIVersion ( ) ; try { "<AssertPlaceHolder>" ; } finally { if ( sysPropBefore == null ) { java . lang . System . clearProperty ( CloudifyConstants . SYSTEM_PROPERTY_CLI_OLD_REST_API_VERSION ) ; } else { java . lang . System . setProperty ( CloudifyConstants . SYSTEM_PROPERTY_CLI_OLD_REST_API_VERSION , sysPropBefore ) ; } } } resolveOldAPIVersion ( ) { final java . lang . String sysprop = java . lang . System . getProperty ( CloudifyConstants . SYSTEM_PROPERTY_CLI_OLD_REST_API_VERSION ) ; if ( sysprop != null ) { return sysprop ; } final java . lang . String platformVersion = com . j_spaces . kernel . PlatformVersion . getVersionNumber ( ) ; return platformVersion ; }
org . junit . Assert . assertEquals ( value , result )
notEqualsObjectWithNull ( ) { "<AssertPlaceHolder>" ; }
org . junit . Assert . assertEquals ( new java . lang . Object ( ) , null )
testNoIndexExpression ( ) { org . apache . hadoop . hbase . index . client . IndexExpression exp = new org . apache . hadoop . hbase . index . client . NoIndexExpression ( ) ; org . apache . hadoop . hbase . client . Scan scan = new org . apache . hadoop . hbase . client . Scan ( ) ; scan . setAttribute ( Constants . INDEX_EXPRESSION , org . apache . hadoop . hbase . index . client . IndexUtils . toBytes ( exp ) ) ; byte [ ] value1 = org . apache . hadoop . hbase . util . Bytes . toBytes ( "asdf" ) ; scan . setFilter ( new org . apache . hadoop . hbase . filter . SingleColumnValueFilter ( org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . FAMILY1 , org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . QUALIFIER1 , org . apache . hadoop . hbase . filter . CompareFilter . CompareOp . EQUAL , value1 ) ) ; java . util . List < org . apache . hadoop . hbase . index . IndexSpecification > indices = new java . util . ArrayList < org . apache . hadoop . hbase . index . IndexSpecification > ( ) ; org . apache . hadoop . hbase . index . IndexSpecification is1 = new org . apache . hadoop . hbase . index . IndexSpecification ( "idx1" ) ; org . apache . hadoop . hbase . HColumnDescriptor colDesc = new org . apache . hadoop . hbase . HColumnDescriptor ( org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . FAMILY1 ) ; is1 . addIndexColumn ( colDesc , org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . COL1 , ValueType . String , 15 ) ; indices . add ( is1 ) ; org . apache . hadoop . hbase . index . coprocessor . regionserver . ScanFilterEvaluator evaluator = new org . apache . hadoop . hbase . index . coprocessor . regionserver . ScanFilterEvaluator ( ) ; org . apache . hadoop . hbase . regionserver . HRegion region = org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . initHRegion ( org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . tableName . getBytes ( ) , null , null , "testNoIndexExpression" , org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . TEST_UTIL . getConfiguration ( ) , org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . FAMILY1 ) ; org . apache . hadoop . hbase . index . coprocessor . regionserver . IndexRegionScanner scanner = evaluator . evaluate ( scan , indices , new byte [ 0 ] , region , org . apache . hadoop . hbase . index . coprocessor . regionserver . TestScanFilterEvaluatorForIndexInScan . tableName ) ; "<AssertPlaceHolder>" ; } evaluate ( org . apache . hadoop . hbase . client . Scan , java . util . List , byte [ ] , org . apache . hadoop . hbase . regionserver . HRegion , java . lang . String ) { org . apache . hadoop . hbase . filter . Filter filter = scan . getFilter ( ) ; byte [ ] indexExpBytes = scan . getAttribute ( Constants . INDEX_EXPRESSION ) ; if ( filter == null ) { if ( indexExpBytes != null ) { org . apache . hadoop . hbase . index . coprocessor . regionserver . ScanFilterEvaluator . LOG . warn ( ( "Passed<sp>an<sp>Index<sp>expression<sp>along<sp>with<sp>the<sp>Scan<sp>but<sp>without<sp>any<sp>filters<sp>on<sp>Scan!" + "<sp>The<sp>index<sp>wont<sp>be<sp>used" ) ) ; } return null ; } org . apache . hadoop . hbase . index . coprocessor . regionserver . FilterNode node = null ; org . apache . hadoop . hbase . index . coprocessor . regionserver . IndexRegionScanner indexRegionScanner = null ; if ( indexExpBytes != null ) { try { org . apache . hadoop . hbase . index . client . IndexExpression indexExpression = org . apache . hadoop . hbase . index . client . IndexUtils . toIndexExpression ( indexExpBytes ) ; if ( indexExpression instanceof org . apache . hadoop . hbase . index . client . NoIndexExpression ) { org . apache . hadoop . hbase . index . coprocessor . regionserver . ScanFilterEvaluator . LOG . info ( ( "NoIndexExpression<sp>is<sp>passed<sp>as<sp>the<sp>index<sp>to<sp>be<sp>used<sp>for<sp>this<sp>Scan." + "<sp>No<sp>possible<sp>index<sp>will<sp>be<sp>used." ) ) ; return null ; } java . util . Map < java . lang . String , org . apache . hadoop . hbase . index . IndexSpecification > nameVsIndex = new java . util . HashMap < java . lang . String , org . apache . hadoop . hbase . index . IndexSpecification > ( ) ; for ( org . apache . hadoop . hbase . index . IndexSpecification index : indices ) { nameVsIndex . put ( index . getName ( ) , index ) ; } node = convertIdxExpToFilterNode ( indexExpression , nameVsIndex , tableName ) ; } catch ( java . lang . Exception e ) { org . apache . hadoop . hbase . index . coprocessor . regionserver . ScanFilterEvaluator . LOG . error ( ( "There<sp>is<sp>an<sp>Exception<sp>in<sp>getting<sp>IndexExpression<sp>from<sp>Scan<sp>attribute!" + "<sp>The<sp>index<sp>won't<sp>be<sp>used" ) , e ) ; } } else { org . apache . hadoop . hbase . filter . Filter newFilter = doFiltersRestruct ( filter ) ; if ( newFilter != null ) { node = evalFilterForIndexSelection ( newFilter , indices ) ; } } if ( node != null ) { indexRegionScanner = createIndexScannerScheme ( node , regionStartKey , scan . getStartRow ( ) , scan . getStopRow ( ) , idxRegion , tableName ) ; if ( indexRegionScanner instanceof org . apache . hadoop .
org . junit . Assert . assertNull ( scanner )
testTearDownOperation ( ) { executeScriptOperation . tearDownOperation ( ) ; java . util . List < java . lang . String > tableNames = executeScriptOperation . getTableNames ( executeScriptOperation . getConnection ( ) ) ; "<AssertPlaceHolder>" ; executeScriptOperation . closeConnection ( ) ; } getConnection ( ) { if ( ( connection ) == null ) { openConnection ( ) ; } return connection ; }
org . junit . Assert . assertTrue ( tableNames . isEmpty ( ) )
doubleAddUniqueConstraint ( ) { org . apache . hadoop . hive . metastore . api . Table table = testTables [ 0 ] ; org . apache . hadoop . hive . metastore . api . UniqueConstraintsRequest rqst = new org . apache . hadoop . hive . metastore . api . UniqueConstraintsRequest ( table . getCatName ( ) , table . getDbName ( ) , table . getTableName ( ) ) ; java . util . List < org . apache . hadoop . hive . metastore . api . SQLUniqueConstraint > fetched = client . getUniqueConstraints ( rqst ) ; "<AssertPlaceHolder>" ; java . util . List < org . apache . hadoop . hive . metastore . api . SQLUniqueConstraint > uc = new org . apache . hadoop . hive . metastore . client . builder . SQLUniqueConstraintBuilder ( ) . onTable ( table ) . addColumn ( "col1" ) . build ( metaStore . getConf ( ) ) ; client . addUniqueConstraint ( uc ) ; try { uc = new org . apache . hadoop . hive . metastore . client . builder . SQLUniqueConstraintBuilder ( ) . onTable ( table ) . addColumn ( "col2" ) . build ( metaStore . getConf ( ) ) ; client . addUniqueConstraint ( uc ) ; org . junit . Assert . fail ( ) ; } catch ( org . apache . hadoop . hive . metastore . api . InvalidObjectException | org . apache . thrift . TApplicationException e ) { } } isEmpty ( ) { com . google . common . base . Preconditions . checkNotNull ( getPath ( ) ) ; try { org . apache . hadoop . fs . FileSystem fs = org . apache . hadoop . fs . FileSystem . get ( getPath ( ) . toUri ( ) , org . apache . hadoop . hive . ql . session . SessionState . getSessionConf ( ) ) ; return ( ! ( fs . exists ( getPath ( ) ) ) ) || ( ( fs . listStatus ( getPath ( ) , FileUtils . HIDDEN_FILES_PATH_FILTER ) . length ) == 0 ) ; } catch ( java . io . IOException e ) { throw new org . apache . hadoop . hive . ql . metadata . HiveException ( e ) ; } }
org . junit . Assert . assertTrue ( fetched . isEmpty ( ) )
testWithOnlyPage ( ) { org . xwiki . model . reference . EntityReference reference = new org . xwiki . model . reference . RegexEntityReference ( java . util . regex . Pattern . compile ( org . xwiki . model . reference . RegexEntityReferenceTest . REFERENCETOMATCH . getName ( ) , Pattern . LITERAL ) , org . xwiki . model . EntityType . DOCUMENT ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( obj == ( this ) ) { return true ; } if ( ! ( obj instanceof org . xwiki . model . reference . EntityReference ) ) { return false ; } org . xwiki . model . reference . EntityReference ref = ( ( org . xwiki . model . reference . EntityReference ) ( obj ) ) ; return ( ( ( name . equals ( ref . name ) ) && ( type . equals ( ref . type ) ) ) && ( ( parent ) == null ? ( ref . parent ) == null : parent . equals ( ref . parent ) ) ) && ( ( parameters ) == null ? ( ref . parameters ) == null : parameters . equals ( ref . parameters ) ) ; }
org . junit . Assert . assertTrue ( reference . equals ( org . xwiki . model . reference . RegexEntityReferenceTest . REFERENCETOMATCH ) )
testSetters ( ) { com . j256 . ormlite . table . DatabaseTableConfig < com . j256 . ormlite . dao . Foo > config = com . j256 . ormlite . table . DatabaseTableConfig . fromClass ( databaseType , com . j256 . ormlite . dao . Foo . class ) ; com . j256 . ormlite . dao . BaseDaoImpl < com . j256 . ormlite . dao . Foo , java . lang . Integer > dao = new com . j256 . ormlite . dao . BaseDaoImpl < com . j256 . ormlite . dao . Foo , java . lang . Integer > ( com . j256 . ormlite . dao . Foo . class ) { } ; dao . setTableConfig ( config ) ; dao . setConnectionSource ( connectionSource ) ; "<AssertPlaceHolder>" ; } getTableConfig ( ) { return tableConfig ; }
org . junit . Assert . assertSame ( config , dao . getTableConfig ( ) )
testUnbalanceExpression ( ) { com . damnhandy . uri . template . impl . UriTemplateParser e = new com . damnhandy . uri . template . impl . UriTemplateParser ( ) ; java . util . List < com . damnhandy . uri . template . UriTemplateComponent > expressions = e . scan ( "http://example.com/{expr/thing/{other}" ) ; "<AssertPlaceHolder>" ; } scan ( java . lang . String ) { char [ ] template = templateString . toCharArray ( ) ; startTemplate ( ) ; int i ; for ( i = 0 ; i < ( template . length ) ; i ++ ) { char c = template [ i ] ; if ( c == ( com . damnhandy . uri . template . impl . UriTemplateParser . EXPR_START ) ) { if ( literalCaptureOn ) { endLiteral ( i ) ; } startExpression ( i ) ; } if ( c != ( com . damnhandy . uri . template . impl . UriTemplateParser . EXPR_START ) ) { startLiteral ( i ) ; } if ( ( expressionCaptureOn ) || ( literalCaptureOn ) ) { capture ( c ) ; } if ( c == ( com . damnhandy . uri . template . impl . UriTemplateParser . EXPR_END ) ) { endExpression ( i ) ; startLiteral ( i ) ; } } if ( literalCaptureOn ) { endLiteral ( i ) ; } endTemplate ( i ) ; return components ; }
org . junit . Assert . assertNotNull ( expressions )
testRemoveIntegerType ( ) { int [ ] inputArray1 = new int [ ] { 0 , 3 , 5 } ; int [ ] outputArray = org . openl . rules . helpers . RulesUtilsTest . instance . testIntegerTypeRemove ( inputArray1 , 1 ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertTrue ( org . openl . rules . helpers . Arrays . equals ( new int [ ] { 0 , 5 } , outputArray ) )
testSetupComponentWhenConstraintTypeIsExpression ( ) { final org . kie . workbench . common . dmn . api . definition . v1_1 . ConstraintType type = EXPRESSION ; final java . lang . String constraint = "expression" ; final java . lang . String constraintValueType = "string" ; doReturn ( constraint ) . when ( modal ) . getConstraintValue ( ) ; doReturn ( constraintValueType ) . when ( modal ) . getConstraintValueType ( ) ; when ( constraintExpression . getElement ( ) ) . thenReturn ( element ) ; modal . setupComponent ( type ) ; "<AssertPlaceHolder>" ; verify ( constraintExpression ) . setValue ( constraint ) ; verify ( constraintExpression ) . setConstraintValueType ( constraintValueType ) ; verify ( modal ) . enableOkButton ( ) ; verify ( element ) . setAttribute ( "class" , "kie-string" ) ; } getCurrentComponent ( ) { return currentComponent ; }
org . junit . Assert . assertEquals ( constraintExpression , modal . getCurrentComponent ( ) )
equalsContent_shouldIndicateUnequalWhenOnlyAddressSixDiffers ( ) { org . openmrs . PersonAddress rileyStreetAddress = new org . openmrs . PersonAddress ( ) ; org . openmrs . PersonAddress crownStreetAddress = new org . openmrs . PersonAddress ( ) ; crownStreetAddress . setAddress6 ( "crown<sp>street" ) ; rileyStreetAddress . setAddress6 ( "riley<sp>street" ) ; "<AssertPlaceHolder>" ; } equalsContent ( org . openmrs . PersonAttribute ) { boolean returnValue = true ; java . lang . String [ ] methods = new java . lang . String [ ] { "getAttributeType" , "getValue" , "getVoided" } ; java . lang . Class attributeClass = this . getClass ( ) ; for ( java . lang . String methodAttribute : methods ) { try { java . lang . reflect . Method method = attributeClass . getMethod ( methodAttribute ) ; java . lang . Object thisValue = method . invoke ( this ) ; java . lang . Object otherValue = method . invoke ( otherAttribute ) ; if ( otherValue != null ) { returnValue &= otherValue . equals ( thisValue ) ; } } catch ( java . lang . NoSuchMethodException e ) { org . openmrs . PersonAttribute . log . warn ( ( "No<sp>such<sp>method<sp>for<sp>comparison<sp>" + methodAttribute ) , e ) ; } catch ( java . lang . IllegalAccessException | java . lang . reflect . InvocationTargetException e ) { org . openmrs . PersonAttribute . log . error ( "Error<sp>while<sp>comparing<sp>attributes" , e ) ; } } return returnValue ; }
org . junit . Assert . assertThat ( crownStreetAddress . equalsContent ( rileyStreetAddress ) , org . hamcrest . CoreMatchers . is ( false ) )
shouldFindWorldsByName ( ) { for ( org . springframework . data . neo4j . examples . hellograph . World w : galaxy . makeSomeWorlds ( ) ) { "<AssertPlaceHolder>" ; } } findWorldNamed ( java . lang . String ) { return worldRepository . findByPropertyValue ( "name" , name ) ; }
org . junit . Assert . assertNotNull ( galaxy . findWorldNamed ( w . getName ( ) ) )
testIterator ( ) { int i = 0 ; for ( final java . lang . String value : record ) { "<AssertPlaceHolder>" ; i ++ ; } }
org . junit . Assert . assertEquals ( values [ i ] , value )
testBuildWithParameters ( ) { java . lang . String abbrName = "abbrNAme" ; java . lang . String name = "name" ; org . lnu . is . domain . timeperiod . TimePeriodType context = new org . lnu . is . domain . timeperiod . TimePeriodType ( ) ; context . setAbbrName ( abbrName ) ; context . setName ( name ) ; java . lang . String expected = "SELECT<sp>e<sp>FROM<sp>TimePeriodType<sp>e<sp>WHERE<sp>(<sp>e.name<sp>LIKE<sp>CONCAT('%',:name,'%')<sp>AND<sp>e.abbrName<sp>LIKE<sp>CONCAT('%',:abbrName,'%')<sp>)<sp>AND<sp>e.status=:status<sp>AND<sp>e.crtUserGroup<sp>IN<sp>(:userGroups)<sp>" ; org . lnu . is . pagination . MultiplePagedSearch < org . lnu . is . domain . timeperiod . TimePeriodType > pagedSearch = new org . lnu . is . pagination . MultiplePagedSearch ( ) ; pagedSearch . setEntity ( context ) ; java . lang . String actualQuery = unit . build ( pagedSearch ) ; "<AssertPlaceHolder>" ; } setEntity ( T ) { this . entity = entity ; }
org . junit . Assert . assertEquals ( expected , actualQuery )
testGradingPeriod ( ) { org . slc . sli . domain . Entity gp = helper . generateGradingPeriod ( ) ; "<AssertPlaceHolder>" ; } canAccess ( org . slc . sli . domain . Entity ) { return canAccess ( entity , true ) ; }
org . junit . Assert . assertTrue ( validator . canAccess ( gp ) )
testDeepSubBandInlineSubReportDetection ( ) { org . pentaho . reporting . engine . classic . core . SubReport srInner = new org . pentaho . reporting . engine . classic . core . SubReport ( ) ; org . pentaho . reporting . engine . classic . core . Band band = new org . pentaho . reporting . engine . classic . core . Band ( ) ; band . addElement ( srInner ) ; org . pentaho . reporting . engine . classic . core . SubReport sr = new org . pentaho . reporting . engine . classic . core . SubReport ( ) ; sr . getReportHeader ( ) . addElement ( band ) ; org . pentaho . reporting . engine . classic . core . MasterReport report = new org . pentaho . reporting . engine . classic . core . MasterReport ( ) ; report . getReportHeader ( ) . addSubReport ( sr ) ; org . pentaho . reporting . engine . classic . core . modules . output . fast . validator . ReportStructureValidator v = new org . pentaho . reporting . engine . classic . core . modules . output . fast . validator . ReportStructureValidator ( ) ; "<AssertPlaceHolder>" ; } isValidForFastProcessing ( org . pentaho . reporting . engine . classic . core . MasterReport ) { valid = true ; inspect ( report ) ; return valid ; }
org . junit . Assert . assertFalse ( v . isValidForFastProcessing ( report ) )
two_$parent_contains_none_returns_false_when_expression_values_in_array ( ) { com . redhat . lightblue . query . QueryExpression expr = com . redhat . lightblue . eval . EvalTestContext . queryExpressionFromJson ( "{'array':'field6.nf7.nnf1.$parent.$parent.nf5',<sp>'contains':'$none',<sp>'values':[5,null,15,25]}" ) ; com . redhat . lightblue . eval . QueryEvaluator eval = com . redhat . lightblue . eval . QueryEvaluator . getInstance ( expr , md ) ; com . redhat . lightblue . eval . QueryEvaluationContext context = eval . evaluate ( jsonDoc ) ; "<AssertPlaceHolder>" ; } getResult ( ) { return result ; }
org . junit . Assert . assertFalse ( context . getResult ( ) )
testDeleteDown ( ) { setStackStatus ( StatusEnum . DOWN , org . sagebionetworks . repo . web . StackStatusInterceptorTest . MSG_FORMAT ) ; "<AssertPlaceHolder>" ; servletTestHelper . deleteEntity ( dispatchServlet , org . sagebionetworks . repo . model . Project . class , sampleProject . getId ( ) , adminUserId ) ; sampleProject = null ; org . junit . Assert . fail ( ) ; } getCurrentStatus ( ) { return stackStatusDao . getFullCurrentStatus ( ) ; }
org . junit . Assert . assertEquals ( StatusEnum . DOWN , stackStatusDao . getCurrentStatus ( ) )
latest ( ) { int top = 100 ; com . iota . iri . model . Hash milestoneHash = HashFactory . TRANSACTION . create ( "ZBCDEFGHIJKLMNOPQRSTUVWXYZ9ABCDEFGHIJKLMNOPQRSTUVWXYZ9ABCDEFGHIJKLMNOPQRSTUV99999" ) ; com . iota . iri . controllers . MilestoneViewModel milestoneViewModel = new com . iota . iri . controllers . MilestoneViewModel ( top , milestoneHash ) ; milestoneViewModel . store ( com . iota . iri . controllers . MilestoneViewModelTest . tangle ) ; "<AssertPlaceHolder>" ; } latest ( com . iota . iri . storage . Tangle ) { com . iota . iri . utils . Pair < com . iota . iri . storage . Indexable , com . iota . iri . storage . Persistable > milestonePair = tangle . getLatest ( com . iota . iri . model . persistables . Milestone . class , com . iota . iri . model . IntegerIndex . class ) ; if ( ( milestonePair != null ) && ( ( milestonePair . hi ) != null ) ) { com . iota . iri . model . persistables . Milestone milestone = ( ( com . iota . iri . model . persistables . Milestone ) ( milestonePair . hi ) ) ; return new com . iota . iri . controllers . MilestoneViewModel ( milestone ) ; } return null ; }
org . junit . Assert . assertTrue ( ( top == ( com . iota . iri . controllers . MilestoneViewModel . latest ( com . iota . iri . controllers . MilestoneViewModelTest . tangle ) . index ( ) ) ) )
testGetStartCommand_featureDisabled ( ) { properties . setCloudProvider ( com . hazelcast . simulator . harakiri . PROVIDER_EC2 ) ; properties . set ( "HARAKIRI_MONITOR_ENABLED" , "false" ) ; java . lang . String command = com . hazelcast . simulator . harakiri . HarakiriMonitorUtils . getStartHarakiriMonitorCommandOrNull ( properties ) ; "<AssertPlaceHolder>" ; } getStartHarakiriMonitorCommandOrNull ( com . hazelcast . simulator . common . SimulatorProperties ) { if ( ! ( com . hazelcast . simulator . harakiri . HarakiriMonitorUtils . isHarakiriMonitorEnabled ( properties ) ) ) { if ( isEC2 ( properties ) ) { com . hazelcast . simulator . harakiri . HarakiriMonitorUtils . LOGGER . info ( "HarakiriMonitor<sp>is<sp>not<sp>enabled" ) ; } return null ; } java . lang . String waitSeconds = properties . get ( "HARAKIRI_MONITOR_WAIT_SECONDS" ) ; com . hazelcast . simulator . harakiri . HarakiriMonitorUtils . LOGGER . info ( java . lang . String . format ( "HarakiriMonitor<sp>is<sp>enabled<sp>and<sp>will<sp>kill<sp>inactive<sp>EC2<sp>instances<sp>after<sp>%s<sp>seconds" , waitSeconds ) ) ; return java . lang . String . format ( ( "nohup<sp>hazelcast-simulator-%s/bin/harakiri-monitor<sp>--cloudProvider<sp>%s<sp>--cloudIdentity<sp>%s<sp>--cloudCredential<sp>%s" + "<sp>--waitSeconds<sp>%s<sp>><sp>harakiri.out<sp>2><sp>harakiri.err<sp><<sp>/dev/null<sp>&" ) , getSimulatorVersion ( ) , properties . getCloudProvider ( ) , properties . getCloudIdentity ( ) , properties . getCloudCredential ( ) , waitSeconds ) ; }
org . junit . Assert . assertNull ( command )
writeManifest ( ) { java . nio . file . Path manifest = java . nio . file . Files . createTempFile ( "manifest" , ".txt" ) ; com . spotify . hype . gcs . ManifestUtil . write ( com . spotify . hype . gcs . ManifestUtilTest . EXAMPLE , manifest ) ; java . util . List < java . lang . String > expected = java . nio . file . Files . readAllLines ( load ( "/example-manifest.txt" ) ) ; java . util . List < java . lang . String > strings = java . nio . file . Files . readAllLines ( manifest ) ; "<AssertPlaceHolder>" ; } load ( java . lang . String ) { java . net . URL resource = com . spotify . hype . gcs . ManifestUtil . class . getResource ( resourceName ) ; return java . nio . file . Paths . get ( resource . toURI ( ) ) ; }
org . junit . Assert . assertThat ( strings , org . hamcrest . Matchers . is ( expected ) )
testGetSyntaxErrors_DoubleQuotes_Middle ( ) { a . setProperty ( "prop" , "Value<sp>''{0}''<sp>is<sp>expected." ) ; final org . oscm . build . ant . PropertiesSyntaxChecker checker = new org . oscm . build . ant . PropertiesSyntaxChecker ( a ) ; "<AssertPlaceHolder>" ; } getSyntaxSingleQuotesErrorKeys ( ) { java . lang . String singleQuoted = "\'\\{[0-9]+\\}\'" ; java . lang . String notQuote = "[^']{1}" ; java . lang . String begin = ( ( "^(" + singleQuoted ) + notQuote ) + ")" ; java . lang . String end = ( ( "(" + notQuote ) + singleQuoted ) + ")$" ; java . lang . String middle = ( notQuote + singleQuoted ) + notQuote ; java . lang . String exact = ( "^(" + singleQuoted ) + ")$" ; java . lang . String pattern = ( ( ( ( ( ( ( "(" + begin ) + ")|(" ) + middle ) + ")|(" ) + end ) + ")|(" ) + exact ) + ")" ; final java . util . regex . Pattern VAR_PATTERN_SYNTAX = java . util . regex . Pattern . compile ( pattern ) ; java . util . Set < java . util . Map . Entry < java . lang . Object , java . lang . Object > > s = a . entrySet ( ) ; java . util . Iterator < java . util . Map . Entry < java . lang . Object , java . lang . Object > > it = s . iterator ( ) ; final java . util . Set < java . lang . String > result = new java . util . HashSet < java . lang . String > ( ) ; while ( it . hasNext ( ) ) { final java . util . Map . Entry < java . lang . Object , java . lang . Object > propEntry = it . next ( ) ; java . lang . String text = propEntry . getValue ( ) . toString ( ) ; final java . util . regex . Matcher m = VAR_PATTERN_SYNTAX . matcher ( text ) ; if ( m . find ( ) ) { result . add ( propEntry . getKey ( ) . toString ( ) ) ; } } return result ; }
org . junit . Assert . assertEquals ( 0 , checker . getSyntaxSingleQuotesErrorKeys ( ) . size ( ) )
testDocumentationExample6 ( ) { net . objecthunter . exp4j . function . Function logb = new net . objecthunter . exp4j . function . Function ( "logb" , 2 ) { @ net . objecthunter . exp4j . Override public double apply ( double ... args ) { return ( java . lang . Math . Math . log ( args [ 0 ] ) ) / ( java . lang . Math . Math . log ( args [ 1 ] ) ) ; } } ; double result = new net . objecthunter . exp4j . ExpressionBuilder ( "logb(8,<sp>2)" ) . function ( logb ) . build ( ) . evaluate ( ) ; double expected = 3 ; "<AssertPlaceHolder>" ; } evaluate ( ) { final net . objecthunter . exp4j . ArrayStack output = new net . objecthunter . exp4j . ArrayStack ( ) ; for ( int i = 0 ; i < ( tokens . length ) ; i ++ ) { net . objecthunter . exp4j . tokenizer . Token t = tokens [ i ] ; else if ( ( t . getType ( ) ) == ( net . objecthunter . exp4j . tokenizer . Token . TOKEN_VARIABLE ) ) { final java . lang . String name = ( ( net . objecthunter . exp4j . tokenizer . VariableToken ) ( t ) ) . getName ( ) ; final java . lang . Double value = this . variables . get ( name ) ; if ( value == null ) { throw new java . lang . IllegalArgumentException ( ( ( "No<sp>value<sp>has<sp>been<sp>set<sp>for<sp>the<sp>setVariable<sp>'" + name ) + "'." ) ) ; } output . push ( value ) ; } else if ( ( t . getType ( ) ) == ( net . objecthunter . exp4j . tokenizer . Token . TOKEN_OPERATOR ) ) { net . objecthunter . exp4j . tokenizer . OperatorToken op = ( ( net . objecthunter . exp4j . tokenizer . OperatorToken ) ( t ) ) ; if ( ( output . size ( ) ) < ( op . getOperator ( ) . getNumOperands ( ) ) ) { throw new java . lang . IllegalArgumentException ( ( ( "Invalid<sp>number<sp>of<sp>operands<sp>available<sp>for<sp>'" + ( op . getOperator ( ) . getSymbol ( ) ) ) + "'<sp>operator" ) ) ; } if ( ( op . getOperator ( ) . getNumOperands ( ) ) == 2 ) { double rightArg = output . pop ( ) ; double leftArg = output . pop ( ) ; output . push ( op . getOperator ( ) . apply ( leftArg , rightArg ) ) ; } else if ( ( op . getOperator ( ) . getNumOperands ( ) ) == 1 ) { double arg = output . pop ( ) ; output . push ( op . getOperator ( ) . apply ( arg ) ) ; } } else if ( ( t . getType ( ) ) == ( net . objecthunter . exp4j . tokenizer . Token . TOKEN_FUNCTION ) ) { net . objecthunter . exp4j . tokenizer . FunctionToken func = ( ( net . objecthunter . exp4j . tokenizer . FunctionToken ) ( t ) ) ; final int numArguments = func . getFunction ( ) . getNumArguments ( ) ; if ( ( output . size ( ) ) < numArguments ) { throw new java . lang . IllegalArgumentException ( ( ( "Invalid<sp>number<sp>of<sp>arguments<sp>available<sp>for<sp>'" + ( func . getFunction ( ) . getName ( ) ) ) + "'<sp>function" ) ) ; } double [ ] args = new double [ numArguments ] ; for ( int j = numArguments - 1 ; j >= 0 ; j -- ) { args [ j ] = output . pop ( ) ; } output . push ( func . getFunction ( ) . apply ( args ) ) ; } } if ( ( output . size ( ) ) > 1 ) { throw new java . lang . IllegalArgumentException ( "Invalid<sp>number<sp>of<sp>items<sp>on<sp>the<sp>output<sp>queue.<sp>Might<sp>be<sp>caused<sp>by<sp>an<sp>invalid<sp>number<sp>of<sp>arguments<sp>for<sp>a<sp>function." ) ; } return output . pop ( ) ; }
org . junit . Assert . assertEquals ( expected , result , 0.0 )
testService ( ) { try ( org . nuxeo . ecm . directory . Session session = directoryService . open ( OAuth2ClientService . OAUTH2CLIENT_DIRECTORY_NAME ) ) { java . util . Map < java . lang . String , java . io . Serializable > filter = new java . util . HashMap ( ) ; filter . put ( "clientId" , org . nuxeo . adobe . cc . NuxeoAdobeConnectorService . ADOBE_CC_CLIENT_ID ) ; "<AssertPlaceHolder>" ; } } query ( java . lang . String ) { return discService . query ( repositoryId , statement , Boolean . TRUE , null , null , null , null , null , null ) ; }
org . junit . Assert . assertEquals ( 1 , session . query ( filter ) . size ( ) )
testIntersectionOfTwoLists_TotalCount_Test2 ( ) { java . util . List < java . lang . Integer > listOne = new java . util . ArrayList < java . lang . Integer > ( ) ; listOne . add ( 1000 ) ; listOne . add ( 2000 ) ; listOne . add ( 3000 ) ; listOne . add ( 4000 ) ; listOne . add ( 5000 ) ; listOne . add ( 6000 ) ; java . util . List < java . lang . Integer > listTwo = new java . util . ArrayList < java . lang . Integer > ( ) ; listTwo . add ( 2000 ) ; listTwo . add ( 3000 ) ; listTwo . add ( 4000 ) ; listTwo . add ( 5000 ) ; listTwo . add ( 6000 ) ; listTwo . add ( 7000 ) ; java . util . List < java . lang . Integer > expected = new java . util . ArrayList < java . lang . Integer > ( ) ; expected . add ( 2000 ) ; expected . add ( 30000 ) ; expected . add ( 4000 ) ; expected . add ( 5000 ) ; expected . add ( 60000 ) ; java . util . Collection < java . lang . Integer > actual = info . michaelkohler . helpertools . collections . CollectionHelper . intersect ( listOne , listTwo ) ; "<AssertPlaceHolder>" ; } intersect ( java . util . Collection , java . util . Collection ) { if ( groupOne == null ) return groupTwo ; else if ( groupTwo == null ) return groupOne ; else { java . util . Collection < T > results = new java . util . HashSet < T > ( ) ; for ( T listOneItem : groupOne ) if ( groupTwo . contains ( listOneItem ) ) results . add ( listOneItem ) ; return results ; } }
org . junit . Assert . assertEquals ( expected . size ( ) , actual . size ( ) )
handleExactResult ( ) { java . lang . String longstr = org . apache . commons . lang . RandomStringUtils . randomAlphanumeric ( JobStatus . RESULT_COL_LENGTH ) ; status . setResult ( longstr ) ; "<AssertPlaceHolder>" ; } getResult ( ) { return result ; }
org . junit . Assert . assertEquals ( longstr . substring ( 0 , JobStatus . RESULT_COL_LENGTH ) , status . getResult ( ) )
getOptionsNotNull ( ) { "<AssertPlaceHolder>" ; } getOptions ( ) { org . apache . commons . cli . Options options = new org . apache . commons . cli . Options ( ) ; org . apache . commons . cli . Option option = org . apache . commons . cli . Option . builder ( ) . longOpt ( com . archimatetool . commandline . providers . SaveModelProvider . OPTION_SAVE_MODEL ) . hasArg ( ) . argName ( Messages . SaveModelProvider_1 ) . desc ( Messages . SaveModelProvider_2 ) . build ( ) ; options . addOption ( option ) ; return options ; }
org . junit . Assert . assertNotNull ( provider . getOptions ( ) )
testAuthenticateUserNullJaasEntryName ( ) { mockery . checking ( new org . jmock . Expectations ( ) { { allowing ( authnService ) . isAllowHashTableLoginWithIdOnly ( ) ; will ( returnValue ( true ) ) ; } } ) ; final javax . security . auth . Subject partialSubject = authnHelper . createPartialSubject ( com . ibm . ws . security . authentication . helper . AuthenticateUserHelperTest . userName , authnService , null ) ; mockery . checking ( new org . jmock . Expectations ( ) { { allowing ( authnService ) . isAllowHashTableLoginWithIdOnly ( ) ; will ( returnValue ( true ) ) ; allowing ( authnService ) . authenticate ( JaasLoginConfigConstants . SYSTEM_DEFAULT , partialSubject ) ; will ( returnValue ( authenticatedSubject ) ) ; } } ) ; "<AssertPlaceHolder>" ; } authenticateUser ( com . ibm . ws . security . authentication . AuthenticationService , java . lang . String , java . lang . String ) { return authenticateUser ( authenticationService , userName , jaasEntryName , null ) ; }
org . junit . Assert . assertNotNull ( authnHelper . authenticateUser ( authnService , com . ibm . ws . security . authentication . helper . AuthenticateUserHelperTest . userName , null ) )
shouldReturnSupportedViews ( ) { org . openmrs . obs . handler . MediaHandler handler = new org . openmrs . obs . handler . MediaHandler ( ) ; java . lang . String [ ] actualViews = handler . getSupportedViews ( ) ; java . lang . String [ ] expectedViews = new java . lang . String [ ] { ComplexObsHandler . RAW_VIEW } ; "<AssertPlaceHolder>" ; } getSupportedViews ( ) { return new java . lang . String [ 0 ] ; }
org . junit . Assert . assertArrayEquals ( actualViews , expectedViews )
testSkipIncluded ( ) { byte [ ] bytes = new byte [ 100 ] ; java . io . InputStream is = new com . amazonaws . util . LengthCheckInputStream ( new java . io . ByteArrayInputStream ( bytes ) , 100 , com . amazonaws . util . LengthCheckInputStream . INCLUDE_SKIPPED_BYTES ) ; "<AssertPlaceHolder>" ; com . amazonaws . util . StreamUtils . consumeInputStream ( is ) ; is . close ( ) ; } skip ( long ) { hasBeenAccessed = true ; return super . skip ( n ) ; }
org . junit . Assert . assertTrue ( ( 10 == ( is . skip ( 10 ) ) ) )
detachParent_detachLastChildOnDetachFirst_oneDetachEvent ( ) { com . vaadin . flow . internal . StateNodeTest . TestStateTree tree = new com . vaadin . flow . internal . StateNodeTest . TestStateTree ( ) ; com . vaadin . flow . internal . StateNode a = com . vaadin . flow . internal . StateNodeTest . createEmptyNode ( "a" ) ; com . vaadin . flow . internal . StateNode b = com . vaadin . flow . internal . StateNodeTest . createEmptyNode ( "b" ) ; com . vaadin . flow . internal . StateNode parent = com . vaadin . flow . internal . StateNodeTest . createParentNode ( "parent" ) ; addChild ( parent , a ) ; addChild ( parent , b ) ; addChild ( tree . getRootNode ( ) , parent ) ; java . util . concurrent . atomic . AtomicInteger detachEvents = new java . util . concurrent . atomic . AtomicInteger ( ) ; a . addDetachListener ( ( ) -> removeFromParent ( a ) ) ; b . addDetachListener ( ( ) -> detachEvents . incrementAndGet ( ) ) ; removeFromParent ( parent ) ; "<AssertPlaceHolder>" ; } get ( ) { return com . vaadin . flow . dom . impl . BasicTextElementStateProvider . INSTANCE ; }
org . junit . Assert . assertEquals ( 1 , detachEvents . get ( ) )
should_save_url_when_redirected_to_login ( ) { br . com . caelum . vraptor . test . requestflow . UserFlow navigation = createQuestionPage ( navigate ( ) ) ; br . com . caelum . vraptor . test . VRaptorTestResult navigationResult = navigation . followRedirect ( ) . execute ( ) ; navigationResult . wasStatus ( 200 ) . isValid ( ) ; org . jsoup . select . Elements redirectInput = getElementsByAttributeAndValue ( navigationResult , "name" , "redirectUrl" ) ; java . lang . String redirectUrl = redirectInput . first ( ) . attr ( "value" ) ; java . lang . String expectedUrl = rootPath ( navigationResult ) . concat ( "/perguntar" ) ; "<AssertPlaceHolder>" ; } rootPath ( br . com . caelum . vraptor . test . VRaptorTestResult ) { java . lang . String resultUrl = navigationResult . getRequest ( ) . getRequestURL ( ) . toString ( ) ; java . lang . String requestURI = navigationResult . getRequest ( ) . getRequestURI ( ) ; return resultUrl . replace ( requestURI , "" ) ; }
org . junit . Assert . assertThat ( redirectUrl , org . hamcrest . Matchers . equalTo ( expectedUrl ) )
doesNotUnzipExternalPaths ( ) { final java . lang . String resourceName = "dummy.xml" ; final java . lang . String resourceAlias = java . nio . file . Paths . get ( "folder" , ".." , ".." , resourceName ) . toString ( ) ; final java . io . File compressedFile = new java . io . File ( toDir , "test.zip" ) ; compress ( compressedFile , new org . mule . tck . ZipUtils . ZipResource [ ] { new org . mule . tck . ZipUtils . ZipResource ( resourceName , resourceName ) , new org . mule . tck . ZipUtils . ZipResource ( resourceName , resourceAlias ) } ) ; thrownException . expect ( org . mule . runtime . core . api . util . compression . InvalidZipFileException . class ) ; thrownException . expectMessage ( ( "External<sp>paths<sp>are<sp>not<sp>allowed:<sp>" + resourceAlias ) ) ; org . mule . runtime . core . api . util . FileUtils . unzip ( compressedFile , toDir ) ; "<AssertPlaceHolder>" ; } exists ( ) { return org . mule . runtime . module . deployment . internal . DefaultArchiveDeployer . allResourcesExist ( initialResourceFiles . keySet ( ) . toArray ( new java . io . File [ initialResourceFiles . size ( ) ] ) ) ; }
org . junit . Assert . assertThat ( new java . io . File ( toDir , resourceName ) . exists ( ) , org . hamcrest . Matchers . is ( false ) )
delete3 ( ) { this . save ( ) ; int i = jdbcDao . delete ( com . dexcoder . test . model . User . class , ( - 2L ) ) ; "<AssertPlaceHolder>" ; } delete ( java . lang . Class , java . io . Serializable ) { com . dexcoder . dal . build . Criteria criteria = com . dexcoder . dal . build . Criteria . delete ( clazz ) . mappingHandler ( getMappingHandler ( ) ) ; com . dexcoder . dal . BoundSql boundSql = criteria . where ( criteria . getPkField ( ) , new java . lang . Object [ ] { id } ) . build ( true ) ; return jdbcTemplate . update ( boundSql . getSql ( ) , boundSql . getParameters ( ) . toArray ( ) ) ; }
org . junit . Assert . assertEquals ( i , 1 )
shouldConvertLongToId ( ) { org . openkilda . model . Cookie cookie = new org . openkilda . model . Cookie ( ( ( long ) ( 291 ) ) ) ; org . openkilda . model . Cookie actualEntity = new org . openkilda . persistence . converters . CookieConverter ( ) . toEntityAttribute ( cookie . getValue ( ) ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
org . junit . Assert . assertEquals ( cookie , actualEntity )
testAuthIdProperty ( ) { jp . eisbahn . oauth2 . server . models . AccessToken target = new jp . eisbahn . oauth2 . server . models . AccessToken ( ) ; target . setAuthId ( "authId1" ) ; "<AssertPlaceHolder>" ; } getAuthId ( ) { return authId ; }
org . junit . Assert . assertEquals ( "authId1" , target . getAuthId ( ) )
classToINstanceMap_example ( ) { com . levelup . java . guava . ClassToInstanceMapExample . Person person = new com . levelup . java . guava . ClassToInstanceMapExample . Person ( "Jackson" ) ; com . levelup . java . guava . ClassToInstanceMapExample . Jobs jobs = new com . levelup . java . guava . ClassToInstanceMapExample . Jobs ( "IT<sp>person" ) ; com . levelup . java . guava . ClassToInstanceMapExample . Address address = new com . levelup . java . guava . ClassToInstanceMapExample . Address ( "505<sp>Williams<sp>Street" ) ; com . google . common . collect . ClassToInstanceMap < java . lang . Object > classToInstanceMap = com . google . common . collect . MutableClassToInstanceMap . create ( ) ; classToInstanceMap . put ( com . levelup . java . guava . ClassToInstanceMapExample . Person . class , person ) ; classToInstanceMap . put ( com . levelup . java . guava . ClassToInstanceMapExample . Jobs . class , jobs ) ; classToInstanceMap . put ( com . levelup . java . guava . ClassToInstanceMapExample . Address . class , address ) ; com . levelup . java . guava . ClassToInstanceMapExample . logger . info ( classToInstanceMap ) ; "<AssertPlaceHolder>" ; } getJobName ( ) { return jobName ; }
org . junit . Assert . assertEquals ( "IT<sp>person" , classToInstanceMap . getInstance ( com . levelup . java . guava . ClassToInstanceMapExample . Jobs . class ) . getJobName ( ) )
testCompareResults ( ) { org . gitlab4j . api . models . CompareResults compareResults = org . gitlab4j . api . JsonUtils . unmarshalResource ( org . gitlab4j . api . models . CompareResults . class , "compare-results.json" ) ; "<AssertPlaceHolder>" ; } compareJson ( T , java . lang . String ) { java . io . InputStreamReader reader = new java . io . InputStreamReader ( org . gitlab4j . api . TestGitLabApiBeans . class . getResourceAsStream ( filename ) ) ; return org . gitlab4j . api . JsonUtils . compareJson ( apiObject , reader ) ; }
org . junit . Assert . assertTrue ( org . gitlab4j . api . JsonUtils . compareJson ( compareResults , "compare-results.json" ) )
takesNoParametersShouldReturnFalseWhenMethodTakesOneParameter ( ) { final org . pitest . mutationtest . engine . gregor . MethodInfo testee = this . methodInfo . withMethodDescriptor ( org . pitest . mutationtest . engine . gregor . MethodInfoTest . ONE_PARAMETER ) ; "<AssertPlaceHolder>" ; } takesNoParameters ( ) { return this . methodDescriptor . startsWith ( "()" ) ; }
org . junit . Assert . assertThat ( testee . takesNoParameters ( ) , org . hamcrest . Matchers . is ( false ) )
testAliasPastTop2 ( ) { final org . apache . calcite . tools . RelBuilder builder = org . apache . calcite . tools . RelBuilder . create ( org . apache . calcite . test . RelBuilderTest . config ( ) . build ( ) ) ; org . apache . calcite . rel . RelNode root = builder . scan ( "EMP" ) . as ( "t1" ) . scan ( "EMP" ) . as ( "t2" ) . join ( JoinRelType . INNER , builder . equals ( builder . field ( 2 , "t1" , "EMPNO" ) , builder . field ( 2 , "t2" , "EMPNO" ) ) ) . scan ( "LogicalJoin(condition=[AND(=($7,<sp>$16),<sp><>($10,<sp>$18))],<sp>joinType=[inner])\n" 0 ) . as ( "t3" ) . join ( JoinRelType . INNER , builder . equals ( builder . field ( 2 , "t1" , "DEPTNO" ) , builder . field ( 2 , "t3" , "DEPTNO" ) ) , builder . not ( builder . equals ( builder . field ( 2 , "t2" , "JOB" ) , builder . field ( 2 , "t3" , "LOC" ) ) ) ) . build ( ) ; final java . lang . String expected = "" + ( ( ( ( "LogicalJoin(condition=[AND(=($7,<sp>$16),<sp><>($10,<sp>$18))],<sp>joinType=[inner])\n" + "LogicalJoin(condition=[AND(=($7,<sp>$16),<sp><>($10,<sp>$18))],<sp>joinType=[inner])\n" 2 ) + "LogicalJoin(condition=[AND(=($7,<sp>$16),<sp><>($10,<sp>$18))],<sp>joinType=[inner])\n" 1 ) + "LogicalJoin(condition=[AND(=($7,<sp>$16),<sp><>($10,<sp>$18))],<sp>joinType=[inner])\n" 1 ) + "LogicalJoin(condition=[AND(=($7,<sp>$16),<sp><>($10,<sp>$18))],<sp>joinType=[inner])\n" 3 ) ; "<AssertPlaceHolder>" ; } hasTree ( java . lang . String ) { return org . apache . calcite . test . Matchers . compose ( org . hamcrest . core . Is . is ( value ) , ( input ) -> { return org . apache . calcite . util . Util . toLinux ( org . apache . calcite . plan . RelOptUtil . toString ( input ) ) ; } ) ; }
org . junit . Assert . assertThat ( root , org . apache . calcite . test . Matchers . hasTree ( expected ) )
testReservedSpdySynStreamFrameBits ( ) { short type = 1 ; byte flags = 0 ; int length = 10 ; int streamId = ( ( io . netty . handler . codec . spdy . SpdyFrameDecoderTest . RANDOM . nextInt ( ) ) & 2147483647 ) | 1 ; int associatedToStreamId = ( io . netty . handler . codec . spdy . SpdyFrameDecoderTest . RANDOM . nextInt ( ) ) & 2147483647 ; byte priority = ( ( byte ) ( ( io . netty . handler . codec . spdy . SpdyFrameDecoderTest . RANDOM . nextInt ( ) ) & 7 ) ) ; io . netty . buffer . ByteBuf buf = io . netty . buffer . Unpooled . buffer ( ( ( io . netty . handler . codec . spdy . SpdyCodecUtil . SPDY_HEADER_SIZE ) + length ) ) ; io . netty . handler . codec . spdy . SpdyFrameDecoderTest . encodeControlFrameHeader ( buf , type , flags , length ) ; buf . writeInt ( ( streamId | - 2147483648 ) ) ; buf . writeInt ( ( associatedToStreamId | - 2147483648 ) ) ; buf . writeByte ( ( ( priority << 5 ) | 31 ) ) ; buf . writeByte ( 255 ) ; decoder . decode ( buf ) ; verify ( delegate ) . readSynStreamFrame ( streamId , associatedToStreamId , priority , false , false ) ; verify ( delegate ) . readHeaderBlockEnd ( ) ; "<AssertPlaceHolder>" ; buf . release ( ) ; } isReadable ( ) { return ( writerIndex ) > ( readerIndex ) ; }
org . junit . Assert . assertFalse ( buf . isReadable ( ) )