input
stringlengths 28
18.7k
| output
stringlengths 39
1.69k
|
|---|---|
testContentType ( ) { com . woonoz . proxy . servlet . UrlRewriter rewriter = org . easymock . EasyMock . createMock ( com . woonoz . proxy . servlet . UrlRewriter . class ) ; org . easymock . EasyMock . replay ( rewriter ) ; com . woonoz . proxy . servlet . AbstractHeadersHandler handler = new com . woonoz . proxy . servlet . HttpEntityEnclosingHeadersHandler ( rewriter ) ; java . lang . String headerValue = "text/xml" ; java . lang . String actualValue = handler . handleHeader ( "Content-Type" , headerValue ) ; "<AssertPlaceHolder>" ; org . easymock . EasyMock . verify ( rewriter ) ; } handleHeader ( java . lang . String , java . lang . String ) { com . woonoz . proxy . servlet . HeadersToSubstitute handler = headersToHandle . get ( toLower ( headerName ) ) ; if ( handler != null ) { return handler . handleValue ( headerValue , urlRewriter ) ; } else { return headerValue ; } }
|
org . junit . Assert . assertEquals ( headerValue , actualValue )
|
toJSON_TooltipWithPointFormatter_serializedAsFunction ( ) { com . vaadin . addon . charts . model . Tooltip tooltip = new com . vaadin . addon . charts . model . Tooltip ( true ) ; tooltip . setPointFormatter ( "this.y<sp>units" ) ; java . lang . String json = toJSON ( tooltip ) ; java . lang . String expected = "{\"enabled\":true,\"_fn_pointFormatter\":\"this.y<sp>units\"}" ; "<AssertPlaceHolder>" ; } toJSON ( com . vaadin . addon . charts . model . AbstractConfigurationObject ) { try { return com . vaadin . addon . charts . util . ChartSerialization . jsonWriter . writeValueAsString ( object ) ; } catch ( com . fasterxml . jackson . core . JsonProcessingException e ) { e . printStackTrace ( ) ; throw new java . lang . RuntimeException ( ( "Error<sp>while<sp>serializing<sp>" + ( object . getClass ( ) . getSimpleName ( ) ) ) , e ) ; } }
|
org . junit . Assert . assertEquals ( expected , json )
|
testAndTemporalMeets ( ) { final org . opengis . temporal . Period p = period ( "2016-01-01T00:00:00.000-0500" , "2106-01-02T00:00:00.000-0500" ) ; final org . opengis . filter . Filter f = ff . and ( ff . bbox ( "geom" , ( - 10 ) , ( - 10 ) , 10 , 10 , null ) , ff . meets ( ff . literal ( "someDate" ) , ff . literal ( p ) ) ) ; final org . locationtech . jts . geom . Envelope env = ( ( org . locationtech . jts . geom . Envelope ) ( f . accept ( visitor , null ) ) ) ; "<AssertPlaceHolder>" ; } accept ( org . opengis . filter . expression . ExpressionVisitor , java . lang . Object ) { return visitor . visit ( this , extraData ) ; }
|
org . junit . Assert . assertEquals ( new org . locationtech . jts . geom . Envelope ( ( - 10 ) , 10 , ( - 10 ) , 10 ) , env )
|
missingLastRevSeeker ( ) { "<AssertPlaceHolder>" ; } createMissingLastRevSeeker ( ) { final org . apache . jackrabbit . oak . plugins . document . DocumentStore store = org . apache . jackrabbit . oak . plugins . document . mongo . MongoDocumentNodeStoreBuilderBase . getDocumentStore ( ) ; if ( store instanceof org . apache . jackrabbit . oak . plugins . document . mongo . MongoDocumentStore ) { return new org . apache . jackrabbit . oak . plugins . document . mongo . MongoMissingLastRevSeeker ( ( ( org . apache . jackrabbit . oak . plugins . document . mongo . MongoDocumentStore ) ( store ) ) , org . apache . jackrabbit . oak . plugins . document . mongo . MongoDocumentNodeStoreBuilderBase . getClock ( ) ) ; } else { return super . createMissingLastRevSeeker ( ) ; } }
|
org . junit . Assert . assertTrue ( ( ( builder . createMissingLastRevSeeker ( ) ) instanceof org . apache . jackrabbit . oak . plugins . document . mongo . MongoMissingLastRevSeeker ) )
|
stream ( ) { "<AssertPlaceHolder>" ; } stream ( ) { org . junit . Assert . assertThat ( seq . stream ( ) . collect ( java . util . stream . Collectors . toList ( ) ) , org . hamcrest . Matchers . contains ( new java . lang . Integer ( 1 ) , 3 , 5 , 7 , 9 ) ) ; }
|
org . junit . Assert . assertThat ( seq . stream ( ) . collect ( java . util . stream . Collectors . toList ( ) ) , org . hamcrest . Matchers . contains ( new java . lang . Integer ( 1 ) , 3 , 5 , 7 , 9 ) )
|
testColor ( ) { net . kyori . text . Component component = TextParsers . LEGACY . parse ( ( ( org . bukkit . ChatColor . COLOR_CHAR ) + "afoo" ) ) ; net . kyori . text . TextComponent expected = net . kyori . text . TextComponent . of ( "foo" ) . color ( TextColor . GREEN ) ; "<AssertPlaceHolder>" ; } parse ( java . lang . String ) { try { source = ( "<span>" + source ) + "</span>" ; javax . xml . bind . JAXBContext context = javax . xml . bind . JAXBContext . newInstance ( com . supaham . commons . bukkit . text . xml . Element . class ) ; javax . xml . bind . Unmarshaller unmarshaller = context . createUnmarshaller ( ) ; com . supaham . commons . bukkit . text . xml . Element tag = ( ( com . supaham . commons . bukkit . text . xml . Element ) ( unmarshaller . unmarshal ( new java . io . StringReader ( source ) ) ) ) ; net . kyori . text . TextComponent . Builder builder = net . kyori . text . TextComponent . builder ( ) . content ( "" ) ; tag . apply ( builder ) ; tag . loop ( builder ) ; return builder . build ( ) ; } catch ( java . lang . Exception e ) { throw new java . lang . RuntimeException ( ( "Failed<sp>to<sp>parse:<sp>" + source ) , e ) ; } }
|
org . junit . Assert . assertEquals ( expected , component )
|
testInvalidNonSerializableDependentInjection ( ) { java . util . Collection < java . lang . String > beanXmls = new java . util . ArrayList < java . lang . String > ( ) ; java . util . Collection < java . lang . Class < ? > > beanClasses = new java . util . ArrayList < java . lang . Class < ? > > ( ) ; beanClasses . add ( org . apache . webbeans . test . injection . serialization . beans . ProducerWithNonSerializableParameterBean . class ) ; beanClasses . add ( org . apache . webbeans . test . injection . serialization . beans . SerializableBean . class ) ; beanClasses . add ( org . apache . webbeans . test . injection . serialization . beans . NonSerializableDependentBean . class ) ; try { startContainer ( beanClasses , beanXmls ) ; org . apache . webbeans . test . injection . serialization . beans . SerializableBean sb = getInstance ( org . apache . webbeans . test . injection . serialization . beans . SerializableBean . class ) ; "<AssertPlaceHolder>" ; } finally { shutDownContainer ( ) ; } } getInstance ( javax . enterprise . inject . spi . Bean ) { return ( ( T ) ( manager . getReference ( bean , null , manager . createCreationalContext ( bean ) ) ) ) ; }
|
org . junit . Assert . assertNotNull ( sb )
|
removeEmptyCollection ( ) { final org . exist . storage . BrokerPool brokerPool = existEmbeddedServer . getBrokerPool ( ) ; final org . exist . storage . lock . LockTable lockTable = brokerPool . getLockManager ( ) . getLockTable ( ) ; lockTable . setTraceStackDepth ( org . exist . storage . NativeBrokerLockingTest . TRACE_STACK_DEPTH ) ; final org . exist . storage . NativeBrokerLockingTest . LockSymmetryListener lockSymmetryListener = new org . exist . storage . NativeBrokerLockingTest . LockSymmetryListener ( ) ; boolean registered = false ; try { try ( final org . exist . storage . DBBroker broker = brokerPool . get ( java . util . Optional . of ( brokerPool . getSecurityManager ( ) . getSystemSubject ( ) ) ) ; final org . exist . storage . txn . Txn transaction = brokerPool . getTransactionManager ( ) . beginTransaction ( ) ) { final org . exist . collections . Collection collectionA = broker . getCollection ( org . exist . storage . NativeBrokerLockingTest . COLLECTION_A ) ; lockTable . registerListener ( lockSymmetryListener ) ; while ( ! ( lockSymmetryListener . isRegistered ( ) ) ) ; registered = true ; broker . removeCollection ( transaction , collectionA ) ; transaction . commit ( ) ; } } finally { if ( registered ) { lockTable . deregisterListener ( lockSymmetryListener ) ; } } while ( lockSymmetryListener . isRegistered ( ) ) { } "<AssertPlaceHolder>" ; } isSymmetrical ( ) { return ( ! ( error . get ( ) ) ) && ( events . empty ( ) ) ; }
|
org . junit . Assert . assertTrue ( lockSymmetryListener . isSymmetrical ( ) )
|
forEach ( ) { super . forEach ( ) ; java . lang . String [ ] sum = new java . lang . String [ 1 ] ; sum [ 0 ] = "" ; this . classUnderTest ( ) . forEach ( ( each ) -> sum [ 0 ] += each ) ; java . lang . StringBuilder expectedString = new java . lang . StringBuilder ( ) ; for ( int i = 0 ; i < ( this . classUnderTest ( ) . size ( ) ) ; i ++ ) { expectedString . append ( ( ( i & 1 ) == 0 ) ) ; } "<AssertPlaceHolder>" ; } toString ( ) { return this . appendable . toString ( ) ; }
|
org . junit . Assert . assertEquals ( expectedString . toString ( ) , sum [ 0 ] )
|
testCacheBlockNextBlockMetadataMissing ( ) { long maxSize = 100000 ; long blockSize = calculateBlockSize ( maxSize , 10 ) ; int size = 100 ; int length = ( org . apache . hadoop . hbase . HConstants . HFILEBLOCK_HEADER_SIZE ) + size ; byte [ ] byteArr = new byte [ length ] ; java . nio . ByteBuffer buf = java . nio . ByteBuffer . wrap ( byteArr , 0 , size ) ; org . apache . hadoop . hbase . io . hfile . HFileContext meta = new org . apache . hadoop . hbase . io . hfile . HFileContextBuilder ( ) . build ( ) ; org . apache . hadoop . hbase . io . hfile . HFileBlock blockWithNextBlockMetadata = new org . apache . hadoop . hbase . io . hfile . HFileBlock ( BlockType . DATA , size , size , ( - 1 ) , buf , HFileBlock . FILL_HEADER , ( - 1 ) , 52 , ( - 1 ) , meta ) ; org . apache . hadoop . hbase . io . hfile . HFileBlock blockWithoutNextBlockMetadata = new org . apache . hadoop . hbase . io . hfile . HFileBlock ( BlockType . DATA , size , size , ( - 1 ) , buf , HFileBlock . FILL_HEADER , ( - 1 ) , ( - 1 ) , ( - 1 ) , meta ) ; org . apache . hadoop . hbase . io . hfile . LruBlockCache cache = new org . apache . hadoop . hbase . io . hfile . LruBlockCache ( maxSize , blockSize , false , ( ( int ) ( java . lang . Math . ceil ( ( ( 1.2 * maxSize ) / blockSize ) ) ) ) , LruBlockCache . DEFAULT_LOAD_FACTOR , LruBlockCache . DEFAULT_CONCURRENCY_LEVEL , 0.66F , 0.99F , 0.33F , 0.33F , 0.34F , 1.2F , false , 1024 ) ; org . apache . hadoop . hbase . io . hfile . BlockCacheKey key = new org . apache . hadoop . hbase . io . hfile . BlockCacheKey ( "key1" , 0 ) ; java . nio . ByteBuffer actualBuffer = java . nio . ByteBuffer . allocate ( length ) ; java . nio . ByteBuffer block1Buffer = java . nio . ByteBuffer . allocate ( length ) ; java . nio . ByteBuffer block2Buffer = java . nio . ByteBuffer . allocate ( length ) ; blockWithNextBlockMetadata . serialize ( block1Buffer , true ) ; blockWithoutNextBlockMetadata . serialize ( block2Buffer , true ) ; org . apache . hadoop . hbase . io . hfile . CacheTestUtils . getBlockAndAssertEquals ( cache , key , blockWithNextBlockMetadata , actualBuffer , block1Buffer ) ; org . apache . hadoop . hbase . io . hfile . CacheTestUtils . getBlockAndAssertEquals ( cache , key , blockWithoutNextBlockMetadata , actualBuffer , block1Buffer ) ; cache . clearCache ( ) ; "<AssertPlaceHolder>" ; org . apache . hadoop . hbase . io . hfile . CacheTestUtils . getBlockAndAssertEquals ( cache , key , blockWithoutNextBlockMetadata , actualBuffer , block2Buffer ) ; org . apache . hadoop . hbase . io . hfile . CacheTestUtils . getBlockAndAssertEquals ( cache , key , blockWithNextBlockMetadata , actualBuffer , block1Buffer ) ; } getBlock ( org . apache . hadoop . hbase . io . hfile . BlockCacheKey , boolean , boolean , boolean ) { org . apache . hadoop . hbase . io . hfile . HFileBlock result = null ; try ( org . apache . htrace . core . TraceScope traceScope = org . apache . hadoop . hbase . trace . TraceUtil . createTrace ( "MemcachedBlockCache.getBlock" ) ) { result = client . get ( cacheKey . toString ( ) , tc ) ; } catch ( java . lang . Exception e ) { if ( org . apache . hadoop . hbase . io . hfile . MemcachedBlockCache . LOG . isDebugEnabled ( ) ) { org . apache . hadoop . hbase . io . hfile . MemcachedBlockCache . LOG . debug ( ( ( "Exception<sp>pulling<sp>from<sp>memcached<sp>[<sp>" + ( cacheKey . toString ( ) ) ) + "<sp>].<sp>Treating<sp>as<sp>a<sp>miss." ) , e ) ; } result = null ; } finally { if ( updateCacheMetrics ) { if ( result == null ) { cacheStats . miss ( caching , cacheKey . isPrimary ( ) , cacheKey . getBlockType ( ) ) ; } else { cacheStats . hit ( caching , cacheKey . isPrimary ( ) , cacheKey . getBlockType ( ) ) ; } } } return result ; }
|
org . junit . Assert . assertNull ( cache . getBlock ( key , false , false , false ) )
|
getHoldabilityShouldReturnCorrectHoldability ( ) { java . sql . ResultSet resultSet = org . neo4j . jdbc . bolt . BoltNeo4jResultSet . newInstance ( false , null , null , Neo4jResultSet . TYPE_FORWARD_ONLY , Neo4jResultSet . CONCUR_READ_ONLY , Neo4jResultSet . CLOSE_CURSORS_AT_COMMIT ) ; "<AssertPlaceHolder>" ; } getHoldability ( ) { checkClosed ( ) ; return this . holdability ; }
|
org . junit . Assert . assertEquals ( Neo4jResultSet . CLOSE_CURSORS_AT_COMMIT , resultSet . getHoldability ( ) )
|
getProvidersByRole_shouldReturnEmptySetForNullRole ( ) { org . openmrs . Encounter encounter = new org . openmrs . Encounter ( ) ; org . openmrs . EncounterRole role = new org . openmrs . EncounterRole ( ) ; org . openmrs . Provider provider = new org . openmrs . Provider ( ) ; encounter . addProvider ( role , provider ) ; java . util . Set < org . openmrs . Provider > providers = encounter . getProvidersByRole ( null ) ; "<AssertPlaceHolder>" ; } size ( ) { return getMemberships ( ) . stream ( ) . filter ( ( m ) -> ! ( m . getVoided ( ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , providers . size ( ) )
|
testDynamicQueryByPrimaryKeyMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . portal . workflow . kaleo . model . KaleoNotification . class , _dynamicQueryClassLoader ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . eq ( "kaleoNotificationId" , com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) ) ) ; java . util . List < com . liferay . portal . workflow . kaleo . model . KaleoNotification > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
|
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
|
testSimple ( ) { org . apache . felix . gogo . runtime . threadio . ThreadIOImpl tio = new org . apache . felix . gogo . runtime . threadio . ThreadIOImpl ( ) ; tio . start ( ) ; System . out . println ( "Hello<sp>World" ) ; java . io . ByteArrayOutputStream out = new java . io . ByteArrayOutputStream ( ) ; java . io . ByteArrayOutputStream err = new java . io . ByteArrayOutputStream ( ) ; tio . setStreams ( System . in , new java . io . PrintStream ( out ) , new java . io . PrintStream ( err ) ) ; try { System . out . println ( "Simple<sp>Normal<sp>Message" ) ; System . err . println ( "Simple<sp>Error<sp>Message" ) ; } finally { tio . close ( ) ; } tio . stop ( ) ; java . lang . String normal = out . toString ( ) . trim ( ) ; "<AssertPlaceHolder>" ; System . out . println ( "Goodbye<sp>World" ) ; } trim ( ) { int l = org . apache . felix . resolver . util . OpenHashMap . arraySize ( size , f ) ; if ( l >= ( n ) ) { return true ; } else { try { rehash ( l ) ; return true ; } catch ( java . lang . OutOfMemoryError cantDoIt ) { return false ; } } }
|
org . junit . Assert . assertEquals ( "Simple<sp>Normal<sp>Message" , normal )
|
runTestReflection6 ( ) { soot . jimple . infoflow . results . InfoflowResults res = analyzeAPKFile ( "Reflection/Reflection6.apk" , null , enableReflectionCallback ) ; "<AssertPlaceHolder>" ; } size ( ) { return set . size ( ) ; }
|
org . junit . Assert . assertEquals ( 1 , res . size ( ) )
|
testGetProductInventories ( ) { com . iluwatar . inventory . microservice . InventoryController inventoryController = new com . iluwatar . inventory . microservice . InventoryController ( ) ; int numberOfInventories = inventoryController . getProductInventories ( ) ; "<AssertPlaceHolder>" ; } getProductInventories ( ) { java . lang . String response = "0" ; try ( org . apache . http . impl . client . CloseableHttpClient httpClient = org . apache . http . impl . client . HttpClients . createDefault ( ) ) { org . apache . http . client . methods . HttpGet httpGet = new org . apache . http . client . methods . HttpGet ( "http://localhost:51516/inventories" ) ; try ( org . apache . http . client . methods . CloseableHttpResponse httpResponse = httpClient . execute ( httpGet ) ) { response = org . apache . http . util . EntityUtils . toString ( httpResponse . getEntity ( ) ) ; } } catch ( java . io . IOException e ) { com . iluwatar . aggregator . microservices . ProductInventoryClientImpl . LOGGER . error ( "Exception<sp>caught." , e ) ; } return java . lang . Integer . parseInt ( response ) ; }
|
org . junit . Assert . assertEquals ( 5 , numberOfInventories )
|
shouldSaveAndMergeDataWithTranslatedUris ( ) { ioUtils . copy ( "jsonExisting" , jsonFile ) ; java . util . SortedMap < java . lang . String , jscover . report . FileData > map = new java . util . TreeMap ( ) ; map . put ( "/exclude/js/code.js" , fileData ) ; java . util . SortedMap < java . lang . String , jscover . report . FileData > mapTranslated = new java . util . TreeMap ( ) ; mapTranslated . put ( "/js/code.js" , fileData ) ; java . util . SortedMap < java . lang . String , jscover . report . FileData > mapExisting = new java . util . TreeMap ( ) ; mapExisting . put ( "/existing" , fileData ) ; java . util . SortedMap < java . lang . String , jscover . report . FileData > mapMerged = new java . util . TreeMap ( ) ; mapMerged . put ( "/merged" , fileData ) ; given ( uriFileTranslator . mutates ( ) ) . willReturn ( true ) ; given ( jsonDataMerger . jsonToMap ( "jsonSubmitted" ) ) . willReturn ( map ) ; given ( uriFileTranslator . convertUriToFile ( "/exclude/js/code.js" ) ) . willReturn ( "/js/code.js" ) ; given ( jsonDataMerger . jsonToMap ( "jsonExisting" ) ) . willReturn ( mapExisting ) ; given ( jsonDataMerger . mergeJSONCoverageMaps ( argThat ( isMapWithKey ( "/js/code.js" ) ) , argThat ( isMapWithKey ( "/existing" ) ) ) ) . willReturn ( mapMerged ) ; given ( jsonDataMerger . toJSON ( argThat ( isMapWithKey ( "/merged" ) ) ) ) . willReturn ( "jsonMerged" ) ; jsonDataSaver . saveJSONData ( destDir , "jsonSubmitted" , null , uriFileTranslator ) ; java . lang . String json = ioUtils . loadFromFileSystem ( jsonFile ) ; "<AssertPlaceHolder>" ; } loadFromFileSystem ( jscover . util . File ) { jscover . util . InputStream is = null ; try { is = new jscover . util . FileInputStream ( dataFile ) ; return toString ( is ) ; } catch ( java . lang . Throwable e ) { throw new java . lang . RuntimeException ( java . lang . String . format ( "Problem<sp>loading<sp>file:<sp>'%s'" , dataFile ) , e ) ; } finally { closeQuietly ( is ) ; } }
|
org . junit . Assert . assertThat ( json , org . hamcrest . CoreMatchers . equalTo ( "jsonMerged" ) )
|
testCheckHdfsNameNotEmpty ( ) { java . lang . String testHost = "test" ; namedCluster . setHdfsHost ( ( ( "<sp>" + testHost ) + "<sp>" ) ) ; "<AssertPlaceHolder>" ; } isHdfsHostEmpty ( org . pentaho . di . core . variables . VariableSpace ) { java . lang . String hostNameParsed = getHostNameParsed ( variableSpace ) ; return ( hostNameParsed == null ) || ( hostNameParsed . trim ( ) . isEmpty ( ) ) ; }
|
org . junit . Assert . assertEquals ( false , namedCluster . isHdfsHostEmpty ( null ) )
|
testDiscoveryError ( ) { org . powermock . reflect . Whitebox . invokeMethod ( nodeDiscovery , com . digi . xbee . api . IDiscoveryListenerTest . NOTIFY_DISCOVERY_ERROR , listeners , com . digi . xbee . api . IDiscoveryListenerTest . ERROR ) ; org . mockito . Mockito . verify ( discoverListener , org . mockito . Mockito . times ( 1 ) ) . discoveryError ( com . digi . xbee . api . IDiscoveryListenerTest . ERROR ) ; "<AssertPlaceHolder>" ; } getError ( ) { return error ; }
|
org . junit . Assert . assertEquals ( com . digi . xbee . api . IDiscoveryListenerTest . ERROR , discoverListener . getError ( ) )
|
testRemove ( ) { com . liferay . portal . tools . service . builder . test . model . LVEntryLocalizationVersion newLVEntryLocalizationVersion = addLVEntryLocalizationVersion ( ) ; _persistence . remove ( newLVEntryLocalizationVersion ) ; com . liferay . portal . tools . service . builder . test . model . LVEntryLocalizationVersion existingLVEntryLocalizationVersion = _persistence . fetchByPrimaryKey ( newLVEntryLocalizationVersion . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
|
org . junit . Assert . assertNull ( existingLVEntryLocalizationVersion )
|
testMatches ( ) { when ( left . matches ( typeDescription , classLoader , module , net . bytebuddy . agent . builder . AgentBuilderRawMatcherDisjunctionTest . Foo . class , protectionDomain ) ) . thenReturn ( true ) ; when ( right . matches ( typeDescription , classLoader , module , net . bytebuddy . agent . builder . AgentBuilderRawMatcherDisjunctionTest . Foo . class , protectionDomain ) ) . thenReturn ( true ) ; net . bytebuddy . agent . builder . AgentBuilder . RawMatcher rawMatcher = new net . bytebuddy . agent . builder . AgentBuilder . RawMatcher . Disjunction ( left , right ) ; "<AssertPlaceHolder>" ; verify ( left ) . matches ( typeDescription , classLoader , module , net . bytebuddy . agent . builder . AgentBuilderRawMatcherDisjunctionTest . Foo . class , protectionDomain ) ; verifyNoMoreInteractions ( left ) ; verifyZeroInteractions ( right ) ; } is ( java . lang . annotation . Annotation ) { return is ( AnnotationDescription . ForLoadedAnnotation . of ( annotation ) ) ; }
|
org . junit . Assert . assertThat ( rawMatcher . matches ( typeDescription , classLoader , module , net . bytebuddy . agent . builder . AgentBuilderRawMatcherDisjunctionTest . Foo . class , protectionDomain ) , org . hamcrest . CoreMatchers . is ( true ) )
|
testCreate_5_reproc ( ) { java . io . File testFile = new java . io . File ( getClass ( ) . getResource ( "test_5_reproc_MTL.txt" ) . getFile ( ) ) ; org . esa . s3tbx . dataio . landsat . geotiff . LandsatMetadata landsatMetadata = org . esa . s3tbx . dataio . landsat . geotiff . LandsatMetadataFactory . create ( testFile ) ; "<AssertPlaceHolder>" ; } create ( java . io . File ) { org . esa . s3tbx . dataio . landsat . geotiff . LandsatLegacyMetadata landsatMetadata = new org . esa . s3tbx . dataio . landsat . geotiff . LandsatLegacyMetadata ( new java . io . FileReader ( mtlFile ) ) ; if ( landsatMetadata . isLegacyFormat ( ) ) { if ( ( landsatMetadata . isLandsatTM ( ) ) || ( landsatMetadata . isLandsatETM_Plus ( ) ) ) { return landsatMetadata ; } else { throw new org . esa . snap . core . dataio . ProductIOException ( "Product<sp>is<sp>of<sp>a<sp>legacy<sp>landsat<sp>format,<sp>not<sp>a<sp>legacy<sp>Landsat5<sp>or<sp>Landsat7<sp>ETM+<sp>product." ) ; } } else { java . io . BufferedReader reader = null ; try { java . io . FileReader fileReader = new java . io . FileReader ( mtlFile ) ; reader = new java . io . BufferedReader ( fileReader ) ; java . lang . String line = reader . readLine ( ) ; while ( line != null ) { if ( line . contains ( "SPACECRAFT_ID" ) ) { if ( line . contains ( "LANDSAT_8" ) ) { return new org . esa . s3tbx . dataio . landsat . geotiff . Landsat8Metadata ( new java . io . FileReader ( mtlFile ) ) ; } else { return new org . esa . s3tbx . dataio . landsat . geotiff . LandsatReprocessedMetadata ( new java . io . FileReader ( mtlFile ) ) ; } } line = reader . readLine ( ) ; } } finally { if ( reader != null ) { reader . close ( ) ; } } throw new java . lang . IllegalStateException ( ( ( "File<sp>'" + mtlFile ) + "'<sp>does<sp>not<sp>contain<sp>spacecraft<sp>information.<sp>(Field<sp>'SPACECRAFT_ID'<sp>missing)" ) ) ; } }
|
org . junit . Assert . assertTrue ( ( landsatMetadata instanceof org . esa . s3tbx . dataio . landsat . geotiff . LandsatReprocessedMetadata ) )
|
testNegateAndDeMorgan ( ) { org . jbasics . math . BigRational test = org . jbasics . math . BigRational . valueOf ( "1/2" ) ; "<AssertPlaceHolder>" ; } negate ( ) { return new org . jbasics . math . BigRational ( this . numerator . negate ( ) , this . denomintar ) ; }
|
org . junit . Assert . assertEquals ( test , test . negate ( ) . negate ( ) )
|
testSerde ( ) { int coordinatorEpoch = 79 ; org . apache . kafka . common . record . EndTransactionMarker marker = new org . apache . kafka . common . record . EndTransactionMarker ( ControlRecordType . COMMIT , coordinatorEpoch ) ; java . nio . ByteBuffer buffer = marker . serializeValue ( ) ; org . apache . kafka . common . record . EndTransactionMarker deserialized = org . apache . kafka . common . record . EndTransactionMarker . deserializeValue ( ControlRecordType . COMMIT , buffer ) ; "<AssertPlaceHolder>" ; } coordinatorEpoch ( ) { return coordinatorEpoch ; }
|
org . junit . Assert . assertEquals ( coordinatorEpoch , deserialized . coordinatorEpoch ( ) )
|
archivetrue ( ) { java . lang . String collectionName = "archive" ; com . fujitsu . dc . test . unit . cell . LogTest . TestLogResource logResource = new com . fujitsu . dc . test . unit . cell . LogTest . TestLogResource ( ) ; boolean res = logResource . isValidLogCollection ( collectionName ) ; "<AssertPlaceHolder>" ; } isValidLogCollection ( java . lang . String ) { return ( com . fujitsu . dc . core . rs . cell . LogResource . CURRENT_COLLECTION . equals ( collectionName ) ) || ( com . fujitsu . dc . core . rs . cell . LogResource . ARCHIVE_COLLECTION . equals ( collectionName ) ) ; }
|
org . junit . Assert . assertTrue ( res )
|
testTriggerScanDoneFalse ( ) { com . sonyericsson . hudson . plugins . gerrit . trigger . hudsontrigger . actions . manual . TriggerMonitor monitor = new com . sonyericsson . hudson . plugins . gerrit . trigger . hudsontrigger . actions . manual . TriggerMonitor ( ) ; com . sonyericsson . hudson . plugins . gerrit . trigger . events . ManualPatchsetCreated patch = com . sonyericsson . hudson . plugins . gerrit . trigger . mock . Setup . createManualPatchsetCreated ( ) ; monitor . add ( patch ) ; monitor . triggerScanStarting ( patch ) ; com . sonyericsson . hudson . plugins . gerrit . trigger . hudsontrigger . actions . manual . TriggerMonitor . EventState state = monitor . getEvents ( ) . get ( 0 ) ; "<AssertPlaceHolder>" ; } isTriggerScanDone ( ) { return triggerScanDone ; }
|
org . junit . Assert . assertFalse ( state . isTriggerScanDone ( ) )
|
testGetRowImmediate ( ) { rowSet . putRow ( new org . pentaho . di . core . row . RowMeta ( ) , row ) ; "<AssertPlaceHolder>" ; } getRowImmediate ( ) { throw new java . lang . UnsupportedOperationException ( ) ; }
|
org . junit . Assert . assertSame ( row , rowSet . getRowImmediate ( ) )
|
shouldFireEventsWhenTestRunFinishes ( ) { eventSupport . addTestQueueListener ( new org . infinitest . testrunner . TestQueueAdapter ( ) { @ org . infinitest . testrunner . Override public void testRunComplete ( ) { ( events ) ++ ; } } ) ; eventSupport . fireTestRunComplete ( ) ; "<AssertPlaceHolder>" ; } fireTestRunComplete ( ) { for ( org . infinitest . testrunner . TestQueueListener each : testQueueListenerList ) { each . testRunComplete ( ) ; } }
|
org . junit . Assert . assertEquals ( 1 , events )
|
customKindName ( ) { final java . lang . String kind = "CustomKind" ; repository = new org . togglz . appengine . repository . DatastoreStateRepository ( kind , datastoreService ) ; "<AssertPlaceHolder>" ; } kind ( ) { return this . kind ; }
|
org . junit . Assert . assertEquals ( kind , repository . kind ( ) )
|
portIsNotInCircuitTest ( ) { org . opennaas . extensions . genericnetwork . model . circuit . Circuit circuit = org . opennaas . extensions . genericnetwork . model . helpers . GenericNetworkModelUtils . generateSampleCircuit ( ) ; java . lang . String portId = "port" ; boolean isPortInCircuit = org . opennaas . extensions . genericnetwork . model . helpers . GenericNetworkModelHelper . isPortInCircuit ( portId , circuit ) ; "<AssertPlaceHolder>" ; } isPortInCircuit ( java . lang . String , org . opennaas . extensions . genericnetwork . model . circuit . Circuit ) { org . opennaas . extensions . genericnetwork . model . circuit . Route route = circuit . getRoute ( ) ; for ( org . opennaas . extensions . genericnetwork . model . circuit . NetworkConnection netConnection : route . getNetworkConnections ( ) ) { if ( ( netConnection . getSource ( ) . getId ( ) . equals ( portId ) ) || ( netConnection . getDestination ( ) . getId ( ) . equals ( portId ) ) ) return true ; } return false ; }
|
org . junit . Assert . assertFalse ( isPortInCircuit )
|
inboundValues ( ) { org . mule . runtime . api . message . Message message = event . getMessage ( ) ; event = org . mule . runtime . core . api . event . CoreEvent . builder ( event ) . message ( org . mule . runtime . core . internal . message . InternalMessage . builder ( message ) . addInboundProperty ( "foo" , "abc" ) . addInboundProperty ( "bar" , "xyz" ) . build ( ) ) . build ( ) ; java . util . Collection < javax . activation . DataHandler > values = ( ( java . util . Collection < javax . activation . DataHandler > ) ( evaluate ( "message.inboundProperties.values()" , event ) ) ) ; "<AssertPlaceHolder>" ; values . contains ( "abc" ) ; values . contains ( "xyz" ) ; } size ( ) { return delegate . size ( ) ; }
|
org . junit . Assert . assertEquals ( 2 , values . size ( ) )
|
testAddRequiredProductsWithNullValue ( ) { org . candlepin . dto . manifest . v1 . ContentDTO dto = new org . candlepin . dto . manifest . v1 . ContentDTO ( ) ; dto . setRequiredProductIds ( java . util . Arrays . asList ( "1" , "2" ) ) ; "<AssertPlaceHolder>" ; dto . addRequiredProductId ( null ) ; } getRequiredProductIds ( ) { return ( this . requiredProductIds ) != null ? new org . candlepin . util . SetView ( this . requiredProductIds ) : null ; }
|
org . junit . Assert . assertEquals ( new java . util . HashSet ( java . util . Arrays . asList ( "1" , "2" ) ) , dto . getRequiredProductIds ( ) )
|
testUsageInRange ( ) { fModule . schedule ( ) ; fModule . waitForCompletion ( ) ; expected . clear ( ) ; expected . put ( 0 , 0L ) ; expected . put ( 1 , 2L ) ; expected . put ( ( - 1 ) , 2L ) ; resultMap = fModule . getContextSwitchesRange ( 4L , 13L ) ; "<AssertPlaceHolder>" ; } getContextSwitchesRange ( long , long ) { @ org . eclipse . jdt . annotation . Nullable final org . eclipse . tracecompass . statesystem . core . ITmfStateSystem stateSystem = getStateSystem ( ) ; org . eclipse . tracecompass . tmf . core . trace . ITmfTrace trace = getTrace ( ) ; if ( ( trace == null ) || ( stateSystem == null ) ) { return java . util . Collections . < java . lang . Integer , java . lang . Long > emptyMap ( ) ; } long start = java . lang . Math . max ( startParam , stateSystem . getStartTime ( ) ) ; long end = java . lang . Math . min ( endParam , stateSystem . getCurrentEndTime ( ) ) ; org . eclipse . tracecompass . statesystem . core . ITmfStateSystem contextSwitchStateSystem = org . eclipse . tracecompass . tmf . core . statesystem . TmfStateSystemAnalysisModule . getStateSystem ( trace , org . eclipse . tracecompass . analysis . os . linux . core . contextswitch . KernelContextSwitchAnalysis . ID ) ; if ( contextSwitchStateSystem == null ) { return java . util . Collections . < java . lang . Integer , java . lang . Long > emptyMap ( ) ; } org . eclipse . tracecompass . internal . analysis . os . linux . core . Activator . getDefault ( ) . logError ( "Error<sp>getting<sp>CPU<sp>context<sp>switches<sp>in<sp>a<sp>time<sp>range" , e ) ; } return map ; }
|
org . junit . Assert . assertEquals ( expected , resultMap )
|
SimpleSelectTest ( ) { java . io . File schemaFile = new java . io . File ( "src/test/resources/noAggQuery/simpleSelectQuery.sql" ) ; java . lang . String sql = com . google . common . io . Files . toString ( schemaFile , Charsets . UTF_8 ) ; org . verdictdb . VerdictContext verdictContext = new org . verdictdb . VerdictContext ( org . verdictdb . VerdictContextNoAggQueryTest . dbmsConnection ) ; org . verdictdb . VerdictSingleResult result = verdictContext . sql ( sql ) ; java . sql . ResultSet rs = org . verdictdb . VerdictContextNoAggQueryTest . stmt . executeQuery ( sql ) ; while ( result . next ( ) ) { rs . next ( ) ; "<AssertPlaceHolder>" ; } } getInt ( java . lang . String ) { if ( colNameIdx . containsKey ( standardizedLabel ( columnLabel ) ) ) { return queryResult . getInt ( standardizedLabel ( columnLabel . toLowerCase ( ) ) ) ; } else throw new java . sql . SQLException ( "ColumnLabel<sp>does<sp>not<sp>exist." ) ; }
|
org . junit . Assert . assertEquals ( rs . getInt ( 1 ) , result . getInt ( 0 ) )
|
testIsJDK5_1 ( ) { boolean result = org . audit4j . core . util . EnvUtil . isJDK5 ( ) ; "<AssertPlaceHolder>" ; } isJDK5 ( ) { return org . audit4j . core . util . EnvUtil . isJDK_N_OrHigher ( 5 ) ; }
|
org . junit . Assert . assertEquals ( true , result )
|
testLoad ( ) { javax . jms . Queue dest = session . createQueue ( "TEST" ) ; for ( int i = 0 ; i < ( org . apache . activemq . transport . nio . NIOSSLLoadTest . PRODUCER_COUNT ) ; i ++ ) { org . apache . activemq . util . ProducerThread producer = new org . apache . activemq . util . ProducerThread ( session , dest ) ; producer . setMessageCount ( org . apache . activemq . transport . nio . NIOSSLLoadTest . MESSAGE_COUNT ) ; producer . start ( ) ; } for ( int i = 0 ; i < ( org . apache . activemq . transport . nio . NIOSSLLoadTest . CONSUMER_COUNT ) ; i ++ ) { org . apache . activemq . util . ConsumerThread consumer = new org . apache . activemq . util . ConsumerThread ( session , dest ) ; consumer . setMessageCount ( org . apache . activemq . transport . nio . NIOSSLLoadTest . MESSAGE_COUNT ) ; consumer . start ( ) ; consumers [ i ] = consumer ; } org . apache . activemq . util . Wait . waitFor ( new org . apache . activemq . util . Wait . Condition ( ) { @ org . apache . activemq . transport . nio . Override public boolean isSatisified ( ) throws java . lang . Exception { return ( getReceived ( ) ) == ( ( org . apache . activemq . transport . nio . NIOSSLLoadTest . PRODUCER_COUNT ) * ( org . apache . activemq . transport . nio . NIOSSLLoadTest . MESSAGE_COUNT ) ) ; } } , 60000 ) ; "<AssertPlaceHolder>" ; } getReceived ( ) { return received ; }
|
org . junit . Assert . assertEquals ( ( ( org . apache . activemq . transport . nio . NIOSSLLoadTest . PRODUCER_COUNT ) * ( org . apache . activemq . transport . nio . NIOSSLLoadTest . MESSAGE_COUNT ) ) , getReceived ( ) )
|
insertProteinMatches ( ) { deleteAll ( ) ; dao . insertProteinMatches ( createGene3dMatches ( ) ) ; "<AssertPlaceHolder>" ; } retrieveAll ( ) { java . lang . String queryString = java . lang . String . format ( "select<sp>o<sp>from<sp>%s<sp>o" , unqualifiedModelClassName ) ; javax . persistence . Query query = this . entityManager . createQuery ( queryString ) ; @ uk . ac . ebi . interpro . scan . genericjpadao . SuppressWarnings ( "unchecked" ) java . util . List < T > results = query . getResultList ( ) ; return results ; }
|
org . junit . Assert . assertEquals ( 1 , dao . retrieveAll ( ) . size ( ) )
|
testFindUncoveredHttpMethods12 ( ) { org . apache . tomcat . util . descriptor . web . SecurityConstraint [ ] result = org . apache . tomcat . util . descriptor . web . SecurityConstraint . findUncoveredHttpMethods ( new org . apache . tomcat . util . descriptor . web . SecurityConstraint [ ] { org . apache . tomcat . util . descriptor . web . TestSecurityConstraint . GET_OMIT , org . apache . tomcat . util . descriptor . web . TestSecurityConstraint . POST_OMIT } , true , org . apache . tomcat . util . descriptor . web . TestSecurityConstraint . DUMMY_LOG ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertEquals ( 0 , result . length )
|
testFragmentToFragmentInternal ( ) { com . streamsets . pipeline . lib . http . NopHttpRequestFragmenter fragmenter = new com . streamsets . pipeline . lib . http . NopHttpRequestFragmenter ( ) ; fragmenter = org . mockito . Mockito . spy ( fragmenter ) ; java . util . List < byte [ ] > list = new java . util . ArrayList ( ) ; org . mockito . Mockito . doReturn ( list ) . when ( fragmenter ) . fragmentInternal ( org . mockito . Mockito . any ( java . io . InputStream . class ) , org . mockito . Mockito . anyInt ( ) , org . mockito . Mockito . anyInt ( ) ) ; java . io . InputStream is = org . mockito . Mockito . mock ( java . io . InputStream . class ) ; fragmenter . init ( null ) ; "<AssertPlaceHolder>" ; org . mockito . Mockito . verify ( fragmenter , org . mockito . Mockito . times ( 1 ) ) . fragmentInternal ( org . mockito . Mockito . eq ( is ) , org . mockito . Mockito . eq ( 1000 ) , org . mockito . Mockito . eq ( 2000 ) ) ; fragmenter . destroy ( ) ; } fragment ( java . io . InputStream , int , int ) { int fragmentSizeB = fragmentSizeKB * 1000 ; int maxSizeB = maxSizeKB * 1000 ; return fragmentInternal ( is , fragmentSizeB , maxSizeB ) ; }
|
org . junit . Assert . assertEquals ( list , fragmenter . fragment ( is , 1 , 2 ) )
|
copyInputStreamNullEncoding ( ) { java . lang . String probe = "A<sp>string<sp>⍅ï" ; java . io . StringWriter writer = new org . apache . maven . shared . utils . io . IOUtilTest . DontCloseStringWriter ( ) ; org . apache . maven . shared . utils . io . IOUtil . copy ( new java . io . ByteArrayInputStream ( probe . getBytes ( ) ) , writer , null ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( "ReportSet{id='" + ( getId ( ) ) ) + "',<sp>reports=" ) + ( reports ) ) + "}" ; }
|
org . junit . Assert . assertThat ( writer . toString ( ) . getBytes ( ) , org . hamcrest . CoreMatchers . is ( probe . getBytes ( ) ) )
|
shouldHandleRequest ( ) { io . gravitee . gateway . api . Request request = mock ( io . gravitee . gateway . api . Request . class ) ; boolean handle = authenticationHandler . canHandle ( request ) ; "<AssertPlaceHolder>" ; } canHandle ( io . gravitee . gateway . repository . plugins . Plugin ) { return ( plugin . type ( ) ) == ( PluginType . REPOSITORY ) ; }
|
org . junit . Assert . assertTrue ( handle )
|
testRefereesClass ( ) { System . out . println ( "referees-class" ) ; java . lang . String query = "select<sp>referees(heap.findClass(\"java.io.File\"))" ; long [ ] refereesTest = new long [ ] { 1684106928 , 1684106888 , 1684106848 , 1684106408 } ; final java . util . List < java . lang . Long > referees = new java . util . ArrayList < java . lang . Long > ( ) ; instance . executeQuery ( query , new org . netbeans . modules . profiler . oql . engine . api . OQLEngine . ObjectVisitor ( ) { public boolean visit ( java . lang . Object o ) { referees . add ( ( ( org . netbeans . lib . profiler . heap . Instance ) ( o ) ) . getInstanceId ( ) ) ; return false ; } } ) ; "<AssertPlaceHolder>" ; for ( long referee : refereesTest ) { if ( ! ( referees . contains ( referee ) ) ) org . junit . Assert . fail ( ) ; } } size ( ) { return listItems . size ( ) ; }
|
org . junit . Assert . assertEquals ( refereesTest . length , referees . size ( ) )
|
ex17 ( ) { java . util . Collections . shuffle ( org . meri . matasano . set3 . Set3Test . EX_17_INPUTS ) ; for ( java . lang . String cookie : org . meri . matasano . set3 . Set3Test . EX_17_INPUTS ) { org . meri . matasano . utils . webtools . SessionManager sessionManager = new org . meri . matasano . utils . webtools . SessionManager ( ) ; byte [ ] cookieCiphertext = sessionManager . encrypt ( cookie ) ; java . lang . String cookiePlaintext = set . decryptLeakingPkcsN7Padding ( cookieCiphertext , sessionManager ) ; "<AssertPlaceHolder>" ; } } decryptLeakingPkcsN7Padding ( byte [ ] , org . meri . matasano . utils . webtools . SessionManager ) { org . meri . matasano . utils . cryptoanalysis . LeakingPkcsN7PaddingInCBCAnalyzer analyzer = new org . meri . matasano . utils . cryptoanalysis . LeakingPkcsN7PaddingInCBCAnalyzer ( new org . meri . matasano . utils . oracle . PaddingValidatingOracle ( ) { public boolean validatePadding ( byte [ ] ciphertext ) { return sessionManager . validateSessionCookieEncryption ( ciphertext ) ; } public int getBlockLength ( ) { return sessionManager . getBlockLength ( ) ; } } ) ; byte [ ] result = analyzer . decryptLeakingPkcsN7Padding ( cookieCiphertext ) ; return org . apache . shiro . codec . CodecSupport . toString ( result ) ; }
|
org . junit . Assert . assertEquals ( cookie , cookiePlaintext )
|
testSetContentAuthorReferenceSetsMetadataDirtyFlag ( ) { this . document . setMetaDataDirty ( false ) ; org . xwiki . model . reference . DocumentReference contentAuthor = new org . xwiki . model . reference . DocumentReference ( "Wiki" , "XWiki" , "ContentAuthor" ) ; this . document . setContentAuthorReference ( contentAuthor ) ; "<AssertPlaceHolder>" ; } isMetaDataDirty ( ) { return this . isMetaDataDirty ; }
|
org . junit . Assert . assertEquals ( true , this . document . isMetaDataDirty ( ) )
|
connectionMadeSQLFails ( ) { final org . zapodot . junit . db . internal . SQLInitializationPlugin sqlInitializationPlugin = new org . zapodot . junit . db . internal . SQLInitializationPlugin ( "INSERT<sp>INTO<sp>MyTables<sp>values(1,<sp>'User');" ) ; "<AssertPlaceHolder>" ; when ( connection . createStatement ( ) ) . thenReturn ( statement ) ; when ( statement . execute ( anyString ( ) ) ) . thenThrow ( new java . sql . SQLException ( "Reason" ) ) ; sqlInitializationPlugin . connectionMade ( "name" , connection ) ; }
|
org . junit . Assert . assertNotNull ( sqlInitializationPlugin )
|
testFunc ( ) { java . lang . String orig = "foo(\"bar\",<sp>42)" ; org . antlr . runtime . tree . CommonTree tree = com . cloudera . flume . conf . FlumeBuilder . parseArg ( orig ) ; java . lang . String out = com . cloudera . flume . conf . FlumeSpecGen . genArg ( tree ) ; "<AssertPlaceHolder>" ; } genArg ( org . antlr . runtime . tree . CommonTree ) { com . cloudera . flume . conf . FlumeBuilder . ASTNODE type = com . cloudera . flume . conf . FlumeBuilder . ASTNODE . valueOf ( t . getText ( ) ) ; switch ( type ) { case HEX : case DEC : case BOOL : case OCT : case STRING : case FLOAT : return t . getChild ( 0 ) . getText ( ) ; case KWARG : return ( ( t . getChild ( 0 ) . getText ( ) ) + "=" ) + ( com . cloudera . flume . conf . FlumeSpecGen . genArg ( ( ( org . antlr . runtime . tree . CommonTree ) ( t . getChild ( 1 ) ) ) ) ) ; case FUNC : return com . cloudera . flume . conf . FlumeSpecGen . genFunc ( t ) ; default : throw new com . cloudera . flume . conf . FlumeSpecException ( ( "Not<sp>a<sp>node<sp>of<sp>literal<sp>type:<sp>" + ( t . toStringTree ( ) ) ) ) ; } }
|
org . junit . Assert . assertEquals ( orig , out )
|
shouldNotLoadNullValues ( ) { org . jsr107 . tck . integration . NullValueCacheLoader < java . lang . String , java . lang . String > cacheLoader = new org . jsr107 . tck . integration . NullValueCacheLoader ( ) ; cacheLoaderServer . setCacheLoader ( cacheLoader ) ; java . util . HashSet < java . lang . String > keys = new java . util . HashSet < java . lang . String > ( ) ; keys . add ( "gudday" ) ; keys . add ( "hello" ) ; keys . add ( "howdy" ) ; keys . add ( "bonjour" ) ; java . util . Map < java . lang . String , java . lang . String > map = cache . getAll ( keys ) ; "<AssertPlaceHolder>" ; } setCacheLoader ( javax . cache . integration . CacheLoader ) { this . cacheLoader = cacheLoader ; }
|
org . junit . Assert . assertThat ( map . size ( ) , org . hamcrest . CoreMatchers . is ( 0 ) )
|
testGeboorteDatumKindOpAanvAdreshoudingMoeder ( ) { kind . getGeboorte ( ) . setDatumGeboorte ( new nl . bzk . brp . model . attribuuttype . Datum ( 20120501 ) ) ; java . util . List < nl . bzk . brp . model . validatie . Melding > melding = brpuc00120 . executeer ( null , nieuweSituatie , null ) ; "<AssertPlaceHolder>" ; } size ( ) { return elementen . size ( ) ; }
|
org . junit . Assert . assertTrue ( ( ( melding . size ( ) ) == 0 ) )
|
addTokenTwiceTest ( ) { final cc . blynk . server . internal . token . ResetPassToken user = new cc . blynk . server . internal . token . ResetPassToken ( "test.gmail.com" , cc . blynk . utils . AppNameUtil . BLYNK ) ; final java . lang . String token = "123" ; final cc . blynk . server . internal . token . TokensPool tokensPool = new cc . blynk . server . internal . token . TokensPool ( "" ) ; tokensPool . addToken ( token , user ) ; tokensPool . addToken ( token , user ) ; "<AssertPlaceHolder>" ; } size ( ) { return size ; }
|
org . junit . Assert . assertEquals ( 1 , tokensPool . size ( ) )
|
testNoteIssueEvent ( ) { org . gitlab4j . api . webhook . Event noteEvent = makeFakeApiCall ( org . gitlab4j . api . webhook . NoteEvent . class , "note-issue-event" ) ; "<AssertPlaceHolder>" ; } compareJson ( T , java . lang . String ) { java . io . InputStreamReader reader = new java . io . InputStreamReader ( org . gitlab4j . api . TestGitLabApiBeans . class . getResourceAsStream ( filename ) ) ; return org . gitlab4j . api . JsonUtils . compareJson ( apiObject , reader ) ; }
|
org . junit . Assert . assertTrue ( compareJson ( noteEvent , "note-issue-event" ) )
|
testOrderCircularDependencyException ( ) { java . util . List < com . liferay . portal . osgi . web . servlet . context . helper . definition . WebXMLDefinition > webXMLDefinitions = new java . util . ArrayList ( ) ; webXMLDefinitions . add ( loadWebXMLDefinition ( "dependencies/custom-web-fragment-circular-1.xml" ) ) ; webXMLDefinitions . add ( loadWebXMLDefinition ( "dependencies/custom-web-fragment-circular-2.xml" ) ) ; com . liferay . portal . osgi . web . servlet . context . helper . definition . WebXMLDefinition webXMLDefinition = loadWebXMLDefinition ( "dependencies/custom-web.xml" ) ; boolean threwOrderCircularDependencyException = false ; try { com . liferay . portal . osgi . web . servlet . context . helper . internal . order . OrderUtil . getOrderedWebXMLDefinitions ( webXMLDefinitions , webXMLDefinition . getAbsoluteOrderingNames ( ) ) ; } catch ( java . lang . Exception e ) { if ( e instanceof com . liferay . portal . osgi . web . servlet . context . helper . internal . order . OrderCircularDependencyException ) { threwOrderCircularDependencyException = true ; } } "<AssertPlaceHolder>" ; } getAbsoluteOrderingNames ( ) { return _absoluteOrderingNames ; }
|
org . junit . Assert . assertTrue ( threwOrderCircularDependencyException )
|
testListOpenFilesNN1DownNN2Up ( ) { setUpHaCluster ( false ) ; cluster . getDfsCluster ( ) . shutdownNameNode ( 0 ) ; cluster . getDfsCluster ( ) . transitionToActive ( 1 ) ; int exitCode = admin . run ( new java . lang . String [ ] { "-listOpenFiles" } ) ; "<AssertPlaceHolder>" ; } toString ( ) { final java . lang . StringBuilder sb = new java . lang . StringBuilder ( "InterruptEscalator{" ) ; sb . append ( "<sp>signalAlreadyReceived=" ) . append ( signalAlreadyReceived . get ( ) ) ; org . apache . hadoop . service . launcher . ServiceLauncher owner = ownerRef . get ( ) ; if ( owner != null ) { sb . append ( ",<sp>owner=<sp>" ) . append ( owner . toString ( ) ) ; } sb . append ( ",<sp>shutdownTimeMillis=" ) . append ( shutdownTimeMillis ) . append ( ",<sp>forcedShutdownTimedOut=" ) . append ( forcedShutdownTimedOut ) . append ( '}' ) ; return sb . toString ( ) ; }
|
org . junit . Assert . assertEquals ( err . toString ( ) . trim ( ) , 0 , exitCode )
|
testAddDuplicateAddressesToApplication2 ( ) { com . rayo . server . storage . model . Application application = buildApplication ( ) ; store . storeApplication ( application ) ; java . util . List < java . lang . String > addresses = new java . util . ArrayList < java . lang . String > ( ) ; addresses . add ( "+348005551212" ) ; addresses . add ( "+348005551212" ) ; store . storeAddresses ( addresses , application . getBareJid ( ) ) ; "<AssertPlaceHolder>" ; } getAddressesForApplication ( java . lang . String ) { java . util . concurrent . locks . Lock applicationLock = applicationsLock . readLock ( ) ; applicationLock . lock ( ) ; try { java . util . List < java . lang . String > addresses = appToAddressesMap . get ( appId ) ; if ( addresses != null ) { return new java . util . ArrayList < java . lang . String > ( addresses ) ; } else { return java . util . Collections . EMPTY_LIST ; } } finally { applicationLock . unlock ( ) ; } }
|
org . junit . Assert . assertEquals ( store . getAddressesForApplication ( application . getBareJid ( ) ) . size ( ) , 1 )
|
unrecognisedAtomTypes ( ) { org . openscience . cdk . io . Mol2Reader mol2Reader = null ; try { mol2Reader = new org . openscience . cdk . io . Mol2Reader ( getClass ( ) . getResourceAsStream ( "CLMW1.mol2" ) ) ; org . openscience . cdk . interfaces . IAtomContainer container = mol2Reader . read ( new org . openscience . cdk . silent . AtomContainer ( ) ) ; for ( org . openscience . cdk . interfaces . IAtom atom : container . atoms ( ) ) "<AssertPlaceHolder>" ; } finally { if ( mol2Reader != null ) mol2Reader . close ( ) ; } } getAtomicNumber ( ) { return this . atomicNumber ; }
|
org . junit . Assert . assertNotNull ( atom . getAtomicNumber ( ) )
|
readConnectorState ( ) { byte [ ] value = new byte [ 0 ] ; org . apache . kafka . connect . util . KafkaBasedLog < java . lang . String , byte [ ] > kafkaBasedLog = mock ( org . apache . kafka . connect . util . KafkaBasedLog . class ) ; org . apache . kafka . connect . storage . Converter converter = mock ( org . apache . kafka . connect . storage . Converter . class ) ; org . apache . kafka . connect . storage . KafkaStatusBackingStore store = new org . apache . kafka . connect . storage . KafkaStatusBackingStore ( new org . apache . kafka . common . utils . MockTime ( ) , converter , org . apache . kafka . connect . storage . KafkaStatusBackingStoreTest . STATUS_TOPIC , kafkaBasedLog ) ; java . util . Map < java . lang . String , java . lang . Object > statusMap = new java . util . HashMap ( ) ; statusMap . put ( "worker_id" , org . apache . kafka . connect . storage . KafkaStatusBackingStoreTest . WORKER_ID ) ; statusMap . put ( "state" , "RUNNING" ) ; statusMap . put ( "generation" , 0L ) ; expect ( converter . toConnectData ( org . apache . kafka . connect . storage . KafkaStatusBackingStoreTest . STATUS_TOPIC , value ) ) . andReturn ( new org . apache . kafka . connect . data . SchemaAndValue ( null , statusMap ) ) ; replayAll ( ) ; store . read ( org . apache . kafka . connect . storage . KafkaStatusBackingStoreTest . consumerRecord ( 0 , "status-connector-conn" , value ) ) ; org . apache . kafka . connect . runtime . ConnectorStatus status = new org . apache . kafka . connect . runtime . ConnectorStatus ( org . apache . kafka . connect . storage . KafkaStatusBackingStoreTest . CONNECTOR , ConnectorStatus . State . RUNNING , org . apache . kafka . connect . storage . KafkaStatusBackingStoreTest . WORKER_ID , 0 ) ; "<AssertPlaceHolder>" ; verifyAll ( ) ; } get ( org . apache . kafka . connect . data . Field ) { java . lang . Object val = values [ field . index ( ) ] ; if ( ( val == null ) && ( ( field . schema ( ) . defaultValue ( ) ) != null ) ) { val = field . schema ( ) . defaultValue ( ) ; } return val ; }
|
org . junit . Assert . assertEquals ( status , store . get ( org . apache . kafka . connect . storage . KafkaStatusBackingStoreTest . CONNECTOR ) )
|
whenDeleteUserFromStorageFromUncorrectIDThatTheyDontDelete ( ) { ru . szhernovoy . model . User [ ] users = new ru . szhernovoy . model . User [ 2 ] ; users [ 0 ] = new ru . szhernovoy . model . User ( "Vasja" , 25 ) ; users [ 1 ] = new ru . szhernovoy . model . User ( "Kolja" , 26 ) ; ru . szhernovoy . model . User vasja = new ru . szhernovoy . model . User ( "Vasja" , 25 ) ; ru . szhernovoy . model . UserStorage storage = new ru . szhernovoy . model . UserStorage ( users ) ; ru . szhernovoy . control . Validate [ ] validId = new ru . szhernovoy . control . Validate [ 1 ] ; validId [ 0 ] = new ru . szhernovoy . control . ValidateId ( ) ; vasja . setId ( "1212121212" ) ; boolean result = storage . deleteUser ( validId , vasja ) ; boolean control = false ; "<AssertPlaceHolder>" ; } deleteUser ( ru . szhernovoy . control . Validate [ ] , ru . szhernovoy . model . User ) { boolean result = checkParam ( valid , user ) ; System . out . println ( result ) ; if ( result ) { for ( int index = 0 ; index < ( this . users . length ) ; index ++ ) { if ( this . users [ index ] . getId ( ) . equals ( user . getId ( ) ) ) { this . users [ index ] = null ; break ; } } } return result ; }
|
org . junit . Assert . assertThat ( control , org . hamcrest . core . Is . is ( result ) )
|
GetAllProfiles ( ) { net . billforward . model . Profile [ ] profiles = net . billforward . model . Profile . getAll ( ) ; "<AssertPlaceHolder>" ; System . out . println ( profiles [ 0 ] . toString ( ) ) ; } getAll ( ) { return net . billforward . model . Profile . getAll ( net . billforward . model . Profile . ResourcePath ( ) ) ; }
|
org . junit . Assert . assertNotNull ( profiles )
|
managedPoolMustNotRecordObjectLifetimeLatencyBeforeFirstDeallocation ( ) { config . setMetricsRecorder ( new stormpot . LastSampleMetricsRecorder ( ) ) ; stormpot . ManagedPool managedPool = assumeManagedPool ( ) ; stormpot . GenericPoolable obj = pool . claim ( stormpot . PoolTest . longTimeout ) ; try { "<AssertPlaceHolder>" ; } finally { obj . release ( ) ; } } getObjectLifetimePercentile ( double ) { if ( ( metricsRecorder ) == null ) { return Double . NaN ; } return metricsRecorder . getObjectLifetimePercentile ( percentile ) ; }
|
org . junit . Assert . assertThat ( managedPool . getObjectLifetimePercentile ( 0.0 ) , is ( Double . NaN ) )
|
getGroupsCount ( ) { System . out . println ( ( ( cz . metacentrum . perun . core . entry . GroupsManagerEntryIntegrationTest . CLASS_NAME ) + "getGroupsCount" ) ) ; vo = setUpVo ( ) ; setUpGroup ( vo ) ; int count = groupsManager . getGroupsCount ( sess ) ; "<AssertPlaceHolder>" ; } getGroupsCount ( cz . metacentrum . perun . core . api . PerunSession ) { return getGroupsManagerImpl ( ) . getGroupsCount ( sess ) ; }
|
org . junit . Assert . assertTrue ( ( count > 0 ) )
|
uploadMemoryMappedTest ( ) { final java . lang . String sas = ( ( java . lang . System . getenv ( "JAVA_SDK_TEST_SAS" ) ) == null ) ? "" : java . lang . System . getenv ( "JAVA_SDK_TEST_SAS" ) ; reactor . core . publisher . Flux < byte [ ] > md5s = reactor . core . publisher . Flux . range ( 0 , com . azure . common . implementation . RestProxyStressTests . NUM_FILES ) . map ( ( integer ) -> { final java . nio . file . Path filePath = com . azure . common . implementation . RestProxyStressTests . TEMP_FOLDER_PATH . resolve ( ( ( "100m-" + integer ) + "-md5.dat" ) ) ; try { return java . nio . file . Files . readAllBytes ( filePath ) ; } catch ( ioe ) { throw reactor . core . Exceptions . propagate ( com . azure . common . implementation . ioe ) ; } } ) ; java . time . Instant uploadStart = java . time . Instant . now ( ) ; reactor . core . publisher . Flux . range ( 0 , com . azure . common . implementation . RestProxyStressTests . NUM_FILES ) . zipWith ( md5s , ( id , md5 ) -> { java . nio . channels . FileChannel fileStream = null ; try { fileStream = java . nio . channels . FileChannel . open ( com . azure . common . implementation . RestProxyStressTests . TEMP_FOLDER_PATH . resolve ( ( ( "100m-" + id ) + ".dat" ) ) , StandardOpenOption . READ ) ; } catch ( ioe ) { reactor . core . Exceptions . propagate ( com . azure . common . implementation . ioe ) ; } io . netty . buffer . ByteBuf mappedByteBufFile = null ; Flux < io . netty . buffer . ByteBuf > stream = null ; try { java . nio . MappedByteBuffer mappedByteBufferFile = fileStream . map ( FileChannel . MapMode . READ_ONLY , 0 , fileStream . size ( ) ) ; mappedByteBufFile = io . netty . buffer . Unpooled . wrappedBuffer ( mappedByteBufferFile ) ; stream = com . azure . common . implementation . util . FluxUtil . split ( mappedByteBufFile , com . azure . common . implementation . RestProxyStressTests . CHUNK_SIZE ) ; } catch ( ioe ) { mappedByteBufFile . release ( ) ; reactor . core . Exceptions . propagate ( com . azure . common . implementation . ioe ) ; } return com . azure . common . implementation . RestProxyStressTests . service . upload100MB ( java . lang . String . valueOf ( id ) , sas , "BlockBlob" , stream , com . azure . common . implementation . RestProxyStressTests . FILE_SIZE ) . map ( ( response ) -> { java . lang . String base64MD5 = response . headers ( ) . value ( "Content-MD5" ) ; byte [ ] receivedMD5 = java . util . Base64 . getDecoder ( ) . decode ( base64MD5 ) ; "<AssertPlaceHolder>" ; return response ; } ) ; } ) . flatMapDelayError ( ( m ) -> m , 15 , 1 ) . blockLast ( ) ; long durationMilliseconds = java . time . Duration . between ( uploadStart , java . time . Instant . now ( ) ) . toMillis ( ) ; org . slf4j . LoggerFactory . getLogger ( getClass ( ) ) . info ( ( ( "Upload<sp>took<sp>" + durationMilliseconds ) + "<sp>milliseconds." ) ) ; } decode ( byte [ ] ) { return encoded == null ? null : java . util . Base64 . getDecoder ( ) . decode ( encoded ) ; }
|
org . junit . Assert . assertArrayEquals ( md5 , receivedMD5 )
|
test_UrlBuilder_AlreadyHasQuery_WithMultipleQueries ( ) { java . lang . String baseURL = "http://www.fakex.com/course?type=mobile" ; java . util . Map < java . lang . String , java . lang . String > queryParams = new java . util . HashMap ( ) ; queryParams . put ( "search_query" , "mobile<sp>linux" ) ; queryParams . put ( "subject" , "Architecture" ) ; java . lang . String expected = "http://www.fakex.com/course?type=mobile&subject=Architecture&search_query=mobile%20linux" ; java . lang . String output = org . edx . mobile . util . UrlUtil . buildUrlWithQueryParams ( logger , baseURL , queryParams ) ; "<AssertPlaceHolder>" ; } buildUrlWithQueryParams ( org . edx . mobile . logger . Logger , java . lang . String , java . util . Map ) { final android . net . Uri . Builder uriBuilder = android . net . Uri . parse ( baseUrl ) . buildUpon ( ) ; for ( Map . Entry < java . lang . String , java . lang . String > entry : queryParams . entrySet ( ) ) { uriBuilder . appendQueryParameter ( entry . getKey ( ) , entry . getValue ( ) ) ; } final java . lang . String finalUrl = uriBuilder . build ( ) . toString ( ) ; logger . debug ( ( "URL:<sp>" + finalUrl ) ) ; return finalUrl ; }
|
org . junit . Assert . assertEquals ( expected , output )
|
testGetFloatPosition ( ) { final double [ ] initial = new double [ ] { 5.3 , 2.6 , 3.1 , - 852.1 } ; final net . imglib2 . RealPoint p1 = new net . imglib2 . RealPoint ( initial ) ; for ( int j = 0 ; j < 4 ; j ++ ) { "<AssertPlaceHolder>" ; } } getFloatPosition ( int ) { return position . getFloatPosition ( d ) ; }
|
org . junit . Assert . assertEquals ( p1 . getFloatPosition ( j ) , ( ( float ) ( initial [ j ] ) ) , 0 )
|
testGetParametersWithDefaultEntityAndDisabledSecurity ( ) { unit . setSecurity ( false ) ; org . lnu . is . domain . asset . status . AssetStatus entity = new org . lnu . is . domain . asset . status . AssetStatus ( ) ; java . util . Map < java . lang . String , java . lang . Object > expected = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; expected . put ( "status" , RowStatus . ACTIVE ) ; java . util . Map < java . lang . String , java . lang . Object > actual = unit . getParameters ( entity ) ; "<AssertPlaceHolder>" ; } getParameters ( org . springframework . web . context . request . NativeWebRequest ) { java . util . Map < java . lang . String , java . lang . Object > resultMap = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; java . util . Map < java . lang . String , java . lang . String > pathVariables = ( ( java . util . Map < java . lang . String , java . lang . String > ) ( webRequest . getAttribute ( HandlerMapping . URI_TEMPLATE_VARIABLES_ATTRIBUTE , RequestAttributes . SCOPE_REQUEST ) ) ) ; java . util . Map < java . lang . String , java . lang . Object > requestParams = getRequestParameterMap ( webRequest ) ; for ( Map . Entry < java . lang . String , java . lang . Object > entry : requestParams . entrySet ( ) ) { resultMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } resultMap . putAll ( pathVariables ) ; return resultMap ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
test ( ) { com . arjuna . ats . arjuna . common . Uid u = new com . arjuna . ats . arjuna . common . Uid ( "hello" , true ) ; "<AssertPlaceHolder>" ; try { u = com . arjuna . ats . internal . arjuna . common . UidHelper . unpackFrom ( null ) ; org . junit . Assert . fail ( ) ; } catch ( final java . lang . IllegalArgumentException ex ) { } try { com . arjuna . ats . internal . arjuna . common . UidHelper . packInto ( null , new com . arjuna . ats . arjuna . state . OutputObjectState ( ) ) ; org . junit . Assert . fail ( ) ; } catch ( final java . lang . IllegalArgumentException ex ) { } try { com . arjuna . ats . internal . arjuna . common . UidHelper . packInto ( u , new com . arjuna . ats . arjuna . state . OutputObjectState ( ) ) ; org . junit . Assert . fail ( ) ; } catch ( final java . lang . IllegalArgumentException ex ) { } } valid ( ) { return ( _context ) != null ; }
|
org . junit . Assert . assertEquals ( u . valid ( ) , false )
|
testLargeRequestCancellations ( ) { com . devexperts . rmi . test . RMIFunctionalityTest . log . info ( "<sp>----<sp>testLargeRequestCancellations<sp>----<sp>" ) ; com . devexperts . rmi . test . NTU . exportServices ( server . getServer ( ) , new com . devexperts . rmi . test . RMIServiceImplementation ( new com . devexperts . rmi . test . RMIFunctionalityTest . LargeRequestProcessorImpl2 ( ) , com . devexperts . rmi . test . RMIFunctionalityTest . LargeRequestProcessor . class ) , channelLogic ) ; com . devexperts . rmi . test . NTU . connect ( server , ( "shaped[throughput=1000]+:" + ( com . devexperts . rmi . test . NTU . port ( 39 ) ) ) ) ; com . devexperts . rmi . test . NTU . connect ( client , ( ( ( "shaped[throughput=1000]+" + ( NTU . LOCAL_HOST ) ) + ":" ) + ( com . devexperts . rmi . test . NTU . port ( 39 ) ) ) ) ; setSingleThreadExecutorForLargeMethods ( ) ; channelLogic . initPorts ( ) ; com . devexperts . rmi . test . RMIOperation < java . lang . Integer > processOp = com . devexperts . rmi . test . RMIOperation . valueOf ( com . devexperts . rmi . test . RMIFunctionalityTest . LargeRequestProcessor . class , com . devexperts . rmi . test . RMIFunctionalityTest . LargeRequestProcessor . class . getMethod ( "process" , byte [ ] . class ) ) ; byte [ ] largeData = new byte [ com . devexperts . rmi . test . RMIFunctionalityTest . LARGE_SIZE ] ; com . devexperts . rmi . test . ArrayList < com . devexperts . rmi . test . RMIRequest < java . lang . Integer > > requests = new com . devexperts . rmi . test . ArrayList ( ) ; com . devexperts . rmi . test . Random rnd = new com . devexperts . rmi . test . Random ( 6409837516922350791L ) ; for ( int i = 0 ; i < ( ( com . devexperts . rmi . test . RMIFunctionalityTest . MAX_CONCURRENT_MESSAGES ) / 2 ) ; i ++ ) { rnd . nextBytes ( largeData ) ; requests . add ( channelLogic . clientPort . createRequest ( processOp , ( ( java . lang . Object ) ( largeData ) ) ) ) ; } byte [ ] smallData = new byte [ com . devexperts . rmi . test . RMIFunctionalityTest . SMALL_SIZE ] ; rnd . nextBytes ( smallData ) ; com . devexperts . rmi . test . RMIRequest < java . lang . Integer > smallRequest = channelLogic . clientPort . createRequest ( processOp , ( ( java . lang . Object ) ( smallData ) ) ) ; smallRequest . send ( ) ; "<AssertPlaceHolder>" ; com . devexperts . rmi . test . RMIFunctionalityTest . log . info ( "---------------------------------" ) ; long currentTime = java . lang . System . currentTimeMillis ( ) ; for ( com . devexperts . rmi . test . RMIRequest < java . lang . Integer > request : requests ) { request . send ( ) ; while ( ( request . getState ( ) ) != ( RMIRequestState . SENT ) ) { java . lang . Thread . sleep ( 10 ) ; if ( ( java . lang . System . currentTimeMillis ( ) ) > ( 10000 + currentTime ) ) org . junit . Assert . fail ( "TIMEOUT!" ) ; } request . cancelWithConfirmation ( ) ; } for ( com . devexperts . rmi . test . RMIRequest < java . lang . Integer > request : requests ) try { request . getBlocking ( ) ; org . junit . Assert . fail ( ) ; } catch ( com . devexperts . rmi . test . RMIException e ) { if ( ( ( e . getType ( ) ) != ( RMIExceptionType . CANCELLED_BEFORE_EXECUTION ) ) && ( ( e . getType ( ) ) != ( RMIExceptionType . CANCELLED_DURING_EXECUTION ) ) ) { System . err . println ( "===<sp>Unexpected<sp>exception<sp>===" ) ; e . printStackTrace ( System . err ) ; org . junit . Assert . fail ( ( "TYPE<sp>=<sp>" + ( e . getType ( ) ) ) ) ; } } } hashCode ( int ) { if ( cipher == 0 ) return 0 ; long penta = com . devexperts . qd . kit . PentaCodec . decodeCipher ( cipher ) ; int plen = 0 ; while ( ( penta > > > plen ) != 0 ) plen += 5 ; int hash = 0 ; while ( plen > 0 ) { plen -= 5 ; int code = ( ( int ) ( penta > > > plen ) ) & 31 ; if ( ( code >= 30 ) && ( plen > 0 ) ) { plen -= 5 ; code = ( ( int ) ( penta > > > plen ) ) & 1023 ; } hash = ( 31 * hash ) + ( com . devexperts . qd . kit . PentaCodec . CHAR [ code ] ) ; } return hash ; }
|
org . junit . Assert . assertEquals ( com . devexperts . rmi . test . Arrays . hashCode ( smallData ) , ( ( int ) ( smallRequest . getBlocking ( ) ) ) )
|
testAdd ( ) { org . apache . flink . api . common . accumulators . DoubleMinimum min = new org . apache . flink . api . common . accumulators . DoubleMinimum ( ) ; min . add ( 1234.5768 ) ; min . add ( 9876.5432 ) ; min . add ( ( - 987.6543 ) ) ; min . add ( ( - 123.4567 ) ) ; "<AssertPlaceHolder>" ; } getLocalValue ( ) { return null ; }
|
org . junit . Assert . assertEquals ( ( - 987.6543 ) , min . getLocalValue ( ) , 0.0 )
|
postRawBody ( ) { java . lang . String sourceString = "\'\"@こんにちは-test-123" ; byte [ ] sentBytes = sourceString . getBytes ( StandardCharsets . UTF_8 ) ; io . joshworks . restclient . http . HttpResponse < java . lang . String > response = client . post ( ( ( io . joshworks . restclient . http . RestClientTest . BASE_URL ) + "/echoBinary" ) ) . header ( "Content-type" , "text/plain" ) . body ( sentBytes ) . asString ( ) ; "<AssertPlaceHolder>" ; } body ( ) { return parseBody ( responseClass ) ; }
|
org . junit . Assert . assertEquals ( sourceString , response . body ( ) )
|
testSortingWithOptions1 ( ) { io . anserini . eval . ResultDoc d1 = new io . anserini . eval . ResultDoc ( "1" , 1.0001 , true , true ) ; io . anserini . eval . ResultDoc d2 = new io . anserini . eval . ResultDoc ( "010" , 1.0001 , true , true ) ; io . anserini . eval . ResultDoc d3 = new io . anserini . eval . ResultDoc ( "1000" , 1.0001 , true , true ) ; io . anserini . eval . ResultDoc d4 = new io . anserini . eval . ResultDoc ( "00100" , 1.0001 , true , true ) ; java . util . List < io . anserini . eval . ResultDoc > l = new java . util . ArrayList ( ) ; l . add ( d1 ) ; l . add ( d2 ) ; l . add ( d3 ) ; l . add ( d4 ) ; java . util . Collections . sort ( l ) ; java . lang . String [ ] sorted = new java . lang . String [ 4 ] ; for ( int i = 0 ; i < ( l . size ( ) ) ; i ++ ) { sorted [ i ] = l . get ( i ) . getDocid ( ) ; } java . lang . String [ ] expected = new java . lang . String [ ] { "1" , "010" , "00100" , "1000" } ; "<AssertPlaceHolder>" ; } getDocid ( ) { return getHeaderMetadataItem ( "WARC-TREC-ID" ) ; }
|
org . junit . Assert . assertArrayEquals ( expected , sorted )
|
provides_monotonically_increasing_times_for_single_marker ( ) { timing . mark ( "mark1" ) ; long tlast = 0 ; for ( int i = 0 ; i < 10 ; i ++ ) { tlast = timing . getElapsed ( "mark1" ) ; java . lang . Thread . sleep ( 10 ) ; "<AssertPlaceHolder>" ; } } getElapsed ( java . lang . String ) { if ( startTimes . containsKey ( name ) ) { return ( java . lang . System . nanoTime ( ) ) - ( startTimes . get ( name ) ) ; } else { return 0 ; } }
|
org . junit . Assert . assertThat ( ( ( timing . getElapsed ( "mark1" ) ) > tlast ) , org . hamcrest . CoreMatchers . equalTo ( true ) )
|
serializeAndDeserialize ( ) { final org . eclipse . rdf4j . model . ValueFactory vf = org . eclipse . rdf4j . model . impl . SimpleValueFactory . getInstance ( ) ; final org . eclipse . rdf4j . model . Statement statement = vf . createStatement ( vf . createIRI ( "urn:person1" ) , vf . createIRI ( "urn:hasName" ) , vf . createLiteral ( "alice" ) , vf . createIRI ( "urn:testContext" ) ) ; final org . apache . rya . api . model . VisibilityStatement original = new org . apache . rya . api . model . VisibilityStatement ( statement , "a|b|c" ) ; try ( final org . apache . kafka . common . serialization . Serde < org . apache . rya . api . model . VisibilityStatement > serde = new org . apache . rya . streams . kafka . serialization . VisibilityStatementSerde ( ) ) { final byte [ ] bytes = serde . serializer ( ) . serialize ( "topic" , original ) ; final org . apache . rya . api . model . VisibilityStatement deserialized = serde . deserializer ( ) . deserialize ( "topic" , bytes ) ; "<AssertPlaceHolder>" ; } } deserialize ( org . apache . rya . api . RdfCloudTripleStoreConstants . TABLE_LAYOUT , org . apache . rya . api . resolver . triple . TripleRow ) { try { assert ( tripleRow != null ) && ( table_layout != null ) ; final byte [ ] row = tripleRow . getRow ( ) ; final int firstIndex = com . google . common . primitives . Bytes . indexOf ( row , org . apache . rya . api . RdfCloudTripleStoreConstants . DELIM_BYTE ) ; final int secondIndex = com . google . common . primitives . Bytes . lastIndexOf ( row , org . apache . rya . api . RdfCloudTripleStoreConstants . DELIM_BYTE ) ; final int typeIndex = com . google . common . primitives . Bytes . indexOf ( row , org . apache . rya . api . RdfCloudTripleStoreConstants . TYPE_DELIM_BYTE ) ; final byte [ ] first = java . util . Arrays . copyOf ( row , firstIndex ) ; final byte [ ] second = java . util . Arrays . copyOfRange ( row , ( firstIndex + 1 ) , secondIndex ) ; final byte [ ] third = java . util . Arrays . copyOfRange ( row , ( secondIndex + 1 ) , typeIndex ) ; final byte [ ] type = java . util . Arrays . copyOfRange ( row , typeIndex , row . length ) ; final byte [ ] columnFamily = tripleRow . getColumnFamily ( ) ; final boolean contextExists = ( columnFamily != null ) && ( ( columnFamily . length ) > 0 ) ; final org . apache . rya . api . domain . RyaIRI context = ( contextExists ) ? new org . apache . rya . api . domain . RyaIRI ( new java . lang . String ( columnFamily , java . nio . charset . StandardCharsets . UTF_8 ) ) : null ; final byte [ ] columnQualifier = tripleRow . getColumnQualifier ( ) ; final java . lang . String qualifier = ( ( columnQualifier != null ) && ( ( columnQualifier . length ) > 0 ) ) ? new java . lang . String ( columnQualifier , java . nio . charset . StandardCharsets . UTF_8 ) : null ; final java . lang . Long timestamp = tripleRow . getTimestamp ( ) ; final byte [ ] columnVisibility = tripleRow . getColumnVisibility ( ) ; final byte [ ] value = tripleRow . getValue ( ) ; switch ( table_layout ) { case SPO : { final byte [ ] obj = com . google . common . primitives . Bytes . concat ( third , type ) ; return new org . apache . rya . api . domain . RyaStatement ( new org . apache . rya . api . domain . RyaIRI ( new java . lang . String ( first , java . nio . charset . StandardCharsets . UTF_8 ) ) , new org . apache . rya . api . domain . RyaIRI ( new java . lang . String ( second , java . nio . charset . StandardCharsets . UTF_8 ) ) , org . apache . rya . api . resolver . RyaContext . getInstance ( ) . deserialize ( obj ) , context , qualifier , columnVisibility , value , timestamp ) ; } case PO : { final byte [ ] obj = com . google . common . primitives . Bytes . concat ( second , type ) ; return new org . apache . rya . api . domain . RyaStatement ( new org . apache . rya . api . domain . RyaIRI ( new java . lang . String ( third , java . nio . charset . StandardCharsets . UTF_8 ) ) , new org . apache . rya . api . domain . RyaIRI ( new java . lang . String ( first , java . nio . charset . StandardCharsets . UTF_8 ) ) , org . apache . rya . api . resolver . RyaContext . getInstance ( ) . deserialize ( obj ) , context , qualifier , columnVisibility , value , timestamp ) ; } case OSP : { final byte [ ] obj = com . google . common . primitives . Bytes . concat ( first , type ) ; return new org . apache . rya . api . domain . RyaStatement ( new org . apache . rya . api . domain . RyaIRI ( new java . lang . String ( second , java . nio . charset . StandardCharsets . UTF_8 ) ) , new org . apache .
|
org . junit . Assert . assertEquals ( original , deserialized )
|
testAllAny ( ) { org . nd4j . linalg . api . ndarray . INDArray allZeros = org . nd4j . linalg . factory . Nd4j . zeros ( DataType . FLOAT , 3 , 4 ) ; org . nd4j . linalg . api . ndarray . INDArray allOnes = org . nd4j . linalg . factory . Nd4j . ones ( DataType . FLOAT , 3 , 4 ) ; org . nd4j . linalg . api . ndarray . INDArray mixed = org . nd4j . linalg . factory . Nd4j . zeros ( DataType . FLOAT , 3 , 4 ) ; mixed . getRow ( 1 ) . assign ( 1.0 ) ; org . nd4j . linalg . api . ndarray . INDArray [ ] in = new org . nd4j . linalg . api . ndarray . INDArray [ ] { allZeros , allOnes , mixed } ; boolean [ ] expAll = new boolean [ ] { false , true , false } ; boolean [ ] expAny = new boolean [ ] { false , true , true } ; for ( int i = 0 ; i < 3 ; i ++ ) { org . nd4j . autodiff . samediff . SameDiff sd = org . nd4j . autodiff . samediff . SameDiff . create ( ) ; org . nd4j . autodiff . samediff . SDVariable s = sd . var ( "in" , in [ i ] ) ; org . nd4j . autodiff . samediff . SDVariable all = sd . f ( ) . all ( s ) ; org . nd4j . autodiff . samediff . SDVariable any = sd . f ( ) . any ( s ) ; java . lang . String err = org . nd4j . autodiff . validation . OpValidation . validate ( new org . nd4j . autodiff . validation . TestCase ( sd ) . gradientCheck ( false ) . expected ( all , org . nd4j . linalg . factory . Nd4j . create ( new boolean [ ] { expAll [ i ] } ) ) . expected ( any , org . nd4j . linalg . factory . Nd4j . create ( new boolean [ ] { expAny [ i ] } ) ) ) ; "<AssertPlaceHolder>" ; } } create ( org . nd4j . linalg . api . shape . LongShapeDescriptor ) { return org . nd4j . linalg . factory . Nd4j . create ( descriptor , true ) ; }
|
org . junit . Assert . assertNull ( err )
|
testShortPath ( ) { byte [ ] data = new byte [ 26 ] ; java . nio . ByteBuffer buffer = java . nio . ByteBuffer . wrap ( data ) ; buffer . putShort ( ( ( short ) ( AdobePathSegment . CLOSED_SUBPATH_LENGTH_RECORD ) ) ) ; buffer . putShort ( ( ( short ) ( 1 ) ) ) ; java . awt . geom . Path2D path = new com . twelvemonkeys . imageio . path . AdobePathBuilder ( data ) . path ( ) ; "<AssertPlaceHolder>" ; } path ( ) { java . util . List < java . util . List < com . twelvemonkeys . imageio . path . AdobePathSegment > > subPaths = new java . util . ArrayList < java . util . List < com . twelvemonkeys . imageio . path . AdobePathSegment > > ( ) ; java . util . List < com . twelvemonkeys . imageio . path . AdobePathSegment > currentPath = null ; int currentPathLength = 0 ; com . twelvemonkeys . imageio . path . AdobePathSegment segment ; while ( ( segment = nextSegment ( ) ) != null ) { if ( com . twelvemonkeys . imageio . path . AdobePathBuilder . DEBUG ) { System . out . println ( segment ) ; } if ( ( ( segment . selector ) == ( AdobePathSegment . OPEN_SUBPATH_LENGTH_RECORD ) ) || ( ( segment . selector ) == ( AdobePathSegment . CLOSED_SUBPATH_LENGTH_RECORD ) ) ) { if ( currentPath != null ) { if ( currentPathLength != ( currentPath . size ( ) ) ) { throw new javax . imageio . IIOException ( java . lang . String . format ( "Bad<sp>path,<sp>expected<sp>%d<sp>segments,<sp>found<sp>only<sp>%d" , currentPathLength , currentPath . size ( ) ) ) ; } subPaths . add ( currentPath ) ; } currentPath = new java . util . ArrayList < com . twelvemonkeys . imageio . path . AdobePathSegment > ( segment . length ) ; currentPathLength = segment . length ; } else if ( ( ( ( ( segment . selector ) == ( AdobePathSegment . OPEN_SUBPATH_BEZIER_LINKED ) ) || ( ( segment . selector ) == ( AdobePathSegment . OPEN_SUBPATH_BEZIER_UNLINKED ) ) ) || ( ( segment . selector ) == ( AdobePathSegment . CLOSED_SUBPATH_BEZIER_LINKED ) ) ) || ( ( segment . selector ) == ( AdobePathSegment . CLOSED_SUBPATH_BEZIER_UNLINKED ) ) ) { if ( currentPath == null ) { throw new javax . imageio . IIOException ( "Bad<sp>path,<sp>missing<sp>subpath<sp>length<sp>record" ) ; } if ( ( currentPath . size ( ) ) >= currentPathLength ) { throw new javax . imageio . IIOException ( java . lang . String . format ( "Bad<sp>path,<sp>expected<sp>%d<sp>segments,<sp>found%d" , currentPathLength , currentPath . size ( ) ) ) ; } currentPath . add ( segment ) ; } } if ( currentPath != null ) { if ( currentPathLength != ( currentPath . size ( ) ) ) { throw new javax . imageio . IIOException ( java . lang . String . format ( "Bad<sp>path,<sp>expected<sp>%d<sp>segments,<sp>found<sp>only<sp>%d" , currentPathLength , currentPath . size ( ) ) ) ; } subPaths . add ( currentPath ) ; } return pathToShape ( subPaths ) ; }
|
org . junit . Assert . assertNotNull ( path )
|
multiThreadedSyncCreateReadTest ( ) { java . util . Collection < java . lang . Class < ? > > ac = new java . util . ArrayList < java . lang . Class < ? > > ( ) ; ac . add ( org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationsInMemoryDsTest . SimpleEntity . class ) ; final org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationInMemoryDs localDs = new org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationInMemoryDs ( org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationsInMemoryDsTest . SimpleEntity . class . getPackage ( ) . getName ( ) , true ) ; final org . apache . olingo . odata2 . annotation . processor . core . edm . AnnotationEdmProvider localProvider = new org . apache . olingo . odata2 . annotation . processor . core . edm . AnnotationEdmProvider ( ac ) ; final org . apache . olingo . odata2 . api . edm . EdmEntitySet edmEntitySet = createMockedEdmEntitySet ( localProvider , "SimpleEntitySet" ) ; final java . util . concurrent . CountDownLatch latch ; java . util . List < java . lang . Thread > threads = new java . util . ArrayList < java . lang . Thread > ( ) ; int max = 500 ; latch = new java . util . concurrent . CountDownLatch ( max ) ; for ( int i = 0 ; i < max ; i ++ ) { java . lang . Runnable run = new java . lang . Runnable ( ) { @ org . apache . olingo . odata2 . annotation . processor . core . datasource . Override public void run ( ) { org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationsInMemoryDsTest . SimpleEntity se = new org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationsInMemoryDsTest . SimpleEntity ( ) ; se . id = java . lang . Integer . valueOf ( java . lang . String . valueOf ( java . lang . System . currentTimeMillis ( ) ) . substring ( 8 ) ) ; se . name = "Name:<sp>" + ( java . lang . System . currentTimeMillis ( ) ) ; try { localDs . createData ( edmEntitySet , se ) ; } catch ( java . lang . Exception ex ) { throw new java . lang . RuntimeException ( ex ) ; } finally { latch . countDown ( ) ; } } } ; threads . add ( new java . lang . Thread ( run ) ) ; } for ( java . lang . Thread thread : threads ) { thread . start ( ) ; } latch . await ( 60 , TimeUnit . SECONDS ) ; org . apache . olingo . odata2 . annotation . processor . core . datasource . DataStore < org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationsInMemoryDsTest . SimpleEntity > ds = localDs . getDataStore ( org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationsInMemoryDsTest . SimpleEntity . class ) ; java . util . Collection < org . apache . olingo . odata2 . annotation . processor . core . datasource . AnnotationsInMemoryDsTest . SimpleEntity > buildings = ds . read ( ) ; "<AssertPlaceHolder>" ; } read ( ) { return java . util . Collections . unmodifiableCollection ( dataStore . values ( ) ) ; }
|
org . junit . Assert . assertEquals ( max , buildings . size ( ) )
|
absolutePathResolveSiblingWithAbsolutePathTest ( ) { com . beijunyi . parallelgit . filesystem . GitPath path = gfs . getPath ( "/a/b/c" ) ; com . beijunyi . parallelgit . filesystem . GitPath result = path . resolveSibling ( "/a/d" ) ; "<AssertPlaceHolder>" ; } toString ( ) { if ( ( stringValue ) == null ) stringValue = decode ( com . beijunyi . parallelgit . filesystem . CHARSET , path ) ; return stringValue ; }
|
org . junit . Assert . assertEquals ( "/a/d" , result . toString ( ) )
|
testServiceLookup ( ) { org . nuxeo . ecm . platform . scanimporter . service . ScannedFileMapperService sfms = org . nuxeo . runtime . api . Framework . getService ( org . nuxeo . ecm . platform . scanimporter . service . ScannedFileMapperService . class ) ; "<AssertPlaceHolder>" ; } getService ( java . lang . Class ) { org . nuxeo . runtime . api . ServiceProvider provider = org . nuxeo . runtime . api . DefaultServiceProvider . getProvider ( ) ; if ( provider != null ) { return provider . getService ( serviceClass ) ; } org . nuxeo . runtime . api . Framework . checkRuntimeInitialized ( ) ; return org . nuxeo . runtime . api . Framework . runtime . getService ( serviceClass ) ; }
|
org . junit . Assert . assertNotNull ( sfms )
|
testUniquifyOrderingNotCaseSensitive ( ) { java . util . List < java . lang . String > nameList = com . google . common . collect . Lists . newArrayList ( "k68s" , "def" , "col1" , "COL1" , "abc" , "123" ) ; java . util . List < java . lang . String > resultList = org . apache . calcite . sql . validate . SqlValidatorUtil . uniquify ( nameList , SqlValidatorUtil . EXPR_SUGGESTER , false ) ; "<AssertPlaceHolder>" ; org . apache . calcite . sql . validate . SqlValidatorUtilTest . checkChangedFieldList ( nameList , resultList , false ) ; } not ( org . apache . calcite . linq4j . tree . Expression ) { return org . apache . calcite . linq4j . tree . Expressions . makeUnary ( ExpressionType . Not , expression ) ; }
|
org . junit . Assert . assertThat ( resultList , org . hamcrest . CoreMatchers . not ( nameList ) )
|
testBuild ( ) { org . lnu . is . domain . publicactivity . PublicActivityType context = new org . lnu . is . domain . publicactivity . PublicActivityType ( ) ; java . lang . String expectedQuery = "SELECT<sp>e<sp>FROM<sp>PublicActivityType<sp>e<sp>WHERE<sp>e.status=:status<sp>AND<sp>e.crtUserGroup<sp>IN<sp>(:userGroups)<sp>" ; org . lnu . is . pagination . MultiplePagedSearch < org . lnu . is . domain . publicactivity . PublicActivityType > pagedSearch = new org . lnu . is . pagination . MultiplePagedSearch ( ) ; pagedSearch . setEntity ( context ) ; java . lang . String actualQuery = unit . build ( pagedSearch ) ; "<AssertPlaceHolder>" ; } setEntity ( T ) { this . entity = entity ; }
|
org . junit . Assert . assertEquals ( expectedQuery , actualQuery )
|
formatInfoEquals ( ) { final org . toilelibre . libe . soundtransform . model . converted . FormatInfo fi1 = new org . toilelibre . libe . soundtransform . model . converted . FormatInfo ( 2 , 44100 ) ; final org . toilelibre . libe . soundtransform . model . converted . FormatInfo fi2 = new org . toilelibre . libe . soundtransform . model . converted . FormatInfo ( 2 , 44100 ) ; "<AssertPlaceHolder>" ; } sameFormatAs ( org . toilelibre . libe . soundtransform . model . converted . FormatInfo ) { return ( ( this . sampleRate ) == ( fi . sampleRate ) ) && ( ( this . sampleSize ) == ( fi . sampleSize ) ) ; }
|
org . junit . Assert . assertTrue ( fi1 . sameFormatAs ( fi2 ) )
|
testBasic9 ( ) { this . GEN_add_group = true ; this . GEN_add_group1 = true ; this . GEN_USE_RUNNING_AGGR = true ; int initialPostion1 = - 1 ; org . eclipse . birt . data . engine . api . querydefn . QueryDefinition qd = newGenIVReportQuery ( ) ; qd . setUsesDetails ( true ) ; org . eclipse . birt . data . engine . api . IQueryResults qr = myGenDataEngine . prepare ( qd ) . execute ( scope ) ; GEN_queryResultID = qr . getID ( ) ; org . eclipse . birt . data . engine . api . IResultIterator ri = qr . getResultIterator ( ) ; initialPostion1 = ri . getRowIndex ( ) ; ri . close ( ) ; qr . close ( ) ; myGenDataEngine . shutdown ( ) ; myGenDataEngine . clearCache ( dataSource , dataSet ) ; this . closeArchiveWriter ( ) ; org . eclipse . birt . data . engine . api . DataEngineContext deContext2 = newContext ( DataEngineContext . MODE_PRESENTATION , fileName , fileName ) ; myPreDataEngine = org . eclipse . birt . data . engine . api . DataEngine . newDataEngine ( deContext2 ) ; qd = newPreIVReportQuery ( this . UPDATE_add_filter , this . UPDATE_add_sort , ( - 1 ) , org . eclipse . birt . data . engine . impl . rd . ViewingTest2 . UPDATE ) ; qd . setUsesDetails ( true ) ; qd . setQueryResultsID ( this . GEN_queryResultID ) ; qr = myPreDataEngine . prepare ( qd ) . execute ( null ) ; this . UPDATE_queryResultID = qr . getID ( ) ; ri = qr . getResultIterator ( ) ; int initialPostion2 = ri . getRowIndex ( ) ; this . closeArchiveReader ( ) ; this . closeArchiveWriter ( ) ; "<AssertPlaceHolder>" ; } closeArchiveWriter ( ) { if ( ( archiveWriter ) != null ) try { archiveWriter . finish ( ) ; } catch ( java . io . IOException e ) { throw new org . eclipse . birt . data . engine . core . DataException ( "error" , e ) ; } }
|
org . junit . Assert . assertTrue ( ( ( initialPostion1 == initialPostion2 ) && ( initialPostion2 == 0 ) ) )
|
testOnSelect ( ) { presenter . onSelect ( org . dashbuilder . displayer . DisplayerType . BARCHART ) ; "<AssertPlaceHolder>" ; verify ( typeSelectedEvent ) . fire ( any ( org . dashbuilder . displayer . client . events . DisplayerTypeSelectedEvent . class ) ) ; } getSelectedType ( ) { return selectedType ; }
|
org . junit . Assert . assertEquals ( presenter . getSelectedType ( ) , org . dashbuilder . displayer . DisplayerType . BARCHART )
|
testNullResumableUploadURI_succeeds ( ) { com . google . api . ads . adwords . lib . utils . BatchJobUploadStatus status = new com . google . api . ads . adwords . lib . utils . BatchJobUploadStatus ( 100L , null ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertNotNull ( status )
|
testEnumerationRandom ( ) { final net . sf . qualitytest . blueprint . BlueprintSession session = new net . sf . qualitytest . blueprint . BlueprintSession ( ) ; final net . sf . qualitytest . blueprint . BlueprintConfiguration rand = new net . sf . qualitytest . blueprint . configuration . RandomBlueprintConfiguration ( ) ; final net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum simple0 = net . sf . qualitytest . blueprint . Blueprint . construct ( net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum . class , rand , session ) ; final net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum simple1 = net . sf . qualitytest . blueprint . Blueprint . construct ( net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum . class , rand , session ) ; final net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum simple2 = net . sf . qualitytest . blueprint . Blueprint . construct ( net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum . class , rand , session ) ; final net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum simple3 = net . sf . qualitytest . blueprint . Blueprint . construct ( net . sf . qualitytest . blueprint . BlueprintTest_enumeration . SimpleEnum . class , rand , session ) ; final boolean a = simple0 != simple1 ; final boolean b = simple0 != simple2 ; final boolean c = simple2 != simple1 ; final boolean d = simple3 != simple0 ; final boolean e = simple3 != simple1 ; final boolean f = simple3 != simple2 ; "<AssertPlaceHolder>" ; } construct ( java . lang . Class , net . sf . qualitytest . blueprint . BlueprintConfiguration , net . sf . qualitytest . blueprint . BlueprintSession ) { net . sf . qualitycheck . Check . notNull ( clazz , "clazz" ) ; net . sf . qualitycheck . Check . notNull ( config , "config" ) ; net . sf . qualitycheck . Check . notNull ( session , "session" ) ; final net . sf . qualitytest . blueprint . CreationStrategy < ? > creator = config . findCreationStrategyForType ( clazz ) ; return net . sf . qualitytest . blueprint . Blueprint . blueprintObject ( clazz , config , creator , session ) ; }
|
org . junit . Assert . assertTrue ( ( ( ( ( ( a || b ) || c ) || d ) || e ) || f ) )
|
testEmptyAfterLoad ( ) { ch . ethz . globis . phtree . v16 . Node ht = create ( ) ; for ( int r = 0 ; r < 10 ; r ++ ) { for ( int i = 0 ; i < 100000 ; i ++ ) { ch . ethz . globis . phtree . v16 . Node . BSTEntry e = ht . bstGetOrCreate ( i , ch . ethz . globis . phtree . bst . TestBST16 . tree ) ; e . set ( i , new long [ ] { i } , i ) ; } for ( int i = 0 ; i < 100000 ; i ++ ) { ch . ethz . globis . phtree . v16 . Node . BSTEntry e = ht . bstRemove ( i , new long [ ] { i } , null , ch . ethz . globis . phtree . bst . TestBST16 . tree ) ; "<AssertPlaceHolder>" ; } checkEmpty ( ht ) ; } } getValue ( ) { return value ; }
|
org . junit . Assert . assertEquals ( i , ( ( int ) ( e . getValue ( ) ) ) )
|
testConvertFoo ( ) { java . sql . Date result = underTest . convert ( "foo" ) ; "<AssertPlaceHolder>" ; } convert ( java . lang . String ) { if ( "S" . equalsIgnoreCase ( value ) ) { return Size . TINY ; } else if ( "M" . equalsIgnoreCase ( value ) ) { return Size . NORMAL ; } else if ( "L" . equalsIgnoreCase ( value ) ) { return Size . HUGE ; } else { return null ; } }
|
org . junit . Assert . assertNull ( result )
|
testOr ( ) { org . drools . modelcompiler . drlx . CompiledUnit unit = org . drools . modelcompiler . drlx . DrlxCompiler . compileFolders ( "src/test/resources/model" , "src/test/resources/unit4" ) ; org . kie . api . runtime . rule . RuleUnitExecutor executor = unit . createExecutor ( ) ; java . lang . reflect . Constructor < ? > personConstructor = unit . getConstructorFor ( "org.model.Person" , java . lang . String . class , int . class ) ; java . lang . reflect . Constructor < ? > childConstructor = unit . getConstructorFor ( "org.model.Child" , java . lang . String . class , int . class , int . class ) ; org . kie . api . runtime . rule . DataSource < ? > persons = executor . newDataSource ( "persons" , personConstructor . newInstance ( "Mario" , 43 ) , personConstructor . newInstance ( "Marilena" , 44 ) , childConstructor . newInstance ( "Sofia" , 5 , 10 ) ) ; org . kie . api . runtime . rule . RuleUnit ruleUnit = unit . getOrCreateRuleUnit ( ) ; "<AssertPlaceHolder>" ; } run ( org . junit . runner . notification . RunNotifier ) { for ( org . drools . workbench . models . testscenarios . shared . Scenario scenario : scenarios ) { runScenario ( notifier , scenario ) ; } }
|
org . junit . Assert . assertEquals ( 1 , executor . run ( ruleUnit ) )
|
test_server_call_client ( ) { final com . lts . job . remoting . RemotingServer server = com . lts . job . remoting . NettyRPCTest . createRemotingServer ( ) ; final com . lts . job . remoting . RemotingClient client = com . lts . job . remoting . NettyRPCTest . createRemotingClient ( ) ; server . registerProcessor ( 0 , new com . lts . job . remoting . NettyRequestProcessor ( ) { @ com . lts . job . remoting . Override public com . lts . job . remoting . protocol . RemotingCommand processRequest ( io . netty . channel . ChannelHandlerContext ctx , com . lts . job . remoting . protocol . RemotingCommand request ) { try { return server . invokeSync ( ctx . channel ( ) , request , ( 1000 * 10 ) ) ; } catch ( java . lang . InterruptedException e ) { com . lts . job . remoting . NettyRPCTest . logger . error ( e . getMessage ( ) , e ) ; } catch ( com . lts . job . remoting . RemotingSendRequestException e ) { com . lts . job . remoting . NettyRPCTest . logger . error ( e . getMessage ( ) , e ) ; } catch ( com . lts . job . remoting . RemotingTimeoutException e ) { com . lts . job . remoting . NettyRPCTest . logger . error ( e . getMessage ( ) , e ) ; } return null ; } } , java . util . concurrent . Executors . newCachedThreadPool ( ) ) ; client . registerProcessor ( 0 , new com . lts . job . remoting . NettyRequestProcessor ( ) { @ com . lts . job . remoting . Override public com . lts . job . remoting . protocol . RemotingCommand processRequest ( io . netty . channel . ChannelHandlerContext ctx , com . lts . job . remoting . protocol . RemotingCommand request ) { System . out . println ( ( "client<sp>receive<sp>server<sp>request<sp>=<sp>" + request ) ) ; request . setRemark ( "client<sp>remark" ) ; return request ; } } , java . util . concurrent . Executors . newCachedThreadPool ( ) ) ; for ( int i = 0 ; i < 3 ; i ++ ) { com . lts . job . remoting . protocol . RemotingCommand request = com . lts . job . remoting . protocol . RemotingCommand . createRequestCommand ( 0 , null ) ; com . lts . job . remoting . protocol . RemotingCommand response = client . invokeSync ( "127.0.0.1:8888" , request , ( 1000 * 3 ) ) ; System . out . println ( ( "invoke<sp>result<sp>=<sp>" + response ) ) ; "<AssertPlaceHolder>" ; } client . shutdown ( ) ; server . shutdown ( ) ; System . out . println ( "-----------------------------------------------------------------" ) ; } invokeSync ( java . lang . String , com . lts . job . remoting . protocol . RemotingCommand , long ) { final com . lts . job . remoting . netty . Channel channel = this . getAndCreateChannel ( addr ) ; if ( ( channel != null ) && ( channel . isActive ( ) ) ) { try { return this . invokeSyncImpl ( channel , request , timeoutMillis ) ; } catch ( com . lts . job . remoting . exception . RemotingSendRequestException e ) { com . lts . job . remoting . netty . NettyRemotingClient . log . warn ( "invokeSync:<sp>send<sp>request<sp>exception,<sp>so<sp>close<sp>the<sp>channel[{}]" , addr ) ; this . closeChannel ( addr , channel ) ; throw e ; } catch ( com . lts . job . remoting . exception . RemotingTimeoutException e ) { com . lts . job . remoting . netty . NettyRemotingClient . log . warn ( "invokeSync:<sp>wait<sp>response<sp>timeout<sp>exception,<sp>the<sp>channel[{}]" , addr ) ; throw e ; } } else { this . closeChannel ( addr , channel ) ; throw new com . lts . job . remoting . exception . RemotingConnectException ( addr ) ; } }
|
org . junit . Assert . assertTrue ( ( response != null ) )
|
testSaslExternalConnection ( ) { org . apache . qpid . jms . transports . TransportOptions sslOptions = new org . apache . qpid . jms . transports . TransportOptions ( ) ; sslOptions . setKeyStoreLocation ( org . apache . qpid . jms . integration . SaslIntegrationTest . BROKER_JKS_KEYSTORE ) ; sslOptions . setKeyStorePassword ( org . apache . qpid . jms . integration . SaslIntegrationTest . PASSWORD ) ; sslOptions . setVerifyHost ( false ) ; sslOptions . setTrustStoreLocation ( org . apache . qpid . jms . integration . SaslIntegrationTest . BROKER_JKS_TRUSTSTORE ) ; sslOptions . setTrustStorePassword ( org . apache . qpid . jms . integration . SaslIntegrationTest . PASSWORD ) ; java . lang . String connOptions = ( ( ( ( ( ( ( ( ( "?transport.trustStoreLocation=" + ( org . apache . qpid . jms . integration . SaslIntegrationTest . CLIENT_JKS_TRUSTSTORE ) ) + "&" ) + "transport.trustStorePassword=" ) + ( org . apache . qpid . jms . integration . SaslIntegrationTest . PASSWORD ) ) + "&" ) + "transport.keyStoreLocation=" ) + ( org . apache . qpid . jms . integration . SaslIntegrationTest . CLIENT_JKS_KEYSTORE ) ) + "&" ) + "transport.keyStorePassword=" ) + ( org . apache . qpid . jms . integration . SaslIntegrationTest . PASSWORD ) ; javax . net . ssl . SSLContext context = org . apache . qpid . jms . transports . TransportSupport . createJdkSslContext ( sslOptions ) ; try ( org . apache . qpid . jms . test . testpeer . TestAmqpPeer testPeer = new org . apache . qpid . jms . test . testpeer . TestAmqpPeer ( context , true ) ) { testPeer . expectSaslExternal ( ) ; testPeer . expectOpen ( ) ; testPeer . expectBegin ( ) ; javax . jms . ConnectionFactory factory = new org . apache . qpid . jms . JmsConnectionFactory ( ( ( "amqps://localhost:" + ( testPeer . getServerPort ( ) ) ) + connOptions ) ) ; javax . jms . Connection connection = factory . createConnection ( ) ; connection . setClientID ( "clientName" ) ; testPeer . waitForAllHandlersToComplete ( 1000 ) ; "<AssertPlaceHolder>" ; testPeer . expectClose ( ) ; connection . close ( ) ; } } getThrowable ( ) { return _driverRunnable . getException ( ) ; }
|
org . junit . Assert . assertNull ( testPeer . getThrowable ( ) )
|
testConvertStr ( ) { org . apache . sqoop . mapreduce . db . TextSplitter splitter = new org . apache . sqoop . mapreduce . db . TextSplitter ( ) ; java . lang . String out = splitter . bigDecimalToString ( splitter . stringToBigDecimal ( "big<sp>str" ) ) ; "<AssertPlaceHolder>" ; } stringToBigDecimal ( java . lang . String ) { java . math . BigDecimal curPlace = org . apache . sqoop . mapreduce . db . TextSplitter . ONE_PLACE ; java . math . BigDecimal result = java . math . BigDecimal . ZERO ; int len = java . lang . Math . min ( str . length ( ) , org . apache . sqoop . mapreduce . db . TextSplitter . MAX_CHARS ) ; for ( int i = 0 ; i < len ; i ++ ) { int codePoint = str . codePointAt ( i ) ; result = result . add ( tryDivide ( new java . math . BigDecimal ( codePoint ) , curPlace ) ) ; curPlace = curPlace . multiply ( org . apache . sqoop . mapreduce . db . TextSplitter . ONE_PLACE ) ; } return result ; }
|
org . junit . Assert . assertEquals ( "big<sp>str" , out )
|
getMemberships_shouldReturnUnvoidedMemberships ( ) { executeDataSet ( org . openmrs . api . CohortServiceTest . COHORT_XML ) ; org . openmrs . Cohort cohort = org . openmrs . api . context . Context . getCohortService ( ) . getCohort ( 1 ) ; org . openmrs . CohortMembership nonVoidedMembership = new org . openmrs . CohortMembership ( 4 ) ; org . openmrs . CohortMembership voidedMembership = new org . openmrs . CohortMembership ( 7 ) ; voidedMembership . setVoided ( true ) ; voidedMembership . setVoidedBy ( org . openmrs . api . context . Context . getAuthenticatedUser ( ) ) ; voidedMembership . setDateVoided ( new java . util . Date ( ) ) ; voidedMembership . setVoidReason ( "Void<sp>reason" ) ; cohort . addMembership ( nonVoidedMembership ) ; cohort . addMembership ( voidedMembership ) ; org . openmrs . api . context . Context . getCohortService ( ) . saveCohort ( cohort ) ; java . util . Collection < org . openmrs . CohortMembership > unvoidedMemberships = cohort . getMemberships ( false ) ; "<AssertPlaceHolder>" ; } size ( ) { return getMemberships ( ) . stream ( ) . filter ( ( m ) -> ! ( m . getVoided ( ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) . size ( ) ; }
|
org . junit . Assert . assertEquals ( 2 , unvoidedMemberships . size ( ) )
|
testEvenNumberWithDefaultIntegerMeanBuilder ( ) { final kieker . analysisteetime . util . RunningMedian < java . lang . Integer > runningMedian = kieker . analysisteetime . util . RunningMedian . forInteger ( ) ; runningMedian . add ( 8 ) ; runningMedian . add ( 6 ) ; runningMedian . add ( 2 ) ; runningMedian . add ( 4 ) ; final long median = runningMedian . getMedian ( ) ; "<AssertPlaceHolder>" ; } getMedian ( ) { if ( ( this . maxHeap . isEmpty ( ) ) && ( this . minHeap . isEmpty ( ) ) ) { throw new java . lang . IllegalStateException ( "There<sp>are<sp>no<sp>present<sp>values<sp>for<sp>this<sp>running<sp>median." ) ; } else if ( ( this . maxHeap . size ( ) ) == ( this . minHeap . size ( ) ) ) { return this . meanBuilder . apply ( this . maxHeap . peek ( ) , this . minHeap . peek ( ) ) ; } else if ( ( this . maxHeap . size ( ) ) > ( this . minHeap . size ( ) ) ) { return this . maxHeap . peek ( ) ; } else { return this . minHeap . peek ( ) ; } }
|
org . junit . Assert . assertEquals ( 5 , median )
|
nonAuthorizedUserShouldNotBeAbleToPutNewParameter ( ) { addReadForEveryone ( org . nuxeo . ecm . localconf . CHILD_WORKSPACE_REF ) ; try ( org . nuxeo . ecm . core . api . CoreSession newSession = openSessionAs ( "user1" ) ) { org . nuxeo . ecm . automation . OperationContext ctx = new org . nuxeo . ecm . automation . OperationContext ( newSession ) ; "<AssertPlaceHolder>" ; org . nuxeo . ecm . automation . OperationChain chain = new org . nuxeo . ecm . automation . OperationChain ( "testPutSimpleConfigurationParametersChain" ) ; chain . add ( FetchDocument . ID ) . set ( "value" , org . nuxeo . ecm . localconf . CHILD_WORKSPACE_REF ) ; chain . add ( PutSimpleConfParam . ID ) . set ( "key" , "key1" ) . set ( "value" , "value1" ) ; service . run ( ctx , chain ) ; } } openSessionAs ( java . lang . String ) { return settings . openSessionAs ( username ) ; }
|
org . junit . Assert . assertNotNull ( ctx )
|
test ( ) { net . minidev . json . JSONObject objectToClean = ( ( jsonToClean ) != null ) ? ( ( net . minidev . json . JSONObject ) ( net . minidev . json . JSONValue . parseWithException ( jsonToClean ) ) ) : null ; net . minidev . json . JSONObject expectedObject = ( ( expectedJson ) != null ) ? ( ( net . minidev . json . JSONObject ) ( net . minidev . json . JSONValue . parseWithException ( expectedJson ) ) ) : null ; net . minidev . json . JSONObject toRemove = ( ( elementsToRemove ) != null ) ? ( ( net . minidev . json . JSONObject ) ( net . minidev . json . JSONValue . parseWithException ( elementsToRemove ) ) ) : null ; net . minidev . json . actions . ElementRemover er = new net . minidev . json . actions . ElementRemover ( toRemove ) ; er . remove ( objectToClean ) ; "<AssertPlaceHolder>" ; } remove ( net . minidev . json . JSONObject ) { net . minidev . json . actions . traverse . JSONTraverseAction strategy = new net . minidev . json . actions . traverse . RemovePathsJsonAction ( this . pathsToRemove ) ; net . minidev . json . actions . traverse . JSONTraverser traversal = new net . minidev . json . actions . traverse . JSONTraverser ( strategy ) ; traversal . traverse ( objectToClean ) ; return ( ( net . minidev . json . JSONObject ) ( strategy . result ( ) ) ) ; }
|
org . junit . Assert . assertEquals ( expectedObject , objectToClean )
|
testSerialize ( ) { com . nextdoor . bender . ipc . es . ElasticSearchTransportSerializer serializer = new com . nextdoor . bender . ipc . es . ElasticSearchTransportSerializer ( false , "event" , "log" , false , "_routing" ) ; com . nextdoor . bender . InternalEvent record = new com . nextdoor . bender . ipc . es . ElasticSearchTansportSerializerTest . DummyEvent ( "foo" , 0 ) ; record . setSerialized ( "foo" ) ; java . lang . String actual = new java . lang . String ( serializer . serialize ( record ) ) ; java . lang . String expected = com . nextdoor . bender . testutils . TestUtils . getResourceString ( this . getClass ( ) , "basic_output.txt" ) ; "<AssertPlaceHolder>" ; } getResourceString ( java . lang . Class , java . lang . String ) { return org . apache . commons . io . IOUtils . toString ( new java . io . InputStreamReader ( clazz . getResourceAsStream ( resource ) , "UTF-8" ) ) ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
when_cancelledMidSnapshot_then_snapshotDoneForTaskletSucceeds ( ) { ssContext . initTaskletCount ( 2 , 0 ) ; java . util . concurrent . CompletableFuture < com . hazelcast . jet . impl . operation . SnapshotOperation . SnapshotOperationResult > future = ssContext . startNewSnapshot ( 10 , "map" , false ) ; ssContext . snapshotDoneForTasklet ( 1 , 1 , 1 ) ; "<AssertPlaceHolder>" ; ssContext . cancel ( ) ; ssContext . snapshotDoneForTasklet ( 1 , 1 , 1 ) ; } isDone ( ) { return ( numActiveQueues ) == 0 ; }
|
org . junit . Assert . assertFalse ( future . isDone ( ) )
|
test_CaptureEnPassantRight ( ) { classUnderTest = addWhitePawn ( GenericPosition . e5 ) ; addBlackPawn ( GenericPosition . d7 ) ; pm = new eubos . board . pieces . PositionManager ( new eubos . board . Board ( pl ) , eubos . board . pieces . Piece . Colour . white ) ; pm . performMove ( new eubos . board . pieces . GenericMove ( GenericPosition . d7 , GenericPosition . d5 ) ) ; ml = classUnderTest . generateMoves ( pm . getTheBoard ( ) ) ; expectedMove = new eubos . board . pieces . GenericMove ( GenericPosition . e5 , GenericPosition . d6 ) ; "<AssertPlaceHolder>" ; } getTheBoard ( ) { return theBoard ; }
|
org . junit . Assert . assertTrue ( ml . contains ( expectedMove ) )
|
testReadCsvReaderOneChar ( ) { java . util . Iterator < java . lang . String [ ] > iterator = org . simpleflatmapper . csv . CsvParser . iterator ( new java . io . StringReader ( "0" ) ) ; java . lang . String [ ] strs = iterator . next ( ) ; "<AssertPlaceHolder>" ; } next ( ) { ( currentIndex ) ++ ; if ( ( currentIndex ) < ( objects . length ) ) { return true ; } return false ; }
|
org . junit . Assert . assertEquals ( "0" , strs [ 0 ] )
|
testFetchByPrimaryKeyExisting ( ) { com . liferay . invitation . invite . members . model . MemberRequest newMemberRequest = addMemberRequest ( ) ; com . liferay . invitation . invite . members . model . MemberRequest existingMemberRequest = _persistence . fetchByPrimaryKey ( newMemberRequest . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
|
org . junit . Assert . assertEquals ( existingMemberRequest , newMemberRequest )
|
testUpgradeWhenCoreIsInLatestSchemaVersion ( ) { _innerPortalUpgradeProcess . updateSchemaVersion ( com . liferay . portal . upgrade . PortalUpgradeProcess . getLatestSchemaVersion ( ) ) ; com . liferay . portal . upgrade . PortalUpgradeProcess portalServiceUpgrade = new com . liferay . portal . upgrade . PortalUpgradeProcess ( ) ; try { portalServiceUpgrade . upgrade ( ) ; } catch ( java . lang . Exception e ) { throw new java . sql . SQLException ( "No<sp>upgrade<sp>processes<sp>should<sp>have<sp>been<sp>executed" , e ) ; } try ( java . sql . Connection connection = com . liferay . portal . kernel . dao . jdbc . DataAccess . getConnection ( ) ) { "<AssertPlaceHolder>" ; } } isInLatestSchemaVersion ( java . sql . Connection ) { com . liferay . portal . kernel . version . Version latestSchemaVersion = com . liferay . portal . upgrade . PortalUpgradeProcess . getLatestSchemaVersion ( ) ; if ( latestSchemaVersion . equals ( com . liferay . portal . upgrade . PortalUpgradeProcess . getCurrentSchemaVersion ( connection ) ) ) { return true ; } return false ; }
|
org . junit . Assert . assertTrue ( com . liferay . portal . upgrade . PortalUpgradeProcess . isInLatestSchemaVersion ( connection ) )
|
testEqualsSelf ( ) { org . jfree . data . time . Minute minute = new org . jfree . data . time . Minute ( ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( obj == ( this ) ) { return true ; } if ( ! ( obj instanceof org . jfree . data . general . TestIntervalCategoryDataset ) ) { return false ; } org . jfree . data . general . TestIntervalCategoryDataset that = ( ( org . jfree . data . general . TestIntervalCategoryDataset ) ( obj ) ) ; if ( ! ( getRowKeys ( ) . equals ( that . getRowKeys ( ) ) ) ) { return false ; } if ( ! ( getColumnKeys ( ) . equals ( that . getColumnKeys ( ) ) ) ) { return false ; } int rowCount = getRowCount ( ) ; int colCount = getColumnCount ( ) ; for ( int r = 0 ; r < rowCount ; r ++ ) { for ( int c = 0 ; c < colCount ; c ++ ) { java . lang . Number v1 = getValue ( r , c ) ; java . lang . Number v2 = that . getValue ( r , c ) ; if ( v1 == null ) { if ( v2 != null ) { return false ; } } else if ( ! ( v1 . equals ( v2 ) ) ) { return false ; } } } return true ; }
|
org . junit . Assert . assertTrue ( minute . equals ( minute ) )
|
canBeNull ( ) { toDoItem . setNotes ( ( ( java . lang . String ) ( null ) ) ) ; "<AssertPlaceHolder>" ; } getNotes ( ) { return notes ; }
|
org . junit . Assert . assertThat ( toDoItem . getNotes ( ) , org . hamcrest . CoreMatchers . is ( ( ( java . lang . String ) ( null ) ) ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.