input
stringlengths 28
18.7k
| output
stringlengths 39
1.69k
|
|---|---|
test ( ) { org . cache2k . benchmark . util . ScrambledZipfianPattern g = new org . cache2k . benchmark . util . ScrambledZipfianPattern ( 123 , 900 ) ; java . util . Set < java . lang . Long > uniqueVals = new java . util . HashSet ( ) ; for ( int i = 0 ; i < 10000 ; i ++ ) { uniqueVals . add ( g . nextLong ( ) ) ; } "<AssertPlaceHolder>" ; } size ( ) { return full ? buffer . length : pos ; }
|
org . junit . Assert . assertEquals ( 900 , uniqueVals . size ( ) )
|
groupTest ( ) { org . hawkular . alerts . api . model . trigger . Trigger groupTrigger = new org . hawkular . alerts . api . model . trigger . Trigger ( "groupTrigger0" , "groupTrigger0" ) ; groupTrigger . setType ( TriggerType . GROUP ) ; definitions . addGroupTrigger ( "tenant0" , groupTrigger ) ; org . hawkular . alerts . api . model . condition . Condition fc = new org . hawkular . alerts . api . model . condition . AvailabilityCondition ( "group-trigger" , org . hawkular . alerts . api . model . trigger . Mode . FIRING , "avail" , org . hawkular . alerts . api . model . condition . AvailabilityCondition . Operator . NOT_UP ) ; definitions . setGroupConditions ( "tenant0" , "groupTrigger0" , Mode . FIRING , java . util . Collections . singleton ( fc ) , null ) ; definitions . addMemberTrigger ( "tenant0" , "groupTrigger0" , "member0" , "member0" , "member0" , null , null , java . util . Collections . singletonMap ( "avail" , "avail0" ) ) ; "<AssertPlaceHolder>" ; definitions . removeGroupTrigger ( "tenant0" , "groupTrigger0" , false , false ) ; } getMemberTriggers ( java . lang . String , java . lang . String , boolean ) { java . util . Collection < org . hawkular . alerts . engine . impl . ispn . model . IspnTrigger > ispnTriggers = queryFactory . from ( org . hawkular . alerts . engine . impl . ispn . model . IspnTrigger . class ) . having ( "tenantId" ) . eq ( tenantId ) . and ( ) . having ( "memberOf" ) . eq ( groupId ) . build ( ) . list ( ) ; return ispnTriggers . stream ( ) . map ( ( t ) -> t . getTrigger ( ) ) . filter ( ( t ) -> includeOrphans || ( TriggerType . MEMBER == ( t . getType ( ) ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) ; }
|
org . junit . Assert . assertEquals ( 1 , definitions . getMemberTriggers ( "tenant0" , "groupTrigger0" , true ) . size ( ) )
|
ensureInterceptorCountIsConstant ( ) { gov . hhs . fha . nhinc . messaging . client . CONNECTClient < gov . hhs . fha . nhinc . messaging . service . port . TestServicePortType > client = createClient ( ) ; org . apache . cxf . endpoint . Client cxfClient = org . apache . cxf . frontend . ClientProxy . getClient ( client . getPort ( ) ) ; int numOutInterceptors = cxfClient . getOutInterceptors ( ) . size ( ) ; createClient ( ) ; createClient ( ) ; gov . hhs . fha . nhinc . messaging . client . CONNECTClient < gov . hhs . fha . nhinc . messaging . service . port . TestServicePortType > client2 = createClient ( ) ; org . apache . cxf . endpoint . Client cxfClient2 = org . apache . cxf . frontend . ClientProxy . getClient ( client2 . getPort ( ) ) ; "<AssertPlaceHolder>" ; } getPort ( ) { return null ; }
|
org . junit . Assert . assertEquals ( numOutInterceptors , cxfClient2 . getOutInterceptors ( ) . size ( ) )
|
snapshotWriteToOutputStream ( ) { okio . ByteString byteString = concatenateBuffers ( okio . SegmentSharingTest . xs , okio . SegmentSharingTest . ys , okio . SegmentSharingTest . zs ) . snapshot ( ) ; okio . Buffer out = new okio . Buffer ( ) ; byteString . write ( out . outputStream ( ) ) ; "<AssertPlaceHolder>" ; } readUtf8 ( ) { buffer . writeAll ( source ) ; return buffer . readUtf8 ( ) ; }
|
org . junit . Assert . assertEquals ( ( ( ( okio . SegmentSharingTest . xs ) + ( okio . SegmentSharingTest . ys ) ) + ( okio . SegmentSharingTest . zs ) ) , out . readUtf8 ( ) )
|
retrieveAllRegistrations ( ) { registrationBC . removeAll ( ) ; de . mpg . imeji . logic . vo . User user1 = new de . mpg . imeji . logic . vo . User ( ) ; user1 . setEmail ( "retrieve-all-1@example.org" ) ; user1 . setPerson ( de . mpg . imeji . logic . controller . util . ImejiFactory . newPerson ( "test" , "user" , "orga" ) ) ; de . mpg . imeji . logic . vo . User user2 = new de . mpg . imeji . logic . vo . User ( ) ; user2 . setEmail ( "retrieve-all-2@example.org" ) ; user2 . setPerson ( de . mpg . imeji . logic . controller . util . ImejiFactory . newPerson ( "test" , "user" , "orga" ) ) ; registrationBC . register ( user1 ) ; registrationBC . register ( user2 ) ; "<AssertPlaceHolder>" ; } retrieveAll ( ) { return com . google . common . collect . Lists . transform ( de . mpg . imeji . logic . search . jenasearch . ImejiSPARQL . exec ( de . mpg . imeji . logic . search . jenasearch . JenaCustomQueries . selectSpaceAll ( ) , Imeji . spaceModel ) , new com . google . common . base . Function < java . lang . String , de . mpg . imeji . logic . vo . Space > ( ) { @ de . mpg . imeji . logic . controller . resource . Override public de . mpg . imeji . logic . vo . Space apply ( java . lang . String id ) { try { return retrieve ( java . net . URI . create ( id ) , Imeji . adminUser ) ; } catch ( de . mpg . imeji . exceptions . ImejiException e ) { de . mpg . imeji . logic . controller . resource . SpaceController . LOGGER . info ( ( "Cannot<sp>retrieve<sp>space:<sp>" + id ) ) ; } return null ; } } ) ; }
|
org . junit . Assert . assertEquals ( registrationBC . retrieveAll ( ) . size ( ) , 2 )
|
testRecodeFunction ( ) { final java . util . Map < java . lang . String , java . lang . String > VALUE_MAP = new java . util . HashMap < java . lang . String , java . lang . String > ( ) { { put ( "sc.1" , "a" ) ; put ( "sc.2" , "b" ) ; put ( "sc.3" , "c" ) ; } } ; org . geotools . feature . FeatureIterator < org . opengis . feature . Feature > features = exCollection . features ( ) ; try { while ( features . hasNext ( ) ) { org . opengis . feature . Feature feature = features . next ( ) ; java . lang . String fId = feature . getIdentifier ( ) . getID ( ) ; java . lang . String recodedName = VALUE_MAP . get ( fId ) ; org . opengis . feature . ComplexAttribute complexAttribute = ( ( org . opengis . feature . ComplexAttribute ) ( ff . property ( "gml:name[3]" ) . evaluate ( feature ) ) ) ; java . lang . String value = org . geotools . util . Converters . convert ( org . geotools . gml3 . bindings . GML3EncodingUtils . getSimpleContent ( complexAttribute ) , java . lang . String . class ) ; "<AssertPlaceHolder>" ; } } finally { features . close ( ) ; } } getSimpleContent ( org . opengis . feature . ComplexAttribute ) { org . opengis . feature . Property simpleContent = complex . getProperty ( new org . geotools . feature . NameImpl ( "simpleContent" ) ) ; if ( simpleContent == null ) { return null ; } else { return simpleContent . getValue ( ) ; } }
|
org . junit . Assert . assertEquals ( recodedName , value )
|
testIntegerMethodCalledFromScriptWithResult ( ) { com . eclipsesource . v8 . V8CallbackTest . ICallback callback = mock ( com . eclipsesource . v8 . V8CallbackTest . ICallback . class ) ; doReturn ( 8 ) . when ( callback ) . integerMethod ( ) ; v8 . registerJavaMethod ( callback , "integerMethod" , "foo" , new java . lang . Class < ? > [ 0 ] ) ; int result = v8 . executeIntegerScript ( "foo();" ) ; "<AssertPlaceHolder>" ; } executeIntegerScript ( java . lang . String ) { return executeIntegerScript ( script , null , 0 ) ; }
|
org . junit . Assert . assertEquals ( 8 , result )
|
getValidReport ( ) { java . lang . String xml = null ; org . entando . entando . aps . system . init . model . DataSourceDumpReport report = new org . entando . entando . aps . system . init . model . DataSourceDumpReport ( xml ) ; when ( databaseManager . getBackupReport ( org . mockito . ArgumentMatchers . anyString ( ) ) ) . thenReturn ( report ) ; org . entando . entando . aps . system . services . database . model . DumpReportDto dto = this . databaseService . getDumpReportDto ( "reportCode" ) ; "<AssertPlaceHolder>" ; } getDumpReportDto ( java . lang . String ) { org . entando . entando . aps . system . services . database . model . DumpReportDto dtos = null ; try { org . entando . entando . aps . system . init . model . DataSourceDumpReport report = this . getDatabaseManager ( ) . getBackupReport ( reportCode ) ; if ( null == report ) { logger . warn ( "no<sp>dump<sp>found<sp>with<sp>code<sp>{}" , reportCode ) ; throw new org . entando . entando . aps . system . exception . ResourceNotFoundException ( org . entando . entando . web . database . validator . DatabaseValidator . ERRCODE_NO_DUMP_FOUND , "reportCode" , reportCode ) ; } dtos = new org . entando . entando . aps . system . services . database . model . DumpReportDto ( report , this . getComponentManager ( ) ) ; } catch ( org . entando . entando . aps . system . exception . ResourceNotFoundException r ) { throw r ; } catch ( java . lang . Throwable t ) { logger . error ( "error<sp>extracting<sp>database<sp>report<sp>{}" , reportCode , t ) ; throw new org . entando . entando . aps . system . exception . RestServerError ( ( "error<sp>extracting<sp>database<sp>report<sp>" + reportCode ) , t ) ; } return dtos ; }
|
org . junit . Assert . assertNotNull ( dto )
|
isInternalAuthMode ( ) { when ( mock . getVOConfigurationSetting ( eq ( ConfigurationKey . AUTH_MODE ) , anyString ( ) ) ) . thenReturn ( createSetting ( ConfigurationKey . AUTH_MODE , "INTERNAL" ) ) ; boolean isInternalAuthMode = bean . isInternalAuthMode ( ) ; "<AssertPlaceHolder>" ; } isInternalAuthMode ( ) { when ( mock . getVOConfigurationSetting ( eq ( ConfigurationKey . AUTH_MODE ) , anyString ( ) ) ) . thenReturn ( createSetting ( ConfigurationKey . AUTH_MODE , "INTERNAL" ) ) ; boolean isInternalAuthMode = bean . isInternalAuthMode ( ) ; org . junit . Assert . assertTrue ( isInternalAuthMode ) ; }
|
org . junit . Assert . assertTrue ( isInternalAuthMode )
|
CollisionWithMergeInlineAbove1RemoveOneCollisonNode ( ) { io . usethesource . capsule . SetSmokeTest . DummyValue hash98304_obj1 = new io . usethesource . capsule . SetSmokeTest . DummyValue ( 1 , 98304 ) ; io . usethesource . capsule . SetSmokeTest . DummyValue hash98304_obj2 = new io . usethesource . capsule . SetSmokeTest . DummyValue ( 2 , 98304 ) ; io . usethesource . capsule . SetSmokeTest . DummyValue hash268435456_obj3 = new io . usethesource . capsule . SetSmokeTest . DummyValue ( 3 , 268435456 ) ; io . usethesource . capsule . Set . Immutable < io . usethesource . capsule . SetSmokeTest . DummyValue > xs = io . usethesource . capsule . core . PersistentTrieSet . of ( hash98304_obj1 , hash98304_obj2 , hash268435456_obj3 ) . __remove ( hash98304_obj2 ) ; io . usethesource . capsule . Set . Immutable < io . usethesource . capsule . SetSmokeTest . DummyValue > ys = io . usethesource . capsule . core . PersistentTrieSet . of ( hash98304_obj1 , hash268435456_obj3 ) ; "<AssertPlaceHolder>" ; } of ( K , K ) { assert ! ( java . util . Objects . equals ( key0 , key1 ) ) ; final int keyHash0 = key0 . hashCode ( ) ; final int keyHash1 = key1 . hashCode ( ) ; io . usethesource . capsule . core . PersistentTrieSet . CompactSetNode < K > newRootNode = io . usethesource . capsule . core . PersistentTrieSet . CompactSetNode . mergeTwoKeyValPairs ( key0 , keyHash0 , key1 , keyHash1 , 0 ) ; return new io . usethesource . capsule . core . PersistentTrieSet < K > ( newRootNode , ( keyHash0 + keyHash1 ) , 2 ) ; }
|
org . junit . Assert . assertEquals ( xs , ys )
|
test_empty ( ) { java . util . List < java . util . List < java . lang . Integer > > actual = t . threeSum ( new int [ ] { } ) ; "<AssertPlaceHolder>" ; } threeSum ( int [ ] ) { if ( ( num == null ) || ( ( num . length ) == 0 ) ) { return java . util . Collections . emptyList ( ) ; } java . util . List < java . util . List < java . lang . Integer > > list = new java . util . ArrayList < java . util . List < java . lang . Integer > > ( ) ; java . util . Arrays . sort ( num ) ; for ( int i = 0 ; i < ( num . length ) ; i ++ ) { if ( ( num [ i ] ) > 0 ) { continue ; } if ( ( i != 0 ) && ( ( num [ i ] ) == ( num [ ( i - 1 ) ] ) ) ) { continue ; } for ( int j = i + 1 ; j < ( num . length ) ; j ++ ) { if ( ( ( num [ i ] ) + ( num [ j ] ) ) > 0 ) { continue ; } if ( ( j != ( i + 1 ) ) && ( ( num [ j ] ) == ( num [ ( j - 1 ) ] ) ) ) { continue ; } for ( int k = j + 1 ; k < ( num . length ) ; k ++ ) { if ( ( k != ( j + 1 ) ) && ( ( num [ k ] ) == ( num [ ( k - 1 ) ] ) ) ) { continue ; } if ( ( ( ( num [ i ] ) + ( num [ j ] ) ) + ( num [ k ] ) ) > 0 ) { continue ; } if ( ( ( ( num [ i ] ) + ( num [ j ] ) ) + ( num [ k ] ) ) == 0 ) { list . add ( java . util . Arrays . asList ( num [ i ] , num [ j ] , num [ k ] ) ) ; } } } } return list ; }
|
org . junit . Assert . assertEquals ( 0 , actual . size ( ) )
|
testSbumit2 ( ) { com . crazymouse . entity . Submit s = new com . crazymouse . entity . Submit ( Constants . PROTOCALTYPE_VERSION_CMPP2 ) ; processCommon ( s ) ; r . nextBytes ( s . getFeeTerminalId ( ) ) ; r . nextBytes ( s . getDestTerminalIds ( ) ) ; r . nextBytes ( s . getReserveOrLinkId ( ) ) ; com . crazymouse . entity . Submit s1 = new com . crazymouse . entity . Submit ( Constants . PROTOCALTYPE_VERSION_CMPP2 ) ; s1 . doDecode ( s . doEncode ( ) ) ; "<AssertPlaceHolder>" ; } getReserveOrLinkId ( ) { return reserveOrLinkId ; }
|
org . junit . Assert . assertEquals ( s , s1 )
|
testGetStyleForMetacardBySourceId ( ) { ddf . catalog . data . Metacard metacard = new org . codice . ddf . spatial . kml . transformer . MockMetacard ( null , null ) ; org . codice . ddf . spatial . kml . transformer . KmlStyleMap mapper = new org . codice . ddf . spatial . kml . transformer . KmlStyleMap ( ) ; mapper . addMapEntry ( new org . codice . ddf . spatial . kml . transformer . KmlStyleMapEntryImpl ( ddf . catalog . data . Metacard . SOURCE_ID , MockMetacard . DEFAULT_SOURCE_ID , org . codice . ddf . spatial . kml . transformer . TestKmlStyleMap . DEFAULT_STYLE_URL ) ) ; "<AssertPlaceHolder>" ; } getStyleForMetacard ( ddf . catalog . data . Metacard ) { for ( org . codice . ddf . spatial . kml . transformer . KmlStyleMapEntry mapEntry : styleMap ) { if ( mapEntry . metacardMatch ( metacard ) ) { return mapEntry . getStyleUrl ( ) ; } } return "" ; }
|
org . junit . Assert . assertThat ( mapper . getStyleForMetacard ( metacard ) , org . hamcrest . Matchers . is ( org . codice . ddf . spatial . kml . transformer . TestKmlStyleMap . DEFAULT_STYLE_URL ) )
|
testGetCsvData ( ) { final java . lang . String TEST = "\"This<sp>is<sp>a<sp>test\",<sp>laughs<sp>Kunal.\n\r" ; org . jumpmind . symmetric . io . data . CsvData data = new org . jumpmind . symmetric . io . data . CsvData ( org . jumpmind . symmetric . io . data . DataEventType . INSERT , new java . lang . String [ ] { TEST } ) ; java . lang . String rowData = data . getCsvData ( CsvData . ROW_DATA ) ; org . jumpmind . symmetric . io . data . CsvData newData = new org . jumpmind . symmetric . io . data . CsvData ( ) ; newData . putCsvData ( CsvData . ROW_DATA , rowData ) ; java . lang . String result = newData . getParsedData ( CsvData . ROW_DATA ) [ 0 ] ; "<AssertPlaceHolder>" ; } getParsedData ( java . lang . String ) { java . lang . String [ ] values = null ; if ( ( ( parsedCsvData ) != null ) && ( parsedCsvData . containsKey ( key ) ) ) { values = parsedCsvData . get ( key ) ; } else if ( ( ( csvData ) != null ) && ( csvData . containsKey ( key ) ) ) { java . lang . String data = csvData . get ( key ) ; if ( data != null ) { values = org . jumpmind . symmetric . io . data . CsvUtils . tokenizeCsvData ( data ) ; putParsedData ( key , values ) ; } } return values ; }
|
org . junit . Assert . assertEquals ( TEST , result )
|
testWriteThroughCache ( ) { org . atlasapi . media . entity . Brand brand = org . atlasapi . media . entity . testing . BrandTestDataBuilder . brand ( ) . build ( ) ; brand . setCanonicalUri ( "http://brand.com" ) ; org . atlasapi . media . entity . Item item = org . atlasapi . media . entity . testing . ComplexItemTestDataBuilder . complexItem ( ) . build ( ) ; item . setCanonicalUri ( "http://brand.com/item" ) ; org . atlasapi . media . entity . Broadcast broadcast = org . atlasapi . media . entity . testing . ComplexBroadcastTestDataBuilder . broadcast ( ) . build ( ) ; contentBuffer . add ( new org . atlasapi . remotesite . pa . ContentHierarchyAndSummaries ( com . google . common . base . Optional . of ( brand ) , com . google . common . base . Optional . absent ( ) , item , broadcast , com . google . common . base . Optional . absent ( ) , com . google . common . base . Optional . absent ( ) ) ) ; org . atlasapi . media . entity . Identified queried = com . google . common . collect . Iterables . getOnlyElement ( contentBuffer . findByCanonicalUris ( com . google . common . collect . ImmutableSet . of ( "http://brand.com/item" ) ) . getAllResolvedResults ( ) ) ; "<AssertPlaceHolder>" ; } of ( java . lang . Iterable ) { return org . atlasapi . output . QueryResult . of ( content , null ) ; }
|
org . junit . Assert . assertEquals ( item , queried )
|
withZeroStep ( ) { "<AssertPlaceHolder>" ; } countWays ( int ) { if ( steps == 0 ) return 1 ; else if ( steps == 1 ) return 1 ; else if ( steps == 2 ) return 2 ; else if ( steps == 3 ) return 4 ; int a = 1 ; int b = 2 ; int c = 4 ; int ways = 0 ; for ( int i = 4 ; i <= steps ; i ++ ) { ways = ( a + b ) + c ; a = b ; b = c ; c = ways ; } return ways ; }
|
org . junit . Assert . assertEquals ( 1 , s . countWays ( 0 ) )
|
testDependsOn ( ) { org . jboss . as . test . integration . ejb . singleton . dependson . CallCounterSingleton singleton = ( ( org . jboss . as . test . integration . ejb . singleton . dependson . CallCounterSingleton ) ( ctx . lookup ( "java:module/CallCounterSingleton" ) ) ) ; deployer . deploy ( "ear" ) ; deployer . undeploy ( "ear" ) ; java . util . List < java . lang . String > expectedOrder = new java . util . ArrayList < java . lang . String > ( ) ; expectedOrder . add ( "SingletonOne" ) ; expectedOrder . add ( "SingletonTwo" ) ; expectedOrder . add ( "SingletonThree" ) ; expectedOrder . add ( "SingletonThree" ) ; expectedOrder . add ( "SingletonTwo" ) ; expectedOrder . add ( "SingletonOne" ) ; "<AssertPlaceHolder>" ; } getCalls ( ) { return this . orderLog ; }
|
org . junit . Assert . assertEquals ( expectedOrder , singleton . getCalls ( ) )
|
testReadFromISO ( ) { java . lang . String eWithAcute = "é" ; java . lang . String nameStringUTF16 = ( "F" + eWithAcute ) + "lix" ; java . lang . String bookStringUTF16 = ( ( "<?xml<sp>version=\"1.0\"<sp>encoding=\"ISO-8859-1\"?>" + "<Book><name>" ) + nameStringUTF16 ) + "</name></Book>" ; byte [ ] iso88591bytes = bookStringUTF16 . getBytes ( "ISO-8859-1" ) ; org . apache . cxf . jaxrs . provider . JAXBElementProvider < org . apache . cxf . jaxrs . resources . Book > p = new org . apache . cxf . jaxrs . provider . JAXBElementProvider ( ) ; org . apache . cxf . jaxrs . resources . Book book = p . readFrom ( org . apache . cxf . jaxrs . resources . Book . class , null , new java . lang . annotation . Annotation [ ] { } , javax . ws . rs . core . MediaType . valueOf ( MediaType . APPLICATION_XML ) , null , new java . io . ByteArrayInputStream ( iso88591bytes ) ) ; "<AssertPlaceHolder>" ; } getName ( ) { return name ; }
|
org . junit . Assert . assertEquals ( book . getName ( ) , nameStringUTF16 )
|
testClientThree ( ) { org . glassfish . tyrus . server . Server server = startServer ( org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . Endpoint1 . class , org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . ServiceEndpoint . class ) ; try { org . glassfish . tyrus . client . ClientManager client = createClient ( ) ; final javax . websocket . Session session = client . connectToServer ( org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . MyClientEndpoint . class , getURI ( org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . Endpoint1 . class ) ) ; testViaServiceEndpoint ( client , org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . ServiceEndpoint . class , org . glassfish . tyrus . test . standard_config . POSITIVE , "CLEANUP" ) ; org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . MyClientEndpoint . latch = new java . util . concurrent . CountDownLatch ( 1 ) ; org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . MyClientEndpoint . throwable = null ; org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . MyClientEndpoint . reason = null ; session . getBasicRemote ( ) . sendText ( "tes" ) ; org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . MyClientEndpoint . latch . await ( 1 , TimeUnit . SECONDS ) ; "<AssertPlaceHolder>" ; testViaServiceEndpoint ( client , org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . ServiceEndpoint . class , org . glassfish . tyrus . test . standard_config . NEGATIVE , "NEGATIVE_EXPECTED" ) ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; throw new java . lang . RuntimeException ( e . getMessage ( ) , e ) ; } finally { stopServer ( server ) ; } } getCount ( ) { return count ; }
|
org . junit . Assert . assertEquals ( 0 , org . glassfish . tyrus . test . standard_config . MaxMessageSizeTest . MyClientEndpoint . latch . getCount ( ) )
|
testModificationIterator ( ) { me . prettyprint . hector . api . mutation . Mutator mutator = me . prettyprint . hector . api . factory . HFactory . createMutator ( keyspace , me . prettyprint . cassandra . service . ColumnSliceIteratorTest . se ) ; me . prettyprint . hector . api . query . SliceQuery < java . lang . String , java . util . UUID , java . lang . String > query = me . prettyprint . hector . api . factory . HFactory . createSliceQuery ( keyspace , me . prettyprint . cassandra . service . ColumnSliceIteratorTest . se , me . prettyprint . cassandra . service . ColumnSliceIteratorTest . us , me . prettyprint . cassandra . service . ColumnSliceIteratorTest . se ) . setKey ( me . prettyprint . cassandra . service . ColumnSliceIteratorTest . KEY ) . setColumnFamily ( me . prettyprint . cassandra . service . ColumnSliceIteratorTest . CF ) ; me . prettyprint . cassandra . service . ColumnSliceIterator < java . lang . String , java . util . UUID , java . lang . String > it = new me . prettyprint . cassandra . service . ColumnSliceIterator < java . lang . String , java . util . UUID , java . lang . String > ( query , null , FINISH , false , 100 ) ; java . util . Map < java . util . UUID , java . lang . String > results = new java . util . HashMap < java . util . UUID , java . lang . String > ( ) ; while ( it . hasNext ( ) ) { me . prettyprint . hector . api . beans . HColumn < java . util . UUID , java . lang . String > c = it . next ( ) ; results . put ( c . getName ( ) , c . getValue ( ) ) ; mutator . addDeletion ( me . prettyprint . cassandra . service . ColumnSliceIteratorTest . KEY , me . prettyprint . cassandra . service . ColumnSliceIteratorTest . CF , c . getName ( ) , me . prettyprint . cassandra . service . ColumnSliceIteratorTest . us ) ; mutator . execute ( ) ; } "<AssertPlaceHolder>" ; } size ( ) { return components . size ( ) ; }
|
org . junit . Assert . assertEquals ( 1000 , results . size ( ) )
|
testGetGE ( ) { java . lang . String actual = table . getGE ( ) ; java . lang . String expected = ">=" ; "<AssertPlaceHolder>" ; } getGE ( ) { return ">=" ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testCanSerialiseEdge ( ) { final uk . gov . gchq . gaffer . data . element . Edge edge = new uk . gov . gchq . gaffer . data . element . Edge . Builder ( ) . group ( TestGroups . EDGE ) . source ( "source" ) . dest ( "destination" ) . directed ( true ) . build ( ) ; final byte [ ] serialisedEdge = serialiser . serialise ( edge ) ; final uk . gov . gchq . gaffer . data . element . Edge deserialisedEdge = serialiser . deserialise ( serialisedEdge ) ; "<AssertPlaceHolder>" ; } deserialise ( byte [ ] ) { final int [ ] lastDelimiter = new int [ ] { 0 } ; final java . lang . String group = uk . gov . gchq . gaffer . serialisation . util . LengthValueBytesSerialiserUtil . deserialise ( stringSerialiser , bytes , lastDelimiter ) ; if ( group . isEmpty ( ) ) { throw new java . lang . IllegalArgumentException ( ( "Group<sp>is<sp>required<sp>for<sp>deserialising<sp>" + ( uk . gov . gchq . gaffer . data . element . GroupedProperties . class . getSimpleName ( ) ) ) ) ; } final uk . gov . gchq . gaffer . store . schema . SchemaElementDefinition elementDefinition = schema . getElement ( group ) ; if ( null == elementDefinition ) { throw new uk . gov . gchq . gaffer . exception . SerialisationException ( ( ( "No<sp>SchemaElementDefinition<sp>found<sp>for<sp>group<sp>" + group ) + ",<sp>is<sp>this<sp>group<sp>in<sp>your<sp>schema?" ) ) ; } final uk . gov . gchq . gaffer . data . element . GroupedProperties properties = new uk . gov . gchq . gaffer . data . element . GroupedProperties ( group ) ; deserialiseProperties ( bytes , properties , elementDefinition , lastDelimiter ) ; return properties ; }
|
org . junit . Assert . assertEquals ( edge , deserialisedEdge )
|
testInject ( ) { org . teiid . adminapi . impl . VDBTranslatorMetaData tm = new org . teiid . adminapi . impl . VDBTranslatorMetaData ( ) ; tm . setExecutionFactoryClass ( org . teiid . deployers . TestTranslatorUtil . MyTranslator . class ) ; tm . addProperty ( "MyProperty" , "correctly-assigned" ) ; org . teiid . deployers . TestTranslatorUtil . MyTranslator my = ( ( org . teiid . deployers . TestTranslatorUtil . MyTranslator ) ( org . teiid . deployers . TranslatorUtil . buildExecutionFactory ( tm ) ) ) ; "<AssertPlaceHolder>" ; org . teiid . adminapi . impl . VDBTranslatorMetaData metadata = org . teiid . deployers . TranslatorUtil . buildTranslatorMetadata ( my , "my-module" ) ; metadata . addProperty ( "MyProperty" , "correctly-assigned" ) ; org . teiid . logging . Logger logger = org . mockito . Mockito . mock ( org . teiid . logging . Logger . class ) ; org . mockito . Mockito . stub ( logger . isEnabled ( org . mockito . Mockito . anyString ( ) , org . mockito . Mockito . anyInt ( ) ) ) . toReturn ( true ) ; org . mockito . Mockito . doThrow ( new java . lang . RuntimeException ( "fail" ) ) . when ( logger ) . log ( org . mockito . Mockito . eq ( MessageLevel . WARNING ) , org . mockito . Mockito . eq ( LogConstants . CTX_RUNTIME ) , org . mockito . Mockito . anyString ( ) ) ; org . teiid . logging . LogManager . setLogListener ( logger ) ; try { org . teiid . deployers . TranslatorUtil . buildExecutionFactory ( metadata ) ; } finally { org . teiid . logging . LogManager . setLogListener ( null ) ; } } getMyProperty ( ) { return mine ; }
|
org . junit . Assert . assertEquals ( "correctly-assigned" , my . getMyProperty ( ) )
|
testVerwerkBerichtIndienExceptionBijLockStapOntgrendel ( ) { final nl . bzk . brp . model . bijhouding . BijhoudingsBericht bijhoudingsBericht = maakNieuwStandaardBericht ( null , maakStandaardActie ( ) ) ; doThrow ( new java . lang . NullPointerException ( "fout<sp>bij<sp>unlock" ) ) . when ( lockStap ) . ontgrendel ( ) ; nl . bzk . brp . bijhouding . business . dto . bijhouding . BijhoudingResultaat bijhoudingResultaat = berichtVerwerker . verwerkBericht ( bijhoudingsBericht , berichtContext ) ; "<AssertPlaceHolder>" ; verify ( publiceerAdministratieveHandelingStap ) . voerUit ( any ( nl . bzk . brp . bijhouding . business . stappen . context . BijhoudingBerichtContext . class ) ) ; } getMeldingen ( ) { return meldingen ; }
|
org . junit . Assert . assertEquals ( 1 , bijhoudingResultaat . getMeldingen ( ) . size ( ) )
|
testGetUnmarshaller ( ) { com . betfair . cougar . marshalling . api . databinding . UnMarshaller marshaller = factory . getUnMarshaller ( ) ; marshaller . unmarshall ( null , ( ( java . lang . Class ) ( null ) ) , null , false ) ; "<AssertPlaceHolder>" ; expectUnMarshall ( ) ; } getUnMarshaller ( ) { return unMarshaller ; }
|
org . junit . Assert . assertSame ( marshaller , factory . getUnMarshaller ( ) )
|
exceptionExecutingPolicy ( ) { java . util . Map < java . lang . String , java . lang . String > props = new java . util . HashMap ( taskConfig ) ; task . start ( props ) ; com . github . mmolimar . kafka . connect . fs . policy . Policy policy = org . easymock . EasyMock . createNiceMock ( com . github . mmolimar . kafka . connect . fs . policy . Policy . class ) ; org . easymock . EasyMock . expect ( policy . hasEnded ( ) ) . andReturn ( Boolean . FALSE ) ; org . easymock . EasyMock . expect ( policy . execute ( ) ) . andThrow ( new org . apache . kafka . connect . errors . ConnectException ( "Exception<sp>from<sp>mock" ) ) ; org . easymock . EasyMock . expect ( policy . getURIs ( ) ) . andReturn ( null ) ; org . easymock . EasyMock . checkOrder ( policy , false ) ; org . easymock . EasyMock . replay ( policy ) ; org . powermock . api . support . membermodification . MemberModifier . field ( com . github . mmolimar . kafka . connect . fs . FsSourceTask . class , "policy" ) . set ( task , policy ) ; "<AssertPlaceHolder>" ; } poll ( ) { while ( ( ( ( stop ) != null ) && ( ! ( stop . get ( ) ) ) ) && ( ! ( policy . hasEnded ( ) ) ) ) { com . github . mmolimar . kafka . connect . fs . FsSourceTask . log . trace ( "Polling<sp>for<sp>new<sp>data" ) ; final com . github . mmolimar . kafka . connect . fs . List < org . apache . kafka . connect . source . SourceRecord > results = new com . github . mmolimar . kafka . connect . fs . ArrayList ( ) ; com . github . mmolimar . kafka . connect . fs . List < com . github . mmolimar . kafka . connect . fs . file . FileMetadata > files = filesToProcess ( ) ; files . forEach ( ( metadata ) -> { try ( com . github . mmolimar . kafka . connect . fs . file . reader . FileReader reader = policy . offer ( metadata , com . github . mmolimar . kafka . connect . fs . context . offsetStorageReader ( ) ) ) { com . github . mmolimar . kafka . connect . fs . FsSourceTask . log . info ( "Processing<sp>records<sp>for<sp>file<sp>{}" , metadata ) ; while ( reader . hasNext ( ) ) { results . add ( convert ( metadata , reader . currentOffset ( ) , reader . next ( ) ) ) ; } } catch ( org . apache . kafka . connect . errors . ConnectException | java . io . IOException e ) { com . github . mmolimar . kafka . connect . fs . FsSourceTask . log . error ( ( ( "Error<sp>reading<sp>file<sp>from<sp>FS:<sp>" + ( metadata . getPath ( ) ) ) + ".<sp>Keep<sp>going..." ) , com . github . mmolimar . kafka . connect . fs . e ) ; } } ) ; return results ; } return null ; }
|
org . junit . Assert . assertEquals ( 0 , task . poll ( ) . size ( ) )
|
testAddCustomProperty ( ) { final java . lang . String customKey = "string" ; final java . lang . String customValue = "Hello" ; this . builder . addCustomProperty ( customKey , customValue ) ; @ com . turo . pushy . apns . util . SuppressWarnings ( "unchecked" ) final java . util . Map < java . lang . String , java . lang . Object > payload = com . turo . pushy . apns . util . ApnsPayloadBuilderTest . GSON . fromJson ( this . builder . buildWithDefaultMaximumLength ( ) , com . turo . pushy . apns . util . ApnsPayloadBuilderTest . MAP_OF_STRING_TO_OBJECT ) ; "<AssertPlaceHolder>" ; } buildWithDefaultMaximumLength ( ) { return this . buildWithMaximumLength ( com . turo . pushy . apns . util . ApnsPayloadBuilder . DEFAULT_MAXIMUM_PAYLOAD_SIZE ) ; }
|
org . junit . Assert . assertEquals ( customValue , payload . get ( customKey ) )
|
update ( ) { java . lang . String propName = java . util . UUID . randomUUID ( ) . toString ( ) ; java . lang . Integer value = new java . util . Random ( ) . nextInt ( ) ; config . setProperty ( propName , value ) ; config . update ( ) ; "<AssertPlaceHolder>" ; } getInteger ( java . lang . String ) { java . lang . String strValue = getProperty ( name ) ; if ( strValue == null ) { return null ; } return java . lang . Integer . valueOf ( strValue ) ; }
|
org . junit . Assert . assertEquals ( value , config . getInteger ( propName ) )
|
testSerilizationConfig ( ) { com . gs . fw . common . mithra . util . serializer . SerializationConfig serializationConfig = com . gs . fw . common . mithra . util . serializer . SerializationConfig . shallowWithDefaultAttributes ( com . gs . fw . common . mithra . test . domain . OrderFinder . getFinderInstance ( ) ) . withoutMetaData ( ) . withDeepFetches ( com . gs . fw . common . mithra . test . domain . OrderFinder . items ( ) ) ; "<AssertPlaceHolder>" ; } serializeMetaData ( ) { return serializeMetaData ; }
|
org . junit . Assert . assertFalse ( serializationConfig . serializeMetaData ( ) )
|
testI2os_ByteArray_lengthMatchingOctetString ( ) { byte [ ] bigInt = new byte [ ] { ( ( byte ) ( 0 ) ) , ( ( byte ) ( 0 ) ) , ( ( byte ) ( 0 ) ) , ( ( byte ) ( 42 ) ) } ; byte [ ] expectedResult = new byte [ ] { ( ( byte ) ( 0 ) ) , ( ( byte ) ( 0 ) ) , ( ( byte ) ( 0 ) ) , ( ( byte ) ( 42 ) ) } ; byte [ ] result = de . persosim . simulator . crypto . Tr03111Utils . i2os ( bigInt , expectedResult . length ) ; "<AssertPlaceHolder>" ; } i2os ( java . math . BigInteger , int ) { if ( ( x . compareTo ( BigInteger . ZERO ) ) < 0 ) { throw new java . lang . IllegalArgumentException ( "x<sp>must<sp>be<sp>non-negative" ) ; } byte [ ] result = de . persosim . simulator . utils . Utils . toUnsignedByteArray ( x ) ; return de . persosim . simulator . crypto . Tr03111Utils . i2os ( result , l ) ; }
|
org . junit . Assert . assertArrayEquals ( expectedResult , result )
|
testGetKeys ( ) { org . apache . directory . server . core . avltree . AvlTree < java . lang . Integer > tree = createTree ( ) ; tree . insert ( 72 ) ; tree . insert ( 79 ) ; tree . insert ( 1 ) ; tree . insert ( 2 ) ; tree . insert ( 3 ) ; tree . insert ( 7 ) ; tree . insert ( 34 ) ; "<AssertPlaceHolder>" ; } getKeys ( ) { return java . util . Collections . singletonList ( singleton . getKey ( ) ) ; }
|
org . junit . Assert . assertTrue ( ( 7 == ( tree . getKeys ( ) . size ( ) ) ) )
|
testBooleanComparison ( ) { final org . drools . modelcompiler . builder . generator . TypedExpression expected = typedResult ( "_this.getAge()<sp>==<sp>18" , int . class ) ; final org . drools . modelcompiler . builder . generator . TypedExpression actual = toTypedExpression ( "age<sp>==<sp>18" , org . drools . modelcompiler . domain . Person . class ) ; "<AssertPlaceHolder>" ; } typedResult ( java . lang . String , java . lang . Class ) { com . github . javaparser . ast . expr . Expression resultExpression = org . drools . modelcompiler . builder . generator . DrlxParseUtil . parseExpression ( expressionResult ) . getExpr ( ) ; return new org . drools . modelcompiler . builder . generator . TypedExpression ( resultExpression , classResult ) ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
testConcatStringAndChar ( ) { java . lang . String expected = org . stjs . generator . exec . ints . CharPlusString . method ( "hello" , CharPlusString . CYRILLIC_IA ) ; "<AssertPlaceHolder>" ; } execute ( org . stjs . javascript . functions . Callback0 ) { }
|
org . junit . Assert . assertEquals ( expected , execute ( org . stjs . generator . exec . ints . CharPlusString . class ) )
|
testVerwerkBerichtIndienExceptionBijLockStapVergrendel ( ) { final nl . bzk . brp . model . bijhouding . BijhoudingsBericht bijhoudingsBericht = maakNieuwStandaardBericht ( null , maakStandaardActie ( ) ) ; doThrow ( new java . lang . NullPointerException ( "fout<sp>bij<sp>lock" ) ) . when ( lockStap ) . vergrendel ( any ( nl . bzk . brp . bijhouding . business . stappen . context . BijhoudingBerichtContext . class ) ) ; nl . bzk . brp . bijhouding . business . dto . bijhouding . BijhoudingResultaat bijhoudingResultaat = berichtVerwerker . verwerkBericht ( bijhoudingsBericht , berichtContext ) ; "<AssertPlaceHolder>" ; verify ( bijhoudingTransactieStap , never ( ) ) . startTransactie ( any ( nl . bzk . brp . bijhouding . business . stappen . context . BijhoudingBerichtContext . class ) ) ; verify ( lockStap ) . ontgrendel ( ) ; } getMeldingen ( ) { return meldingen ; }
|
org . junit . Assert . assertEquals ( 1 , bijhoudingResultaat . getMeldingen ( ) . size ( ) )
|
testCHEMBL529226 ( ) { java . lang . String smiles = "CC1=CN([C@@H]2O[C@@]3(COP(=S)(O)O[C@H]4[C@H]5OC[C@]4(COP(=S)(O)O[C@H]6C[C@@H](O[C@@H]6COP(=S)(O)O[C@H]7[C@@H](O)[C@@H](O[C@@H]7COP(=S)(O)O[C@H]8[C@@H](O)[C@@H](O[C@@H]8COP(=S)(O)O[C@H]9[C@@H](O)[C@@H](O[C@@H]9COP(=S)(O)O[C@H]%10[C@@H](O)[C@@H](O[C@@H]%10COP(=S)(O)O[C@H]%11[C@@H](O)[C@@H](O[C@@H]%11COP(=S)(O)O[C@H]%12[C@@H](O)[C@@H](O[C@@H]%12COP(=S)(O)O[C@H]%13[C@@H](O)[C@@H](O[C@@H]%13COP(=S)(O)O[C@H]%14[C@@H](O)[C@@H](O[C@@H]%14COP(=S)(O)O[C@H]%15[C@@H](O)[C@@H](O[C@@H]%15COP(=S)(O)O[C@H]%16[C@H]%17OC[C@]%16(COP(=S)(O)O[C@H]%18[C@H]%19OC[C@]%18(CO)O[C@H]%19N%20C=C(C)C(=O)NC%20=O)O[C@H]%17N%21C=C(C)C(=NC%21=O)N)N%22C=CC(=NC%22=O)N)n%23cnc%24C(=O)NC(=Nc%23%24)N)n%25cnc%26C(=O)NC(=Nc%25%26)N)N%27C=C(C)C(=O)NC%27=O)N%28C=CC(=NC%28=O)N)n%29cnc%30c(N)ncnc%29%30)N%31C=CC(=NC%31=O)N)n%32cnc%33C(=O)NC(=Nc%32%33)N)n%34cnc%35C(N)NC=Nc%34%35)N%36C=C(C)C(=O)NC%36=O)O[C@H]5N%37C=C(C)C(=O)NC%37=O)CO[C@@H]2[C@@H]3O)C(=O)N=C1N" ; org . openscience . cdk . smiles . SmilesParser smipar = new org . openscience . cdk . smiles . SmilesParser ( org . openscience . cdk . silent . SilentChemObjectBuilder . getInstance ( ) ) ; org . openscience . cdk . interfaces . IAtomContainer mol = smipar . parseSmiles ( smiles ) ; org . openscience . cdk . fragment . MurckoFragmenter fragmenter = new org . openscience . cdk . fragment . MurckoFragmenter ( true , 6 ) ; fragmenter . generateFragments ( mol ) ; "<AssertPlaceHolder>" ; } getFrameworks ( ) { return getSmilesFromAtomContainers ( frameMap . values ( ) ) . toArray ( new java . lang . String [ ] { } ) ; }
|
org . junit . Assert . assertThat ( fragmenter . getFrameworks ( ) . length , org . hamcrest . CoreMatchers . is ( 1 ) )
|
testGetJoinedProjectTokenNullToken ( ) { when ( pluginSettings . get ( ( ( ( ".jenkinsUser." + ( com . kylenicholls . stash . parameterizedbuilds . ciserver . JenkinsTest . USER_SLUG ) ) + "." ) + ( com . kylenicholls . stash . parameterizedbuilds . ciserver . JenkinsTest . PROJECT_KEY ) ) ) ) . thenReturn ( null ) ; java . lang . String actual = jenkins . getJoinedUserToken ( user , com . kylenicholls . stash . parameterizedbuilds . ciserver . JenkinsTest . PROJECT_KEY ) ; "<AssertPlaceHolder>" ; } getJoinedUserToken ( com . atlassian . bitbucket . user . ApplicationUser , java . lang . String ) { if ( ( projectKey == null ) || ( projectKey . equals ( "global-settings" ) ) ) { return getJoinedUserToken ( user ) ; } java . lang . String userToken = getUserToken ( user , projectKey ) ; if ( userToken != null ) { return ( ( user . getSlug ( ) ) + ":" ) + userToken ; } return null ; }
|
org . junit . Assert . assertEquals ( null , actual )
|
nullShouldBeReturnedWhenNoRuntimeHasBeenRegisteredForDatabase ( ) { "<AssertPlaceHolder>" ; } getRuntime ( org . neo4j . graphdb . GraphDatabaseService ) { return com . graphaware . runtime . RuntimeRegistry . RUNTIMES . get ( com . graphaware . runtime . RuntimeRegistry . storeDir ( database ) ) ; }
|
org . junit . Assert . assertNull ( com . graphaware . runtime . RuntimeRegistry . getRuntime ( database ) )
|
testValidateState_nullManager_assertIllegalStateException ( ) { org . nhindirect . monitor . dao . impl . NotificationDuplicationDAOImpl impl = new org . nhindirect . monitor . dao . impl . NotificationDuplicationDAOImpl ( ) ; boolean exceptionOccured = false ; try { impl . validateState ( ) ; } catch ( java . lang . IllegalStateException e ) { exceptionOccured = true ; } "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertTrue ( exceptionOccured )
|
testFieldErrorHandlingOnResultSet ( ) { @ org . simpleflatmapper . jdbc . test . SuppressWarnings ( "unchecked" ) org . simpleflatmapper . map . FieldMapperErrorHandler < org . simpleflatmapper . jdbc . JdbcColumnKey > fieldMapperErrorHandler = mock ( org . simpleflatmapper . map . FieldMapperErrorHandler . class ) ; org . simpleflatmapper . jdbc . test . ResultSet rs = mock ( org . simpleflatmapper . jdbc . test . ResultSet . class ) ; final java . lang . Exception exception = new org . simpleflatmapper . jdbc . test . SQLException ( "Error!" ) ; org . simpleflatmapper . jdbc . JdbcMapper < org . simpleflatmapper . test . beans . DbObject > mapper = org . simpleflatmapper . jdbc . test . JdbcMapperFactoryHelper . asm ( ) . fieldMapperErrorHandler ( fieldMapperErrorHandler ) . newBuilder ( org . simpleflatmapper . test . beans . DbObject . class ) . addMapping ( "id" ) . mapper ( ) ; when ( rs . next ( ) ) . thenReturn ( true , false ) ; when ( rs . getLong ( 1 ) ) . thenThrow ( exception ) ; java . util . List < org . simpleflatmapper . test . beans . DbObject > list = mapper . forEach ( rs , new org . simpleflatmapper . util . ListCollector < org . simpleflatmapper . test . beans . DbObject > ( ) ) . getList ( ) ; "<AssertPlaceHolder>" ; verify ( fieldMapperErrorHandler ) . errorMappingField ( eq ( new org . simpleflatmapper . jdbc . JdbcColumnKey ( "id" , 1 ) ) , any ( ) , same ( list . get ( 0 ) ) , same ( exception ) , any ( org . simpleflatmapper . converter . Context . class ) ) ; } get ( java . lang . Object ) { return null ; }
|
org . junit . Assert . assertNotNull ( list . get ( 0 ) )
|
shouldNotCleanupPodBeforeNonDeletePeriod ( ) { final java . lang . String name = createdPod . getMetadata ( ) . getName ( ) ; when ( k8sClient . pods ( ) . withName ( name ) ) . thenReturn ( namedPod ) ; when ( namedPod . get ( ) ) . thenReturn ( createdPod ) ; createdPod . setStatus ( podStatus ) ; when ( podStatus . getContainerStatuses ( ) ) . thenReturn ( java . util . List . of ( containerStatus , keepaliveContainerStatus ) ) ; when ( containerStatus . getName ( ) ) . thenReturn ( com . spotify . styx . docker . KubernetesDockerRunner . MAIN_CONTAINER_NAME ) ; when ( containerStatus . getState ( ) ) . thenReturn ( containerState ) ; when ( containerState . getTerminated ( ) ) . thenReturn ( containerStateTerminated ) ; when ( containerStateTerminated . getFinishedAt ( ) ) . thenReturn ( com . spotify . styx . docker . KubernetesDockerRunnerTest . FIXED_INSTANT . minus ( java . time . Duration . ofMinutes ( 1 ) ) . toString ( ) ) ; com . spotify . styx . docker . var runState = com . spotify . styx . state . RunState . create ( com . spotify . styx . docker . KubernetesDockerRunnerTest . WORKFLOW_INSTANCE , State . TERMINATED ) ; com . spotify . styx . docker . var shouldDelete = kdr . shouldDeletePodWithRunState ( com . spotify . styx . docker . KubernetesDockerRunnerTest . WORKFLOW_INSTANCE , createdPod , runState ) ; "<AssertPlaceHolder>" ; } is ( com . spotify . styx . api . Api$Version ) { return new org . hamcrest . TypeSafeMatcher < com . spotify . styx . api . Api . Version > ( ) { @ com . spotify . styx . api . Override protected boolean matchesSafely ( com . spotify . styx . api . Api . Version item ) { return ( item . ordinal ( ) ) == ( version . ordinal ( ) ) ; } @ com . spotify . styx . api . Override public void describeTo ( org . hamcrest . Description description ) { description . appendText ( "Version<sp>can<sp>only<sp>be" ) ; description . appendValue ( version ) ; } } ; }
|
org . junit . Assert . assertThat ( shouldDelete , org . hamcrest . Matchers . is ( false ) )
|
testGetTableStatusOrCreateIfNotExistsProcessingExpired ( ) { status . setState ( TableState . PROCESSING ) ; when ( mockTableStatusDAO . getTableStatus ( tableId ) ) . thenReturn ( status ) ; when ( mockTableIndexDAO . doesIndexStateMatch ( anyString ( ) , anyLong ( ) , anyString ( ) ) ) . thenReturn ( true ) ; when ( mockTimeoutUtils . hasExpired ( anyLong ( ) , anyLong ( ) ) ) . thenReturn ( true ) ; when ( mockNodeDao . isNodeAvailable ( tableIdLong ) ) . thenReturn ( true ) ; org . sagebionetworks . repo . model . table . TableStatus result = manager . getTableStatusOrCreateIfNotExists ( tableId ) ; "<AssertPlaceHolder>" ; verify ( mockTableStatusDAO ) . resetTableStatusToProcessing ( tableId ) ; verify ( mockTransactionalMessenger ) . sendMessageAfterCommit ( tableId , ObjectType . TABLE , etag , ChangeType . UPDATE ) ; } getTableStatusOrCreateIfNotExists ( java . lang . String ) { try { org . sagebionetworks . repo . model . table . TableStatus status = tableStatusDAO . getTableStatus ( tableId ) ; if ( ! ( TableState . AVAILABLE . equals ( status . getState ( ) ) ) ) { if ( timeoutUtils . hasExpired ( org . sagebionetworks . repo . manager . table . TableManagerSupportImpl . TABLE_PROCESSING_TIMEOUT_MS , status . getChangedOn ( ) . getTime ( ) ) ) { return setTableToProcessingAndTriggerUpdate ( tableId ) ; } else { return status ; } } if ( isIndexSynchronizedWithTruth ( tableId ) ) { return status ; } else { return setTableToProcessingAndTriggerUpdate ( tableId ) ; } } catch ( org . sagebionetworks . repo . web . NotFoundException e ) { if ( ! ( isTableAvailable ( tableId ) ) ) { throw new org . sagebionetworks . repo . web . NotFoundException ( ( ( "Table<sp>" + tableId ) + "<sp>not<sp>found" ) ) ; } return setTableToProcessingAndTriggerUpdate ( tableId ) ; } }
|
org . junit . Assert . assertNotNull ( result )
|
test_WithValueMismatch ( ) { ctx . request ( ) . addHeader ( "myHeader" , "bar" ) ; extension . activate ( configWithRequestHeaderValues ) ; boolean actual = extension . accepts ( ctx . request ( ) , null , extension . getAllowedKeyValues ( ) ) ; "<AssertPlaceHolder>" ; } activate ( java . util . Map ) { this . listRoot = org . apache . sling . commons . osgi . PropertiesUtil . toString ( props . get ( com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . PROP_LIST_ROOT ) , com . adobe . acs . commons . genericlists . impl . GenericListJsonResourceProvider . DEFAULT_LIST_ROOT ) ; }
|
org . junit . Assert . assertFalse ( actual )
|
testTwoTxs ( ) { doReturn ( java . util . Arrays . asList ( new java . lang . Object [ ] { java . util . Arrays . asList ( new java . lang . Object [ ] { barBytes } ) , java . util . Arrays . asList ( new java . lang . Object [ ] { fooBytes } ) } ) ) . when ( nativeConnection ) . closePipeline ( ) ; connection . get ( foo ) ; connection . exec ( ) ; connection . get ( bar ) ; connection . exec ( ) ; java . util . List < java . lang . Object > results = connection . closePipeline ( ) ; "<AssertPlaceHolder>" ; } asList ( redis . reply . MultiBulkReply ) { redis . reply . Reply < ? > [ ] replies = genericReply . data ( ) ; java . util . List < java . lang . Object > results = new java . util . ArrayList < java . lang . Object > ( ) ; for ( redis . reply . Reply < ? > reply : replies ) { results . add ( reply . data ( ) ) ; } return results ; }
|
org . junit . Assert . assertEquals ( java . util . Arrays . asList ( new java . lang . Object [ ] { java . util . Arrays . asList ( new java . lang . Object [ ] { bar } ) , java . util . Arrays . asList ( new java . lang . Object [ ] { foo } ) } ) , results )
|
testBatchInsertCheckForData ( ) { int [ ] res ; java . util . List < com . ctrip . platform . dal . dao . unitbase . ClientTestModel > modelList = null ; try { res = dao . batchInsert ( new com . ctrip . platform . dal . dao . unittests . DalHints ( ) , modelList ) ; org . junit . Assert . fail ( ) ; } catch ( java . lang . Exception e ) { } modelList = new java . util . ArrayList ( ) ; res = dao . batchInsert ( new com . ctrip . platform . dal . dao . unittests . DalHints ( ) , modelList ) ; "<AssertPlaceHolder>" ; } batchInsert ( com . ctrip . platform . dal . dao . DalHints , java . util . List ) { if ( ( null == daoPojos ) || ( ( daoPojos . size ( ) ) <= 0 ) ) { return new int [ 0 ] ; } hints = com . ctrip . platform . dal . dao . DalHints . createIfAbsent ( hints ) ; return client . batchInsert ( hints , daoPojos ) ; }
|
org . junit . Assert . assertArrayEquals ( new int [ 0 ] , res )
|
testSplitPath ( ) { java . lang . String [ ] expectedResult = new java . lang . String [ ] { "C:" , "Users" , "John" , "Image.jpg" } ; java . lang . String sep = java . io . File . separator ; java . lang . String [ ] actualResult = info . michaelkohler . helpertools . io . FileHelper . splitPath ( ( ( ( ( ( ( "C:" + sep ) + "Users" ) + sep ) + "John" ) + sep ) + "Image.jpg" ) ) ; for ( int i = 0 ; i < ( expectedResult . length ) ; i ++ ) { "<AssertPlaceHolder>" ; } } splitPath ( java . lang . String ) { checkNotNull ( path , "path<sp>cannot<sp>be<sp>null" ) ; return path . split ( java . util . regex . Pattern . quote ( File . separator ) ) ; }
|
org . junit . Assert . assertEquals ( expectedResult [ i ] , actualResult [ i ] )
|
getInstanceHashCode ( ) { "<AssertPlaceHolder>" ; } getInstance ( ) { for ( java . nio . file . spi . FileSystemProvider provider : java . nio . file . spi . FileSystemProvider . installedProviders ( ) ) { if ( provider instanceof org . apache . taverna . robundle . fs . BundleFileSystemProvider ) { return ( ( org . apache . taverna . robundle . fs . BundleFileSystemProvider ) ( provider ) ) ; } } return org . apache . taverna . robundle . fs . BundleFileSystemProvider . Singleton . INSTANCE ; }
|
org . junit . Assert . assertEquals ( org . apache . taverna . robundle . fs . BundleFileSystemProvider . getInstance ( ) . hashCode ( ) , new org . apache . taverna . robundle . fs . BundleFileSystemProvider ( ) . hashCode ( ) )
|
withoutClassloader ( ) { final org . eclipse . smarthome . core . storage . Storage < java . lang . String > storageWithoutClassloader = storageService . getStorage ( "storageWithoutClassloader" ) ; final java . lang . String value = "Value" ; storageWithoutClassloader . put ( org . eclipse . smarthome . storage . mapdb . internal . StorageServiceOSGiTest . KEY_1 , value ) ; "<AssertPlaceHolder>" ; } get ( java . lang . String ) { return get ( typeUID , null ) ; }
|
org . junit . Assert . assertEquals ( value , storageWithoutClassloader . get ( org . eclipse . smarthome . storage . mapdb . internal . StorageServiceOSGiTest . KEY_1 ) )
|
testAllMethodsImplemented ( ) { for ( java . lang . reflect . Method method : org . eclipse . aether . RepositorySystemSession . class . getMethods ( ) ) { java . lang . reflect . Method m = org . eclipse . aether . AbstractForwardingRepositorySystemSession . class . getDeclaredMethod ( method . getName ( ) , method . getParameterTypes ( ) ) ; "<AssertPlaceHolder>" ; } } toString ( ) { return authSchemes . toString ( ) ; }
|
org . junit . Assert . assertNotNull ( method . toString ( ) , m )
|
publishToken ( ) { java . lang . String bearer = sut . publishToken ( server , new scouterx . webapp . model . scouter . SUser ( vutUserId ) ) ; scouterx . webapp . framework . session . UserToken fromBearer = scouterx . webapp . framework . session . UserToken . fromBearerToken ( bearer ) ; "<AssertPlaceHolder>" ; } getUserId ( ) { return userId ; }
|
org . junit . Assert . assertEquals ( vutUserId , fromBearer . getUserId ( ) )
|
ASIPMessage_CompareInToOutMessageRawByteArray_success ( ) { java . lang . String rawInput = "Hello<sp>ASIP." ; net . sharkfw . asip . engine . ASIPOutMessage outMessage = new net . sharkfw . asip . engine . ASIPOutMessage ( this . engine , this . connection , 10 , sender , sender , receiverPeer , null , null , null , null ) ; outMessage . raw ( rawInput . getBytes ( StandardCharsets . UTF_8 ) ) ; this . connection . createInputStream ( ) ; net . sharkfw . asip . engine . ASIPInMessage inMessage = new net . sharkfw . asip . engine . ASIPInMessage ( this . engine , this . connection ) ; inMessage . parse ( ) ; java . lang . String text = null ; try ( java . util . Scanner scanner = new java . util . Scanner ( inMessage . getRaw ( ) , StandardCharsets . UTF_8 . name ( ) ) ) { text = scanner . useDelimiter ( "\\A" ) . next ( ) ; } "<AssertPlaceHolder>" ; } next ( ) { return this . iter . next ( ) ; }
|
org . junit . Assert . assertEquals ( rawInput , text )
|
shouldRaiseConstraintViolationCausePriceLow ( ) { javax . validation . executable . ExecutableValidator methodValidator = org . agoncal . book . javaee7 . chapter03 . ex03 . Book03Test . validator . forExecutables ( ) ; java . lang . reflect . Constructor < org . agoncal . book . javaee7 . chapter03 . ex03 . Book03 > constructor = org . agoncal . book . javaee7 . chapter03 . ex03 . Book03 . class . getConstructor ( java . lang . String . class , org . agoncal . book . javaee7 . chapter03 . ex03 . Float . class , java . lang . String . class , java . lang . String . class , org . agoncal . book . javaee7 . chapter03 . ex03 . Integer . class , org . agoncal . book . javaee7 . chapter03 . ex03 . Boolean . class ) ; java . util . Set < org . agoncal . book . javaee7 . chapter03 . ex03 . ConstraintViolation < org . agoncal . book . javaee7 . chapter03 . ex03 . Book03 > > violations = methodValidator . validateConstructorParameters ( constructor , new java . lang . Object [ ] { "H2G2" , 0.5F , "Best<sp>IT<sp>Scifi<sp>Book" , "1234-4566-9876" , 247 , false } ) ; displayContraintViolations ( violations ) ; "<AssertPlaceHolder>" ; } displayContraintViolations ( java . util . Set ) { for ( org . agoncal . book . javaee7 . chapter03 . ex12 . ConstraintViolation constraintViolation : constraintViolations ) { System . out . println ( ( ( ( ( ( ( ( "###<sp>" + ( constraintViolation . getRootBeanClass ( ) . getSimpleName ( ) ) ) + "." ) + ( constraintViolation . getPropertyPath ( ) ) ) + "<sp>-<sp>Invalid<sp>Value<sp>=<sp>" ) + ( constraintViolation . getInvalidValue ( ) ) ) + "<sp>-<sp>Error<sp>Msg<sp>=<sp>" ) + ( constraintViolation . getMessage ( ) ) ) ) ; } }
|
org . junit . Assert . assertEquals ( 1 , violations . size ( ) )
|
statusNotificationVerifyResponse ( ) { io . motown . ocpp . v15 . soap . centralsystem . StatusNotificationRequest request = new io . motown . ocpp . v15 . soap . centralsystem . StatusNotificationRequest ( ) ; request . setStatus ( ChargePointStatus . AVAILABLE ) ; io . motown . ocpp . v15 . soap . centralsystem . StatusNotificationResponse response = motownCentralSystemService . statusNotification ( request , io . motown . ocpp . v15 . soap . centralsystem . CHARGING_STATION_ID . getId ( ) ) ; "<AssertPlaceHolder>" ; } getId ( ) { return id ; }
|
org . junit . Assert . assertNotNull ( response )
|
testTooManyTagsForLocale ( ) { setupTagData ( ) ; final long tagObjectKey = 864238476 ; final java . lang . String [ ] newTagValues = new java . lang . String [ ] { "q" , "w" , "e" , "r" , "t" } ; try { runTX ( new java . util . concurrent . Callable < java . lang . Void > ( ) { @ org . oscm . serviceprovisioningservice . bean . Override public org . oscm . serviceprovisioningservice . bean . Void call ( ) throws org . oscm . serviceprovisioningservice . bean . Exception { for ( java . lang . String tagValue : newTagValues ) { createTag ( org . oscm . serviceprovisioningservice . bean . TagServiceBeanIT . LOCALE_EN , tagValue , tagObjectKey ) ; } return null ; } } ) ; final org . oscm . serviceprovisioningservice . bean . TechnicalProduct tp = runTX ( new java . util . concurrent . Callable < org . oscm . serviceprovisioningservice . bean . TechnicalProduct > ( ) { @ org . oscm . serviceprovisioningservice . bean . Override public org . oscm . serviceprovisioningservice . bean . TechnicalProduct call ( ) throws org . oscm . serviceprovisioningservice . bean . Exception { final org . oscm . serviceprovisioningservice . bean . TechnicalProduct tpDummy = new org . oscm . serviceprovisioningservice . bean . TechnicalProduct ( ) ; tpDummy . setTechnicalProductId ( java . lang . Long . toString ( tagObjectKey ) ) ; tpDummy . setOrganizationKey ( organization . getKey ( ) ) ; org . oscm . serviceprovisioningservice . bean . TechnicalProduct tp = ( ( org . oscm . serviceprovisioningservice . bean . TechnicalProduct ) ( dm . find ( tpDummy ) ) ) ; for ( org . oscm . serviceprovisioningservice . bean . TechnicalProductTag tpt : tp . getTags ( ) ) { tpt . getTag ( ) . getKey ( ) ; } return tp ; } } ) ; java . lang . String [ ] newValueArray = new java . lang . String [ ] { "q" , "w" , "e" , "1" , "w" 0 , "3" , "4" , "5" } ; final org . oscm . serviceprovisioningservice . bean . List < org . oscm . serviceprovisioningservice . bean . Tag > tags = createTransientTags ( org . oscm . serviceprovisioningservice . bean . TagServiceBeanIT . LOCALE_EN , newValueArray ) ; runTX ( new java . util . concurrent . Callable < java . lang . Void > ( ) { @ org . oscm . serviceprovisioningservice . bean . Override public org . oscm . serviceprovisioningservice . bean . Void call ( ) throws org . oscm . serviceprovisioningservice . bean . Exception { org . oscm . serviceprovisioningservice . bean . TechnicalProduct tpReloaded = ( ( org . oscm . serviceprovisioningservice . bean . TechnicalProduct ) ( dm . find ( tp ) ) ) ; tsLocal . updateTags ( tpReloaded , org . oscm . serviceprovisioningservice . bean . TagServiceBeanIT . LOCALE_EN , tags ) ; return null ; } } ) ; org . junit . Assert . fail ( "Method<sp>UpdateTags<sp>doesn't<sp>throw<sp>a<sp>ValidationException." ) ; } catch ( org . oscm . internal . types . exception . ValidationException e ) { final java . lang . String [ ] tagValues = runTX ( new java . util . concurrent . Callable < java . lang . String [ ] > ( ) { @ org . oscm . serviceprovisioningservice . bean . Override public java . lang . String [ ] call ( ) throws org . oscm . serviceprovisioningservice . bean . Exception { final org . oscm . serviceprovisioningservice . bean . TechnicalProduct tpDummy = new org . oscm . serviceprovisioningservice . bean . TechnicalProduct ( ) ; tpDummy . setTechnicalProductId ( java . lang . Long . toString ( tagObjectKey ) ) ; tpDummy . setOrganizationKey ( organization . getKey ( ) ) ; org . oscm . serviceprovisioningservice . bean . TechnicalProduct tp = ( ( org . oscm . serviceprovisioningservice . bean . TechnicalProduct ) ( dm . find ( tpDummy ) ) ) ; java . lang . String [ ] tagValues = new java . lang . String [ tp . getTags ( ) . size ( ) ] ; int i = 0 ; for ( org . oscm . serviceprovisioningservice . bean . TechnicalProductTag tpt : tp . getTags ( ) ) { tagValues [ ( i ++ ) ] = tpt . getTag ( ) . getValue ( ) ; } return tagValues ; } } ) ; for ( int i = 0 ; i < ( tagValues . length ) ; i ++ ) { "<AssertPlaceHolder>" ; } } } getValue ( ) { return value ; }
|
org . junit . Assert . assertEquals ( newTagValues [ i ] , tagValues [ i ] )
|
testParseNullSafeFieldAccessExpr ( ) { java . lang . String expr = "person!.name<sp>==<sp>\"Mark\"" ; com . github . javaparser . ast . expr . Expression expression = org . drools . constraint . parser . DrlxParser . parseExpression ( parser , expr ) . getExpr ( ) ; "<AssertPlaceHolder>" ; } printConstraint ( com . github . javaparser . ast . Node ) { com . github . javaparser . printer . PrettyPrinterConfiguration prettyPrinterConfiguration = new com . github . javaparser . printer . PrettyPrinterConfiguration ( ) ; org . drools . constraint . parser . printer . ConstraintPrintVisitor constraintPrintVisitor = new org . drools . constraint . parser . printer . ConstraintPrintVisitor ( prettyPrinterConfiguration ) ; node . accept ( constraintPrintVisitor , null ) ; return constraintPrintVisitor . getSource ( ) ; }
|
org . junit . Assert . assertEquals ( expr , printConstraint ( expression ) )
|
testUnsubscribeEventsAndConfirmEventsNoLongerFire ( ) { com . google . firebase . database . DatabaseReference ref = com . google . firebase . testing . IntegrationTestUtils . getRandomNode ( com . google . firebase . database . integration . EventTestIT . masterApp ) ; final java . util . concurrent . atomic . AtomicInteger callbackCount = new java . util . concurrent . atomic . AtomicInteger ( 0 ) ; final com . google . firebase . database . ValueEventListener listener = ref . addValueEventListener ( new com . google . firebase . database . ValueEventListener ( ) { @ com . google . firebase . database . integration . Override public void onDataChange ( com . google . firebase . database . DataSnapshot snapshot ) { if ( ( snapshot . getValue ( ) ) != null ) { callbackCount . incrementAndGet ( ) ; } } @ com . google . firebase . database . integration . Override public void onCancelled ( com . google . firebase . database . DatabaseError error ) { org . junit . Assert . fail ( "Should<sp>not<sp>be<sp>cancelled" ) ; } } ) ; com . google . firebase . database . core . ZombieVerifier . verifyRepoZombies ( ref ) ; for ( int i = 0 ; i < 3 ; ++ i ) { ref . setValueAsync ( i ) ; } com . google . firebase . database . TestHelpers . waitForRoundtrip ( ref ) ; ref . removeEventListener ( listener ) ; com . google . firebase . database . core . ZombieVerifier . verifyRepoZombies ( ref ) ; for ( int i = 10 ; i < 13 ; ++ i ) { ref . setValueAsync ( i ) ; } for ( int i = 20 ; i < 22 ; ++ i ) { ref . setValueAsync ( i ) ; } new com . google . firebase . database . future . WriteFuture ( ref , 22 ) . timedGet ( ) ; "<AssertPlaceHolder>" ; } get ( ) { counter . incrementAndGet ( ) ; return supplier . get ( ) ; }
|
org . junit . Assert . assertEquals ( 3 , callbackCount . get ( ) )
|
testCompareGelijkHogereId ( ) { final nl . bzk . brp . model . operationeel . kern . HisPersoonAfgeleidAdministratiefModel afgAdm1 = maakHisPersoonAfgeleidAdministratiefModel ( 345 , new nl . bzk . brp . model . algemeen . attribuuttype . kern . DatumTijdAttribuut ( nl . bzk . brp . model . algemeen . attribuuttype . kern . DatumTijdAttribuut . bouwDatumTijd ( 211 , 1 , 1 ) . getWaarde ( ) ) ) ; final nl . bzk . brp . model . operationeel . kern . HisPersoonAfgeleidAdministratiefModel afgAdm2 = maakHisPersoonAfgeleidAdministratiefModel ( 123 , new nl . bzk . brp . model . algemeen . attribuuttype . kern . DatumTijdAttribuut ( nl . bzk . brp . model . algemeen . attribuuttype . kern . DatumTijdAttribuut . bouwDatumTijd ( 211 , 1 , 1 ) . getWaarde ( ) ) ) ; final int resultaat = comparator . compare ( afgAdm1 , afgAdm2 ) ; "<AssertPlaceHolder>" ; } compare ( nl . bzk . migratiebrp . synchronisatie . dal . domein . brp . kern . entity . Document , nl . bzk . migratiebrp . synchronisatie . dal . domein . brp . kern . entity . Document ) { return ( berekenHash ( document1 ) ) - ( berekenHash ( document2 ) ) ; }
|
org . junit . Assert . assertEquals ( 1 , resultaat )
|
testHandleMessageRegisteredCollectorForTopic ( ) { io . github . tcdl . msb . api . message . Message originalAndReceivedMessage = io . github . tcdl . msb . support . TestUtils . createSimpleRequestMessage ( io . github . tcdl . msb . collector . CollectorManagerTest . TOPIC ) ; when ( collectorMock . getRequestMessage ( ) ) . thenReturn ( originalAndReceivedMessage ) ; io . github . tcdl . msb . collector . CollectorManager collectorManager = new io . github . tcdl . msb . collector . CollectorManager ( io . github . tcdl . msb . collector . CollectorManagerTest . TOPIC , channelManagerMock ) ; collectorManager . registerCollector ( collectorMock ) ; java . util . Optional < io . github . tcdl . msb . MessageHandler > resolved = collectorManager . resolveMessageHandler ( originalAndReceivedMessage ) ; "<AssertPlaceHolder>" ; } get ( ) { return delegate . get ( ) ; }
|
org . junit . Assert . assertEquals ( resolved . get ( ) , collectorMock )
|
testPutAndRemoveOnNoMergeTreeWithCollapse ( ) { com . sun . sgs . test . app . util . TestScalableHashMap . txnScheduler . runTask ( new com . sun . sgs . test . util . TestAbstractKernelRunnable ( ) { public void run ( ) throws com . sun . sgs . test . app . util . Exception { java . util . Map < java . lang . Integer , java . lang . Integer > test = com . sun . sgs . test . app . util . TestScalableHashMap . createScalableHashMap ( com . sun . sgs . test . app . util . Integer . class , com . sun . sgs . test . app . util . Integer . class , 1 , 8 , 4 ) ; java . util . Map < java . lang . Integer , java . lang . Integer > control = new java . util . HashMap < java . lang . Integer , java . lang . Integer > ( ) ; int [ ] inputs = new int [ 1024 ] ; for ( int i = 0 ; i < ( inputs . length ) ; i ++ ) { int j = com . sun . sgs . test . app . util . TestScalableHashMap . RANDOM . nextInt ( ) ; inputs [ i ] = j ; test . put ( j , j ) ; control . put ( j , j ) ; } for ( int i = 0 ; i < ( inputs . length ) ; i += 2 ) { test . remove ( inputs [ i ] ) ; control . remove ( inputs [ i ] ) ; } "<AssertPlaceHolder>" ; } } , com . sun . sgs . test . app . util . TestScalableHashMap . taskOwner ) ; } remove ( java . lang . Object ) { return removeFirstOccurrence ( o ) ; }
|
org . junit . Assert . assertEquals ( control , test )
|
testReifyDirectoryResourceFailsIfFileExists ( ) { java . io . File tempFile = java . io . File . createTempFile ( "forge" , "testReifyDirectoryResourceFailsIfFileExists" ) ; tempFile . deleteOnExit ( ) ; org . jboss . forge . addon . resource . DirectoryResource reified = factory . create ( tempFile ) . reify ( org . jboss . forge . addon . resource . DirectoryResource . class ) ; "<AssertPlaceHolder>" ; } reify ( java . lang . Class ) { if ( type . isInstance ( this ) ) { return type . cast ( this ) ; } else { return null ; } }
|
org . junit . Assert . assertNull ( reified )
|
testModifyTicketRemoveHostEmptyHost ( ) { org . irods . jargon . ticket . packinstr . TicketAdminInp pi = org . irods . jargon . ticket . packinstr . TicketAdminInp . instanceForModifyRemoveAccess ( ticketId , TicketModifyAddOrRemoveTypeEnum . TICKET_MODIFY_HOST , "" ) ; "<AssertPlaceHolder>" ; } instanceForModifyRemoveAccess ( java . lang . String , org . irods . jargon . ticket . packinstr . TicketModifyAddOrRemoveTypeEnum , java . lang . String ) { if ( ( ticketId == null ) || ( ticketId . isEmpty ( ) ) ) { throw new java . lang . IllegalArgumentException ( "null<sp>or<sp>empty<sp>ticket<sp>id" ) ; } if ( addTypeEnum == null ) { throw new java . lang . IllegalArgumentException ( "null<sp>modify<sp>remove<sp>permission<sp>type<sp>not<sp>set" ) ; } if ( ( modObject == null ) || ( modObject . isEmpty ( ) ) ) { throw new java . lang . IllegalArgumentException ( "null<sp>or<sp>empty<sp>modify<sp>remove<sp>-<sp>user,<sp>group,<sp>or<sp>host" ) ; } return new org . irods . jargon . ticket . packinstr . TicketAdminInp ( TICKET_ADMIN_INP_API_NBR , "mod" , ticketId , "remove" , addTypeEnum . getTextValue ( ) , modObject , BLANK ) ; }
|
org . junit . Assert . assertNotNull ( pi )
|
testFunctionDependingOnInputWithFunctionHierarchy ( ) { org . apache . flink . api . java . typeutils . TypeExtractorTest . IdentityMapper4 < java . lang . String > function = new org . apache . flink . api . java . typeutils . TypeExtractorTest . IdentityMapper4 < java . lang . String > ( ) ; org . apache . flink . api . common . typeinfo . TypeInformation < ? > ti = org . apache . flink . api . java . typeutils . TypeExtractor . getMapReturnTypes ( function , BasicTypeInfo . STRING_TYPE_INFO ) ; "<AssertPlaceHolder>" ; } getMapReturnTypes ( org . apache . flink . api . common . functions . MapFunction , org . apache . flink . api . common . typeinfo . TypeInformation ) { return org . apache . flink . api . java . typeutils . TypeExtractor . getMapReturnTypes ( mapInterface , inType , null , false ) ; }
|
org . junit . Assert . assertEquals ( BasicTypeInfo . STRING_TYPE_INFO , ti )
|
testPausedAccumulate ( ) { io . nats . client . impl . MessageQueue q = new io . nats . client . impl . MessageQueue ( true ) ; q . pause ( ) ; io . nats . client . impl . NatsMessage msg = q . accumulate ( 1 , 1 , null ) ; "<AssertPlaceHolder>" ; } accumulate ( long , long , java . time . Duration ) { if ( ! ( this . singleThreadedReader ) ) { throw new java . lang . IllegalStateException ( "Accumulate<sp>is<sp>only<sp>supported<sp>in<sp>single<sp>reader<sp>mode." ) ; } if ( ! ( this . isRunning ( ) ) ) { return null ; } io . nats . client . impl . NatsMessage msg = this . queue . poll ( ) ; if ( msg == null ) { msg = waitForTimeout ( timeout ) ; if ( ( ! ( this . isRunning ( ) ) ) || ( msg == null ) ) { return null ; } } long size = msg . getSizeInBytes ( ) ; if ( ( maxMessages <= 1 ) || ( size >= maxSize ) ) { this . sizeInBytes . addAndGet ( ( - size ) ) ; this . length . decrementAndGet ( ) ; signalIfNotEmpty ( ) ; return msg ; } long count = 1 ; io . nats . client . impl . NatsMessage cursor = msg ; while ( cursor != null ) { io . nats . client . impl . NatsMessage next = this . queue . peek ( ) ; if ( next != null ) { long s = next . getSizeInBytes ( ) ; if ( ( maxSize < 0 ) || ( ( size + s ) < maxSize ) ) { size += s ; count ++ ; cursor . next = this . queue . poll ( ) ; cursor = cursor . next ; if ( count == maxMessages ) { break ; } } else { break ; } } else { break ; } } this . sizeInBytes . addAndGet ( ( - size ) ) ; this . length . addAndGet ( ( - count ) ) ; signalIfNotEmpty ( ) ; return msg ; }
|
org . junit . Assert . assertNull ( msg )
|
testErrorIDGenerator_blocksize ( ) { boolean deleteLibraryFlag = deleteTypeLibrary ( org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGeneratorTests . MARKET_PLACES_STORE_LOCATION , ( ( org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGeneratorTests . MARKET_PLACES_ORG_NAME ) + 1 ) ) ; boolean isillegalArgumentException = false ; long id = 0 ; try { org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGenerator errorIdGenerator = org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGeneratorFactory . getErrorIdGenerator ( org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGeneratorTests . MARKET_PLACES_STORE_LOCATION , ( ( org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGeneratorTests . MARKET_PLACES_ORG_NAME ) + 1 ) , org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGeneratorTests . MIN_BLK_SIZE ) ; id = errorIdGenerator . getNextId ( org . ebayopensource . turmeric . tools . errorlibrary . ErrorIdGeneratorTests . DOMAIN_TWO ) ; } catch ( java . lang . IllegalArgumentException illegalArgumentException ) { isillegalArgumentException = true ; } "<AssertPlaceHolder>" ; } getNextId ( java . lang . String ) { long nextId ; synchronized ( this ) { java . io . File xmlFile = new java . io . File ( m_fileName ) ; if ( xmlFile . exists ( ) ) { errorBlocks = javax . xml . bind . JAXB . unmarshal ( xmlFile , org . ebayopensource . turmeric . common . config . OrganizationErrorBlocks . class ) ; buildUsedErrorBlockMap ( ) ; buildAllocatedRanges ( ) ; } boolean isFileLock = tryGetFileLock ( ) ; if ( ! isFileLock ) throw new org . ebayopensource . turmeric . tools . errorlibrary . exception . ErrorIdGeneratorException ( ( "Could<sp>not<sp>get<sp>the<sp>lock<sp>for<sp>the<sp>file<sp>:<sp>" + ( m_fileName ) ) ) ; if ( ! ( isDomainCreated ( domain ) ) ) createDomain ( domain ) ; nextId = findAndUpdateNextId ( domain ) ; persist ( ) ; } return nextId ; }
|
org . junit . Assert . assertEquals ( isillegalArgumentException , false )
|
testGetBytesWithPicos ( ) { int tag = new java . util . Random ( ) . nextInt ( ) ; byte [ ] bytes = fixio . fixprotocol . fields . UTCTimeOnlyFieldTest . TIMESTAMP_WITH_PICOS . getBytes ( ) ; fixio . fixprotocol . fields . UTCTimeOnlyField field = new fixio . fixprotocol . fields . UTCTimeOnlyField ( tag , bytes ) ; byte [ ] nanosBytes = ( ( fixio . fixprotocol . fields . UTCTimeOnlyFieldTest . TIMESTAMP_WITH_NANOS ) + "000" ) . getBytes ( ) ; "<AssertPlaceHolder>" ; } getBytes ( ) { switch ( valueLen ) { case 17 : return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_SECONDS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; case 21 : return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_MILLIS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; case 24 : return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_MICROS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; case 27 : return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_NANOS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; case 30 : return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_PICOS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; default : if ( ( valueLen ) > ( DATE_TIME_PATTERN_PICOS_LENGTH ) ) { return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_PICOS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; } else if ( ( valueLen ) > ( DATE_TIME_PATTERN_NANOS_LENGTH ) ) { return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_NANOS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; } else if ( ( valueLen ) > ( DATE_TIME_PATTERN_MICROS_LENGTH ) ) { return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_MICROS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; } else if ( ( valueLen ) > ( DATE_TIME_PATTERN_MILLIS_LENGTH ) ) { return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_MILLIS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; } else { return fixio . fixprotocol . fields . DATE_TIME_FORMATTER_SECONDS . format ( value ) . getBytes ( fixio . fixprotocol . fields . US_ASCII ) ; } } }
|
org . junit . Assert . assertArrayEquals ( nanosBytes , field . getBytes ( ) )
|
testIntArrays ( ) { com . questdb . std . Rnd rnd = new com . questdb . std . Rnd ( ) ; com . questdb . store . UnstructuredFile hb = new com . questdb . store . UnstructuredFile ( temp . newFile ( ) , 16 , JournalMode . APPEND ) ; int [ ] vals = new int [ 100 ] ; long pos = hb . getPos ( ) ; for ( int i = 0 ; i < 10000 ; i ++ ) { for ( int k = 0 ; k < ( vals . length ) ; k ++ ) { vals [ k ] = rnd . nextInt ( ) ; } hb . put ( vals ) ; } rnd . reset ( ) ; hb . setPos ( pos ) ; for ( int i = 0 ; i < 10000 ; i ++ ) { hb . get ( vals ) ; for ( int k = 0 ; k < ( vals . length ) ; k ++ ) { "<AssertPlaceHolder>" ; } } } nextInt ( ) { return ( ( int ) ( nextLong ( ) ) ) ; }
|
org . junit . Assert . assertEquals ( rnd . nextInt ( ) , vals [ k ] )
|
testNullAdded ( ) { serviceFactoryMap . clear ( ) ; serviceLocator . factoryAdded ( null , java . util . Collections . emptyMap ( ) ) ; "<AssertPlaceHolder>" ; } size ( ) { return getPropertyNames ( ) . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , serviceFactoryMap . size ( ) )
|
fib7Test ( ) { final int actual = edu . rice . pcdp . future . FibFutureTest . kernel ( 7 ) ; final int expected = 13 ; "<AssertPlaceHolder>" ; } kernel ( int ) { final int [ ] result = new int [ 1 ] ; edu . rice . pcdp . PCDP . finish ( ( ) -> { try { result [ 0 ] = fib ( N ) ; } catch ( final ex ) { edu . rice . pcdp . future . ex . printStackTrace ( ) ; result [ 0 ] = - 1 ; } } ) ; return result [ 0 ] ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
Property ( ) { java . lang . String testENTITY = "testEntity" ; org . odata4j . edm . EdmEntityType . Builder entityType = EntityType ( testENTITY ) ; java . util . List < org . odata4j . edm . EdmComplexType . Builder > cpBuilderList = new java . util . ArrayList < org . odata4j . edm . EdmComplexType . Builder > ( ) ; org . odata4j . edm . EdmComplexType . Builder ctBuilder = EntityTypecomplexProperty1 ( "newComplexType" , entityType ) ; org . odata4j . edm . EdmDataServices . Builder builder = org . odata4j . edm . EdmDataServices . newBuilder ( ) ; cpBuilderList . add ( ctBuilder ) ; ComplexTypesimpleProperty ( 49 , ctBuilder ) ; org . odata4j . edm . EdmSchema . Builder schema = org . odata4j . edm . EdmSchema . newBuilder ( ) . addEntityTypes ( entityType ) . setNamespace ( com . fujitsu . dc . test . unit . core . model . impl . es . odata . PropertyLimitCheckerTest . NS ) . addComplexTypes ( cpBuilderList ) ; org . odata4j . edm . EdmDataServices metadata = builder . addSchemas ( schema ) . build ( ) ; com . fujitsu . dc . core . model . impl . es . doc . PropertyDocHandler handler = new com . fujitsu . dc . core . model . impl . es . doc . ComplexTypePropertyDocHandler ( ) ; org . json . simple . JSONObject staticFields = new org . json . simple . JSONObject ( ) ; staticFields . put ( "Type" , "Edm.String" ) ; handler . setStaticFields ( staticFields ) ; java . util . Map < java . lang . String , java . lang . String > entityTypeMap = new java . util . HashMap < java . lang . String , java . lang . String > ( ) ; entityTypeMap . put ( "_ComplexType.Name_uniqueKey" , "newComplexType" ) ; handler . setEntityTypeMap ( entityTypeMap ) ; handler . setEntityTypeId ( "_uniqueKey" ) ; java . util . Map < java . lang . String , java . lang . Object > manyToOneKindMap = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; manyToOneKindMap . put ( ComplexType . EDM_TYPE_NAME , "_uniqueKey" ) ; handler . setManyToOnelinkId ( manyToOneKindMap ) ; com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker checker = new com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker ( metadata , handler ) ; java . util . List < com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker . CheckError > errors = checker . checkPropertyLimits ( ) ; "<AssertPlaceHolder>" ; } checkPropertyLimits ( ) { java . util . List < com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker . CheckError > result = new java . util . ArrayList < com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker . CheckError > ( ) ; if ( null == ( metadata ) ) { return result ; } java . util . Iterator < org . odata4j . edm . EdmEntityType > iter = metadata . getEntityTypes ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { org . odata4j . edm . EdmEntityType target = iter . next ( ) ; checkPropertyLimitsForEntityTypeInternal ( result , target ) ; } java . util . Iterator < org . odata4j . edm . EdmComplexType > complexTypeIter = metadata . getComplexTypes ( ) . iterator ( ) ; while ( complexTypeIter . hasNext ( ) ) { int simplePropCount = 0 ; int complexPropCount = 0 ; org . odata4j . edm . EdmComplexType complexType = complexTypeIter . next ( ) ; for ( org . odata4j . edm . EdmProperty prop : complexType . getProperties ( ) ) { if ( prop . getName ( ) . startsWith ( "_" ) ) { continue ; } if ( prop . getType ( ) . isSimple ( ) ) { simplePropCount ++ ; } else { complexPropCount ++ ; } } if ( ( simpleMaxForOverAllLayers ) < simplePropCount ) { java . lang . String message = java . lang . String . format ( "Total<sp>property[%s]<sp>count<sp>exceeds<sp>the<sp>limit[%d]." , complexType . getName ( ) , simpleMaxForOverAllLayers ) ; com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker . log . info ( message ) ; result . add ( new com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker . CheckError ( complexType . getName ( ) , message ) ) ; } if ( ( complexMaxForOverallLayers ) < complexPropCount ) { java . lang . String message = java . lang . String . format ( "Total<sp>property[%s]<sp>count<sp>exceeds<sp>the<sp>limit[%d]." , complexType . getName ( ) , complexMaxForOverallLayers ) ; com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker . log . info ( message ) ; result . add ( new com . fujitsu . dc . core . model . impl . es . odata . PropertyLimitChecker . CheckError ( complexType . getName ( ) , message ) ) ; } } return result ; }
|
org . junit . Assert . assertEquals ( 0 , errors . size ( ) )
|
noFlow2 ( ) { java . lang . String mSig = ( "<" + ( soot . jimple . infoflow . test . methodSummary . junit . ApiTests . className ) ) + ":<sp>int<sp>noFlow2(int,int)>" ; java . util . Set < soot . jimple . infoflow . methodSummary . data . summary . MethodFlow > res = createSummaries ( mSig ) . getAllFlows ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return ( this . results ) == null ? 0 : this . results . size ( ) ; }
|
org . junit . Assert . assertTrue ( ( ( res == null ) || ( ( res . size ( ) ) == 0 ) ) )
|
testBasicProbabilities ( ) { org . evosuite . seeding . StaticConstantVariableProbabilityPool pool1 = new org . evosuite . seeding . StaticConstantVariableProbabilityPool ( ) ; org . evosuite . seeding . StaticConstantVariableProbabilityPool pool2 = new org . evosuite . seeding . StaticConstantVariableProbabilityPool ( ) ; for ( int i = 0 ; i < 99 ; i ++ ) { pool1 . add ( "Foo" ) ; pool2 . add ( "Bar" ) ; } pool1 . add ( "Bar" ) ; pool2 . add ( "Foo" ) ; int count1 = 0 ; int count2 = 0 ; for ( int i = 0 ; i < 100 ; i ++ ) { if ( pool1 . getRandomString ( ) . equals ( "Bar" ) ) count1 ++ ; if ( pool2 . getRandomString ( ) . equals ( "Bar" ) ) count2 ++ ; } "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { return super . equals ( obj ) ; }
|
org . junit . Assert . assertTrue ( ( count1 < count2 ) )
|
testDubiousEquals ( ) { com . questdb . griffin . model . IntrinsicModel m = modelOf ( "sum(ts)<sp>=<sp>sum(ts)" ) ; "<AssertPlaceHolder>" ; } modelOf ( java . lang . CharSequence ) { return modelOf ( seq , null ) ; }
|
org . junit . Assert . assertNull ( m . filter )
|
createNpmAngularPortlet ( ) { wizardAction . openNewLiferayModuleWizard ( ) ; wizardAction . newModule . prepareGradle ( project . getName ( ) , com . liferay . ide . ui . portlet . tests . NPM_ANGULAR_PORTLET ) ; wizardAction . finish ( ) ; jobAction . waitForNoRunningJobs ( ) ; "<AssertPlaceHolder>" ; viewAction . project . closeAndDelete ( project . getName ( ) ) ; } visibleFileTry ( java . lang . String [ ] ) { try { return _getProjects ( ) . isVisible ( files ) ; } catch ( java . lang . Exception e ) { _getProjects ( ) . setFocus ( ) ; try { java . lang . String [ ] parents = java . util . Arrays . copyOfRange ( files , 0 , ( ( files . length ) - 1 ) ) ; _getProjects ( ) . expand ( parents ) ; _getProjects ( ) . contextMenu ( com . liferay . ide . ui . liferay . action . REFRESH , parents ) ; ide . sleep ( 2000 ) ; } catch ( java . lang . Exception e1 ) { } for ( int i = ( files . length ) - 1 ; i > 0 ; i -- ) { java . lang . String [ ] parents = java . util . Arrays . copyOfRange ( files , 0 , ( ( files . length ) - i ) ) ; org . eclipse . swtbot . swt . finder . widgets . SWTBotTreeItem parent = _getProjects ( ) . getTreeItem ( parents ) ; _getProjects ( ) . expand ( parents ) ; java . lang . String subnode = files [ ( ( files . length ) - i ) ] ; _jobAction . waitForSubnode ( parent , subnode , com . liferay . ide . ui . liferay . action . REFRESH ) ; } return _getProjects ( ) . isVisible ( files ) ; } }
|
org . junit . Assert . assertTrue ( viewAction . project . visibleFileTry ( project . getName ( ) ) )
|
testRemove ( ) { com . liferay . friendly . url . model . FriendlyURLEntryMapping newFriendlyURLEntryMapping = addFriendlyURLEntryMapping ( ) ; _persistence . remove ( newFriendlyURLEntryMapping ) ; com . liferay . friendly . url . model . FriendlyURLEntryMapping existingFriendlyURLEntryMapping = _persistence . fetchByPrimaryKey ( newFriendlyURLEntryMapping . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
|
org . junit . Assert . assertNull ( existingFriendlyURLEntryMapping )
|
testImplicitCastInAccumulateFunction ( ) { java . lang . String str = ( ( ( ( ( "import<sp>" + ( org . drools . modelcompiler . AccumulateTest . ShortValue . class . getCanonicalName ( ) ) ) + ";" ) + "rule<sp>X<sp>when\n" ) + "<sp>$max<sp>:<sp>Double(doubleValue<sp>!=<sp>Double.MAX_VALUE)<sp>from<sp>accumulate<sp>(<sp>ShortValue(<sp>$v<sp>:<sp>value<sp>);<sp>max($v)<sp>)<sp>\n" ) + "then\n" ) + "end" ; org . kie . api . runtime . KieSession ksession = getKieSession ( str ) ; ksession . insert ( new org . drools . modelcompiler . AccumulateTest . ShortValue ( ) ) ; "<AssertPlaceHolder>" ; } fireAllRules ( ) { return 0 ; }
|
org . junit . Assert . assertEquals ( 1 , ksession . fireAllRules ( ) )
|
testCompressAndDecompressBigPayload ( ) { byte [ ] input = new byte [ 1024 * 1024 ] ; new java . util . Random ( ) . nextBytes ( input ) ; int maxMessageSize = ( 1024 * 1024 ) + 8192 ; com . firefly . codec . websocket . model . extension . compress . DeflateFrameExtension clientExtension = new com . firefly . codec . websocket . model . extension . compress . DeflateFrameExtension ( ) ; clientExtension . setPolicy ( com . firefly . codec . websocket . stream . WebSocketPolicy . newClientPolicy ( ) ) ; clientExtension . getPolicy ( ) . setMaxBinaryMessageSize ( maxMessageSize ) ; clientExtension . getPolicy ( ) . setMaxBinaryMessageBufferSize ( maxMessageSize ) ; clientExtension . setConfig ( com . firefly . codec . websocket . model . ExtensionConfig . parse ( "deflate-frame" ) ) ; final com . firefly . codec . websocket . model . extension . compress . DeflateFrameExtension serverExtension = new com . firefly . codec . websocket . model . extension . compress . DeflateFrameExtension ( ) ; serverExtension . setPolicy ( com . firefly . codec . websocket . stream . WebSocketPolicy . newServerPolicy ( ) ) ; serverExtension . getPolicy ( ) . setMaxBinaryMessageSize ( maxMessageSize ) ; serverExtension . getPolicy ( ) . setMaxBinaryMessageBufferSize ( maxMessageSize ) ; serverExtension . setConfig ( com . firefly . codec . websocket . model . ExtensionConfig . parse ( "deflate-frame" ) ) ; clientExtension . setNextOutgoingFrames ( new com . firefly . codec . websocket . model . OutgoingFrames ( ) { @ test . codec . websocket . model . extension . compress . Override public void outgoingFrame ( com . firefly . codec . websocket . frame . Frame frame , com . firefly . utils . concurrent . Callback callback ) { test . codec . websocket . model . extension . compress . DeflateFrameExtensionTest . LOG . debug ( "outgoingFrame({})" , frame ) ; serverExtension . incomingFrame ( frame ) ; callback . succeeded ( ) ; } } ) ; final java . io . ByteArrayOutputStream result = new java . io . ByteArrayOutputStream ( input . length ) ; serverExtension . setNextIncomingFrames ( new com . firefly . codec . websocket . model . IncomingFrames ( ) { @ test . codec . websocket . model . extension . compress . Override public void incomingFrame ( com . firefly . codec . websocket . frame . Frame frame ) { test . codec . websocket . model . extension . compress . DeflateFrameExtensionTest . LOG . debug ( "incomingFrame({})" , frame ) ; try { result . write ( com . firefly . utils . io . BufferUtils . toArray ( frame . getPayload ( ) ) ) ; } catch ( java . io . IOException x ) { throw new com . firefly . utils . exception . CommonRuntimeException ( x ) ; } } @ test . codec . websocket . model . extension . compress . Override public void incomingError ( java . lang . Throwable t ) { } } ) ; com . firefly . codec . websocket . frame . BinaryFrame frame = new com . firefly . codec . websocket . frame . BinaryFrame ( ) ; frame . setPayload ( input ) ; frame . setFin ( true ) ; clientExtension . outgoingFrame ( frame , null ) ; "<AssertPlaceHolder>" ; } outgoingFrame ( com . firefly . codec . websocket . frame . Frame , com . firefly . utils . concurrent . Callback ) { java . nio . ByteBuffer payload = frame . getPayload ( ) ; int length = ( payload != null ) ? payload . remaining ( ) : 0 ; if ( ( ( com . firefly . codec . websocket . model . OpCode . isControlFrame ( frame . getOpCode ( ) ) ) || ( ( maxLength ) <= 0 ) ) || ( length <= ( maxLength ) ) ) { nextOutgoingFrame ( frame , callback ) ; return ; } com . firefly . codec . websocket . model . extension . fragment . FragmentExtension . FrameEntry entry = new com . firefly . codec . websocket . model . extension . fragment . FragmentExtension . FrameEntry ( frame , callback ) ; if ( com . firefly . codec . websocket . model . extension . fragment . FragmentExtension . LOG . isDebugEnabled ( ) ) com . firefly . codec . websocket . model . extension . fragment . FragmentExtension . LOG . debug ( "Queuing<sp>{}" , entry ) ; offerEntry ( entry ) ; flusher . iterate ( ) ; }
|
org . junit . Assert . assertArrayEquals ( input , result . toByteArray ( ) )
|
testGetVisibleGridWidgets ( ) { doReturn ( getGridWidgets ( ) ) . when ( defaultGridLayer ) . getGridWidgets ( ) ; final java . util . stream . Stream < org . uberfire . ext . wires . core . grids . client . widget . grid . GridWidget > gridWidgets = gridLienzoScrollBounds . getVisibleGridWidgets ( ) ; "<AssertPlaceHolder>" ; } count ( ) { return 1 ; }
|
org . junit . Assert . assertEquals ( 3 , gridWidgets . count ( ) )
|
testBasicData ( ) { com . github . jsonzou . jmockdata . BasicBean basicBean = com . github . jsonzou . jmockdata . JMockData . mock ( com . github . jsonzou . jmockdata . BasicBean . class ) ; "<AssertPlaceHolder>" ; try { com . github . jsonzou . jmockdata . JMockData . mock ( com . github . jsonzou . jmockdata . ErrorBean . class ) ; org . junit . Assert . fail ( ) ; } catch ( java . lang . Exception e ) { } } mock ( java . lang . Class ) { return com . github . jsonzou . jmockdata . JMockData . mock ( clazz , new com . github . jsonzou . jmockdata . MockConfig ( ) ) ; }
|
org . junit . Assert . assertNotNull ( basicBean )
|
testListByAuthUri ( ) { runTestAsSubject ( new org . apache . sentry . provider . db . service . thrift . TestOperation ( ) { @ org . apache . sentry . provider . db . service . thrift . Override public void runTestAsSubject ( ) throws org . apache . sentry . provider . db . service . thrift . Exception { java . lang . String requestorUserName = ADMIN_USER ; java . util . Set < java . lang . String > requestorUserGroupNames = com . google . common . collect . Sets . newHashSet ( org . apache . sentry . provider . db . service . thrift . ADMIN_GROUP ) ; java . lang . String roleName1 = "role1" ; java . lang . String roleName2 = "role2" ; java . lang . String server = "server1" ; java . lang . String db = "testDB" ; java . lang . String db2 = "testDB2" ; java . lang . String tab = "testTab" ; java . lang . String uri1 = "hdfs:///fooUri" ; setLocalGroupMapping ( requestorUserName , requestorUserGroupNames ) ; writePolicyFile ( ) ; client . dropRoleIfExists ( requestorUserName , roleName1 ) ; client . createRole ( requestorUserName , roleName1 ) ; client . dropRoleIfExists ( requestorUserName , roleName2 ) ; client . createRole ( requestorUserName , roleName2 ) ; client . grantDatabasePrivilege ( requestorUserName , roleName1 , server , db , AccessConstants . SELECT ) ; client . grantTablePrivilege ( requestorUserName , roleName1 , server , db , tab , AccessConstants . ALL ) ; client . grantTablePrivilege ( requestorUserName , roleName1 , server , db2 , tab , AccessConstants . SELECT ) ; org . apache . sentry . provider . db . service . thrift . TSentryPrivilege role1uri1 = client . grantURIPrivilege ( requestorUserName , roleName1 , server , uri1 ) ; client . grantDatabasePrivilege ( requestorUserName , roleName2 , server , db , AccessConstants . ALL ) ; client . grantDatabasePrivilege ( requestorUserName , roleName2 , server , db2 , AccessConstants . SELECT ) ; client . grantTablePrivilege ( requestorUserName , roleName2 , server , db2 , tab , AccessConstants . ALL ) ; org . apache . sentry . provider . db . service . thrift . TSentryPrivilege role2uri2 = client . grantURIPrivilege ( requestorUserName , roleName2 , server , uri1 ) ; org . apache . sentry . provider . db . service . thrift . TSentryPrivilegeMap db1RoleToPrivMap = new org . apache . sentry . provider . db . service . thrift . TSentryPrivilegeMap ( new java . util . TreeMap < java . lang . String , java . util . Set < org . apache . sentry . provider . db . service . thrift . TSentryPrivilege > > ( ) ) ; db1RoleToPrivMap . getPrivilegeMap ( ) . put ( roleName1 , com . google . common . collect . Sets . newHashSet ( role1uri1 ) ) ; db1RoleToPrivMap . getPrivilegeMap ( ) . put ( roleName2 , com . google . common . collect . Sets . newHashSet ( role2uri2 ) ) ; java . util . Map < org . apache . sentry . provider . db . service . thrift . TSentryAuthorizable , org . apache . sentry . provider . db . service . thrift . TSentryPrivilegeMap > expectedResults = com . google . common . collect . Maps . newTreeMap ( ) ; java . util . List < ? extends org . apache . sentry . core . common . Authorizable > uri1Authrizable = com . google . common . collect . Lists . newArrayList ( new org . apache . sentry . core . model . db . Server ( server ) , new org . apache . sentry . core . model . db . AccessURI ( uri1 ) ) ; expectedResults . put ( org . apache . sentry . provider . db . service . thrift . SentryPolicyServiceClientDefaultImpl . setupSentryAuthorizable ( uri1Authrizable ) , db1RoleToPrivMap ) ; java . util . Set < java . util . List < ? extends org . apache . sentry . core . common . Authorizable > > authorizableSet = com . google . common . collect . Sets . newHashSet ( ) ; authorizableSet . add ( uri1Authrizable ) ; java . util . Map < org . apache . sentry . provider . db . service . thrift . TSentryAuthorizable , org . apache . sentry . provider . db . service . thrift . TSentryPrivilegeMap > authPrivMap = client . listPrivilegsbyAuthorizable ( requestorUserName , authorizableSet , null , null ) ; "<AssertPlaceHolder>" ; } } ) ; } listPrivilegsbyAuthorizable ( java . lang . String , java . util . Set , java . util . Set , org . apache . sentry . core . common . ActiveRoleSet ) { java . util . Set < org . apache . sentry . provider . db . service . thrift . TSentryAuthorizable > authSet = com . google . common . collect . Sets . newTreeSet ( ) ; for ( java . util . List < ? extends org . apache . sentry . core . common . Authorizable > authorizableHierarchy : authorizables ) { authSet . add ( org . apache . sentry . provider . db . service . thrift . SentryPolicyServiceClientDefaultImpl . setupSentryAuthorizable ( authorizableHierarchy ) ) ; } org . apache . sentry . provider . db . service . thrift . TListSentryPrivilegesByAuthRequest request = new org . apache . sentry . provider . db . service . thrift . TListSentryPrivilegesByAuthRequest ( org . apache . sentry . service . thrift . ServiceConstants . ThriftConstants . TSENTRY_SERVICE_VERSION_CURRENT , requestorUserName , authSet ) ; if ( groups != null ) { request . setGroups ( groups ) ; } if ( roleSet != null ) { request . setRoleSet ( new org . apache . sentry . provider . db . service
|
org . junit . Assert . assertEquals ( expectedResults , authPrivMap )
|
testWriteALotOfFiles ( ) { com . devexperts . qd . qtp . file . test . FileWriterParams . Default params = new com . devexperts . qd . qtp . file . test . FileWriterParams . Default ( ) ; params . setFormat ( FileFormat . TEXT ) ; params . setSplit ( com . devexperts . util . TimePeriod . valueOf ( "1s" ) ) ; fileWriter = new com . devexperts . qd . qtp . file . test . FileWriterImpl ( ( ( ( com . devexperts . qd . qtp . file . test . FileWriterTest . NAME_PREFIX ) + "~" ) + ( com . devexperts . qd . qtp . file . test . FileWriterTest . NAME_SUFFIX ) ) , scheme , params ) . open ( ) ; fileWriter . addSendMessageType ( MessageType . STREAM_DATA ) ; com . devexperts . qd . test . TestDataProvider provider = new com . devexperts . qd . test . TestDataProvider ( scheme , com . devexperts . qd . qtp . file . test . FileWriterTest . SEED , com . devexperts . qd . qtp . file . test . FileWriterTest . RECORD_CNT , false ) ; com . devexperts . qd . qtp . file . test . HeartbeatPayload heartbeatPayload = new com . devexperts . qd . qtp . file . test . HeartbeatPayload ( ) ; long time = ( ( java . lang . System . currentTimeMillis ( ) ) / ( params . getSplit ( ) . getTime ( ) ) ) * ( params . getSplit ( ) . getTime ( ) ) ; for ( int i = 1 ; i < ( ( ( com . devexperts . qd . qtp . file . test . FileWriterTest . A_LOT_OF_FILES ) * 10 ) - 2 ) ; i ++ ) { heartbeatPayload . setTimeMillis ( ( time + ( ( i * ( params . getSplit ( ) . getTime ( ) ) ) / 10 ) ) ) ; fileWriter . visitHeartbeat ( heartbeatPayload ) ; fileWriter . visitData ( provider , MessageType . STREAM_DATA ) ; } fileWriter . close ( ) ; "<AssertPlaceHolder>" ; } getDataFiles ( ) { return new java . io . File ( "." ) . listFiles ( ( dir , name ) -> ( name . startsWith ( com . devexperts . qd . qtp . file . test . FileWriterTest . NAME_PREFIX ) ) && ( name . endsWith ( com . devexperts . qd . qtp . file . test . FileWriterTest . NAME_SUFFIX ) ) ) ; }
|
org . junit . Assert . assertEquals ( com . devexperts . qd . qtp . file . test . FileWriterTest . A_LOT_OF_FILES , getDataFiles ( ) . length )
|
debieraCrearTemporadaColportor ( ) { log . debug ( "temporada.id" 2 ) ; mx . edu . um . mateo . colportor . web . Organizacion organizacion = new mx . edu . um . mateo . colportor . web . Organizacion ( "observaciones" 9 , "observaciones" 4 , "observaciones" 4 ) ; currentSession ( ) . save ( organizacion ) ; mx . edu . um . mateo . colportor . web . Empresa empresa = new mx . edu . um . mateo . colportor . web . Empresa ( "observaciones" 9 , "empresa" , "temporada.id" 3 , "123456789123" , organizacion ) ; currentSession ( ) . save ( empresa ) ; mx . edu . um . mateo . inventario . model . Almacen almacen = new mx . edu . um . mateo . inventario . model . Almacen ( "observaciones" 9 , "nombre" , empresa ) ; currentSession ( ) . save ( almacen ) ; mx . edu . um . mateo . colportor . model . Colportor colportor = new mx . edu . um . mateo . colportor . model . Colportor ( "test01@test.com" , "temporada.id" 1 , "temporada.id" 1 , "temporada.id" 1 , "temporada.id" 1 , "temporada.id" 1 , mx . edu . um . mateo . general . utils . Constantes . STATUS_ACTIVO , "observaciones" 5 , "temporada.id" 1 , "temporada.id" 1 , "observaciones" 3 , "temporada.id" 1 , "observaciones" 8 , new java . util . Date ( ) ) ; colportor . setAlmacen ( almacen ) ; colportor . setEmpresa ( empresa ) ; currentSession ( ) . save ( colportor ) ; mx . edu . um . mateo . colportor . web . Usuario asociado = new mx . edu . um . mateo . colportor . model . Asociado ( "observaciones" 7 , "temporada.id" 1 , "temporada.id" 1 , "temporada.id" 1 , "temporada.id" 1 , mx . edu . um . mateo . general . utils . Constantes . STATUS_ACTIVO , mx . edu . um . mateo . general . utils . Constantes . CLAVE , mx . edu . um . mateo . general . utils . Constantes . TELEFONO , mx . edu . um . mateo . general . utils . Constantes . CALLE , mx . edu . um . mateo . general . utils . Constantes . COLONIA , mx . edu . um . mateo . general . utils . Constantes . MUNICIPIO ) ; asociado . setAlmacen ( almacen ) ; asociado . setEmpresa ( empresa ) ; currentSession ( ) . save ( asociado ) ; mx . edu . um . mateo . colportor . model . Temporada temporada = new mx . edu . um . mateo . colportor . model . Temporada ( "temporada.id" 1 ) ; temporada . setOrganizacion ( organizacion ) ; currentSession ( ) . save ( temporada ) ; "<AssertPlaceHolder>" ; mx . edu . um . mateo . colportor . model . ColegioColportor colegio = new mx . edu . um . mateo . colportor . model . ColegioColportor ( "observaciones" 0 , mx . edu . um . mateo . general . utils . Constantes . STATUS_ACTIVO ) ; currentSession ( ) . save ( colegio ) ; colegioDao . crea ( colegio ) ; log . debug ( ( "observaciones" 1 + colegio ) ) ; this . authenticate ( asociado , asociado . getPassword ( ) , new java . util . ArrayList ( asociado . getAuthorities ( ) ) ) ; java . text . SimpleDateFormat sdf = new java . text . SimpleDateFormat ( mx . edu . um . mateo . general . utils . Constantes . DATE_SHORT_HUMAN_PATTERN ) ; this . mockMvc . perform ( post ( Constantes . TEMPORADACOLPORTOR_PATH_CREA ) . param ( "observaciones" 6 , Constantes . STATUS_ACTIVO ) . param ( "fecha" , sdf . format ( new java . util . Date ( ) ) ) . param ( "objetivo" , "temporada.id" 1 ) . param ( "temporada.id" , temporada . getId ( ) . toString ( ) ) . param ( "colportor.id" , colportor . getId ( ) . toString ( ) ) . param ( "asociado.id" , asociado . getId ( ) . toString ( ) ) . param ( "temporada.id" 0 , colegio . getId ( ) . toString ( ) ) ) . andExpect ( status ( ) . isOk ( ) ) ; } currentSession ( ) { return sessionFactory . getCurrentSession ( ) ; }
|
org . junit . Assert . assertNotNull ( temporada )
|
testDrawBorderLineDashed2 ( ) { org . apache . fop . afp . BorderPaintingInfo paintInfo = new org . apache . fop . afp . BorderPaintingInfo ( 0 , 0 , 0 , 0 , false , org . apache . fop . fo . Constants . EN_DASHED , java . awt . Color . BLACK ) ; borderPainter . paint ( paintInfo ) ; ds . endDocument ( ) ; "<AssertPlaceHolder>" ; } endDocument ( ) { mifFile . finish ( true ) ; try { mifFile . output ( outStream ) ; outStream . flush ( ) ; } catch ( java . io . IOException ioe ) { throw new org . xml . sax . SAXException ( ioe ) ; } }
|
org . junit . Assert . assertNull ( line )
|
testRepositoryContainsStoredObject ( ) { com . github . resource4j . ResourceObject object = given ( com . github . resource4j . objects . ByteArrayResourceObjectBuilder . anObject ( ) ) ; objects . put ( object . name ( ) , withoutContext ( ) , object :: asStream ) ; "<AssertPlaceHolder>" ; } contains ( java . lang . String , com . github . resource4j . resources . context . ResourceResolutionContext ) { java . lang . String resolvedName = resolver ( ) . resolve ( name , context ) ; java . io . File file = new java . io . File ( base , resolvedName ) ; return com . github . resource4j . objects . providers . mutable . FileResourceObjectRepository . isResourceFile ( file ) ; }
|
org . junit . Assert . assertTrue ( objects . contains ( object . name ( ) , withoutContext ( ) ) )
|
testAddDbNotExisting ( ) { System . out . println ( ( ( getTestTraceHead ( "[MySQLCache.addDb]" ) ) + "--------<sp>A<sp>database<sp>is<sp>added<sp>if<sp>not<sp>existing<sp>in<sp>the<sp>cache" ) ) ; com . telefonica . iot . cygnus . backends . mysql . MySQLCache cache = new com . telefonica . iot . cygnus . backends . mysql . MySQLCache ( ) ; java . lang . String dbName = "dbname" ; boolean added = cache . addDb ( dbName ) ; try { "<AssertPlaceHolder>" ; System . out . println ( ( ( getTestTraceHead ( "[MySQLCache.addDb]" ) ) + "-<sp>OK<sp>-<sp>The<sp>database<sp>was<sp>added" ) ) ; } catch ( java . lang . AssertionError e ) { System . out . println ( ( ( getTestTraceHead ( "[MySQLCache.addDb]" ) ) + "-<sp>FAIL<sp>-<sp>The<sp>database<sp>was<sp>not<sp>added" ) ) ; throw e ; } addDb ( java . lang . String ) { if ( hierarchy . containsKey ( dbName ) ) { com . telefonica . iot . cygnus . backends . mysql . MySQLCache . LOGGER . debug ( ( ( "'" + dbName ) + "'<sp>not<sp>added<sp>to<sp>the<sp>cache,<sp>since<sp>already<sp>existing" ) ) ; return false ; } else { hierarchy . put ( dbName , new java . util . ArrayList < java . lang . String > ( ) ) ; com . telefonica . iot . cygnus . backends . mysql . MySQLCache . LOGGER . debug ( ( ( "'" + dbName ) + "'<sp>added<sp>to<sp>the<sp>cache" ) ) ; return true ; }
|
org . junit . Assert . assertTrue ( added )
|
setPercentType ( ) { org . eclipse . smarthome . core . library . items . NumberItem item = new org . eclipse . smarthome . core . library . items . NumberItem ( org . eclipse . smarthome . core . library . items . NumberItemTest . ITEM_NAME ) ; org . eclipse . smarthome . core . types . State percent = new org . eclipse . smarthome . core . library . types . PercentType ( 50 ) ; item . setState ( percent ) ; "<AssertPlaceHolder>" ; } getState ( ) { return state ; }
|
org . junit . Assert . assertEquals ( percent , item . getState ( ) )
|
test32 ( ) { byte [ ] packet = new byte [ ] { ( ( byte ) ( 255 ) ) , ( ( byte ) ( 255 ) ) } ; com . openddal . server . mysql . proto . Proto proto = new com . openddal . server . mysql . proto . Proto ( packet ) ; java . util . ArrayList < byte [ ] > payload = new java . util . ArrayList < byte [ ] > ( ) ; payload . add ( com . openddal . server . mysql . proto . Proto . build_fixed_str ( packet . length , proto . get_fixed_str ( packet . length ) ) ) ; "<AssertPlaceHolder>" ; } arraylist_to_array ( java . util . ArrayList ) { int size = 0 ; for ( byte [ ] field : input ) size += field . length ; byte [ ] result = new byte [ size ] ; int offset = 0 ; for ( byte [ ] field : input ) { java . lang . System . arraycopy ( field , 0 , result , offset , field . length ) ; offset += field . length ; } return result ; }
|
org . junit . Assert . assertArrayEquals ( packet , com . openddal . server . mysql . proto . Proto . arraylist_to_array ( payload ) )
|
reentrantLock ( ) { java . util . concurrent . locks . Lock lock = new java . util . concurrent . locks . ReentrantLock ( ) ; try ( alluxio . resource . LockResource r1 = new alluxio . resource . LockResource ( lock ) ) { try ( alluxio . resource . LockResource r2 = new alluxio . resource . LockResource ( lock ) ) { "<AssertPlaceHolder>" ; lock . unlock ( ) ; } } } tryLock ( ) { return mAvailable . tryAcquire ( mPermits ) ; }
|
org . junit . Assert . assertTrue ( lock . tryLock ( ) )
|
testCannotExistTwoOrderElementsWithTheSameCode ( ) { final java . lang . String repeatedCode = "code1" ; org . libreplan . ws . common . api . OrderLineDTO orderLineDTO = createOrderLineDTO ( repeatedCode ) ; org . libreplan . ws . common . api . OrderDTO orderDTO = createOrderDTO ( repeatedCode ) ; orderDTO . children . add ( orderLineDTO ) ; org . libreplan . ws . orders . api . OrderListDTO orderListDTO = createOrderListDTO ( orderDTO ) ; java . util . List < org . libreplan . ws . common . api . InstanceConstraintViolationsDTO > instanceConstraintViolationsList = orderElementService . addOrders ( orderListDTO ) . instanceConstraintViolationsList ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( ( ( ( ( ( ( "Task:<sp>" + ( getDuration ( ) ) ) + "<sp>(" ) + ( earliestStart ) ) + "," ) + ( earliestFinish ) ) + ")<sp>(" ) + ( latestStart ) ) + "," ) + ( latestFinish ) ) + ")" ; }
|
org . junit . Assert . assertTrue ( instanceConstraintViolationsList . toString ( ) , ( ( instanceConstraintViolationsList . size ( ) ) == 1 ) )
|
testGetOverlayReturningNull ( ) { final edu . illinois . library . cantaloupe . resource . RequestContext context = new edu . illinois . library . cantaloupe . resource . RequestContext ( ) ; edu . illinois . library . cantaloupe . script . DelegateProxyService service = edu . illinois . library . cantaloupe . script . DelegateProxyService . getInstance ( ) ; edu . illinois . library . cantaloupe . script . DelegateProxy proxy = service . newDelegateProxy ( context ) ; edu . illinois . library . cantaloupe . operation . overlay . Overlay overlay = instance . getOverlay ( proxy ) ; "<AssertPlaceHolder>" ; } getOverlay ( edu . illinois . library . cantaloupe . script . DelegateProxy ) { final java . util . Map < java . lang . String , java . lang . Object > defs = overlayProperties ( proxy ) ; if ( defs != null ) { final int inset = ( ( java . lang . Long ) ( defs . get ( "inset" ) ) ) . intValue ( ) ; final edu . illinois . library . cantaloupe . operation . overlay . Position position = ( ( edu . illinois . library . cantaloupe . operation . overlay . Position ) ( defs . get ( "position" ) ) ) ; final java . lang . String location = ( ( java . lang . String ) ( defs . get ( "stroke_color" 2 ) ) ) ; if ( location != null ) { try { java . net . URI overlayURI ; if ( ImageOverlay . SUPPORTED_URI_SCHEMES . stream ( ) . anyMatch ( location :: startsWith ) ) { overlayURI = new java . net . URI ( location ) ; } else { overlayURI = java . nio . file . Paths . get ( location ) . toUri ( ) ; } return new edu . illinois . library . cantaloupe . operation . overlay . ImageOverlay ( overlayURI , position , inset ) ; } catch ( java . net . URISyntaxException e ) { edu . illinois . library . cantaloupe . operation . overlay . DelegateOverlayService . LOGGER . error ( "getOverlay():<sp>{}" , e . getMessage ( ) ) ; return null ; } } else { final java . lang . String string = ( ( java . lang . String ) ( defs . get ( "stroke_color" 0 ) ) ) ; final java . util . Map < java . awt . font . TextAttribute , java . lang . Object > attributes = new java . util . HashMap ( ) ; attributes . put ( TextAttribute . FAMILY , defs . get ( "stroke_color" 3 ) ) ; attributes . put ( TextAttribute . SIZE , defs . get ( "font_size" ) ) ; attributes . put ( TextAttribute . WEIGHT , defs . get ( "font_weight" ) ) ; attributes . put ( TextAttribute . TRACKING , defs . get ( "glyph_spacing" ) ) ; final java . awt . Font font = java . awt . Font . getFont ( attributes ) ; final edu . illinois . library . cantaloupe . operation . Color backgroundColor = edu . illinois . library . cantaloupe . operation . Color . fromString ( ( ( java . lang . String ) ( defs . get ( "background_color" ) ) ) ) ; final edu . illinois . library . cantaloupe . operation . Color color = edu . illinois . library . cantaloupe . operation . Color . fromString ( ( ( java . lang . String ) ( defs . get ( "color" ) ) ) ) ; final int minSize = ( ( java . lang . Long ) ( defs . get ( "font_min_size" ) ) ) . intValue ( ) ; final edu . illinois . library . cantaloupe . operation . Color strokeColor = edu . illinois . library . cantaloupe . operation . Color . fromString ( ( ( java . lang . String ) ( defs . get ( "stroke_color" ) ) ) ) ; final float strokeWidth = java . lang . Float . parseFloat ( defs . get ( "stroke_color" 1 ) . toString ( ) ) ; return new edu . illinois . library . cantaloupe . operation . overlay . StringOverlay ( string , position , inset , font , minSize , color , backgroundColor , strokeColor , strokeWidth ) ; } } return null ; }
|
org . junit . Assert . assertNull ( overlay )
|
testSetApplicationContext ( ) { org . quartz . impl . JobDetailImpl jobDetail = new org . quartz . impl . JobDetailImpl ( ) ; jobDetail . setJobClass ( com . thinkbiganalytics . scheduler . quartz . MockJob . class ) ; org . quartz . spi . TriggerFiredBundle bundle = new org . quartz . spi . TriggerFiredBundle ( jobDetail , org . mockito . Mockito . mock ( org . quartz . spi . OperableTrigger . class ) , new org . quartz . impl . calendar . BaseCalendar ( ) , true , new java . util . Date ( ) , new java . util . Date ( ) , new java . util . Date ( ) , new java . util . Date ( ) ) ; "<AssertPlaceHolder>" ; } createJobInstance ( org . quartz . spi . TriggerFiredBundle ) { final java . lang . Object job = super . createJobInstance ( bundle ) ; beanFactory . autowireBean ( job ) ; return job ; }
|
org . junit . Assert . assertNotNull ( factory . createJobInstance ( bundle ) )
|
testGetHierarchyTypeByName ( ) { when ( hierarchyTypeService . getHierarchyTypeByName ( "Test" ) ) . thenReturn ( expectedHierarchyType ) ; org . egov . infra . admin . master . entity . HierarchyType hierarchyType = hierarchyTypeService . getHierarchyTypeByName ( "Test" ) ; "<AssertPlaceHolder>" ; } getName ( ) { return name ; }
|
org . junit . Assert . assertTrue ( hierarchyType . getName ( ) . equalsIgnoreCase ( expectedHierarchyType . getName ( ) ) )
|
testValidate ( ) { final ca . uhn . fhir . jaxrs . server . OperationOutcome oo = new ca . uhn . fhir . jaxrs . server . OperationOutcome ( ) ; final ca . uhn . fhir . jaxrs . server . Patient patient = new ca . uhn . fhir . jaxrs . server . Patient ( ) ; patient . addIdentifier ( new ca . uhn . fhir . jaxrs . server . Identifier ( ) . setValue ( "1" ) ) ; final ca . uhn . fhir . jaxrs . server . Parameters inParams = new ca . uhn . fhir . jaxrs . server . Parameters ( ) ; inParams . addParameter ( ) . setResource ( patient ) ; final ca . uhn . fhir . rest . api . MethodOutcome mO = ca . uhn . fhir . jaxrs . server . AbstractJaxRsResourceProviderDstu3Test . client . validate ( ) . resource ( patient ) . execute ( ) ; "<AssertPlaceHolder>" ; } getOperationOutcome ( ) { return toOperationOutcome ( ) ; }
|
org . junit . Assert . assertNotNull ( mO . getOperationOutcome ( ) )
|
testAnimalNegativeAge ( ) { org . eclipse . emf . ecore . EObject generic = testFactory . create ( animal ) ; generic . eSet ( age , ( - 50 ) ) ; "<AssertPlaceHolder>" ; } validateConstraints ( org . eclipse . emf . ecore . EObject ) { final org . eclipse . core . runtime . IStatus status = gov . nasa . ensemble . core . plan . formula . js . TestJSAnnotationConstraint . batchValidator . validate ( target ) ; return status . isOK ( ) ; }
|
org . junit . Assert . assertFalse ( validateConstraints ( generic ) )
|
testCallSoapClient_exception ( ) { com . google . api . ads . common . lib . client . RemoteCallReturn expectedRemoteCallReturn = new com . google . api . ads . common . lib . client . RemoteCallReturn . Builder ( ) . withException ( MockSoapClient . EXCEPTION ) . build ( ) ; com . google . api . ads . common . lib . soap . SoapCall < java . lang . Object > soapCall = org . mockito . Mockito . mock ( com . google . api . ads . common . lib . soap . SoapCall . class ) ; when ( soapClientHandler . invokeSoapCall ( soapCall ) ) . thenReturn ( expectedRemoteCallReturn ) ; com . google . api . ads . common . lib . client . RemoteCallReturn testRemoteCallReturn = soapServiceClient . callSoapClient ( soapCall ) ; "<AssertPlaceHolder>" ; } getException ( ) { return exception ; }
|
org . junit . Assert . assertSame ( MockSoapClient . EXCEPTION , testRemoteCallReturn . getException ( ) )
|
bytesNotEmittedToSinkWithoutFlush ( ) { okio . Buffer sink = new okio . Buffer ( ) ; okio . BufferedSink bufferedSink = new okio . RealBufferedSink ( sink ) ; bufferedSink . writeUtf8 ( "abc" ) ; "<AssertPlaceHolder>" ; } size ( ) { return pos ; }
|
org . junit . Assert . assertEquals ( 0 , sink . size ( ) )
|
testClearArgs ( ) { final org . apache . oozie . fluentjob . api . action . HiveActionBuilder builder = getBuilderInstance ( ) ; for ( final java . lang . String file : org . apache . oozie . fluentjob . api . action . TestHiveActionBuilder . ARGS ) { builder . withArg ( file ) ; } builder . clearArgs ( ) ; final org . apache . oozie . fluentjob . api . action . HiveAction action = builder . build ( ) ; final java . util . List < java . lang . String > argList = action . getArgs ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return org . apache . oozie . event . MemoryEventQueue . currentSize . intValue ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , argList . size ( ) )
|
testGetConfigSystemProperty ( ) { final java . lang . String propName = Config . HASH_ALGORITHM ; final java . lang . String originalValue = java . lang . System . getProperty ( propName ) ; try { final java . lang . String newVal = "XXX" ; java . lang . System . setProperty ( propName , newVal ) ; org . hyperledger . fabric . sdk . helper . Config config = org . hyperledger . fabric . sdk . helper . Config . getConfig ( ) ; "<AssertPlaceHolder>" ; } finally { setSystemProperty ( propName , originalValue ) ; } } getHashAlgorithm ( ) { return getProperty ( org . hyperledger . fabric_ca . sdk . helper . Config . HASH_ALGORITHM ) ; }
|
org . junit . Assert . assertEquals ( config . getHashAlgorithm ( ) , newVal )
|
getAutomorphismGroup_StartingPartitionTest ( ) { org . openscience . cdk . group . Partition partition = org . openscience . cdk . group . Partition . fromString ( "0,1|2,3" ) ; java . lang . String acpString = "C0C1C2C3<sp>0:1(1),0:3(1),1:2(1),2:3(1)" ; org . openscience . cdk . interfaces . IAtomContainer ac = org . openscience . cdk . group . AtomContainerPrinter . fromString ( acpString , org . openscience . cdk . group . BondDiscretePartitionRefinerTest . builder ) ; org . openscience . cdk . group . BondDiscretePartitionRefiner refiner = new org . openscience . cdk . group . BondDiscretePartitionRefiner ( ) ; org . openscience . cdk . group . PermutationGroup autG = refiner . getAutomorphismGroup ( ac , partition ) ; "<AssertPlaceHolder>" ; } order ( ) { java . util . Collections . sort ( cells , new java . util . Comparator < java . util . SortedSet < java . lang . Integer > > ( ) { @ org . openscience . cdk . group . Override public int compare ( java . util . SortedSet < java . lang . Integer > cellA , java . util . SortedSet < java . lang . Integer > cellB ) { return cellA . first ( ) . compareTo ( cellB . first ( ) ) ; } } ) ; }
|
org . junit . Assert . assertEquals ( 2 , autG . order ( ) )
|
testUpdateWithRelationship ( ) { org . apache . cayenne . access . IvConcrete parent1 = context . newObject ( org . apache . cayenne . access . IvConcrete . class ) ; parent1 . setName ( "Parent1" ) ; context . commitChanges ( ) ; org . apache . cayenne . access . IvConcrete parent2 = context . newObject ( org . apache . cayenne . access . IvConcrete . class ) ; parent2 . setName ( "Parent2" ) ; context . commitChanges ( ) ; org . apache . cayenne . access . IvConcrete child = context . newObject ( org . apache . cayenne . access . IvConcrete . class ) ; child . setName ( "Child" ) ; child . setParent ( parent1 ) ; context . commitChanges ( ) ; child . setParent ( parent2 ) ; context . commitChanges ( ) ; "<AssertPlaceHolder>" ; context . deleteObject ( child ) ; context . commitChanges ( ) ; } getParent ( ) { return ( ( org . apache . cayenne . commitlog . db . Auditable1 ) ( readProperty ( "parent" ) ) ) ; }
|
org . junit . Assert . assertEquals ( parent2 , child . getParent ( ) )
|
isAdminProfile ( ) { com . navercorp . pinpoint . bootstrap . config . ProfilerConfig profilerConfig = new com . navercorp . pinpoint . bootstrap . config . DefaultProfilerConfig ( ) ; com . navercorp . pinpoint . plugin . hbase . HbasePluginConfig config = new com . navercorp . pinpoint . plugin . hbase . HbasePluginConfig ( profilerConfig ) ; "<AssertPlaceHolder>" ; System . out . println ( config ) ; } isAdminProfile ( ) { return adminProfile ; }
|
org . junit . Assert . assertTrue ( config . isAdminProfile ( ) )
|
determineSchemaCanReadSchemaFromHDFS ( ) { java . lang . String schemaString = TestAvroObjectInspectorGenerator . RECORD_SCHEMA ; org . apache . hadoop . hdfs . MiniDFSCluster miniDfs = null ; try { miniDfs = new org . apache . hadoop . hdfs . MiniDFSCluster ( new org . apache . hadoop . conf . Configuration ( ) , 1 , true , null ) ; miniDfs . getFileSystem ( ) . mkdirs ( new org . apache . hadoop . fs . Path ( "/path/to/schema" ) ) ; org . apache . hadoop . fs . FSDataOutputStream out = miniDfs . getFileSystem ( ) . create ( new org . apache . hadoop . fs . Path ( "/path/to/schema/schema.avsc" ) ) ; out . writeBytes ( schemaString ) ; out . close ( ) ; java . lang . String onHDFS = ( miniDfs . getFileSystem ( ) . getUri ( ) ) + "/path/to/schema/schema.avsc" ; org . apache . avro . Schema schemaFromHDFS = org . apache . hadoop . hive . serde2 . avro . AvroSerdeUtils . getSchemaFromFS ( onHDFS , miniDfs . getFileSystem ( ) . getConf ( ) ) ; org . apache . avro . Schema expectedSchema = org . apache . hadoop . hive . serde2 . avro . AvroSerdeUtils . getSchemaFor ( schemaString ) ; "<AssertPlaceHolder>" ; } finally { if ( miniDfs != null ) miniDfs . shutdown ( ) ; } } getSchemaFor ( java . lang . String ) { org . apache . avro . Schema . Parser parser = new org . apache . avro . Schema . Parser ( ) ; org . apache . avro . Schema schema = parser . parse ( str ) ; return schema ; }
|
org . junit . Assert . assertEquals ( expectedSchema , schemaFromHDFS )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.