input
stringlengths
28
18.7k
output
stringlengths
39
1.69k
validUrlsThatDoNotExistShouldWork ( ) { final org . constretto . model . UrlResource urlResource = new org . constretto . model . UrlResource ( "http://loocalhost/notHere.html" ) ; "<AssertPlaceHolder>" ; } exists ( ) { java . io . InputStream is = getInputStream ( ) ; boolean result = is != null ; try { if ( is != null ) { is . close ( ) ; } } catch ( java . lang . Exception e ) { } return result ; }
org . junit . Assert . assertFalse ( urlResource . exists ( ) )
testGetInstance ( ) { org . openscience . smsd . tools . BondEnergies energies = org . openscience . smsd . tools . BondEnergies . getInstance ( ) ; "<AssertPlaceHolder>" ; } getInstance ( ) { if ( null == ( org . openscience . smsd . tools . BondEnergies . instance ) ) { org . openscience . smsd . tools . BondEnergies . instance = new org . openscience . smsd . tools . BondEnergies ( ) ; } return org . openscience . smsd . tools . BondEnergies . instance ; }
org . junit . Assert . assertNotNull ( energies )
tryEvictThatFailsOnExclusiveLockMustNotUndoSaidLock ( ) { pageList . unlockExclusive ( pageRef ) ; int swapperId = swappers . allocate ( org . neo4j . io . pagecache . impl . muninn . PageListTest . DUMMY_SWAPPER ) ; doFault ( swapperId , 42 ) ; pageList . tryEvict ( pageRef , EvictionRunEvent . NULL ) ; "<AssertPlaceHolder>" ; } isExclusivelyLocked ( long ) { return ( ( org . neo4j . io . pagecache . impl . muninn . OffHeapPageLock . getState ( address ) ) & ( org . neo4j . io . pagecache . impl . muninn . OffHeapPageLock . EXL_MASK ) ) == ( org . neo4j . io . pagecache . impl . muninn . OffHeapPageLock . EXL_MASK ) ; }
org . junit . Assert . assertTrue ( pageList . isExclusivelyLocked ( pageRef ) )
shouldFindAUserByUsername ( ) { com . danidemi . tutorial . tdd . mocks . UserDao userDao = new com . danidemi . tutorial . tdd . mocks . UserDaoImpl ( com . danidemi . tutorial . tdd . mocks . UserDaoDbTest . jdbcConf ) ; com . danidemi . tutorial . tdd . mocks . User john = userDao . findUserByUsername ( "John" ) ; "<AssertPlaceHolder>" ; } findUserByUsername ( java . lang . String ) { com . danidemi . tutorial . tdd . showcase . authentication . User user = null ; java . sql . Connection connection = getConnection ( ) ; java . lang . String sql = "SELECT<sp>*<sp>FROM<sp>USERS<sp>WHERE<sp>USERNAME<sp>=<sp>?" ; java . sql . PreparedStatement s = connection . prepareStatement ( sql ) ; s . setString ( 1 , username ) ; java . sql . ResultSet rs = s . executeQuery ( ) ; if ( rs . next ( ) ) { } return user ; }
org . junit . Assert . assertNotNull ( john )
testFactoryMethod ( ) { final org . apache . druid . segment . realtime . appenderator . Committed committed = org . apache . druid . segment . realtime . appenderator . CommittedTest . fixedInstance ( ) ; final org . apache . druid . segment . realtime . appenderator . Committed committed2 = org . apache . druid . segment . realtime . appenderator . Committed . create ( com . google . common . collect . ImmutableMap . of ( org . apache . druid . segment . realtime . appenderator . CommittedTest . IDENTIFIER_OBJECT1 , 3 , org . apache . druid . segment . realtime . appenderator . CommittedTest . IDENTIFIER_OBJECT2 , 2 ) , com . google . common . collect . ImmutableMap . of ( "metadata" , "foo" ) ) ; "<AssertPlaceHolder>" ; } of ( java . lang . String , java . lang . Object [ ] ) { return org . apache . druid . java . util . common . Intervals . of ( org . apache . druid . java . util . common . StringUtils . format ( format , formatArgs ) ) ; }
org . junit . Assert . assertEquals ( committed , committed2 )
shouldTheAlgorithmReturnTheCorrectSolutionWhenSolvingProblemOneMax ( ) { int NUMBER_OF_BITS = 512 ; org . uma . jmetal . algorithm . Algorithm < org . uma . jmetal . solution . BinarySolution > algorithm ; org . uma . jmetal . problem . BinaryProblem problem = new org . uma . jmetal . problem . singleobjective . OneMax ( NUMBER_OF_BITS ) ; org . uma . jmetal . operator . CrossoverOperator < org . uma . jmetal . solution . BinarySolution > crossoverOperator = new org . uma . jmetal . operator . impl . crossover . SinglePointCrossover ( 0.9 ) ; org . uma . jmetal . operator . MutationOperator < org . uma . jmetal . solution . BinarySolution > mutationOperator = new org . uma . jmetal . operator . impl . mutation . BitFlipMutation ( ( 1.0 / ( problem . getNumberOfBits ( 0 ) ) ) ) ; org . uma . jmetal . operator . SelectionOperator < java . util . List < org . uma . jmetal . solution . BinarySolution > , org . uma . jmetal . solution . BinarySolution > selectionOperator = new org . uma . jmetal . operator . impl . selection . BinaryTournamentSelection < org . uma . jmetal . solution . BinarySolution > ( ) ; algorithm = new org . uma . jmetal . algorithm . singleobjective . geneticalgorithm . GeneticAlgorithmBuilder < org . uma . jmetal . solution . BinarySolution > ( problem , crossoverOperator , mutationOperator ) . setPopulationSize ( 50 ) . setMaxEvaluations ( 50000 ) . setSelectionOperator ( selectionOperator ) . build ( ) ; new org . uma . jmetal . util . AlgorithmRunner . Executor ( algorithm ) . execute ( ) ; org . uma . jmetal . solution . BinarySolution solution = algorithm . getResult ( ) ; "<AssertPlaceHolder>" ; } getObjective ( int ) { return 0 ; }
org . junit . Assert . assertEquals ( NUMBER_OF_BITS , ( ( - 1 ) * ( ( int ) ( solution . getObjective ( 0 ) ) ) ) )
testQuerySerialization ( ) { org . apache . druid . query . Query query = org . apache . druid . query . Druids . newTimeBoundaryQueryBuilder ( ) . dataSource ( "testing" ) . build ( ) ; java . lang . String json = org . apache . druid . query . timeboundary . TimeBoundaryQueryTest . jsonMapper . writeValueAsString ( query ) ; org . apache . druid . query . Query serdeQuery = org . apache . druid . query . timeboundary . TimeBoundaryQueryTest . jsonMapper . readValue ( json , org . apache . druid . query . Query . class ) ; "<AssertPlaceHolder>" ; } build ( ) { return new org . apache . druid . indexing . overlord . http . OverlordTest . MockTaskRunner ( runTaskCountDownLatches , taskCompletionCountDownLatches ) ; }
org . junit . Assert . assertEquals ( query , serdeQuery )
testFileContextResolveAfs ( ) { org . apache . hadoop . conf . Configuration conf = new org . apache . hadoop . conf . Configuration ( ) ; localFs = org . apache . hadoop . fs . FileSystem . get ( conf ) ; org . apache . hadoop . fs . Path localPath = new org . apache . hadoop . fs . Path ( ( ( org . apache . hadoop . fs . TestFileContextResolveAfs . TEST_ROOT_DIR_LOCAL ) + "/TestFileContextResolveAfs1" ) ) ; org . apache . hadoop . fs . Path linkPath = new org . apache . hadoop . fs . Path ( ( ( "file://" + ( org . apache . hadoop . fs . TestFileContextResolveAfs . TEST_ROOT_DIR_LOCAL ) ) + "/TestFileContextResolveAfs2" ) ) ; localFs . mkdirs ( new org . apache . hadoop . fs . Path ( org . apache . hadoop . fs . TestFileContextResolveAfs . TEST_ROOT_DIR_LOCAL ) ) ; localFs . create ( localPath ) ; fc . createSymlink ( localPath , linkPath , true ) ; java . util . Set < org . apache . hadoop . fs . AbstractFileSystem > afsList = fc . resolveAbstractFileSystems ( linkPath ) ; "<AssertPlaceHolder>" ; localFs . deleteOnExit ( localPath ) ; localFs . deleteOnExit ( linkPath ) ; localFs . close ( ) ; } size ( ) { return multimap . size ( ) ; }
org . junit . Assert . assertEquals ( 1 , afsList . size ( ) )
should_apply_order_by_in_order ( ) { final java . lang . String name = "findAllOrderByNameDescIdAsc" ; final java . lang . String expected = "select<sp>e<sp>from<sp>Simple<sp>e<sp>" + "order<sp>by<sp>e.name<sp>desc,<sp>e.id<sp>asc" ; java . lang . String result = org . apache . deltaspike . data . impl . builder . part . QueryRoot . create ( name , repo , prefix ( name ) ) . getJpqlQuery ( ) . trim ( ) ; "<AssertPlaceHolder>" ; } getJpqlQuery ( ) { return jpqlQuery ; }
org . junit . Assert . assertEquals ( expected , result )
deveObterDataOcorrenciaBaixaComoFoiSetado ( ) { final com . fincatto . documentofiscal . nfe400 . classes . cadastro . NFRetornoConsultaCadastroSituacaoCadastral retorno = new com . fincatto . documentofiscal . nfe400 . classes . cadastro . NFRetornoConsultaCadastroSituacaoCadastral ( ) ; final java . time . LocalDate dataOcorrenciaBaixa = java . time . LocalDate . from ( java . time . format . DateTimeFormatter . ofPattern ( "dd/MM/yyyy" ) . parse ( "20/10/2010" ) ) ; retorno . setDataOcorrenciaBaixa ( dataOcorrenciaBaixa ) ; "<AssertPlaceHolder>" ; } getDataOcorrenciaBaixa ( ) { return this . dataOcorrenciaBaixa ; }
org . junit . Assert . assertEquals ( dataOcorrenciaBaixa , retorno . getDataOcorrenciaBaixa ( ) )
testFactoryWhenUserFilter ( ) { conf . setVar ( HiveConf . ConfVars . HIVE_SERVER2_PLAIN_LDAP_USERFILTER , "User1,User2" ) ; "<AssertPlaceHolder>" ; } getInstance ( java . util . List ) { java . util . ArrayList < java . util . List < java . lang . String > > key = new java . util . ArrayList < java . util . List < java . lang . String > > ( 1 ) ; key . add ( columnNames ) ; org . apache . hadoop . hive . serde2 . objectinspector . MetadataListStructObjectInspector result = org . apache . hadoop . hive . serde2 . objectinspector . MetadataListStructObjectInspector . cached . get ( key ) ; if ( result == null ) { result = new org . apache . hadoop . hive . serde2 . objectinspector . MetadataListStructObjectInspector ( columnNames ) ; org . apache . hadoop . hive . serde2 . objectinspector . MetadataListStructObjectInspector prev = org . apache . hadoop . hive . serde2 . objectinspector . MetadataListStructObjectInspector . cached . putIfAbsent ( key , result ) ; if ( prev != null ) { result = prev ; } } return result ; }
org . junit . Assert . assertNotNull ( factory . getInstance ( conf ) )
testGetURLsForClasspathWithSomeNonExistentAndSuppression ( ) { final java . lang . String jarFilePath = "src/test/resources/TestClassLoaderUtils/TestSuccess.jar,src/test/resources/TestClassLoaderUtils/FakeTest.jar" ; java . net . URL [ ] urls = org . apache . nifi . util . file . classloader . ClassLoaderUtils . getURLsForClasspath ( jarFilePath , null , true ) ; "<AssertPlaceHolder>" ; } getURLsForClasspath ( java . lang . String , java . io . FilenameFilter , boolean ) { return org . apache . nifi . util . file . classloader . ClassLoaderUtils . getURLsForClasspath ( ( modulePath == null ? java . util . Collections . emptySet ( ) : java . util . Collections . singleton ( modulePath ) ) , filenameFilter , suppressExceptions ) ; }
org . junit . Assert . assertEquals ( 1 , urls . length )
updateMarketplace_MarketplaceNotFound ( ) { doThrow ( new org . oscm . internal . types . exception . ObjectNotFoundException ( org . oscm . internal . types . exception . DomainObjectException . ClassEnum . MARKETPLACE , "mId" ) ) . when ( mmps ) . updateMarketplace ( any ( org . oscm . internal . vo . VOMarketplace . class ) , any ( org . oscm . ui . beans . POMarketplacePriceModel . class ) , any ( org . oscm . ui . beans . POPartnerPriceModel . class ) ) ; umpb . updateMarketplace ( ) ; verify ( mbMock , times ( 1 ) ) . resetMenuVisibility ( ) ; "<AssertPlaceHolder>" ; } hasErrors ( ) { return errors ; }
org . junit . Assert . assertTrue ( ui . hasErrors ( ) )
testBeanInjectable ( ) { org . jboss . as . test . integration . weld . modules . access . BuiltInBeanWithPackagePrivateConstructor bean = injectedBean . getBean ( ) ; bean . setValue ( "foo" ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
org . junit . Assert . assertEquals ( "foo" , bean . getValue ( ) )
getSessionNotExists ( ) { io . undertow . server . HttpServerExchange exchange = new io . undertow . server . HttpServerExchange ( null ) ; org . wildfly . clustering . ee . Batcher < org . wildfly . clustering . ee . Batch > batcher = mock ( org . wildfly . clustering . ee . Batcher . class ) ; org . wildfly . clustering . ee . Batch batch = mock ( org . wildfly . clustering . ee . Batch . class ) ; io . undertow . server . session . SessionConfig config = mock ( io . undertow . server . session . SessionConfig . class ) ; java . lang . String sessionId = "session" ; when ( config . findSessionId ( exchange ) ) . thenReturn ( sessionId ) ; when ( this . manager . findSession ( sessionId ) ) . thenReturn ( null ) ; when ( this . manager . getBatcher ( ) ) . thenReturn ( batcher ) ; when ( batcher . createBatch ( ) ) . thenReturn ( batch ) ; io . undertow . server . session . Session sessionAdapter = this . adapter . getSession ( exchange , config ) ; "<AssertPlaceHolder>" ; verify ( batch ) . close ( ) ; verify ( batcher , never ( ) ) . suspendBatch ( ) ; } getSession ( io . undertow . server . HttpServerExchange , io . undertow . server . session . SessionConfig ) { if ( exchange != null ) { io . undertow . server . session . Session attachedSession = exchange . getAttachment ( this . key ) ; if ( attachedSession != null ) { return attachedSession ; } } if ( config == null ) { throw UndertowMessages . MESSAGES . couldNotFindSessionCookieConfig ( ) ; } java . lang . String id = config . findSessionId ( exchange ) ; if ( id == null ) { return null ; } if ( ! ( org . wildfly . clustering . web . undertow . session . DistributableSessionManager . IDENTIFIER_SERIALIZER . validate ( id ) ) ) { return null ; } boolean close = true ; java . util . function . Consumer < io . undertow . server . HttpServerExchange > closeTask = this . getSessionCloseTask ( ) ; try { org . wildfly . clustering . ee . Batcher < org . wildfly . clustering . ee . Batch > batcher = this . manager . getBatcher ( ) ; org . wildfly . clustering . ee . Batch batch = batcher . createBatch ( ) ; try { org . wildfly . clustering . web . session . Session < org . wildfly . clustering . web . undertow . session . LocalSessionContext > session = this . manager . findSession ( id ) ; if ( session == null ) { return null ; } io . undertow . server . session . Session result = new org . wildfly . clustering . web . undertow . session . DistributableSession ( this , session , config , batcher . suspendBatch ( ) , closeTask ) ; if ( exchange != null ) { exchange . putAttachment ( this . key , result ) ; } close = false ; return result ; } catch ( java . lang . RuntimeException | java . lang . Error e ) { batch . discard ( ) ; throw e ; } finally { if ( close ) { batch . close ( ) ; } } } finally { if ( close ) { closeTask . accept ( exchange ) ; } } }
org . junit . Assert . assertNull ( sessionAdapter )
when_removeNonEmpty_then_removeItem ( ) { inbox . remove ( ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return queue . isEmpty ( ) ; }
org . junit . Assert . assertTrue ( inbox . isEmpty ( ) )
testGetParametersWithDefaultEntity ( ) { org . lnu . is . domain . subject . Subject entity = new org . lnu . is . domain . subject . Subject ( ) ; java . util . Map < java . lang . String , java . lang . Object > expected = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; expected . put ( "status" , RowStatus . ACTIVE ) ; expected . put ( "userGroups" , groups ) ; java . util . Map < java . lang . String , java . lang . Object > actual = unit . getParameters ( entity ) ; "<AssertPlaceHolder>" ; } getParameters ( org . springframework . web . context . request . NativeWebRequest ) { java . util . Map < java . lang . String , java . lang . Object > resultMap = new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ; java . util . Map < java . lang . String , java . lang . String > pathVariables = ( ( java . util . Map < java . lang . String , java . lang . String > ) ( webRequest . getAttribute ( HandlerMapping . URI_TEMPLATE_VARIABLES_ATTRIBUTE , RequestAttributes . SCOPE_REQUEST ) ) ) ; java . util . Map < java . lang . String , java . lang . Object > requestParams = getRequestParameterMap ( webRequest ) ; for ( Map . Entry < java . lang . String , java . lang . Object > entry : requestParams . entrySet ( ) ) { resultMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } resultMap . putAll ( pathVariables ) ; return resultMap ; }
org . junit . Assert . assertEquals ( expected , actual )
givenPath_whenChecksIfFile_thenCorrect ( ) { boolean isFile = com . baeldung . java . nio2 . attributes . BasicAttribsIntegrationTest . basicAttribs . isRegularFile ( ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertFalse ( isFile )
testCreateExportManifestDto ( ) { "<AssertPlaceHolder>" ; } createExportManifestDto ( ) { return new org . pentaho . platform . plugin . services . importexport . exportManifest . bindings . ExportManifestDto ( ) ; }
org . junit . Assert . assertNotNull ( factory . createExportManifestDto ( ) )
testBadConnectionString ( ) { com . streamsets . pipeline . lib . jdbc . HikariPoolConfigBean configBean = createConfigBean ( "some<sp>bad<sp>connection<sp>string" , username , password ) ; configBean . driverClassName = "org.h2.Driver" ; com . streamsets . pipeline . stage . origin . jdbc . JdbcSource origin = new com . streamsets . pipeline . stage . origin . jdbc . JdbcSource ( true , query , initialOffset , "P_ID" , false , "" , 1000 , JdbcRecordType . LIST_MAP , new com . streamsets . pipeline . stage . origin . jdbc . CommonSourceConfigBean ( queriesPerSecond , com . streamsets . pipeline . stage . origin . jdbc . TestJdbcSource . BATCH_SIZE , com . streamsets . pipeline . stage . origin . jdbc . TestJdbcSource . CLOB_SIZE , com . streamsets . pipeline . stage . origin . jdbc . TestJdbcSource . CLOB_SIZE ) , false , "" , configBean , com . streamsets . pipeline . lib . jdbc . UnknownTypeAction . STOP_PIPELINE , queryInterval ) ; com . streamsets . pipeline . sdk . SourceRunner runner = new com . streamsets . pipeline . sdk . SourceRunner . Builder ( com . streamsets . pipeline . stage . origin . jdbc . JdbcDSource . class , origin ) . addOutputLane ( "lane" ) . build ( ) ; java . util . List < com . streamsets . pipeline . api . Stage . ConfigIssue > issues = runner . runValidateConfigs ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return delegate . size ( ) ; }
org . junit . Assert . assertEquals ( 1 , issues . size ( ) )
testJmsProducerIdFromJmsConnectionId ( ) { org . apache . qpid . jms . meta . JmsProducerId id1 = new org . apache . qpid . jms . meta . JmsProducerId ( firstId . getConnectionId ( ) , 1 , 1 ) ; org . apache . qpid . jms . meta . JmsProducerId id2 = new org . apache . qpid . jms . meta . JmsProducerId ( secondId . getConnectionId ( ) , 1 , 1 ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
org . junit . Assert . assertSame ( id1 . getValue ( ) , id2 . getValue ( ) )
testRemove ( ) { com . liferay . layout . page . template . model . LayoutPageTemplateEntry newLayoutPageTemplateEntry = addLayoutPageTemplateEntry ( ) ; _persistence . remove ( newLayoutPageTemplateEntry ) ; com . liferay . layout . page . template . model . LayoutPageTemplateEntry existingLayoutPageTemplateEntry = _persistence . fetchByPrimaryKey ( newLayoutPageTemplateEntry . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
org . junit . Assert . assertNull ( existingLayoutPageTemplateEntry )
testPojo ( ) { java . lang . String contractUUID = java . util . UUID . randomUUID ( ) . toString ( ) ; eu . atos . sla . datamodel . bean . Breach breach = new eu . atos . sla . datamodel . bean . Breach ( ) ; breach . setAgreementUuid ( contractUUID ) ; "<AssertPlaceHolder>" ; } getAgreementUuid ( ) { return contractUUID ; }
org . junit . Assert . assertEquals ( contractUUID , breach . getAgreementUuid ( ) )
testCCAClientSessionStartsIdleState ( ) { org . jdiameter . api . cca . ClientCCASession session = getAppSession ( org . jdiameter . api . cca . ClientCCASession . class , new org . jdiameter . common . impl . app . cca . CCASessionFactoryImpl ( org . mobicents . diameter . stack . sessions . SessionsWithAppIdTest . sessionFactory ) , org . mobicents . diameter . stack . sessions . SessionsWithAppIdTest . CCA_APPID ) ; org . jdiameter . common . api . app . cca . ClientCCASessionState state = session . getState ( org . jdiameter . common . api . app . cca . ClientCCASessionState . class ) ; "<AssertPlaceHolder>" ; } getState ( java . lang . Class ) { switch ( stack . getState ( ) ) { case IDLE : return ( ( E ) ( org . jdiameter . api . PeerState . DOWN ) ) ; case CONFIGURED : return ( ( E ) ( org . jdiameter . api . PeerState . INITIAL ) ) ; case STARTED : return ( ( E ) ( org . jdiameter . api . PeerState . OKAY ) ) ; case STOPPED : return ( ( E ) ( org . jdiameter . api . PeerState . SUSPECT ) ) ; } return ( ( E ) ( org . jdiameter . api . PeerState . DOWN ) ) ; }
org . junit . Assert . assertEquals ( state . IDLE , state )
testException ( ) { float xTrans = - 3.2F ; float yTrans = 0 ; int dataType = java . awt . image . DataBuffer . TYPE_BYTE ; byte imageValueB = 127 ; java . awt . image . RenderedImage testIMG = createTestImage ( dataType , it . geosolutions . jaiext . translate . DEFAULT_WIDTH , it . geosolutions . jaiext . translate . DEFAULT_HEIGHT , imageValueB , false ) ; try { javax . media . jai . PlanarImage translatedIMG = it . geosolutions . jaiext . translate . TranslateDescriptor . create ( testIMG , xTrans , yTrans , null , null ) ; } catch ( java . lang . Exception e ) { java . lang . String exception = "Translate<sp>Operation<sp>can<sp>be<sp>used<sp>only<sp>for<sp>integral<sp>displacements.<sp>If" + "a<sp>layout<sp>is<sp>present,<sp>the<sp>translate<sp>operation<sp>cannot<sp>deal<sp>with" ; "<AssertPlaceHolder>" ; } } create ( java . awt . image . RenderedImage , java . lang . Float , java . lang . Float , javax . media . jai . Interpolation , java . awt . RenderingHints ) { javax . media . jai . ParameterBlockJAI pb = new javax . media . jai . ParameterBlockJAI ( "Translate" , javax . media . jai . registry . RenderedRegistryMode . MODE_NAME ) ; pb . setSource ( "source0" , source0 ) ; pb . setParameter ( "xTrans" , xTrans ) ; pb . setParameter ( "yTrans" , yTrans ) ; pb . setParameter ( "interpolation" , interpolation ) ; javax . media . jai . ImageLayout layout = com . sun . media . jai . opimage . RIFUtil . getImageLayoutHint ( hints ) ; if ( ( ( ( java . lang . Math . abs ( ( xTrans - ( xTrans . intValue ( ) ) ) ) ) > ( it . geosolutions . jaiext . translate . TranslateDescriptor . TOLERANCE ) ) || ( ( java . lang . Math . abs ( ( yTrans - ( yTrans . intValue ( ) ) ) ) ) > ( it . geosolutions . jaiext . translate . TranslateDescriptor . TOLERANCE ) ) ) || ( layout != null ) ) { throw new java . lang . UnsupportedOperationException ( ( "Translate<sp>Operation<sp>can<sp>be<sp>used<sp>only<sp>for<sp>integral<sp>displacements.<sp>If" + "a<sp>layout<sp>is<sp>present,<sp>the<sp>translate<sp>operation<sp>cannot<sp>deal<sp>with" ) ) ; } return javax . media . jai . JAI . create ( "Translate" , pb , hints ) ; }
org . junit . Assert . assertEquals ( exception , e . getMessage ( ) )
testTraceAndMeasureConstructorJoinPointInMemoryMetric ( ) { setJmxStatisticsEnabled ( true ) ; javax . management . MBeanServer mbs = java . lang . management . ManagementFactory . getPlatformMBeanServer ( ) ; classWithAspect = new com . ibm . logger . FakeClassWithAspect ( "hello" ) ; java . lang . String id = "public<sp>com.ibm.logger.FakeClassWithAspect(java.lang.String)" ; javax . management . ObjectName channelMXBeanName = com . ibm . logger . PerformanceLogger . channelMXBeanName ( id , PerformanceLogger . TOTAL_INTERVAL_NAME , com . ibm . logger . stats . TotalLogEntry . class ) ; com . ibm . logger . jmx . TimeIntervalLogEntryMXBean logEntry = javax . management . JMX . newMBeanProxy ( mbs , channelMXBeanName , com . ibm . logger . jmx . TimeIntervalLogEntryMXBean . class ) ; long numCalls = logEntry . getCallCount ( ) ; classWithAspect = new com . ibm . logger . FakeClassWithAspect ( "hello" ) ; long newNumCalls = logEntry . getCallCount ( ) ; "<AssertPlaceHolder>" ; } getCallCount ( ) { return ( ( java . lang . Number ) ( fieldValues . get ( 1 ) ) ) . longValue ( ) ; }
org . junit . Assert . assertEquals ( ( numCalls + 1 ) , newNumCalls )
testConnectServerAlreadyConnected ( ) { final com . allanbank . mongodb . MongoClientConfiguration config = new com . allanbank . mongodb . MongoClientConfiguration ( ) ; final com . allanbank . mongodb . client . state . Cluster cluster = new com . allanbank . mongodb . client . state . Cluster ( config , com . allanbank . mongodb . client . ClusterType . SHARDED ) ; final com . allanbank . mongodb . client . state . Server server = cluster . add ( "localhost:27017" ) ; final com . allanbank . mongodb . client . connection . Connection mockConnection = createMock ( com . allanbank . mongodb . client . connection . Connection . class ) ; final com . allanbank . mongodb . client . connection . Connection mockConnection2 = createMock ( com . allanbank . mongodb . client . connection . Connection . class ) ; final com . allanbank . mongodb . client . state . ServerSelector mockSelector = createMock ( com . allanbank . mongodb . client . state . ServerSelector . class ) ; final com . allanbank . mongodb . client . connection . proxy . ProxiedConnectionFactory mockFactory = createMock ( com . allanbank . mongodb . client . connection . proxy . ProxiedConnectionFactory . class ) ; final org . easymock . Capture < java . beans . PropertyChangeListener > listener = new org . easymock . Capture < java . beans . PropertyChangeListener > ( ) ; mockConnection . addPropertyChangeListener ( capture ( listener ) ) ; expectLastCall ( ) ; expect ( mockFactory . connect ( server , config ) ) . andReturn ( mockConnection2 ) ; mockConnection2 . shutdown ( true ) ; expectLastCall ( ) ; replay ( mockConnection , mockConnection2 , mockSelector , mockFactory ) ; final com . allanbank . mongodb . client . connection . sharded . ShardedConnection conn = new com . allanbank . mongodb . client . connection . sharded . ShardedConnection ( mockConnection , server , cluster , mockSelector , mockFactory , config ) ; "<AssertPlaceHolder>" ; verify ( mockConnection , mockConnection2 , mockSelector , mockFactory ) ; reset ( mockConnection , mockConnection2 , mockSelector , mockFactory ) ; mockConnection . removePropertyChangeListener ( listener . getValue ( ) ) ; expectLastCall ( ) ; mockConnection . close ( ) ; replay ( mockConnection , mockConnection2 , mockSelector , mockFactory ) ; conn . close ( ) ; verify ( mockConnection , mockConnection2 , mockSelector , mockFactory ) ; } connect ( com . allanbank . mongodb . client . state . Server ) { com . allanbank . mongodb . client . connection . Connection conn = null ; try { conn = myFactory . connect ( server , myConfig ) ; conn = cacheConnection ( server , conn ) ; } catch ( final java . io . IOException e ) { com . allanbank . mongodb . client . connection . sharded . ShardedConnection . LOG . info ( e , "Could<sp>not<sp>connect<sp>to<sp>the<sp>server<sp>'{}':<sp>{}" , server . getCanonicalName ( ) , e . getMessage ( ) ) ; } return conn ; }
org . junit . Assert . assertThat ( conn . connect ( server ) , org . hamcrest . Matchers . is ( mockConnection ) )
get_A$Request_jpg ( ) { com . m3 . curly . com . m3 . curly . Request request = new com . m3 . curly . com . m3 . curly . Request ( "http://localhost:8801/images/self_face.jpg" ) ; com . m3 . curly . com . m3 . curly . Response response = com . m3 . curly . com . m3 . curly . HTTP . get ( request ) ; "<AssertPlaceHolder>" ; java . io . FileOutputStream os = new java . io . FileOutputStream ( "target/self_face.jpg" ) ; os . write ( response . getBody ( ) ) ; os . close ( ) ; } getStatus ( ) { return status ; }
org . junit . Assert . assertThat ( response . getStatus ( ) , is ( 200 ) )
Should_createDumpOfUnderlyingSequence ( ) { info . smart_tools . smartactors . iobject . iobject . IObject options = mock ( info . smart_tools . smartactors . iobject . iobject . IObject . class ) ; info . smart_tools . smartactors . iobject . iobject . IObject dump = mock ( info . smart_tools . smartactors . iobject . iobject . IObject . class ) ; when ( dumpCreationStrategy . resolve ( same ( sequenceMock ) , same ( options ) ) ) . thenReturn ( dump ) ; info . smart_tools . smartactors . iobject . iobject . IObject doneDump = new info . smart_tools . smartactors . debugger . sequence_impl . DebuggerSequenceImpl ( sequenceMock , debuggerAddress ) . dump ( options ) ; "<AssertPlaceHolder>" ; } dump ( info . smart_tools . smartactors . iobject . iobject . IObject ) { return getChainDescription ( ) ; }
org . junit . Assert . assertSame ( dump , doneDump )
testNewAminoAcid ( ) { org . openscience . cdk . interfaces . IChemObjectBuilder builder = org . openscience . cdk . AbstractChemObjectBuilderTest . rootObject . getBuilder ( ) ; org . openscience . cdk . interfaces . IAminoAcid aa = builder . newInstance ( org . openscience . cdk . interfaces . IAminoAcid . class ) ; "<AssertPlaceHolder>" ; } getBuilder ( ) { return org . openscience . cdk . DefaultChemObjectBuilder . getInstance ( ) ; }
org . junit . Assert . assertNotNull ( aa )
testCount ( ) { int count = getManager ( ) . count ( ) ; "<AssertPlaceHolder>" ; } count ( ) { return tiposAsuntos . size ( ) ; }
org . junit . Assert . assertTrue ( ( count > 0 ) )
makeFailurePropagatingCompleteFutureAlreadyDoneFuturesTest ( ) { java . util . List < org . threadly . concurrent . future . ListenableFuture < ? > > futures = new java . util . ArrayList ( TEST_QTY ) ; for ( int i = 0 ; i < ( TEST_QTY ) ; i ++ ) { org . threadly . concurrent . future . SettableListenableFuture < ? > future = new org . threadly . concurrent . future . SettableListenableFuture ( ) ; future . setResult ( null ) ; futures . add ( future ) ; } org . threadly . concurrent . future . ListenableFuture < ? > f = org . threadly . concurrent . future . FutureUtils . makeFailurePropagatingCompleteFuture ( futures ) ; "<AssertPlaceHolder>" ; } isDone ( ) { return true ; }
org . junit . Assert . assertTrue ( f . isDone ( ) )
testAccept ( ) { com . thoughtworks . qdox . model . expression . ExpressionVisitor visitor = mock ( com . thoughtworks . qdox . model . expression . ExpressionVisitor . class ) ; com . thoughtworks . qdox . model . expression . GreaterEquals expr = new com . thoughtworks . qdox . model . expression . GreaterEquals ( null , null ) ; java . lang . Object visitResult = new java . lang . Object ( ) ; when ( visitor . visit ( expr ) ) . thenReturn ( visitResult ) ; "<AssertPlaceHolder>" ; } accept ( com . thoughtworks . qdox . model . expression . ExpressionVisitor ) { return visitor . visit ( this ) ; }
org . junit . Assert . assertSame ( expr . accept ( visitor ) , visitResult )
testInstanceDoesNotEqualNull ( ) { "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == obj ) { return true ; } if ( obj == null ) { return false ; } if ( ! ( obj instanceof org . springframework . roo . addon . plural . addon . PluralMetadata ) ) { return false ; } final org . springframework . roo . addon . plural . addon . PluralMetadata other = ( ( org . springframework . roo . addon . plural . addon . PluralMetadata ) ( obj ) ) ; return org . apache . commons . lang3 . StringUtils . equals ( plural , other . getPlural ( ) ) ; }
org . junit . Assert . assertFalse ( new org . springframework . roo . project . Configuration ( mock ( org . w3c . dom . Element . class ) ) . equals ( null ) )
parsePersoonslijstOk ( ) { final nl . bzk . migratiebrp . bericht . model . sync . impl . SynchroniseerNaarBrpVerzoekBericht verzoek = new nl . bzk . migratiebrp . bericht . model . sync . impl . SynchroniseerNaarBrpVerzoekBericht ( ) ; verzoek . setMessageId ( "verzoek-message-id" ) ; verzoek . setLo3PersoonslijstAlsTeletexString ( maakLo3Pl ( ) ) ; org . mockito . Mockito . when ( syntaxControle . controleer ( org . mockito . Matchers . anyListOf ( nl . bzk . migratiebrp . conversie . model . lo3 . syntax . Lo3CategorieWaarde . class ) ) ) . thenAnswer ( new nl . bzk . migratiebrp . synchronisatie . runtime . service . synchronisatie . AbstractSynchronisatieVerwerkerImplTest . SimpleSyntaxAnswer ( ) ) ; final nl . bzk . migratiebrp . conversie . model . lo3 . Lo3Persoonslijst result = subject . parsePersoonslijst ( logging , verzoek ) ; "<AssertPlaceHolder>" ; org . mockito . Mockito . verify ( syntaxControle ) . controleer ( org . mockito . Matchers . anyListOf ( nl . bzk . migratiebrp . conversie . model . lo3 . syntax . Lo3CategorieWaarde . class ) ) ; org . mockito . Mockito . verifyNoMoreInteractions ( syntaxControle , preconditieService , converteerLo3NaarBrpService ) ; } controleer ( java . util . List ) { final nl . bzk . migratiebrp . synchronisatie . runtime . service . synchronisatie . controle . logging . ControleLogging logging = new nl . bzk . migratiebrp . synchronisatie . runtime . service . synchronisatie . controle . logging . ControleLogging ( nl . bzk . migratiebrp . synchronisatie . runtime . service . synchronisatie . controle . logging . ControleMelding . LIJST_CONTROLE_EEN ) ; final boolean result = ( lijst != null ) && ( ( lijst . size ( ) ) == 1 ) ; logging . logResultaat ( result ) ; return result ; }
org . junit . Assert . assertNotNull ( result )
testPrefetchInDurableSubscription ( ) { final org . apache . activemq . command . ActiveMQTopic topic = new org . apache . activemq . command . ActiveMQTopic ( "TestTopic" ) ; final javax . jms . TopicConnection initialSubConnection = connectionFactory . createTopicConnection ( ) ; initialSubConnection . setClientID ( "TestClient" ) ; initialSubConnection . start ( ) ; final javax . jms . TopicSession initialSubSession = initialSubConnection . createTopicSession ( false , Session . CLIENT_ACKNOWLEDGE ) ; final javax . jms . TopicSubscriber initialSubscriber = initialSubSession . createDurableSubscriber ( topic , "TestSubscriber" ) ; initialSubscriber . close ( ) ; initialSubSession . close ( ) ; initialSubConnection . close ( ) ; final javax . jms . TopicConnection publisherConnection = connectionFactory . createTopicConnection ( ) ; publisherConnection . start ( ) ; final javax . jms . TopicSession publisherSession = publisherConnection . createTopicSession ( false , Session . AUTO_ACKNOWLEDGE ) ; final javax . jms . TopicPublisher publisher = publisherSession . createPublisher ( topic ) ; for ( int i = 1 ; i <= ( org . apache . activemq . bugs . AMQ3961Test . MESSAGE_COUNT ) ; i ++ ) { final javax . jms . Message msg = publisherSession . createTextMessage ( ( "Message<sp>#" + i ) ) ; publisher . publish ( msg ) ; } publisher . close ( ) ; publisherSession . close ( ) ; publisherConnection . close ( ) ; final javax . jms . TopicConnection connection = connectionFactory . createTopicConnection ( ) ; connection . setClientID ( "TestClient" ) ; connection . start ( ) ; final org . apache . activemq . bugs . AMQ3961Test . TestServerSessionPool pool = new org . apache . activemq . bugs . AMQ3961Test . TestServerSessionPool ( connection ) ; final javax . jms . ConnectionConsumer connectionConsumer = connection . createDurableConnectionConsumer ( topic , "TestSubscriber" , null , pool , 1 ) ; while ( true ) { int lastMsgCount = 0 ; int msgCount = 0 ; do { lastMsgCount = msgCount ; java . lang . Thread . sleep ( 200L ) ; synchronized ( processedSessions ) { msgCount = processedSessions . size ( ) ; } } while ( lastMsgCount < msgCount ) ; if ( lastMsgCount == 0 ) { break ; } final java . util . LinkedList < org . apache . activemq . bugs . AMQ3961Test . TestServerSession > collected ; synchronized ( processedSessions ) { collected = new java . util . LinkedList < org . apache . activemq . bugs . AMQ3961Test . TestServerSession > ( processedSessions ) ; processedSessions . clear ( ) ; } final java . util . Iterator < org . apache . activemq . bugs . AMQ3961Test . TestServerSession > sessions = collected . iterator ( ) ; while ( sessions . hasNext ( ) ) { final org . apache . activemq . bugs . AMQ3961Test . TestServerSession session = sessions . next ( ) ; committedSessions . add ( session ) ; session . getSession ( ) . commit ( ) ; session . getSession ( ) . close ( ) ; } } connectionConsumer . close ( ) ; final javax . jms . TopicSession finalSession = connection . createTopicSession ( false , Session . AUTO_ACKNOWLEDGE ) ; finalSession . unsubscribe ( "TestSubscriber" ) ; finalSession . close ( ) ; connection . close ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return map . size ( ) ; }
org . junit . Assert . assertEquals ( org . apache . activemq . bugs . AMQ3961Test . MESSAGE_COUNT , committedSessions . size ( ) )
testIsUpdatable ( ) { com . j256 . ormlite . dao . Dao < com . j256 . ormlite . dao . Foo , java . lang . Integer > dao = createDao ( com . j256 . ormlite . dao . Foo . class , false ) ; "<AssertPlaceHolder>" ; } isUpdatable ( ) { return tableInfo . isUpdatable ( ) ; }
org . junit . Assert . assertTrue ( dao . isUpdatable ( ) )
testGetMaskForDepricatedTypeFileAndTable ( ) { long expected = ( ViewTypeMask . File . getMask ( ) ) | ( ViewTypeMask . Table . getMask ( ) ) ; "<AssertPlaceHolder>" ; } getMaskForDepricatedType ( org . sagebionetworks . repo . model . table . ViewType ) { switch ( oldType ) { case file : return org . sagebionetworks . repo . model . table . ViewTypeMask . File . bitMask ; case project : return org . sagebionetworks . repo . model . table . ViewTypeMask . Project . bitMask ; case file_and_table : return ( org . sagebionetworks . repo . model . table . ViewTypeMask . File . bitMask ) | ( org . sagebionetworks . repo . model . table . ViewTypeMask . Table . bitMask ) ; default : throw new java . lang . IllegalArgumentException ( ( "Unknown<sp>type:<sp>" + oldType ) ) ; } }
org . junit . Assert . assertEquals ( expected , org . sagebionetworks . repo . model . table . ViewTypeMask . getMaskForDepricatedType ( ViewType . file_and_table ) )
testFactoryPresent ( ) { "<AssertPlaceHolder>" ; }
org . junit . Assert . assertNotNull ( instance )
testNextSetBitBeforeStart ( ) { org . roaringbitmap . BitmapContainer container = new org . roaringbitmap . BitmapContainer ( org . roaringbitmap . TestBitmapContainer . evenBits ( ) , ( 1 << 15 ) ) ; container . bitmap [ 0 ] = 0L ; container . cardinality -= 32 ; "<AssertPlaceHolder>" ; } nextSetBit ( int ) { int x = i > > 6 ; long w = bitmap . get ( x ) ; w >>>= i ; if ( w != 0 ) { return i + ( java . lang . Long . numberOfTrailingZeros ( w ) ) ; } for ( ++ x ; x < ( ( org . roaringbitmap . buffer . MappeableBitmapContainer . MAX_CAPACITY ) / 64 ) ; ++ x ) { long X = bitmap . get ( x ) ; if ( X != 0 ) { return ( x * 64 ) + ( java . lang . Long . numberOfTrailingZeros ( X ) ) ; } } return - 1 ; }
org . junit . Assert . assertEquals ( 64 , container . nextSetBit ( 1 ) )
testCreate_shouldStoreElement ( ) { org . openengsb . core . persistence . PersistenceTestBean additional = new org . openengsb . core . persistence . PersistenceTestBean ( "Test" , 1 , null ) ; persistence . create ( additional ) ; java . util . List < org . openengsb . core . persistence . PersistenceTestBean > results = persistence . query ( new org . openengsb . core . persistence . PersistenceTestBean ( "Test" , null , null ) ) ; "<AssertPlaceHolder>" ; } contains ( org . openengsb . core . edbi . jdbc . sql . Column ) { return columns . containsValue ( column ) ; }
org . junit . Assert . assertThat ( results . contains ( additional ) , org . hamcrest . CoreMatchers . is ( true ) )
testCompareSmartUris ( ) { final org . apache . rya . indexing . entity . model . Entity entity1 = org . apache . rya . indexing . smarturi . duplication . DuplicateDataDetectorIT . createBobEntity ( ) ; final org . apache . rya . indexing . entity . model . Entity entity2 = new org . apache . rya . indexing . entity . model . Entity . Builder ( entity1 ) . build ( ) ; final org . apache . rya . indexing . smarturi . duplication . DuplicateDataDetector duplicateDataDetector = new org . apache . rya . indexing . smarturi . duplication . DuplicateDataDetector ( ) ; final boolean areDuplicates = duplicateDataDetector . compareSmartUris ( entity1 . getSmartUri ( ) , entity2 . getSmartUri ( ) ) ; "<AssertPlaceHolder>" ; } getSmartUri ( ) { return smartUri ; }
org . junit . Assert . assertTrue ( areDuplicates )
testThatCanReflectMethod ( ) { java . lang . reflect . Method m = new net . vidageek . mirror . reflect . DefaultMethodReflector ( provider , "methodWithOneArg" , net . vidageek . mirror . fixtures . MethodFixture . class ) . withArgs ( java . lang . String . class ) ; "<AssertPlaceHolder>" ; m . invoke ( new net . vidageek . mirror . fixtures . MethodFixture ( ) , "Any<sp>string" ) ; } withArgs ( java . lang . Object [ ] ) { if ( ( ( target ) == null ) && ( ! ( java . lang . reflect . Modifier . isStatic ( method . getModifiers ( ) ) ) ) ) { throw new java . lang . IllegalStateException ( ( ( ( "attempt<sp>to<sp>call<sp>instance<sp>method<sp>" + ( method . getName ( ) ) ) + "<sp>on<sp>class<sp>" ) + ( clazz . getName ( ) ) ) ) ; } net . vidageek . mirror . provider . MethodReflectionProvider methodReflectionProvider = provider . getMethodReflectionProvider ( target , clazz , method ) ; methodReflectionProvider . setAccessible ( ) ; return methodReflectionProvider . invoke ( args ) ; }
org . junit . Assert . assertNotNull ( m )
testGetUserNamespaceAuthorization ( ) { org . finra . herd . model . api . xml . UserNamespaceAuthorizationKey key = new org . finra . herd . model . api . xml . UserNamespaceAuthorizationKey ( USER_ID , NAMESPACE ) ; org . finra . herd . model . jpa . UserNamespaceAuthorizationEntity userNamespaceAuthorizationEntity = userNamespaceAuthorizationDaoTestHelper . createUserNamespaceAuthorizationEntity ( key , org . finra . herd . service . SUPPORTED_NAMESPACE_PERMISSIONS ) ; org . finra . herd . model . api . xml . UserNamespaceAuthorization resultUserNamespaceAuthorization = userNamespaceAuthorizationService . getUserNamespaceAuthorization ( key ) ; "<AssertPlaceHolder>" ; } getId ( ) { return id ; }
org . junit . Assert . assertEquals ( new org . finra . herd . model . api . xml . UserNamespaceAuthorization ( userNamespaceAuthorizationEntity . getId ( ) , key , SUPPORTED_NAMESPACE_PERMISSIONS ) , resultUserNamespaceAuthorization )
testFetchByPrimaryKeyExisting ( ) { com . liferay . portal . kernel . model . PasswordPolicy newPasswordPolicy = addPasswordPolicy ( ) ; com . liferay . portal . kernel . model . PasswordPolicy existingPasswordPolicy = _persistence . fetchByPrimaryKey ( newPasswordPolicy . getPrimaryKey ( ) ) ; "<AssertPlaceHolder>" ; } getPrimaryKey ( ) { return _amImageEntryId ; }
org . junit . Assert . assertEquals ( existingPasswordPolicy , newPasswordPolicy )
testListWithRecords ( ) { int total = 5 ; for ( int i = 0 ; i < total ; i ++ ) { com . liferay . data . engine . service . test . DEDataEngineTestUtil . insertDEDataDefinition ( _adminUser , _group , _deDataDefinitionService ) ; } java . util . List < com . liferay . data . engine . model . DEDataDefinition > deDataDefinitions = listDEDataDefinitions ( _group , null , null ) ; "<AssertPlaceHolder>" ; } toString ( ) { com . liferay . petra . string . StringBundler sb = new com . liferay . petra . string . StringBundler ( 23 ) ; sb . append ( ",<sp>width=" 1 ) ; sb . append ( uuid ) ; sb . append ( ",<sp>width=" 0 ) ; sb . append ( amImageEntryId ) ; sb . append ( ",<sp>groupId=" ) ; sb . append ( groupId ) ; sb . append ( ",<sp>companyId=" ) ; sb . append ( companyId ) ; sb . append ( ",<sp>createDate=" ) ; sb . append ( createDate ) ; sb . append ( ",<sp>configurationUuid=" ) ; sb . append ( configurationUuid ) ; sb . append ( ",<sp>fileVersionId=" ) ; sb . append ( fileVersionId ) ; sb . append ( ",<sp>mimeType=" ) ; sb . append ( mimeType ) ; sb . append ( ",<sp>height=" ) ; sb . append ( height ) ; sb . append ( ",<sp>width=" ) ; sb . append ( width ) ; sb . append ( ",<sp>size=" ) ; sb . append ( size ) ; sb . append ( "}" ) ; return sb . toString ( ) ; }
org . junit . Assert . assertEquals ( deDataDefinitions . toString ( ) , 5 , deDataDefinitions . size ( ) )
testConvertWSDLwithMultipleNamespaceScenario1 ( ) { testingdir . ensureEmpty ( ) ; java . io . File wsdl = org . ebayopensource . turmeric . tools . TestResourceUtil . copyResource ( "org/ebayopensource/turmeric/test/tools/codegen/data/Testing.wsdl" , testingdir , "meta-src" ) ; java . io . File wsdlout = new java . io . File ( wsdl . getParentFile ( ) , "Test.wsdl" ) ; org . ebayopensource . turmeric . tools . codegen . external . wsdl . parser . WSDLConversionToSingleNamespace wsdlconv = new org . ebayopensource . turmeric . tools . codegen . external . wsdl . parser . WSDLConversionToSingleNamespace ( ) ; wsdlconv . convertWSDL ( wsdl . getAbsolutePath ( ) , wsdlout . getAbsolutePath ( ) ) ; int totalSchemas = org . ebayopensource . turmeric . tools . codegen . WSDLConversionTestHelper . getNumberOfschemaFromWSDL ( wsdlout ) ; "<AssertPlaceHolder>" ; } getNumberOfschemaFromWSDL ( java . io . File ) { int no_Schema = 0 ; javax . xml . parsers . DocumentBuilderFactory factory = javax . xml . parsers . DocumentBuilderFactory . newInstance ( ) ; try { javax . xml . parsers . DocumentBuilder builder = factory . newDocumentBuilder ( ) ; org . w3c . dom . Document document = builder . parse ( wsdlFile ) ; org . w3c . dom . NodeList nodelist = document . getElementsByTagName ( "*" ) ; for ( int i = 0 ; i < ( nodelist . getLength ( ) ) ; i ++ ) { org . w3c . dom . Node node = nodelist . item ( i ) ; if ( node . getNodeName ( ) . contains ( org . ebayopensource . turmeric . tools . codegen . WSDLConversionTestHelper . XML_SCHEMA ) ) { no_Schema ++ ; } } return no_Schema ; } catch ( java . lang . Exception e ) { org . ebayopensource . turmeric . tools . codegen . WSDLConversionTestHelper . s_Logger . log ( Level . SEVERE , e . getMessage ( ) ) ; } return 0 ; }
org . junit . Assert . assertEquals ( 5 , totalSchemas )
testProcessorsToSteps ( ) { org . apache . taverna . scufl2 . api . core . Workflow workflow = new org . apache . taverna . scufl2 . api . core . Workflow ( ) ; workflow . setName ( "workflowName" ) ; workflow . getProcessors ( ) . add ( new org . apache . taverna . scufl2 . api . core . Processor ( workflow , "processor1" ) ) ; workflow . getProcessors ( ) . add ( new org . apache . taverna . scufl2 . api . core . Processor ( workflow , "processor2" ) ) ; org . apache . taverna . scufl2 . cwl . components . Step step1 = new org . apache . taverna . scufl2 . cwl . components . Step ( ) ; step1 . setRun ( new org . apache . taverna . scufl2 . cwl . components . Reference ( "processor1" ) ) ; org . apache . taverna . scufl2 . cwl . components . Step step2 = new org . apache . taverna . scufl2 . cwl . components . Step ( ) ; step2 . setRun ( new org . apache . taverna . scufl2 . cwl . components . Reference ( "processor2" ) ) ; java . util . Set < org . apache . taverna . scufl2 . cwl . components . Step > steps = new java . util . HashSet ( ) ; steps . add ( step1 ) ; steps . add ( step2 ) ; org . apache . taverna . scufl2 . cwl . TavernaConverter converter = new org . apache . taverna . scufl2 . cwl . TavernaConverter ( ) ; java . util . Set < org . apache . taverna . scufl2 . cwl . components . Step > convertedSteps = converter . convertProcessors ( workflow ) ; "<AssertPlaceHolder>" ; } convertProcessors ( org . apache . taverna . scufl2 . api . core . Workflow ) { java . util . Set < org . apache . taverna . scufl2 . cwl . components . Step > result = new java . util . HashSet ( ) ; org . apache . taverna . scufl2 . api . common . NamedSet < org . apache . taverna . scufl2 . api . core . Processor > processors = workflow . getProcessors ( ) ; java . util . Set < org . apache . taverna . scufl2 . api . core . DataLink > dataLinks = workflow . getDataLinks ( ) ; java . util . Map < java . lang . String , org . apache . taverna . scufl2 . api . port . SenderPort > portNameToSource = new java . util . HashMap ( ) ; for ( org . apache . taverna . scufl2 . api . core . DataLink link : dataLinks ) { portNameToSource . put ( link . getSendsTo ( ) . getName ( ) , link . getReceivesFrom ( ) ) ; } for ( org . apache . taverna . scufl2 . api . core . Processor processor : processors ) { org . apache . taverna . scufl2 . cwl . components . Step step = convertProcessor ( processor , portNameToSource ) ; result . add ( step ) ; } return result ; }
org . junit . Assert . assertEquals ( steps , convertedSteps )
testMissingJar ( ) { java . util . Set < java . lang . String > names = localLibraryManager . list ( ) ; java . util . List < java . lang . String > allJars = new java . util . ArrayList < java . lang . String > ( ) ; org . talend . core . database . conn . version . EDatabaseVersion4Drivers [ ] values = org . talend . core . database . conn . version . EDatabaseVersion4Drivers . values ( ) ; for ( org . talend . core . database . conn . version . EDatabaseVersion4Drivers driver : values ) { java . util . Set < java . lang . String > providerDrivers = driver . getProviderDrivers ( ) ; allJars . addAll ( providerDrivers ) ; } java . util . Set < java . lang . String > missJars = new java . util . HashSet < java . lang . String > ( ) ; for ( java . lang . String jar : allJars ) { boolean hadInstalled = false ; for ( java . lang . String installJar : names ) { if ( jar . equals ( installJar ) ) { hadInstalled = true ; } } if ( ! hadInstalled ) { missJars . add ( jar ) ; } } if ( ( missJars . size ( ) ) > 0 ) { for ( java . lang . String notDiliver : notDilivers ) { if ( missJars . contains ( notDiliver ) ) { missJars . remove ( notDiliver ) ; } } } if ( ( missJars . size ( ) ) > 0 ) { java . lang . StringBuffer buffer = new java . lang . StringBuffer ( ) ; buffer . append ( "db<sp>system<sp>missing<sp>jars!<sp>\n" ) ; for ( java . lang . String missJar : missJars ) { buffer . append ( ( missJar + "\n" ) ) ; } throw new java . lang . RuntimeException ( buffer . toString ( ) ) ; } "<AssertPlaceHolder>" ; } size ( ) { return list . size ( ) ; }
org . junit . Assert . assertTrue ( ( ( missJars . size ( ) ) == 0 ) )
testMTKTpValueAll ( ) { double [ ] [ ] values = new double [ 10 ] [ 2 ] ; double [ ] values1 = new double [ ] { 1.0 , 2.0 , 3.0 , 4.0 , 5.0 , 6.0 , 7.0 , 8.0 , 9.0 , 10.0 } ; double [ ] values2 = new double [ ] { 1.0 , 2.0 , 3.0 , 4.0 , 5.0 , 6.0 , 7.0 , 8.0 , 9.0 , 10.0 } ; for ( int i = 0 ; i < 4 ; i ++ ) { values [ i ] [ 0 ] = values1 [ i ] ; values [ i ] [ 1 ] = values2 [ i ] ; } net . imglib2 . img . Img < net . imglib2 . type . numeric . real . DoubleType > vImage1 = net . imglib2 . img . array . ArrayImgs . doubles ( values1 , values1 . length ) ; net . imglib2 . img . Img < net . imglib2 . type . numeric . real . DoubleType > vImage2 = net . imglib2 . img . array . ArrayImgs . doubles ( values2 , values2 . length ) ; net . imagej . ops . special . function . BinaryFunctionOp < net . imglib2 . RandomAccessibleInterval < net . imglib2 . type . numeric . real . DoubleType > , net . imglib2 . RandomAccessibleInterval < net . imglib2 . type . numeric . real . DoubleType > , java . lang . Double > op = net . imagej . ops . special . function . Functions . binary ( ops , net . imagej . ops . coloc . maxTKendallTau . MTKT . class , net . imagej . ops . coloc . maxTKendallTau . Double . class , vImage1 , vImage2 ) ; net . imagej . ops . coloc . pValue . PValueResult value = ( ( net . imagej . ops . coloc . pValue . PValueResult ) ( ops . run ( Ops . Coloc . PValue . class , new net . imagej . ops . coloc . pValue . PValueResult ( ) , vImage1 , vImage2 , op , 5 ) ) ) ; "<AssertPlaceHolder>" ; } getPValue ( ) { return pValue ; }
org . junit . Assert . assertEquals ( 0.0 , value . getPValue ( ) , 0.0 )
testUserCommentDescription_BlankAscii ( ) { byte [ ] commentBytes = "ASCII
org . junit . Assert . assertEquals ( "" , descriptor . getDescription ( com . drew . metadata . exif . TAG_USER_COMMENT ) )
testOneToOneWaiting ( ) { com . questdb . mp . ConcurrentTest . LOG . info ( ) . $ ( "testOneToOneWaiting" ) . $ ( ) ; int cycle = 1024 ; int size = 1024 * cycle ; com . questdb . mp . RingQueue < com . questdb . mp . Event > queue = new com . questdb . mp . RingQueue ( Event . FACTORY , cycle ) ; com . questdb . mp . Sequence pubSeq = new com . questdb . mp . SPSequence ( cycle ) ; com . questdb . mp . Sequence subSeq = new com . questdb . mp . SCSequence ( new com . questdb . mp . BlockingWaitStrategy ( ) ) ; pubSeq . then ( subSeq ) . then ( pubSeq ) ; java . util . concurrent . CyclicBarrier barrier = new java . util . concurrent . CyclicBarrier ( 2 ) ; java . util . concurrent . CountDownLatch latch = new java . util . concurrent . CountDownLatch ( 1 ) ; com . questdb . mp . ConcurrentTest . WaitingConsumer consumer = new com . questdb . mp . ConcurrentTest . WaitingConsumer ( size , subSeq , queue , barrier , latch ) ; consumer . start ( ) ; barrier . await ( ) ; int i = 0 ; do { long cursor = pubSeq . nextBully ( ) ; queue . get ( cursor ) . value = i ++ ; pubSeq . done ( cursor ) ; } while ( i != size ) ; com . questdb . mp . ConcurrentTest . publishEOE ( queue , pubSeq ) ; latch . await ( ) ; int [ ] buf = consumer . buf ; for ( i = 0 ; i < ( buf . length ) ; i ++ ) { "<AssertPlaceHolder>" ; } } await ( ) { lock . lock ( ) ; try { if ( alerted ) { throw AlertedException . INSTANCE ; } if ( ! ( condition . await ( time , unit ) ) ) { throw TimeoutException . INSTANCE ; } } catch ( java . lang . InterruptedException e ) { throw TimeoutException . INSTANCE ; } finally { lock . unlock ( ) ; } }
org . junit . Assert . assertEquals ( i , buf [ i ] )
getURIIfFileDoesNotExist ( ) { java . io . File notExistingFile = new java . io . File ( tmp . getRoot ( ) , "not-existing.dat" ) ; java . net . URI uri = new com . hotels . bdp . waggledance . spring . CommonVFSResource ( notExistingFile . getAbsolutePath ( ) ) . getURI ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { return attributesString ; }
org . junit . Assert . assertThat ( uri . toString ( ) , org . hamcrest . CoreMatchers . is ( notExistingFile . getAbsolutePath ( ) ) )
testDoubleArrayZero ( ) { final org . hillview . table . columns . DoubleArrayColumn col = org . hillview . test . table . DoubleArrayTest . generateDoubleArray ( this . size , 100 ) ; "<AssertPlaceHolder>" ; this . checkContents ( col ) ; } sizeInRows ( ) { return this . data . length ; }
org . junit . Assert . assertEquals ( col . sizeInRows ( ) , this . size )
testTicker ( ) { "<AssertPlaceHolder>" ; } ticker ( ) { return com . google . common . base . Ticker . systemTicker ( ) ; }
org . junit . Assert . assertSame ( com . google . common . base . Ticker . systemTicker ( ) , ctx . ticker ( ) )
testFreeSelectSqlBuilderInAllTableShardsWithTableName ( ) { java . util . List < java . lang . Integer > list = new java . util . ArrayList ( ) ; list . add ( 2 ) ; list . add ( 3 ) ; list . add ( 4 ) ; com . ctrip . platform . dal . dao . sqlbuilder . FreeSelectSqlBuilder < java . util . List < com . ctrip . platform . dal . dao . shard . ClientTestModel > > builder = new com . ctrip . platform . dal . dao . sqlbuilder . FreeSelectSqlBuilder ( ) ; builder . selectAll ( ) . from ( com . ctrip . platform . dal . dao . shard . BaseDalTabelDaoShardByTableTest . TABLE_NAME ) . where ( com . ctrip . platform . dal . dao . sqlbuilder . Expressions . in ( "id" , list , Types . INTEGER ) ) ; builder . mapWith ( new com . ctrip . platform . dal . dao . helper . DalDefaultJpaMapper ( com . ctrip . platform . dal . dao . shard . ClientTestModel . class ) ) ; java . util . List < com . ctrip . platform . dal . dao . shard . ClientTestModel > result = null ; try { result = com . ctrip . platform . dal . dao . shard . BaseDalTabelDaoShardByTableTest . queryDao . query ( builder , new com . ctrip . platform . dal . dao . shard . DalHints ( ) . inAllTableShards ( ) ) ; } catch ( java . lang . Exception e ) { org . junit . Assert . fail ( ) ; } "<AssertPlaceHolder>" ; } size ( ) { return allKeys . size ( ) ; }
org . junit . Assert . assertEquals ( 6 , result . size ( ) )
testFilter ( ) { java . lang . String expectedJavascript = "$('div').filter('span');" ; java . lang . String generatedJavascript = new org . odlabs . wiquery . core . javascript . JsStatement ( ) . $ ( null , "div" ) . chain ( org . odlabs . wiquery . core . javascript . helper . TraversingHelper . filter ( "span" ) ) . render ( ) . toString ( ) ; org . odlabs . wiquery . core . javascript . helper . TraversingHelperTestCase . log . info ( expectedJavascript ) ; org . odlabs . wiquery . core . javascript . helper . TraversingHelperTestCase . log . info ( generatedJavascript ) ; "<AssertPlaceHolder>" ; } toString ( ) { return super . toString ( ) . toLowerCase ( ) ; }
org . junit . Assert . assertEquals ( generatedJavascript , expectedJavascript )
testAllEanbled_06 ( ) { slacknotifications . teamcity . BuildState state = new slacknotifications . teamcity . BuildState ( ) ; state . enable ( slacknotifications . teamcity . BUILD_STARTED ) ; state . enable ( slacknotifications . teamcity . BUILD_INTERRUPTED ) ; state . enable ( slacknotifications . teamcity . RESPONSIBILITY_CHANGED ) ; state . enable ( slacknotifications . teamcity . BEFORE_BUILD_FINISHED ) ; state . enable ( slacknotifications . teamcity . BUILD_FINISHED ) ; state . enable ( slacknotifications . teamcity . BUILD_FAILED ) ; state . enable ( slacknotifications . teamcity . BUILD_SUCCESSFUL ) ; state . enable ( slacknotifications . teamcity . BUILD_FIXED ) ; state . enable ( slacknotifications . teamcity . BUILD_BROKEN ) ; "<AssertPlaceHolder>" ; } allEnabled ( ) { boolean areAllEnbled = true ; for ( slacknotifications . teamcity . BuildStateEnum . BuildStateEnum state : states . keySet ( ) ) { if ( ( ( state . equals ( slacknotifications . teamcity . BUILD_BROKEN ) ) && ( states . get ( slacknotifications . teamcity . BUILD_BROKEN ) . isEnabled ( ) ) ) || ( ( state . equals ( slacknotifications . teamcity . BUILD_FIXED ) ) && ( states . get ( slacknotifications . teamcity . BUILD_FIXED ) . isEnabled ( ) ) ) ) { return false ; } if ( ( state . equals ( slacknotifications . teamcity . BUILD_BROKEN ) ) || ( state . equals ( slacknotifications . teamcity . BUILD_FIXED ) ) ) { continue ; } areAllEnbled = areAllEnbled && ( states . get ( state ) . isEnabled ( ) ) ; } return areAllEnbled ; }
org . junit . Assert . assertFalse ( state . allEnabled ( ) )
registerModeChangeListener ( ) { org . candlepin . controller . ModeChangeListener listener = new org . candlepin . controller . ModeChangeListener ( ) { @ org . candlepin . controller . Override public void modeChanged ( org . candlepin . model . CandlepinModeChange . Mode newMode ) { modeChanged = true ; } } ; modeManager . registerModeChangeListener ( listener ) ; modeManager . enterMode ( Mode . SUSPEND , testReason ) ; "<AssertPlaceHolder>" ; } enterMode ( org . candlepin . model . CandlepinModeChange . Mode , org . candlepin . model . CandlepinModeChange . Reason [ ] ) { if ( ! ( config . getBoolean ( ConfigProperties . SUSPEND_MODE_ENABLED ) ) ) { org . candlepin . controller . ModeManagerImpl . log . debug ( "Suspend<sp>mode<sp>is<sp>disabled,<sp>ignoring<sp>mode<sp>transition" ) ; return ; } if ( ( reasons == null ) || ( ( reasons . length ) == 0 ) ) { java . lang . String noReasonErrorString = "No<sp>reason<sp>supplied<sp>when<sp>trying<sp>to<sp>change<sp>CandlepinModeChange." ; org . candlepin . controller . ModeManagerImpl . log . error ( noReasonErrorString ) ; throw new java . lang . IllegalArgumentException ( noReasonErrorString ) ; } org . candlepin . controller . ModeManagerImpl . log . info ( "Entering<sp>new<sp>mode<sp>{}<sp>for<sp>the<sp>following<sp>reasons:<sp>{}" , m , reasons ) ; org . candlepin . model . CandlepinModeChange . Mode previousMode = cpMode . getMode ( ) ; if ( previousMode != m ) { fireModeChangeEvent ( m ) ; } if ( m . equals ( Mode . SUSPEND ) ) { org . candlepin . controller . ModeManagerImpl . log . warn ( "Candlepin<sp>is<sp>entering<sp>suspend<sp>mode<sp>for<sp>the<sp>following<sp>reasons:<sp>{}" , reasons ) ; } cpMode = new org . candlepin . model . CandlepinModeChange ( new java . util . Date ( ) , m , reasons ) ; }
org . junit . Assert . assertEquals ( true , modeChanged )
testAllocatePublicIpRequest ( ) { try { com . fit2cloud . aliyun . ecs . model . request . AllocatePublicIpAddressRequest r = new com . fit2cloud . aliyun . ecs . model . request . AllocatePublicIpAddressRequest ( "i-25zh2k2lv" ) ; com . fit2cloud . aliyun . ecs . model . response . AllocatePublicIpAddressResponse response = client . allocatePublicIp ( r ) ; System . out . println ( ( "testAllocatePublicIpRequest<sp>::<sp>" + response ) ) ; "<AssertPlaceHolder>" ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( e . getMessage ( ) ) ; } } allocatePublicIp ( com . fit2cloud . aliyun . ecs . model . request . AllocatePublicIpAddressRequest ) { return gson . fromJson ( request . execute ( "AllocatePublicIpAddress" , allocatePublicIpAddressRequest . toMap ( ) ) , com . fit2cloud . aliyun . ecs . model . response . AllocatePublicIpAddressResponse . class ) ; }
org . junit . Assert . assertTrue ( true )
testVeryLargeJson ( ) { javax . json . JsonWriterFactory writerFactory = javax . json . Json . createWriterFactory ( new java . util . HashMap < java . lang . String , java . lang . Object > ( ) { { put ( JsonParserFactoryImpl . BUFFER_LENGTH , "256" ) ; } } ) ; int itemSize = 50000 ; java . io . StringWriter sw = new java . io . StringWriter ( ) ; javax . json . JsonWriter writer = writerFactory . createWriter ( sw ) ; javax . json . JsonArrayBuilder arrayBuilder = javax . json . Json . createArrayBuilder ( ) ; for ( int i = 0 ; i < itemSize ; i ++ ) { arrayBuilder . add ( ( "0123456789012345-" + i ) ) ; } writer . writeArray ( arrayBuilder . build ( ) ) ; java . lang . String json = sw . toString ( ) ; System . out . println ( ( ( "Created<sp>a<sp>JSON<sp>of<sp>size<sp>" + ( json . length ( ) ) ) + "<sp>bytes" ) ) ; javax . json . JsonArray jsonArray = javax . json . Json . createReader ( new java . io . StringReader ( json ) ) . readArray ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return size ; }
org . junit . Assert . assertEquals ( itemSize , jsonArray . size ( ) )
testContainsArrayContainer_ExcludeSuperSet ( ) { org . roaringbitmap . Container bc = new org . roaringbitmap . BitmapContainer ( ) . add ( 0 , 10 ) ; org . roaringbitmap . Container superset = new org . roaringbitmap . ArrayContainer ( ) . add ( 0 , 20 ) ; "<AssertPlaceHolder>" ; } contains ( int ) { throw new java . lang . UnsupportedOperationException ( "Not<sp>implemented<sp>in<sp>ImmutableConciseSet" ) ; }
org . junit . Assert . assertFalse ( bc . contains ( superset ) )
testGetProducts_Suspended ( ) { importProducts ( ) ; runTX ( new java . util . concurrent . Callable < java . lang . Void > ( ) { @ org . oscm . serviceprovisioningservice . bean . Override public org . oscm . serviceprovisioningservice . bean . Void call ( ) throws org . oscm . serviceprovisioningservice . bean . Exception { org . oscm . domobjects . Organization org = mgr . getReference ( org . oscm . domobjects . Organization . class , supplier . getKey ( ) ) ; java . util . List < org . oscm . domobjects . Product > products = org . getProducts ( ) ; for ( org . oscm . domobjects . Product product : products ) { product . setStatus ( ServiceStatus . SUSPENDED ) ; } return null ; } } ) ; java . util . List < org . oscm . internal . vo . VOService > products = svcProv . getSuppliedServices ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return categoriesForMarketplace . size ( ) ; }
org . junit . Assert . assertEquals ( 4 , products . size ( ) )
testCreate ( ) { org . oscarehr . common . model . CtlSpecialInstructions entity = new org . oscarehr . common . model . CtlSpecialInstructions ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( entity ) ; dao . persist ( entity ) ; "<AssertPlaceHolder>" ; } getId ( ) { return this . id ; }
org . junit . Assert . assertNotNull ( entity . getId ( ) )
listVisibilityOptionsForwardsCalls ( ) { final java . util . List < org . phenotips . data . permissions . Visibility > visibilities = java . util . Collections . singletonList ( org . phenotips . data . permissions . internal . DefaultEntityPermissionsManagerTest . PUBLIC_VISIBILITY ) ; when ( this . visibilityManager . listVisibilityOptions ( ) ) . thenReturn ( visibilities ) ; "<AssertPlaceHolder>" ; verify ( this . visibilityManager , times ( 1 ) ) . listVisibilityOptions ( ) ; } listVisibilityOptions ( ) { return this . internalService . listVisibilityOptions ( ) ; }
org . junit . Assert . assertSame ( visibilities , this . component . listVisibilityOptions ( ) )
checkHashCodeWorks ( ) { org . openqa . selenium . support . Color objectA = org . openqa . selenium . support . Color . fromString ( "#f00" ) ; org . openqa . selenium . support . Color objectB = org . openqa . selenium . support . Color . fromString ( "rgb(255,<sp>0,<sp>0)" ) ; "<AssertPlaceHolder>" ; } hashCode ( ) { return com . google . common . base . Objects . hashCode ( method , path ) ; }
org . junit . Assert . assertEquals ( objectA . hashCode ( ) , objectB . hashCode ( ) )
test2 ( ) { com . actiontech . dble . route . function . AutoPartitionByLong autoPartition = new com . actiontech . dble . route . function . AutoPartitionByLong ( ) ; autoPartition . setMapFile ( "autopartition-long.txt" ) ; autoPartition . setDefaultNode ( 0 ) ; autoPartition . init ( ) ; java . lang . String idVal = "6000001" ; "<AssertPlaceHolder>" ; } calculate ( long ) { long v1 = 0L ; long v2 = 0L ; long v3 = 0L ; int c1 = 0 ; int c2 = 0 ; int c3 = 0 ; for ( com . actiontech . dble . statistic . HeartbeatRecorder . Record record : records ) { long t = time - ( record . time ) ; if ( t <= ( com . actiontech . dble . statistic . HeartbeatRecorder . AVG1_TIME ) ) { v1 += record . value ; ++ c1 ; } if ( t <= ( com . actiontech . dble . statistic . HeartbeatRecorder . AVG2_TIME ) ) { v2 += record . value ; ++ c2 ; } if ( t <= ( com . actiontech . dble . statistic . HeartbeatRecorder . AVG3_TIME ) ) { v3 += record . value ; ++ c3 ; } } if ( c1 != 0 ) { avg1 = v1 / c1 ; } if ( c2 != 0 ) { avg2 = v2 / c2 ; } if ( c3 != 0 ) { avg3 = v3 / c3 ; } }
org . junit . Assert . assertEquals ( true , ( 0 == ( autoPartition . calculate ( idVal ) ) ) )
testSetType ( ) { service . setType ( "NodePort" ) ; "<AssertPlaceHolder>" ; } getType ( ) { return asString ( "type" ) ; }
org . junit . Assert . assertEquals ( "NodePort" , service . getType ( ) )
testConfigurePrivateShadowedMethod ( ) { abstract class SubclassedServlet extends com . twelvemonkeys . servlet . ServletConfiguratorTest . AnnotatedServlet { @ com . twelvemonkeys . servlet . InitParam ( name = "package-private" ) abstract void setPrivate ( java . lang . String priv ) ; } SubclassedServlet servlet = mock ( SubclassedServlet . class ) ; javax . servlet . ServletConfig config = mock ( javax . servlet . ServletConfig . class ) ; when ( config . getServletName ( ) ) . thenReturn ( "FooServlet" ) ; when ( config . getInitParameterNames ( ) ) . thenReturn ( java . util . Collections . enumeration ( java . util . Arrays . asList ( "private" ) ) ) ; when ( config . getInitParameter ( "private" ) ) . thenReturn ( "private" ) ; when ( config . getInitParameter ( "package-private" ) ) . thenReturn ( "package" ) ; com . twelvemonkeys . servlet . ServletConfigurator . configure ( servlet , config ) ; "<AssertPlaceHolder>" ; verify ( servlet , times ( 1 ) ) . setPrivate ( "package" ) ; } configure ( javax . servlet . Servlet , javax . servlet . ServletConfig ) { new com . twelvemonkeys . servlet . ServletConfigurator . Configurator ( pServlet , pConfig . getServletName ( ) ) . configure ( com . twelvemonkeys . servlet . ServletUtil . asMap ( pConfig ) ) ; }
org . junit . Assert . assertEquals ( servlet . priv , "private" )
testFetchByPrimaryKeysWithNoPrimaryKeys ( ) { java . util . Set < java . io . Serializable > primaryKeys = new java . util . HashSet < java . io . Serializable > ( ) ; java . util . Map < java . io . Serializable , com . liferay . portal . tools . service . builder . test . model . LocalizedEntryLocalization > localizedEntryLocalizations = _persistence . fetchByPrimaryKeys ( primaryKeys ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return _portalCacheListeners . isEmpty ( ) ; }
org . junit . Assert . assertTrue ( localizedEntryLocalizations . isEmpty ( ) )
nullSafe ( ) { org . locationtech . jts . geom . Geometry offset = new org . geotools . geometry . jts . OffsetCurveBuilder ( 10 ) . offset ( null ) ; "<AssertPlaceHolder>" ; } offset ( double ) { this . offset = literal ( offset ) ; return this ; }
org . junit . Assert . assertNull ( offset )
parameterizedTypeIsAssignableToSameTypeWithWildcard ( ) { final org . jboss . errai . codegen . meta . MetaClass parameterized = parameterizedAs ( java . util . Collection . class , typeParametersOf ( org . jboss . errai . ioc . rebind . ioc . graph . impl . Integer . class ) ) ; final org . jboss . errai . codegen . meta . MetaClass wildcard = org . jboss . errai . codegen . meta . MetaClassFactory . get ( org . jboss . errai . ioc . rebind . ioc . graph . impl . GraphUtilUnitTest . WildcardMethod . class ) . getMethod ( "method" , new java . lang . Class [ 0 ] ) . getReturnType ( ) ; "<AssertPlaceHolder>" ; } hasAssignableTypeParameters ( org . jboss . errai . codegen . meta . MetaClass , org . jboss . errai . codegen . meta . MetaClass ) { final org . jboss . errai . codegen . meta . MetaParameterizedType toParamType = toType . getParameterizedType ( ) ; final java . util . Optional < org . jboss . errai . codegen . meta . MetaParameterizedType > fromParamType = org . jboss . errai . ioc . rebind . ioc . graph . impl . GraphUtil . getFromTypeParams ( fromType , toType ) ; return ( toParamType == null ) || ( fromParamType . map ( ( type ) -> toParamType . isAssignableFrom ( type ) ) . orElse ( true ) ) ; }
org . junit . Assert . assertTrue ( org . jboss . errai . ioc . rebind . ioc . graph . impl . GraphUtil . hasAssignableTypeParameters ( parameterized , wildcard ) )
testPublishDsw ( ) { java . lang . String domainId = "domainId" ; java . io . InputStream metadataFile = mock ( java . io . InputStream . class ) ; boolean overwrite = false ; boolean checkConnection = false ; javax . ws . rs . core . Response mockResponse = mock ( javax . ws . rs . core . Response . class ) ; doReturn ( "dswId" ) . when ( org . pentaho . platform . dataaccess . datasource . api . resources . DataSourceWizardResourceTest . dataSourceWizardResource . service ) . publishDsw ( domainId , metadataFile , overwrite , checkConnection , null ) ; doReturn ( mockResponse ) . when ( org . pentaho . platform . dataaccess . datasource . api . resources . DataSourceWizardResourceTest . dataSourceWizardResource ) . buildOkResponse ( "dswId" ) ; javax . ws . rs . core . Response response = org . pentaho . platform . dataaccess . datasource . api . resources . DataSourceWizardResourceTest . dataSourceWizardResource . publishDsw ( domainId , metadataFile , overwrite , checkConnection , null ) ; verify ( org . pentaho . platform . dataaccess . datasource . api . resources . DataSourceWizardResourceTest . dataSourceWizardResource , times ( 1 ) ) . publishDsw ( domainId , metadataFile , overwrite , checkConnection , null ) ; "<AssertPlaceHolder>" ; } publishDsw ( java . lang . String , java . io . InputStream , boolean , boolean , org . pentaho . platform . api . repository2 . unified . webservices . RepositoryFileAclDto ) { try { final java . lang . String dswId = service . publishDsw ( domainId , metadataFile , overwrite , checkConnection , acl ) ; return buildOkResponse ( dswId ) ; } catch ( org . pentaho . platform . api . engine . PentahoAccessControlException e ) { return buildUnauthorizedResponse ( ) ; } catch ( java . lang . IllegalArgumentException e ) { return buildBadRequestResponse ( e . getMessage ( ) ) ; } catch ( org . pentaho . platform . dataaccess . datasource . api . DataSourceWizardService e ) { return buildConfilictResponse ( e . getMessage ( ) ) ; } catch ( java . lang . Exception e ) { return buildServerErrorResponse ( ) ; } }
org . junit . Assert . assertEquals ( mockResponse , response )
testUnion5 ( ) { fr . inria . corese . core . Graph g1 = fr . inria . corese . core . Graph . create ( ) ; fr . inria . corese . core . query . QueryProcess exec1 = fr . inria . corese . core . query . QueryProcess . create ( g1 ) ; java . lang . String i1 = "let<sp>(?list<sp>=<sp>xt:list(us:test1,<sp>us:test2)){" 3 + ( ( "let<sp>(?list<sp>=<sp>xt:list(us:test1,<sp>us:test2)){" 1 + "us:Jim<sp>foaf:knows<sp>us:Jack<sp>" ) + "}" ) ; java . lang . String q = "select<sp>(us:main()<sp>as<sp>?m)<sp>where<sp>{}" + ( ( ( ( ( ( ( ( ( ( ( ( "function<sp>us:main(){" + "let<sp>(?list<sp>=<sp>xt:list(us:test1,<sp>us:test2)){" ) + "reduce(xt:union,<sp>maplist(rq:funcall,<sp>?list))" ) + "}" ) + "}" ) + "function<sp>us:test1(){" ) + "let<sp>(?m<sp>=<sp>select<sp>*<sp>where<sp>{<sp>?x<sp>foaf:knows<sp>?y}){" ) + "let<sp>(?list<sp>=<sp>xt:list(us:test1,<sp>us:test2)){" 0 ) + "}" ) + "function<sp>us:test2(){" ) + "let<sp>(?m<sp>=<sp>select<sp>*<sp>where<sp>{<sp>?z<sp>foaf:knows<sp>?t}){" ) + "let<sp>(?list<sp>=<sp>xt:list(us:test1,<sp>us:test2)){" 0 ) + "}" ) ; exec1 . query ( i1 ) ; fr . inria . corese . kgram . core . Mappings map = exec1 . query ( q ) ; fr . inria . corese . sparql . api . IDatatype dt = ( ( fr . inria . corese . sparql . api . IDatatype ) ( map . getValue ( "let<sp>(?list<sp>=<sp>xt:list(us:test1,<sp>us:test2)){" 2 ) ) ) ; fr . inria . corese . kgram . core . Mappings m = dt . getPointerObject ( ) . getMappings ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return varList . size ( ) ; }
org . junit . Assert . assertEquals ( 4 , m . size ( ) )
shouldUseExplicitlyConfiguredEntityNameInOrmXmlInCountQueries ( ) { org . springframework . data . jpa . domain . sample . Role reference = new org . springframework . data . jpa . domain . sample . Role ( "ADMIN" ) ; repository . save ( reference ) ; "<AssertPlaceHolder>" ; } count ( ) { return em . createQuery ( getCountQueryString ( ) , org . springframework . data . jpa . repository . support . Long . class ) . getSingleResult ( ) ; }
org . junit . Assert . assertThat ( repository . count ( ) , is ( 1L ) )
characters ( ) { ezvcard . util . CharacterBitSet bitSet = new ezvcard . util . CharacterBitSet ( "a-z" ) ; "<AssertPlaceHolder>" ; } characters ( ) { return characters ; }
org . junit . Assert . assertEquals ( "a-z" , bitSet . characters ( ) )
readBitsAcrossByteBoundary ( ) { final byte [ ] data = new byte [ ] { 3 , ( ( byte ) ( 192 ) ) } ; final java . io . ByteArrayInputStream stream = new java . io . ByteArrayInputStream ( data ) ; final com . flagstone . transform . coder . SWFDecoder fixture = new com . flagstone . transform . coder . SWFDecoder ( stream ) ; fixture . readBits ( 6 , false ) ; "<AssertPlaceHolder>" ; } readBits ( int , boolean ) { int pointer = ( ( index ) << ( com . flagstone . transform . coder . SWFDecoder . BYTES_TO_BITS ) ) + ( offset ) ; if ( ( ( ( size ) << ( com . flagstone . transform . coder . SWFDecoder . BYTES_TO_BITS ) ) - pointer ) < numberOfBits ) { fill ( ) ; pointer = ( ( index ) << ( com . flagstone . transform . coder . SWFDecoder . BYTES_TO_BITS ) ) + ( offset ) ; } int value = 0 ; if ( numberOfBits > 0 ) { if ( ( pointer + numberOfBits ) > ( ( size ) << ( com . flagstone . transform . coder . SWFDecoder . BYTES_TO_BITS ) ) ) { throw new java . lang . ArrayIndexOutOfBoundsException ( ) ; } for ( int i = com . flagstone . transform . coder . SWFDecoder . BITS_PER_INT ; ( i > 0 ) && ( ( index ) < ( buffer . length ) ) ; i -= com . flagstone . transform . coder . SWFDecoder . BITS_PER_BYTE ) { value |= ( ( buffer [ ( ( index ) ++ ) ] ) & ( com . flagstone . transform . coder . SWFDecoder . BYTE_MASK ) ) << ( i - ( com . flagstone . transform . coder . SWFDecoder . BITS_PER_BYTE ) ) ; } value <<= offset ; if ( signed ) { value >>= ( com . flagstone . transform . coder . SWFDecoder . BITS_PER_INT ) - numberOfBits ; } else { value >>>= ( com . flagstone . transform . coder . SWFDecoder . BITS_PER_INT ) - numberOfBits ; } pointer += numberOfBits ; index = pointer > > > ( com . flagstone . transform . coder . SWFDecoder . BITS_TO_BYTES ) ; offset = pointer & ( Coder . LOWEST3 ) ; } return value ; }
org . junit . Assert . assertEquals ( ( - 1 ) , fixture . readBits ( 4 , true ) )
testBuilderKModuleHasDependency ( ) { java . net . URL url = this . getClass ( ) . getResource ( "/GuvnorM2RepoDependencyExample2" ) ; org . uberfire . java . nio . fs . file . SimpleFileSystemProvider p = new org . uberfire . java . nio . fs . file . SimpleFileSystemProvider ( ) ; org . uberfire . java . nio . file . Path path = p . getPath ( url . toURI ( ) ) ; final org . guvnor . common . services . project . model . Module module = moduleService . resolveModule ( org . uberfire . backend . server . util . Paths . convert ( path ) ) ; final org . kie . workbench . common . services . backend . builder . core . Builder builder = new org . kie . workbench . common . services . backend . builder . core . Builder ( module , ioService , moduleService , importsService , new java . util . ArrayList ( ) , dependenciesClassLoaderCache , pomModelCache , getPackageNameWhiteListService ( ) , alwaysTrue ) ; final org . guvnor . common . services . project . builder . model . BuildResults results = builder . build ( ) ; if ( ! ( results . getMessages ( ) . isEmpty ( ) ) ) { for ( org . guvnor . common . services . project . builder . model . BuildMessage m : results . getMessages ( ) ) { org . kie . workbench . common . services . backend . builder . core . BuilderTest . logger . debug ( m . getText ( ) ) ; } } "<AssertPlaceHolder>" ; } isEmpty ( ) { return commands . isEmpty ( ) ; }
org . junit . Assert . assertTrue ( results . getMessages ( ) . isEmpty ( ) )
testEquals ( ) { com . ning . atlas . components . aws . IpRule ip = com . ning . atlas . components . aws . IpRule . parse ( "1" , "tcp<sp>22<sp>0.0.0.0/0" ) ; com . ning . atlas . components . aws . IpRule ip2 = com . ning . atlas . components . aws . IpRule . parse ( "1" , "tcp<sp>22<sp>0.0.0.0/0" ) ; "<AssertPlaceHolder>" ; } parse ( java . lang . String , java . lang . String ) { java . util . regex . Matcher cidr = com . ning . atlas . components . aws . IpRule . CIDR_RULE . matcher ( descriptor ) ; java . util . regex . Matcher group = com . ning . atlas . components . aws . IpRule . GROUP_RULE . matcher ( descriptor ) ; if ( cidr . matches ( ) ) { return new com . ning . atlas . components . aws . IpRule . CIDRRule ( cidr . group ( 1 ) , cidr . group ( 2 ) , cidr . group ( 3 ) ) ; } else if ( group . matches ( ) ) { return new com . ning . atlas . components . aws . IpRule . GroupRule ( userId , group . group ( 1 ) , group . group ( 2 ) , group . group ( 3 ) ) ; } throw new java . lang . IllegalStateException ( ( descriptor + "<sp>does<sp>not<sp>appear<sp>to<sp>be<sp>a<sp>CIDR<sp>or<sp>group<sp>rule" ) ) ; }
org . junit . Assert . assertThat ( ip , org . hamcrest . CoreMatchers . equalTo ( ip2 ) )
testListAllTicket ( ) { org . irods . jargon . ticket . packinstr . TicketAdminInp pi = org . irods . jargon . ticket . packinstr . TicketAdminInp . instanceForListAll ( ) ; "<AssertPlaceHolder>" ; } instanceForListAll ( ) { return new org . irods . jargon . ticket . packinstr . TicketAdminInp ( TICKET_ADMIN_INP_API_NBR , "list-all" , BLANK , BLANK , BLANK , BLANK , BLANK ) ; }
org . junit . Assert . assertNotNull ( pi )
test_hasTagKey ( ) { java . util . Map < io . github . data4all . model . data . Tag , java . lang . String > tags = new java . util . LinkedHashMap < io . github . data4all . model . data . Tag , java . lang . String > ( ) ; tags . put ( io . github . data4all . model . data . Tags . getTagWithId ( 10 ) , "motorway" ) ; testNode . addTags ( tags ) ; "<AssertPlaceHolder>" ; } hasTagKey ( io . github . data4all . model . data . Tag ) { return ( this . getTagValueWithKey ( key ) ) != null ; }
org . junit . Assert . assertTrue ( testNode . hasTagKey ( io . github . data4all . model . data . Tags . getTagWithId ( 10 ) ) )
testReadByNotEqualCondition ( ) { org . apache . eagle . storage . operation . RawQuery rawQuery = new org . apache . eagle . storage . operation . RawQuery ( ) ; rawQuery . setQuery ( "TestTimeSeriesAPIEntity[@cluster!=\"c4ut_not_found\"<sp>AND<sp>@field1<sp>!=<sp>0]{*}" ) ; System . out . println ( org . apache . eagle . common . DateTimeUtil . millisecondsToHumanDateWithSeconds ( baseTimestamp ) ) ; rawQuery . setStartTime ( org . apache . eagle . common . DateTimeUtil . millisecondsToHumanDateWithSeconds ( baseTimestamp ) ) ; rawQuery . setEndTime ( org . apache . eagle . common . DateTimeUtil . millisecondsToHumanDateWithMilliseconds ( ( ( baseTimestamp ) + 2000 ) ) ) ; rawQuery . setPageSize ( 1000 ) ; org . apache . eagle . storage . operation . CompiledQuery query = new org . apache . eagle . storage . operation . CompiledQuery ( rawQuery ) ; org . apache . eagle . storage . result . QueryResult < org . apache . eagle . log . entity . test . TestTimeSeriesAPIEntity > result = org . apache . eagle . storage . hbase . spi . TestHBaseStorage . storage . query ( query , org . apache . eagle . storage . hbase . spi . TestHBaseStorage . entityDefinition ) ; "<AssertPlaceHolder>" ; } query ( org . apache . eagle . storage . operation . CompiledQuery , org . apache . eagle . log . entity . meta . EntityDefinition ) { org . apache . eagle . storage . result . QueryResult < E > result = new org . apache . eagle . storage . result . QueryResult < E > ( ) ; try { org . apache . eagle . query . GenericQuery reader = org . apache . eagle . storage . hbase . query . GenericQueryBuilder . select ( query . getSearchCondition ( ) . getOutputFields ( ) ) . from ( query . getServiceName ( ) , query . getRawQuery ( ) . getMetricName ( ) ) . where ( query . getSearchCondition ( ) ) . groupBy ( query . isHasAgg ( ) , query . getGroupByFields ( ) , query . getAggregateFunctionTypes ( ) , query . getAggregateFields ( ) ) . timeSeries ( query . getRawQuery ( ) . isTimeSeries ( ) , query . getRawQuery ( ) . getIntervalmin ( ) ) . treeAgg ( query . getRawQuery ( ) . isTreeAgg ( ) ) . orderBy ( query . getSortOptions ( ) , query . getSortFunctions ( ) , query . getSortFields ( ) ) . top ( query . getRawQuery ( ) . getTop ( ) ) . parallel ( query . getRawQuery ( ) . getParallel ( ) ) . build ( ) ; java . util . List < E > entities = reader . result ( ) ; result . setData ( entities ) ; result . setFirstTimestamp ( reader . getFirstTimeStamp ( ) ) ; result . setLastTimestamp ( reader . getLastTimestamp ( ) ) ; result . setSize ( entities . size ( ) ) ; if ( ! ( query . isHasAgg ( ) ) ) { result . setEntityType ( ( ( java . lang . Class < E > ) ( entityDefinition . getEntityClass ( ) ) ) ) ; } else { result . setEntityType ( ( ( java . lang . Class < E > ) ( java . util . Map . class ) ) ) ; } result . setSuccess ( true ) ; } catch ( java . lang . Exception e ) { org . apache . eagle . storage . hbase . HBaseStorage . LOG . error ( e . getMessage ( ) , e ) ; throw new java . io . IOException ( e ) ; } return result ; }
org . junit . Assert . assertNotNull ( result )
testSumWithValidRange ( ) { int TIME_LIMIT = ( ( int ) ( ( new java . util . GregorianCalendar ( 2014 , 10 , 10 , 2 , 0 , 0 ) . getTime ( ) . getTime ( ) ) / 1000 ) ) ; org . apache . hadoop . hbase . coprocessor . client . TimeseriesAggregationClient aClient = new org . apache . hadoop . hbase . coprocessor . client . TimeseriesAggregationClient ( org . apache . hadoop . hbase . coprocessor . TestTimeseriesAggregateProtocol . conf , 900 , org . apache . hadoop . hbase . coprocessor . TestTimeseriesAggregateProtocol . TIME_TABLE_BASELINE , TIME_LIMIT , org . apache . hadoop . hbase . coprocessor . TestTimeseriesAggregateProtocol . KEY_FILTER_PATTERN ) ; org . apache . hadoop . hbase . client . Scan scan = new org . apache . hadoop . hbase . client . Scan ( ) ; scan . addFamily ( org . apache . hadoop . hbase . coprocessor . TestTimeseriesAggregateProtocol . TEST_FAMILY ) ; final org . apache . hadoop . hbase . coprocessor . ColumnInterpreter < java . lang . Long , java . lang . Long , org . apache . hadoop . hbase . protobuf . generated . HBaseProtos . EmptyMsg , org . apache . hadoop . hbase . protobuf . generated . HBaseProtos . LongMsg , org . apache . hadoop . hbase . protobuf . generated . HBaseProtos . LongMsg > ci = new org . apache . hadoop . hbase . client . coprocessor . LongColumnInterpreter ( ) ; java . util . Map < java . lang . Long , java . lang . Long > results = new java . util . concurrent . ConcurrentSkipListMap < java . lang . Long , java . lang . Long > ( ) ; results . put ( 1415574000000L , 300L ) ; results . put ( 1415574900000L , 925L ) ; results . put ( 1415575800000L , 1550L ) ; results . put ( 1415576700000L , 2175L ) ; results . put ( 1415577600000L , 300L ) ; results . put ( 1415578500000L , 925L ) ; results . put ( 1415579400000L , 1550L ) ; results . put ( 1415580300000L , 2175L ) ; results . put ( 1415581200000L , 300L ) ; java . util . concurrent . ConcurrentSkipListMap < java . lang . Long , java . lang . Long > sums = aClient . sum ( org . apache . hadoop . hbase . coprocessor . TestTimeseriesAggregateProtocol . TEST_TABLE , ci , scan ) ; "<AssertPlaceHolder>" ; aClient . close ( ) ; } sum ( org . apache . hadoop . hbase . client . Table , org . apache . hadoop . hbase . coprocessor . ColumnInterpreter , org . apache . hadoop . hbase . client . Scan ) { final org . apache . hadoop . hbase . protobuf . generated . TimeseriesAggregateProtos . TimeseriesAggregateRequest requestArg = validateArgAndGetPB ( scan , ci , false , intervalSeconds , timestampSecondsMin , timestampSecondsMax , keyFilterPattern ) ; class SumCallBack implements org . apache . hadoop . hbase . client . coprocessor . Batch . Callback < org . apache . hadoop . hbase . protobuf . generated . TimeseriesAggregateProtos . TimeseriesAggregateResponse > { java . util . concurrent . ConcurrentSkipListMap < java . lang . Long , S > sum = new java . util . concurrent . ConcurrentSkipListMap < java . lang . Long , S > ( ) ; java . util . concurrent . ConcurrentSkipListMap < java . lang . Long , S > getSum ( ) { return sum ; } @ org . apache . hadoop . hbase . coprocessor . client . Override public synchronized void update ( byte [ ] region , byte [ ] row , org . apache . hadoop . hbase . protobuf . generated . TimeseriesAggregateProtos . TimeseriesAggregateResponse result ) { java . util . List < org . apache . hadoop . hbase . protobuf . generated . TimeseriesAggregateProtos . TimeseriesAggregateResponseMapEntry > results = ( ( org . apache . hadoop . hbase . protobuf . generated . TimeseriesAggregateProtos . TimeseriesAggregateResponse ) ( result ) ) . getEntryList ( ) ; for ( org . apache . hadoop . hbase . protobuf . generated . TimeseriesAggregateProtos . TimeseriesAggregateResponseMapEntry entry : results ) { S candidate ; if ( ( entry . getValue ( ) . getFirstPartCount ( ) ) == 0 ) { if ( ! ( sum . containsKey ( entry . getKey ( ) ) ) ) { sum . put ( entry . getKey ( ) , null ) ; } } else { com . google . protobuf . ByteString b = entry . getValue ( ) . getFirstPart ( 0 ) ; T t = null ; try { t = org . apache . hadoop . hbase . protobuf . ProtobufUtil . getParsedGenericInstance ( ci . getClass ( ) , 4 , b ) ; } catch ( java . io . IOException e ) { e . printStackTrace ( ) ; } candidate = ci . getPromotedValueFromProto ( t ) ; if ( null != t ) { if ( sum . containsKey ( entry . getKey ( ) ) ) { S current = sum . get ( entry . getKey ( ) ) ; sum . put ( entry . getKey ( ) , ci . add ( current , candidate ) ) ; } else { if ( ( entry . getValue ( ) . getFirstPartCount ( ) ) == 0 ) { sum . put ( entry . getKey ( ) , null ) ; } else { sum . put ( entry . getKey ( ) , candidate ) ; } } } } } } } SumCallBack aSumCallBack = new SumCallBack ( ) ; table . coprocessorService ( org . apache . hadoop . hbase . protobuf
org . junit . Assert . assertEquals ( results , sums )
supports_route_action_create ( ) { when ( update . getType ( ) ) . thenReturn ( ObjectType . ROUTE ) ; when ( update . getAction ( ) ) . thenReturn ( Action . CREATE ) ; "<AssertPlaceHolder>" ; } supports ( net . ripe . db . whois . update . domain . PreparedUpdate ) { return ( update . getAction ( ) . equals ( Action . CREATE ) ) && ( ( update . getType ( ) . equals ( ObjectType . ROUTE ) ) || ( update . getType ( ) . equals ( ObjectType . ROUTE6 ) ) ) ; }
org . junit . Assert . assertThat ( subject . supports ( update ) , org . hamcrest . Matchers . is ( true ) )
testDynamicQueryByProjectionMissing ( ) { com . liferay . portal . kernel . dao . orm . DynamicQuery dynamicQuery = com . liferay . portal . kernel . dao . orm . DynamicQueryFactoryUtil . forClass ( com . liferay . portal . kernel . model . LayoutPrototype . class , _dynamicQueryClassLoader ) ; dynamicQuery . setProjection ( com . liferay . portal . kernel . dao . orm . ProjectionFactoryUtil . property ( "layoutPrototypeId" ) ) ; dynamicQuery . add ( com . liferay . portal . kernel . dao . orm . RestrictionsFactoryUtil . in ( "layoutPrototypeId" , new java . lang . Object [ ] { com . liferay . portal . kernel . test . util . RandomTestUtil . nextLong ( ) } ) ) ; java . util . List < java . lang . Object > result = _persistence . findWithDynamicQuery ( dynamicQuery ) ; "<AssertPlaceHolder>" ; } size ( ) { if ( ( _workflowTaskAssignees ) != null ) { return _workflowTaskAssignees . size ( ) ; } return _kaleoTaskAssignmentInstanceLocalService . getKaleoTaskAssignmentInstancesCount ( _kaleoTaskInstanceToken . getKaleoTaskInstanceTokenId ( ) ) ; }
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
testJsonSerialization ( ) { org . batfish . datamodel . routing_policy . expr . FirstMatchChain fmc = new org . batfish . datamodel . routing_policy . expr . FirstMatchChain ( com . google . common . collect . ImmutableList . of ( BooleanExprs . TRUE ) ) ; "<AssertPlaceHolder>" ; } clone ( java . lang . Object , java . lang . Class ) { return org . batfish . common . util . BatfishObjectMapper . MAPPER . readValue ( org . batfish . common . util . BatfishObjectMapper . WRITER . writeValueAsBytes ( o ) , clazz ) ; }
org . junit . Assert . assertThat ( org . batfish . common . util . BatfishObjectMapper . clone ( fmc , org . batfish . datamodel . routing_policy . expr . FirstMatchChain . class ) , org . hamcrest . Matchers . equalTo ( fmc ) )
testHaving1 ( ) { java . lang . String sql = "SELECT<sp>AVG(age),<sp>MIN(age),<sp>MAX(age),<sp>age<sp>FROM<sp>t_user<sp>GROUP<sp>BY<sp>age<sp>HAVING<sp>age<sp>><sp>20<sp>ORDER<sp>BY<sp>age<sp>DESC<sp>LIMIT<sp>0,<sp>15" ; com . bj58 . sql . parser . SQLParser parser = new com . bj58 . sql . parser . SQLParser ( ) ; com . bj58 . sql . parser . CursorNode node = ( ( com . bj58 . sql . parser . CursorNode ) ( parser . parseStatement ( sql ) ) ) ; "<AssertPlaceHolder>" ; com . bj58 . sql . parser . SelectNode selectNode = ( ( com . bj58 . sql . parser . SelectNode ) ( node . getResultSetNode ( ) ) ) ; com . bj58 . sql . parser . ValueNode valueNode = selectNode . getHavingClause ( ) ; com . bj58 . oceanus . core . script . ScriptExecutor < java . lang . Boolean > executor = new com . bj58 . oceanus . core . script . InterpretedScriptExecutor < java . lang . Boolean > ( ) ; } parseStatement ( java . lang . String ) { try { reinit ( sqlText ) ; return parser . parseStatement ( sqlText , parameterList ) ; } catch ( com . bj58 . sql . parser . ParseException ex ) { throw new com . bj58 . sql . parser . SQLParserException ( com . bj58 . sql . parser . SQLParser . standardizeEol ( ex . getMessage ( ) ) , ex , com . bj58 . sql . parser . SQLParser . tokenErrorPosition ( ex . currentToken , sqlText ) ) ; } catch ( com . bj58 . sql . parser . TokenMgrError ex ) { parser = null ; if ( ( ex . errorCode ) == ( TokenMgrError . LEXICAL_ERROR ) ) throw new com . bj58 . sql . parser . SQLParserException ( ex . getMessage ( ) , ex , com . bj58 . sql . parser . SQLParser . lineColumnErrorPosition ( ex . errorLine , ex . errorColumn , sqlText ) ) ; else throw new com . bj58 . sql . StandardException ( ex ) ; } }
org . junit . Assert . assertNotNull ( node )
testErrorsWithAttemptsIsBoundedByMaxAttempts ( ) { rx . Observable < java . lang . Throwable > errors = rx . Observable . < java . lang . Throwable > just ( new com . couchbase . client . java . error . CannotRetryException ( "" ) ) . repeat ( 100 ) ; rx . Observable < com . couchbase . client . core . lang . Tuple2 < java . lang . Integer , java . lang . Throwable > > errorsWithAttempts = com . couchbase . client . java . util . retry . Retry . errorsWithAttempts ( errors , 10 ) ; java . util . List < com . couchbase . client . core . lang . Tuple2 < java . lang . Integer , java . lang . Throwable > > list = errorsWithAttempts . toList ( ) . toBlocking ( ) . first ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return resultList . size ( ) ; }
org . junit . Assert . assertEquals ( 10 , list . size ( ) )
testIsSaturated_MissingHydrogens_Methane ( ) { org . openscience . cdk . interfaces . IAtomContainer mol = new org . openscience . cdk . AtomContainer ( ) ; org . openscience . cdk . tools . CDKValencyChecker checker = org . openscience . cdk . tools . CDKValencyChecker . getInstance ( mol . getBuilder ( ) ) ; org . openscience . cdk . Atom c = new org . openscience . cdk . Atom ( "C" ) ; mol . addAtom ( c ) ; c . setImplicitHydrogenCount ( 3 ) ; findAndConfigureAtomTypesForAllAtoms ( mol ) ; "<AssertPlaceHolder>" ; } isSaturated ( org . openscience . cdk . interfaces . IAtomContainer ) { return allSaturated ( container ) ; }
org . junit . Assert . assertFalse ( checker . isSaturated ( mol ) )
testDeserialize ( ) { java . io . ByteArrayOutputStream payload = new java . io . ByteArrayOutputStream ( ) ; org . msgpack . core . MessagePack . newDefaultPacker ( payload ) . packLong ( tabPage . getId ( ) ) . close ( ) ; byte [ ] payloadContents = payload . toByteArray ( ) ; java . io . ByteArrayOutputStream out = new java . io . ByteArrayOutputStream ( ) ; org . msgpack . core . MessagePack . newDefaultPacker ( out ) . packExtensionTypeHeader ( com . neovim . TabPageTest . EXT_TYPE , payloadContents . length ) . writePayload ( payloadContents ) . close ( ) ; com . neovim . TabPage t = objectMapper . readValue ( out . toByteArray ( ) , com . neovim . TabPage . class ) ; "<AssertPlaceHolder>" ; } close ( ) { }
org . junit . Assert . assertThat ( t , org . hamcrest . core . Is . is ( tabPage ) )
shouldThrowValidationExceptionIfTheJsonEnvelopeContainsNoMetadata ( ) { final uk . gov . justice . services . messaging . JsonEnvelope jsonEnvelope = mock ( uk . gov . justice . services . messaging . JsonEnvelope . class ) ; when ( jsonEnvelope . metadata ( ) ) . thenReturn ( null ) ; try { envelopeInspector . getMetadataFor ( jsonEnvelope ) ; org . junit . Assert . fail ( ) ; } catch ( final uk . gov . justice . services . core . envelope . EnvelopeValidationException expected ) { "<AssertPlaceHolder>" ; } } getMetadataFor ( uk . gov . justice . services . messaging . JsonEnvelope ) { final uk . gov . justice . services . messaging . Metadata metadata = jsonEnvelope . metadata ( ) ; if ( metadata != null ) { return metadata ; } throw new uk . gov . justice . services . core . envelope . EnvelopeValidationException ( "Metadata<sp>not<sp>set<sp>in<sp>the<sp>envelope." ) ; }
org . junit . Assert . assertThat ( expected . getMessage ( ) , org . hamcrest . CoreMatchers . is ( "Metadata<sp>not<sp>set<sp>in<sp>the<sp>envelope." ) )
printConnectionDetails_connectedToMongo_auths ( ) { final org . springframework . shell . Bootstrap bootstrap = getTestBootstrap ( ) ; final org . springframework . shell . core . JLineShellComponent shell = getTestShell ( ) ; final org . springframework . context . ApplicationContext context = bootstrap . getApplicationContext ( ) ; final org . apache . rya . shell . util . PasswordPrompt mockPrompt = context . getBean ( org . apache . rya . shell . util . PasswordPrompt . class ) ; when ( mockPrompt . getPassword ( ) ) . thenReturn ( "password" . toCharArray ( ) ) ; final java . lang . String cmd = ( ( ( ( ( ( ( ( RyaConnectionCommands . CONNECT_MONGO_CMD ) + "<sp>" ) + "--hostname<sp>" ) + ( super . getMongoHostname ( ) ) ) + "<sp>" ) + "--port<sp>" ) + ( super . getMongoPort ( ) ) ) + "<sp>" ) + "--username<sp>bob" ; shell . executeCommand ( cmd ) ; final org . springframework . shell . core . CommandResult printResult = shell . executeCommand ( RyaConnectionCommands . PRINT_CONNECTION_DETAILS_CMD ) ; final java . lang . String msg = ( ( java . lang . String ) ( printResult . getResult ( ) ) ) ; final java . lang . String expected = ( ( ( ( ( ( "The<sp>shell<sp>is<sp>connected<sp>to<sp>an<sp>instance<sp>of<sp>MongoDB<sp>using<sp>the<sp>following<sp>parameters:\n" + "<sp>Hostname:<sp>" ) + ( super . getMongoHostname ( ) ) ) + "\n" ) + "<sp>Port:<sp>" ) + ( super . getMongoPort ( ) ) ) + "\n" ) + "<sp>Username:<sp>bob\n" ; "<AssertPlaceHolder>" ; } getMongoPort ( ) { return get ( org . apache . rya . mongodb . MongoDBRdfConfiguration . MONGO_PORT , AbstractMongoDBRdfConfigurationBuilder . DEFAULT_MONGO_PORT ) ; }
org . junit . Assert . assertEquals ( expected , msg )
testQueueIsEmptyAfterCreation ( ) { queue = new org . apache . flume . channel . file . FlumeEventQueue ( backingStore , backingStoreSupplier . getInflightTakes ( ) , backingStoreSupplier . getInflightPuts ( ) ) ; "<AssertPlaceHolder>" ; } removeHead ( long ) { if ( ( backingStore . getSize ( ) ) == 0 ) { return null ; } long value = remove ( 0 , transactionID ) ; com . google . common . base . Preconditions . checkState ( ( value != ( org . apache . flume . channel . file . FlumeEventQueue . EMPTY ) ) , ( "Empty<sp>value<sp>" + ( channelNameDescriptor ) ) ) ; org . apache . flume . channel . file . FlumeEventPointer ptr = org . apache . flume . channel . file . FlumeEventPointer . fromLong ( value ) ; backingStore . decrementFileID ( ptr . getFileID ( ) ) ; return ptr ; }
org . junit . Assert . assertNull ( queue . removeHead ( 0L ) )
testInheritedAbstractMethod ( ) { de . mirkosertic . bytecoder . core . AbstractClassLinkTest . SubClass theSub = new de . mirkosertic . bytecoder . core . AbstractClassLinkTest . SubClass ( ) ; int theResult = theSub . compute ( ) ; "<AssertPlaceHolder>" ; } compute ( ) { java . util . stream . Nodes . SizedCollectorTask < P_IN , P_OUT , T_SINK , K > task = this ; java . util . Spliterator < P_IN > rightSplit = spliterator ; java . util . Spliterator < P_IN > leftSplit ; while ( ( ( rightSplit . estimateSize ( ) ) > ( task . targetSize ) ) && ( ( leftSplit = rightSplit . trySplit ( ) ) != null ) ) { task . setPendingCount ( 1 ) ; long leftSplitSize = leftSplit . estimateSize ( ) ; task . makeChild ( leftSplit , task . offset , leftSplitSize ) . fork ( ) ; task = task . makeChild ( rightSplit , ( ( task . offset ) + leftSplitSize ) , ( ( task . length ) - leftSplitSize ) ) ; } assert ( ( task . offset ) + ( task . length ) ) < ( java . util . stream . Nodes . MAX_ARRAY_SIZE ) ; @ java . util . stream . SuppressWarnings ( "unchecked" ) T_SINK sink = ( ( T_SINK ) ( task ) ) ; task . helper . wrapAndCopyInto ( sink , rightSplit ) ; task . propagateCompletion ( ) ; }
org . junit . Assert . assertEquals ( 1000 , theResult , 0 )
testTransformListIsCalled ( ) { org . hibernate . search . FullTextSession s = org . hibernate . search . Search . getFullTextSession ( openSession ( ) ) ; prepEmployeeIndex ( s ) ; org . hibernate . Transaction tx ; s . clear ( ) ; tx = s . beginTransaction ( ) ; org . apache . lucene . queryparser . classic . QueryParser parser = new org . apache . lucene . queryparser . classic . QueryParser ( "dept" , org . hibernate . search . testsupport . TestConstants . standardAnalyzer ) ; org . apache . lucene . search . Query query = parser . parse ( "dept:ITech" ) ; org . hibernate . search . org . hibernate . search . FullTextQuery hibQuery = s . createFullTextQuery ( query , org . hibernate . search . test . query . Employee . class ) ; hibQuery . setProjection ( "id" , "lastname" , "dept" , FullTextQuery . THIS , FullTextQuery . SCORE , FullTextQuery . ID ) ; hibQuery . setSort ( new org . apache . lucene . search . Sort ( new org . apache . lucene . search . SortField ( "id" , SortField . Type . STRING ) , org . apache . lucene . search . SortField . FIELD_SCORE ) ) ; final org . hibernate . search . test . query . CounterCallsProjectionToMapResultTransformer counters = new org . hibernate . search . test . query . CounterCallsProjectionToMapResultTransformer ( ) ; hibQuery . setResultTransformer ( counters ) ; hibQuery . list ( ) ; "<AssertPlaceHolder>" ; for ( java . lang . Object element : s . createQuery ( ( "from<sp>" + ( org . hibernate . search . test . query . Employee . class . getName ( ) ) ) ) . list ( ) ) { s . delete ( element ) ; } tx . commit ( ) ; s . close ( ) ; } getTransformListCounter ( ) { return transformListCounter ; }
org . junit . Assert . assertEquals ( counters . getTransformListCounter ( ) , 1 )
next_strategyReturnsNonEmptyOptional_returnsThatElement ( ) { java . lang . String element = "element" ; when ( strategy . goToNextNode ( ) ) . thenReturn ( java . util . Optional . of ( element ) ) ; java . lang . String nextElement = treeIterator . next ( ) ; "<AssertPlaceHolder>" ; } next ( ) { I nextElement = getInnerIterator ( ) . next ( ) ; return transformToOuter ( nextElement ) ; }
org . junit . Assert . assertSame ( element , nextElement )
testAppVersion ( ) { org . eclipse . swt . widgets . Display . setAppVersion ( "v1.3" ) ; "<AssertPlaceHolder>" ; } getAppVersion ( ) { org . eclipse . rap . rwt . service . UISession session = org . eclipse . rap . rwt . internal . service . ContextProvider . getUISession ( ) ; return ( ( java . lang . String ) ( session . getAttribute ( org . eclipse . swt . widgets . Display . APP_VERSION ) ) ) ; }
org . junit . Assert . assertEquals ( "v1.3" , org . eclipse . swt . widgets . Display . getAppVersion ( ) )
isNullOrWhiteSpace_input_white_space ( ) { java . lang . String value = "<sp>" ; java . lang . Boolean expResult = true ; new mockit . Expectations ( ) { com . microsoft . azure . sdk . iot . provisioning . service . Tools tools ; { tools . isNullOrEmpty ( anyString ) ; } } ; java . lang . Boolean result = com . microsoft . azure . sdk . iot . provisioning . service . Tools . isNullOrWhiteSpace ( value ) ; "<AssertPlaceHolder>" ; } isNullOrWhiteSpace ( java . lang . String ) { java . lang . Boolean retVal ; if ( value == null ) { retVal = true ; } else { retVal = com . microsoft . azure . sdk . iot . provisioning . service . Tools . isNullOrEmpty ( value . trim ( ) ) ; } return retVal ; }
org . junit . Assert . assertEquals ( expResult , result )
test5 ( ) { final java . util . List < ? > result = org . eclipse . packagedrone . repo . utils . Splits . split ( java . util . Arrays . asList ( 1 , 2 , 3 ) , 10 , 10 ) ; "<AssertPlaceHolder>" ; } size ( ) { return new org . eclipse . packagedrone . utils . rpm . signature . SignatureProcessor ( ) { private long headerSize ; private long payloadSize ; @ org . eclipse . packagedrone . utils . rpm . signature . Override public void feedHeader ( final java . nio . ByteBuffer header ) { this . headerSize = header . remaining ( ) ; } @ org . eclipse . packagedrone . utils . rpm . signature . Override public void feedPayloadData ( final java . nio . ByteBuffer data ) { this . payloadSize += data . remaining ( ) ; } @ org . eclipse . packagedrone . utils . rpm . signature . Override public void finish ( final org . eclipse . packagedrone . utils . rpm . header . Header < org . eclipse . packagedrone . utils . rpm . RpmSignatureTag > signature ) { signature . putSize ( ( ( this . headerSize ) + ( this . payloadSize ) ) , RpmSignatureTag . SIZE , RpmSignatureTag . LONGSIZE ) ; } } ; }
org . junit . Assert . assertEquals ( 0 , result . size ( ) )
testParser_HEAD_5 ( ) { final org . opendaylight . protocol . rsvp . parser . impl . te . SenderTspecObjectParser parser = new org . opendaylight . protocol . rsvp . parser . impl . te . SenderTspecObjectParser ( ) ; final org . opendaylight . yang . gen . v1 . urn . opendaylight . params . xml . ns . yang . rsvp . rev150820 . RsvpTeObject obj = parser . parseObject ( io . netty . buffer . Unpooled . copiedBuffer ( org . opendaylight . protocol . util . ByteArray . subByte ( TEObjectUtil . TE_LSP_SENDER_TSPEC , 4 , ( ( TEObjectUtil . TE_LSP_SENDER_TSPEC . length ) - 4 ) ) ) ) ; final io . netty . buffer . ByteBuf output = io . netty . buffer . Unpooled . buffer ( ) ; parser . serializeObject ( obj , output ) ; "<AssertPlaceHolder>" ; } getAllBytes ( io . netty . buffer . ByteBuf ) { return org . opendaylight . protocol . util . ByteArray . getBytes ( buffer , buffer . readableBytes ( ) ) ; }
org . junit . Assert . assertArrayEquals ( TEObjectUtil . TE_LSP_SENDER_TSPEC , org . opendaylight . protocol . util . ByteArray . getAllBytes ( output ) )