input
stringlengths 28
18.7k
| output
stringlengths 39
1.69k
|
|---|---|
testUMALauncherError ( ) { java . lang . String classpath = org . apache . hadoop . yarn . applications . unmanagedamlauncher . TestUnmanagedAMLauncher . getTestRuntimeClasspath ( ) ; java . lang . String javaHome = java . lang . System . getenv ( "JAVA_HOME" ) ; if ( javaHome == null ) { org . apache . hadoop . yarn . applications . unmanagedamlauncher . TestUnmanagedAMLauncher . LOG . fatal ( "JAVA_HOME<sp>not<sp>defined.<sp>Test<sp>not<sp>running." ) ; return ; } java . lang . String [ ] args = new java . lang . String [ ] { "--classpath" , classpath , "--queue" , "default" , "--cmd" , ( ( javaHome + "/bin/java<sp>-Xmx512m<sp>" ) + ( org . apache . hadoop . yarn . applications . unmanagedamlauncher . TestUnmanagedAMLauncher . class . getCanonicalName ( ) ) ) + "--classpath" 0 } ; org . apache . hadoop . yarn . applications . unmanagedamlauncher . TestUnmanagedAMLauncher . LOG . info ( "Initializing<sp>Launcher" ) ; org . apache . hadoop . yarn . applications . unmanagedamlauncher . UnmanagedAMLauncher launcher = new org . apache . hadoop . yarn . applications . unmanagedamlauncher . UnmanagedAMLauncher ( new org . apache . hadoop . conf . Configuration ( org . apache . hadoop . yarn . applications . unmanagedamlauncher . TestUnmanagedAMLauncher . yarnCluster . getConfig ( ) ) ) ; boolean initSuccess = launcher . init ( args ) ; "<AssertPlaceHolder>" ; org . apache . hadoop . yarn . applications . unmanagedamlauncher . TestUnmanagedAMLauncher . LOG . info ( "Running<sp>Launcher" ) ; try { launcher . run ( ) ; org . junit . Assert . fail ( "Expected<sp>an<sp>exception<sp>to<sp>occur<sp>as<sp>launch<sp>should<sp>have<sp>failed" ) ; } catch ( java . lang . RuntimeException e ) { } } init ( java . lang . String [ ] ) { org . apache . commons . cli . Options opts = new org . apache . commons . cli . Options ( ) ; opts . addOption ( "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 3 , true , "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 5 ) ; opts . addOption ( "priority" , true , "Application<sp>Priority.<sp>Default<sp>0" ) ; opts . addOption ( "queue" , true , "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 6 ) ; opts . addOption ( "master_memory" , true , "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 1 ) ; opts . addOption ( "cmd" , true , "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 7 ) ; opts . addOption ( "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 8 , true , "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 4 ) ; opts . addOption ( "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 0 , false , "Print<sp>usage" ) ; org . apache . commons . cli . CommandLine cliParser = new org . apache . commons . cli . GnuParser ( ) . parse ( opts , args ) ; if ( ( args . length ) == 0 ) { printUsage ( opts ) ; throw new java . lang . IllegalArgumentException ( "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" ) ; } if ( cliParser . hasOption ( "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 0 ) ) { printUsage ( opts ) ; return false ; } appName = cliParser . getOptionValue ( "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 3 , "UnmanagedAM" ) ; amPriority = java . lang . Integer . parseInt ( cliParser . getOptionValue ( "priority" , "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 2 ) ) ; amQueue = cliParser . getOptionValue ( "queue" , "default" ) ; classpath = cliParser . getOptionValue ( "No<sp>args<sp>specified<sp>for<sp>client<sp>to<sp>initialize" 8 , null ) ; amCmd = cliParser . getOptionValue ( "cmd" ) ; if ( ( amCmd ) == null ) { printUsage ( opts ) ; throw new java . lang . IllegalArgumentException ( "No<sp>cmd<sp>specified<sp>for<sp>application<sp>master" ) ; } org . apache . hadoop . yarn . conf . YarnConfiguration yarnConf = new org . apache . hadoop . yarn . conf . YarnConfiguration ( conf ) ; rmClient = org . apache . hadoop . yarn . client . api . YarnClient . createYarnClient ( ) ; rmClient . init ( yarnConf ) ; return true ; }
|
org . junit . Assert . assertTrue ( initSuccess )
|
latestsBuildsPortlet_onlyLatest ( ) { org . jenkinsci . test . acceptance . plugins . dashboard_view . DashboardView v = createDashboardView ( ) ; org . jenkinsci . test . acceptance . plugins . dashboard_view . LatestBuildsPortlet latestBuilds = v . addBottomPortlet ( org . jenkinsci . test . acceptance . plugins . dashboard_view . LatestBuildsPortlet . class ) ; v . save ( ) ; org . jenkinsci . test . acceptance . po . FreeStyleJob job = createFreeStyleJob ( ) ; for ( int i = 0 ; i <= ( ( org . jenkinsci . test . acceptance . plugins . dashboard_view . LatestBuildsPortlet . NUMBER_OF_BUILDS ) + 1 ) ; i ++ ) buildSuccessfulJob ( job ) ; v . open ( ) ; "<AssertPlaceHolder>" ; } hasBuild ( int ) { try { return ! ( getTable ( ) . findElements ( org . openqa . selenium . By . linkText ( ( "#" + ( java . lang . String . valueOf ( buildNr ) ) ) ) ) . isEmpty ( ) ) ; } catch ( org . openqa . selenium . NoSuchElementException e ) { return false ; } }
|
org . junit . Assert . assertThat ( latestBuilds . hasBuild ( 1 ) , is ( false ) )
|
testGetPatternFinderFunctionStringEListOfCharactersMappingCase3 ( ) { java . lang . String expectedResult = null ; org . talend . dq . dbms . NetezzaDbmsLanguage netezzaDbmsLanguage = ( ( org . talend . dq . dbms . NetezzaDbmsLanguage ) ( org . talend . dq . dbms . DbmsLanguageFactory . createDbmsLanguage ( SupportDBUrlType . NETEZZADEFAULTURL ) ) ) ; org . talend . dataquality . indicators . definition . IndicatorDefinition createIndicatorDefinition = DefinitionFactory . eINSTANCE . createIndicatorDefinition ( ) ; org . eclipse . emf . common . util . EList < org . talend . dataquality . indicators . definition . CharactersMapping > charactersMapping = createIndicatorDefinition . getCharactersMapping ( ) ; org . talend . dataquality . indicators . definition . CharactersMapping createCharactersMapping = DefinitionFactory . eINSTANCE . createCharactersMapping ( ) ; createCharactersMapping . setLanguage ( SupportDBUrlType . MYSQLDEFAULTURL . getLanguage ( ) ) ; createCharactersMapping . setCharactersToReplace ( "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789MySql" ) ; createCharactersMapping . setReplacementCharacters ( "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA9999999999MySql" ) ; charactersMapping . add ( createCharactersMapping ) ; java . lang . String patternFinderFunction = netezzaDbmsLanguage . getPatternFinderFunction ( "name" , charactersMapping ) ; "<AssertPlaceHolder>" ; } getPatternFinderFunction ( java . lang . String , org . eclipse . emf . common . util . EList ) { org . talend . dataquality . indicators . definition . CharactersMapping charactersMap = adaptCharactersMapping ( charactersMapping ) ; if ( charactersMap == null ) { return null ; } return this . getPatternFinderFunction ( colName , charactersMap . getCharactersToReplace ( ) , charactersMap . getReplacementCharacters ( ) ) ; }
|
org . junit . Assert . assertEquals ( expectedResult , patternFinderFunction )
|
shouldFindPenaltyFrequencyEntityByType ( ) { org . mifos . accounts . penalties . business . PenaltyFrequencyEntity found = penaltyDao . findPenaltyFrequencyEntityByType ( org . mifos . accounts . penalties . util . helpers . PenaltyFrequency . getPenaltyFrequencyType ( frequencyEntity . getId ( ) ) ) ; "<AssertPlaceHolder>" ; } getId ( ) { return id ; }
|
org . junit . Assert . assertThat ( found . getId ( ) , org . hamcrest . CoreMatchers . is ( frequencyEntity . getId ( ) ) )
|
testModifyColumnFamily ( ) { org . apache . hadoop . hbase . client . Admin admin = org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . TEST_UTIL . getAdmin ( ) ; org . apache . hadoop . hbase . HColumnDescriptor cfDescriptor = new org . apache . hadoop . hbase . HColumnDescriptor ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . FAMILY_0 ) ; int blockSize = cfDescriptor . getBlocksize ( ) ; org . apache . hadoop . hbase . HTableDescriptor baseHtd = new org . apache . hadoop . hbase . HTableDescriptor ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . TABLE_NAME ) ; baseHtd . addFamily ( cfDescriptor ) ; admin . createTable ( baseHtd ) ; admin . disableTable ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . TABLE_NAME ) ; try { verifyTableDescriptor ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . TABLE_NAME , org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . FAMILY_0 ) ; int newBlockSize = 2 * blockSize ; cfDescriptor . setBlocksize ( newBlockSize ) ; admin . modifyColumnFamily ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . TABLE_NAME , cfDescriptor ) ; org . apache . hadoop . hbase . HTableDescriptor htd = new org . apache . hadoop . hbase . HTableDescriptor ( admin . getDescriptor ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . TABLE_NAME ) ) ; org . apache . hadoop . hbase . HColumnDescriptor hcfd = htd . getFamily ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . FAMILY_0 ) ; "<AssertPlaceHolder>" ; } finally { admin . deleteTable ( org . apache . hadoop . hbase . master . procedure . TestTableDescriptorModificationFromClient . TABLE_NAME ) ; } } getBlocksize ( ) { return blocksize ; }
|
org . junit . Assert . assertTrue ( ( ( hcfd . getBlocksize ( ) ) == newBlockSize ) )
|
advanceWithoutHandlerThrowsRuntimeExceptionTest ( ) { java . lang . RuntimeException failure = new org . threadly . util . SuppressedStackRuntimeException ( ) ; scheduler . execute ( new org . threadly . concurrent . TestRuntimeFailureRunnable ( failure ) ) ; try { scheduler . advance ( 10 ) ; org . junit . Assert . fail ( "Exception<sp>should<sp>have<sp>thrown" ) ; } catch ( java . lang . Exception e ) { "<AssertPlaceHolder>" ; } } fail ( java . lang . Throwable ) { synchronized ( notifyLock ) { failure = new org . threadly . test . concurrent . AsyncVerifier . TestFailure ( cause ) ; notifyLock . notifyAll ( ) ; } throw failure ; }
|
org . junit . Assert . assertTrue ( ( e == failure ) )
|
test ( ) { java . util . Random random = new java . util . Random ( _seed ) ; int numBits = random . nextInt ( 10000000 ) ; org . apache . lucene . util . FixedBitSet fixedBitSet = new org . apache . lucene . util . FixedBitSet ( numBits ) ; populate ( random , numBits , fixedBitSet ) ; java . lang . String id = "id" ; java . lang . String segmentName = "seg1" ; org . apache . lucene . store . RAMDirectory directory = new org . apache . lucene . store . RAMDirectory ( ) ; org . apache . blur . filter . IndexFileBitSet indexFileBitSet = new org . apache . blur . filter . IndexFileBitSet ( numBits , id , segmentName , directory ) ; "<AssertPlaceHolder>" ; indexFileBitSet . create ( fixedBitSet . iterator ( ) ) ; indexFileBitSet . load ( ) ; checkEquals ( fixedBitSet . iterator ( ) , indexFileBitSet . iterator ( ) , numBits ) ; indexFileBitSet . close ( ) ; java . lang . String [ ] listAll = directory . listAll ( ) ; for ( java . lang . String s : listAll ) { System . out . println ( ( ( s + "<sp>" ) + ( directory . fileLength ( s ) ) ) ) ; } } exists ( ) { boolean fileExists = _directory . fileExists ( getFileName ( ) ) ; if ( fileExists ) { int words = ( ( _numBits ) / 64 ) + 1 ; int correctLength = words * 8 ; long length = _indexInput . length ( ) ; if ( correctLength == length ) { return true ; } } return false ; }
|
org . junit . Assert . assertFalse ( indexFileBitSet . exists ( ) )
|
iShouldHaveResizePictureConverterRegistered ( ) { org . nuxeo . ecm . core . convert . api . ConverterCheckResult check = org . nuxeo . runtime . api . Framework . getService ( org . nuxeo . ecm . core . convert . api . ConversionService . class ) . isConverterAvailable ( ImagingConvertConstants . OPERATION_RESIZE ) ; "<AssertPlaceHolder>" ; } isAvailable ( ) { return available ; }
|
org . junit . Assert . assertTrue ( check . isAvailable ( ) )
|
testLoadVertexProperties ( ) { org . apache . tinkerpop . gremlin . structure . Vertex marko = this . sqlgGraph . addVertex ( T . label , "Person" , "name" , "marko" ) ; this . sqlgGraph . tx ( ) . commit ( ) ; marko = this . sqlgGraph . traversal ( ) . V ( marko . id ( ) ) . next ( ) ; "<AssertPlaceHolder>" ; } property ( java . lang . String ) { this . sqlgGraph . tx ( ) . readWrite ( ) ; if ( this . removed ) { throw new java . lang . IllegalStateException ( java . lang . String . format ( "Vertex<sp>with<sp>id<sp>%s<sp>was<sp>removed." , id ( ) . toString ( ) ) ) ; } else { if ( ! ( sqlgGraph . tx ( ) . isInBatchMode ( ) ) ) { org . umlg . sqlg . structure . SqlgVertex sqlgVertex = this . sqlgGraph . tx ( ) . putVertexIfAbsent ( this ) ; if ( sqlgVertex != ( this ) ) { this . properties = sqlgVertex . properties ; } } return ( ( org . umlg . sqlg . structure . VertexProperty < V > ) ( super . property ( key ) ) ) ; } }
|
org . junit . Assert . assertEquals ( "marko" , marko . property ( "name" ) . value ( ) )
|
testRangeCurrentRowUnboundedFollowing ( ) { java . lang . String sqlText = java . lang . String . format ( ( "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 7 + ( "sum(salary)<sp>over<sp>(Partition<sp>by<sp>dept<sp>ORDER<sp>BY<sp>salary<sp>range<sp>between<sp>current<sp>row<sp>and<sp>unbounded<sp>following)<sp>" + "from<sp>%s<sp>--SPLICE-PROPERTIES<sp>useSpark<sp>=<sp>%s<sp>\n<sp>order<sp>by<sp>dept,<sp>empnum" ) ) , this . getTableReference ( com . splicemachine . derby . impl . sql . execute . operations . WindowFunctionIT . EMPTAB ) , useSpark ) ; java . sql . ResultSet rs = com . splicemachine . derby . impl . sql . execute . operations . WindowFunctionIT . methodWatcher . executeQuery ( sqlText ) ; java . lang . String expected = "EMPNUM<sp>|DEPT<sp>|SALARY<sp>|<sp>4<sp>|\n" + ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( "------------------------------\n" + "<sp>10<sp>|<sp>1<sp>|<sp>50000<sp>|436000<sp>|\n" ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 2 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 6 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 9 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 0 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 4 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 5 ) + "<sp>40<sp>|<sp>2<sp>|<sp>52000<sp>|157000<sp>|\n" ) + "<sp>44<sp>|<sp>2<sp>|<sp>52000<sp>|157000<sp>|\n" ) + "<sp>49<sp>|<sp>2<sp>|<sp>53000<sp>|<sp>53000<sp>|\n" ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 8 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 1 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" 3 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|293000<sp>|\n" ) + "<sp>120<sp>|<sp>3<sp>|<sp>75000<sp>|238000<sp>|" ) ; "<AssertPlaceHolder>" ; rs . close ( ) ; } toStringUnsorted ( com . splicemachine . homeless . ResultSet ) { return com . splicemachine . homeless . TestUtils . FormattedResult . ResultFactory . convert ( "" , rs , false ) . toString ( ) . trim ( ) ; }
|
org . junit . Assert . assertEquals ( ( ( "<sp>10<sp>|<sp>1<sp>|<sp>50000<sp>|436000<sp>|\n" 0 + sqlText ) + "<sp>10<sp>|<sp>1<sp>|<sp>50000<sp>|436000<sp>|\n" 0 ) , expected , TestUtils . FormattedResult . ResultFactory . toStringUnsorted ( rs ) )
|
testAdjustTLSContext ( ) { de . rub . nds . tlsattacker . core . protocol . message . extension . PaddingExtensionMessage msg = new de . rub . nds . tlsattacker . core . protocol . message . extension . PaddingExtensionMessage ( ) ; msg . setPaddingBytes ( extensionPayload ) ; handler . adjustTLSContext ( msg ) ; "<AssertPlaceHolder>" ; } getPaddingExtensionBytes ( ) { return paddingExtensionBytes ; }
|
org . junit . Assert . assertArrayEquals ( context . getPaddingExtensionBytes ( ) , extensionPayload )
|
testRemove5 ( ) { org . sfs . block . Range rangeOne = new org . sfs . block . Range ( 0 , 100 ) ; try { org . sfs . block . Range [ ] rangeTwo = rangeOne . remove ( ( - 1 ) , 101 ) ; org . junit . Assert . fail ( ) ; } catch ( java . lang . Throwable e ) { "<AssertPlaceHolder>" ; } } remove ( long , long ) { checkState ( encloses ( first , last ) , "Range<sp>does<sp>not<sp>enclose<sp>this<sp>range" ) ; if ( ( ( this . first ) == first ) && ( ( this . last ) == last ) ) { return new org . sfs . block . Range [ ] { } ; } else if ( ( this . first ) == first ) { return new org . sfs . block . Range [ ] { new org . sfs . block . Range ( ( last + 1 ) , this . last ) } ; } else if ( ( this . last ) == last ) { return new org . sfs . block . Range [ ] { new org . sfs . block . Range ( this . first , ( first - 1 ) ) } ; } else { return new org . sfs . block . Range [ ] { new org . sfs . block . Range ( this . first , ( first - 1 ) ) , new org . sfs . block . Range ( ( last + 1 ) , this . last ) } ; } }
|
org . junit . Assert . assertTrue ( ( e instanceof java . lang . IllegalStateException ) )
|
asInt ( ) { "<AssertPlaceHolder>" ; } asInt ( ) { org . junit . Assert . assertEquals ( 23 , new com . eclipsesource . json . JsonNumber ( "23" ) . asInt ( ) ) ; }
|
org . junit . Assert . assertEquals ( 23 , new com . eclipsesource . json . JsonNumber ( "23" ) . asInt ( ) )
|
test ( ) { final com . vladmihalcea . flexypool . model . Book book = transactionTemplate . execute ( new org . springframework . transaction . support . TransactionCallback < com . vladmihalcea . flexypool . model . Book > ( ) { @ com . vladmihalcea . flexypool . adaptor . Override public com . vladmihalcea . flexypool . model . Book doInTransaction ( org . springframework . transaction . TransactionStatus status ) { com . vladmihalcea . flexypool . model . Book book = new com . vladmihalcea . flexypool . model . Book ( ) ; book . setId ( 1L ) ; book . setName ( "High-Performance<sp>Java<sp>Persistence" ) ; entityManager . persist ( book ) ; return book ; } } ) ; transactionTemplate . execute ( new org . springframework . transaction . support . TransactionCallback < java . lang . Void > ( ) { @ com . vladmihalcea . flexypool . adaptor . Override public com . vladmihalcea . flexypool . adaptor . Void doInTransaction ( org . springframework . transaction . TransactionStatus status ) { "<AssertPlaceHolder>" ; return null ; } } ) ; verify ( metricsFactory . getConcurrentConnectionRequestCountHistogram ( ) , atLeastOnce ( ) ) . update ( 1 ) ; verify ( metricsFactory . getConcurrentConnectionRequestCountHistogram ( ) , atLeastOnce ( ) ) . update ( 0 ) ; } getName ( ) { return name ; }
|
org . junit . Assert . assertEquals ( book . getName ( ) , entityManager . find ( com . vladmihalcea . flexypool . model . Book . class , book . getId ( ) ) . getName ( ) )
|
toDateAndClearTimePart ( ) { java . util . Date date = new java . util . Date ( ) ; java . util . Calendar cal = java . util . Calendar . getInstance ( org . slim3 . util . TimeZoneLocator . get ( ) ) ; cal . setTime ( date ) ; cal . set ( Calendar . HOUR_OF_DAY , 0 ) ; cal . set ( Calendar . MINUTE , 0 ) ; cal . set ( Calendar . SECOND , 0 ) ; cal . set ( Calendar . MILLISECOND , 0 ) ; "<AssertPlaceHolder>" ; } toDateAndClearTimePart ( java . lang . Object ) { return org . slim3 . util . DateUtil . clearTimePart ( org . slim3 . util . DateUtil . toDate ( o ) ) ; }
|
org . junit . Assert . assertThat ( org . slim3 . util . DateUtil . toDateAndClearTimePart ( date ) , org . hamcrest . CoreMatchers . is ( cal . getTime ( ) ) )
|
testNoPermisions ( ) { java . util . Properties buildProperties = org . eclipse . tycho . p2 . impl . publisher . rootfiles . FeatureRootAdviceTest . createBuildPropertiesWithDefaultRootFiles ( ) ; java . util . List < java . lang . String [ ] > actualPermissions = org . eclipse . tycho . p2 . impl . publisher . rootfiles . FeatureRootAdvicePermissionsTest . createAdviceAndGetPermissions ( buildProperties , org . eclipse . tycho . p2 . impl . publisher . rootfiles . FeatureRootAdviceTest . GLOBAL_SPEC ) ; "<AssertPlaceHolder>" ; } size ( ) { return map . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , actualPermissions . size ( ) )
|
testStartsWithPattern ( ) { java . lang . String [ ] bogusPatterns = new java . lang . String [ ] { "/druid*" , "/druid*/what*" , "*/druid*" } ; java . lang . String [ ] bogusSources = new java . lang . String [ ] { "/druid" , "/druid/index.html" , "/druid*/what/xyu" } ; boolean result = false ; for ( int i = 0 ; i < ( bogusSources . length ) ; i ++ ) { for ( int j = 0 ; j < ( bogusPatterns . length ) ; j ++ ) { java . lang . String bogusSource = bogusSources [ i ] ; java . lang . String bogusPattern = bogusPatterns [ j ] ; if ( pathMatcher . matches ( bogusPattern , bogusSource ) ) { result = true ; } if ( result == true ) { break ; } } "<AssertPlaceHolder>" ; result = false ; } } matches ( java . lang . String , java . lang . String ) { if ( ( pattern == null ) || ( source == null ) ) { return false ; } pattern = pattern . trim ( ) ; source = source . trim ( ) ; else if ( pattern . startsWith ( "*" ) ) { int length = ( pattern . length ( ) ) - 1 ; if ( ( ( source . length ( ) ) >= length ) && ( source . endsWith ( pattern . substring ( 1 ) ) ) ) { return true ; } } else if ( pattern . contains ( "*" ) ) { int start = pattern . indexOf ( "*" ) ; int end = pattern . lastIndexOf ( "*" ) ; if ( ( source . startsWith ( pattern . substring ( 0 , start ) ) ) && ( source . endsWith ( pattern . substring ( ( end + 1 ) ) ) ) ) { return true ; } } else { if ( pattern . equals ( source ) ) { return true ; } } return false ; }
|
org . junit . Assert . assertThat ( true , org . hamcrest . CoreMatchers . equalTo ( result ) )
|
testFilter ( ) { edu . usc . irds . sparkler . plugin . RegexURLFilter filter = edu . usc . irds . sparkler . util . TestUtils . newInstance ( edu . usc . irds . sparkler . plugin . RegexURLFilter . class , "urlfilter.regex" ) ; java . util . Map < java . lang . String , java . lang . Boolean > expectations = new java . util . HashMap < java . lang . String , java . lang . Boolean > ( ) { { put ( "http://apache.org" , true ) ; put ( "http://irds.usc.edu" , true ) ; put ( "https://twitter.com/thammegowda" , true ) ; put ( "https://twitter.com/profile.png" , false ) ; put ( "https://twitter.com/profile.PNG" , false ) ; put ( "mailto:" , false ) ; put ( "file:///home/tg/" , false ) ; put ( "https://twitter.com/profile.jpg" , true ) ; put ( "https://twitter.com/profile.JPG" , true ) ; } } ; expectations . forEach ( ( url , expected ) -> "<AssertPlaceHolder>" ) ; } filter ( java . lang . String , java . lang . String ) { try { return new java . net . URL ( child ) . getHost ( ) . equals ( new java . net . URL ( parent ) . getHost ( ) ) ; } catch ( java . net . MalformedURLException e ) { throw new java . lang . RuntimeException ( e ) ; } }
|
org . junit . Assert . assertEquals ( expected , filter . filter ( url , url ) )
|
testEqualsSelf ( ) { final ddf . catalog . validation . impl . validator . SizeValidator validator = new ddf . catalog . validation . impl . validator . SizeValidator ( 13 , 1799 ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == o ) { return true ; } if ( ( o == null ) || ( ( getClass ( ) ) != ( o . getClass ( ) ) ) ) { return false ; } ddf . catalog . validation . impl . validator . ISO3CountryCodeValidator that = ( ( ddf . catalog . validation . impl . validator . ISO3CountryCodeValidator ) ( o ) ) ; return new org . apache . commons . lang . builder . EqualsBuilder ( ) . append ( countryCodes , that . countryCodes ) . append ( ignoreCase , that . ignoreCase ) . isEquals ( ) ; }
|
org . junit . Assert . assertThat ( validator . equals ( validator ) , org . hamcrest . Matchers . is ( true ) )
|
toStr ( ) { org . jooby . Response rsp = new org . jooby . Response . Forwarding ( new org . jooby . ResponseTest . ResponseMock ( ) { @ org . jooby . Override public java . lang . String toString ( ) { return "something<sp>something<sp>dark" ; } } ) ; "<AssertPlaceHolder>" ; } toString ( ) { return mainClass ; }
|
org . junit . Assert . assertEquals ( "something<sp>something<sp>dark" , rsp . toString ( ) )
|
testPrint ( ) { hudson . plugins . jacoco . report . MethodReport report = new hudson . plugins . jacoco . report . MethodReport ( ) ; "<AssertPlaceHolder>" ; } printFourCoverageColumns ( ) { java . lang . StringBuilder buf = new java . lang . StringBuilder ( ) ; instruction . setType ( CoverageElement . Type . INSTRUCTION ) ; complexity . setType ( CoverageElement . Type . COMPLEXITY ) ; branch . setType ( CoverageElement . Type . BRANCH ) ; line . setType ( CoverageElement . Type . LINE ) ; method . setType ( CoverageElement . Type . METHOD ) ; printRatioCell ( isFailed ( ) , this . instruction , buf ) ; printRatioCell ( isFailed ( ) , this . branch , buf ) ; printRatioCell ( isFailed ( ) , this . complexity , buf ) ; printRatioCell ( isFailed ( ) , this . line , buf ) ; printRatioCell ( isFailed ( ) , this . method , buf ) ; return buf . toString ( ) ; }
|
org . junit . Assert . assertNotNull ( report . printFourCoverageColumns ( ) )
|
testDropDatabaseWrongLocation ( ) { java . util . Map < java . lang . String , java . util . Collection < java . lang . String > > update = new java . util . HashMap ( ) ; update . put ( org . apache . sentry . service . thrift . TestFullUpdateModifier . DB . toLowerCase ( ) , java . util . Collections . singleton ( org . apache . sentry . service . thrift . TestFullUpdateModifier . PATH ) ) ; org . apache . hadoop . hive . metastore . api . NotificationEvent event = new org . apache . hadoop . hive . metastore . api . NotificationEvent ( 0 , 0 , org . apache . sentry . service . thrift . DROP_DATABASE . toString ( ) , "" ) ; org . apache . hadoop . hive . metastore . messaging . MessageDeserializer deserializer = org . mockito . Mockito . mock ( org . apache . sentry . binding . metastore . messaging . json . SentryJSONMessageDeserializer . class ) ; org . apache . sentry . binding . metastore . messaging . json . SentryJSONDropDatabaseMessage message = new org . apache . sentry . binding . metastore . messaging . json . SentryJSONDropDatabaseMessage ( org . apache . sentry . service . thrift . TestFullUpdateModifier . SERVER , org . apache . sentry . service . thrift . TestFullUpdateModifier . PRINCIPAL , org . apache . sentry . service . thrift . TestFullUpdateModifier . DB , 0L , "hdfs:///bad/location" ) ; org . mockito . Mockito . when ( deserializer . getDropDatabaseMessage ( "" ) ) . thenReturn ( message ) ; org . apache . sentry . service . thrift . FullUpdateModifier . applyEvent ( update , event , deserializer ) ; java . util . Map < java . lang . String , java . util . Set < java . lang . String > > expected = new java . util . HashMap ( ) ; expected . put ( org . apache . sentry . service . thrift . TestFullUpdateModifier . DB . toLowerCase ( ) , java . util . Collections . singleton ( org . apache . sentry . service . thrift . TestFullUpdateModifier . PATH ) ) ; "<AssertPlaceHolder>" ; } applyEvent ( java . util . Map , org . apache . hadoop . hive . metastore . api . NotificationEvent , org . apache . hadoop . hive . metastore . messaging . MessageDeserializer ) { org . apache . hadoop . hive . metastore . messaging . EventMessage . EventType eventType = EventMessage . EventType . valueOf ( event . getEventType ( ) ) ; switch ( eventType ) { case CREATE_DATABASE : org . apache . sentry . service . thrift . FullUpdateModifier . createDatabase ( image , event , deserializer ) ; break ; case DROP_DATABASE : org . apache . sentry . service . thrift . FullUpdateModifier . dropDatabase ( image , event , deserializer ) ; break ; case CREATE_TABLE : org . apache . sentry . service . thrift . FullUpdateModifier . createTable ( image , event , deserializer ) ; break ; case DROP_TABLE : org . apache . sentry . service . thrift . FullUpdateModifier . dropTable ( image , event , deserializer ) ; break ; case ALTER_TABLE : org . apache . sentry . service . thrift . FullUpdateModifier . alterTable ( image , event , deserializer ) ; break ; case ADD_PARTITION : org . apache . sentry . service . thrift . FullUpdateModifier . addPartition ( image , event , deserializer ) ; break ; case DROP_PARTITION : org . apache . sentry . service . thrift . FullUpdateModifier . dropPartition ( image , event , deserializer ) ; break ; case ALTER_PARTITION : org . apache . sentry . service . thrift . FullUpdateModifier . alterPartition ( image , event , deserializer ) ; break ; default : org . apache . sentry . service . thrift . FullUpdateModifier . LOGGER . error ( "Notification<sp>with<sp>ID:{}<sp>has<sp>invalid<sp>event<sp>type:<sp>{}" , event . getEventId ( ) , event . getEventType ( ) ) ; break ; } }
|
org . junit . Assert . assertEquals ( expected , update )
|
testSetObject ( ) { java . lang . String myObject = "my-object" ; entry . setObject ( myObject ) ; "<AssertPlaceHolder>" ; } getObject ( ) { return object ; }
|
org . junit . Assert . assertEquals ( myObject , entry . getObject ( ) )
|
testNoIndexesPermitted ( ) { new org . alfasoftware . morf . upgrade . AddTableFrom ( table ( "foo" ) . columns ( column ( "bar" , DataType . STRING , 10 ) ) , org . alfasoftware . morf . sql . SqlUtils . select ( org . alfasoftware . morf . sql . SqlUtils . literal ( 77 ) ) ) ; try { new org . alfasoftware . morf . upgrade . AddTableFrom ( table ( "foo" ) . columns ( column ( "bar" , DataType . STRING , 10 ) ) . indexes ( org . alfasoftware . morf . metadata . SchemaUtils . index ( "foo_1" ) . columns ( "bar" ) ) , org . alfasoftware . morf . sql . SqlUtils . select ( org . alfasoftware . morf . sql . SqlUtils . literal ( 77 ) ) ) ; org . junit . Assert . fail ( "indexes<sp>not<sp>permitted" ) ; } catch ( java . lang . Exception e ) { "<AssertPlaceHolder>" ; } } literal ( java . lang . String ) { return new org . alfasoftware . morf . sql . element . FieldLiteral ( value ) ; }
|
org . junit . Assert . assertTrue ( e . getMessage ( ) . contains ( "foo" ) )
|
testAbortOnNoValidEditDirs ( ) { cluster . restartNameNode ( ) ; "<AssertPlaceHolder>" ; checkFileCreation ( "file9" ) ; cluster . getNameNode ( ) . getFSImage ( ) . removeStorageDir ( new java . io . File ( nameDirs . get ( 0 ) ) ) ; cluster . getNameNode ( ) . getFSImage ( ) . removeStorageDir ( new java . io . File ( nameDirs . get ( 1 ) ) ) ; org . apache . hadoop . hdfs . server . namenode . FSEditLog spyLog = spy ( cluster . getNameNode ( ) . getFSImage ( ) . getEditLog ( ) ) ; doNothing ( ) . when ( spyLog ) . fatalExit ( anyString ( ) ) ; cluster . getNameNode ( ) . getFSImage ( ) . setEditLog ( spyLog ) ; cluster . getNameNode ( ) . getFSImage ( ) . removeStorageDir ( new java . io . File ( nameDirs . get ( 2 ) ) ) ; verify ( spyLog , atLeastOnce ( ) ) . fatalExit ( anyString ( ) ) ; } numRemovedDirs ( ) { return getRemovedDirs ( ) . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , numRemovedDirs ( ) )
|
equalsWithSameIDsAndDifferentNames ( ) { com . fiveamsolutions . plc . jaas . UserPrincipal testUserPrincipal1 = createTestUserPrincipal ( com . fiveamsolutions . plc . jaas . UserPrincipalTest . TEST_ID , com . fiveamsolutions . plc . jaas . UserPrincipalTest . TEST_USERNAME ) ; com . fiveamsolutions . plc . jaas . UserPrincipal testUserPrincipal2 = createTestUserPrincipal ( com . fiveamsolutions . plc . jaas . UserPrincipalTest . TEST_ID , "test1" ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( obj instanceof com . fiveamsolutions . plc . jaas . UserPrincipal ) ) { return false ; } if ( ( this ) == obj ) { return true ; } com . fiveamsolutions . plc . jaas . UserPrincipal rhs = ( ( com . fiveamsolutions . plc . jaas . UserPrincipal ) ( obj ) ) ; return new org . apache . commons . lang3 . builder . EqualsBuilder ( ) . appendSuper ( super . equals ( obj ) ) . append ( user . getId ( ) , rhs . user . getId ( ) ) . isEquals ( ) ; }
|
org . junit . Assert . assertFalse ( testUserPrincipal1 . equals ( testUserPrincipal2 ) )
|
testActivityNoRecentData ( ) { eu . dime . context . model . api . IContextElement [ ] addrs = new eu . dime . context . model . api . IContextElement [ ] { eu . dime . ps . controllers . context . raw . ContextHelper . createCivilAddress ( ( ( java . lang . System . currentTimeMillis ( ) ) - ( 15 * 1000 ) ) , 600 , "place1" ) , eu . dime . ps . controllers . context . raw . ContextHelper . createCivilAddress ( ( ( java . lang . System . currentTimeMillis ( ) ) - ( ( ( ( eu . dime . ps . controllers . context . raw . utils . Defaults . ACTIVITY_PERIOD ) + ( eu . dime . ps . controllers . context . raw . utils . Defaults . ACTIVITY_TOLERANCE ) ) + 10 ) * 1000 ) ) , 600 , "place1" ) } ; java . lang . String activity = activityDetector . getActivity ( Constants . ENTITY_ME , addrs ) ; "<AssertPlaceHolder>" ; } getActivity ( eu . dime . context . model . api . IEntity , eu . dime . context . model . api . IContextElement [ ] ) { if ( ( addrs . length ) != 0 ) { int index = eu . dime . ps . controllers . context . raw . utils . Utility . getFirstSignificativeItem ( addrs , Defaults . ACTIVITY_PERIOD , Defaults . ACTIVITY_TOLERANCE ) ; if ( index == ( - 1 ) ) { logger . debug ( "Received<sp>civil<sp>addresses<sp>are<sp>too<sp>recent<sp>to<sp>infer<sp>activity" ) ; return "" ; } else { logger . debug ( "Significative<sp>civil<sp>address<sp>for<sp>activity:" ) ; for ( int i = 0 ; i < ( addrs . length ) ; i ++ ) { java . lang . String prefix = "" ; if ( i <= index ) prefix = "*" + i ; else prefix = "" + i ; logger . debug ( ( ( ( ( prefix + "<sp>-<sp>" ) + ( ( java . lang . String ) ( addrs [ i ] . getContextData ( ) . getContextValue ( eu . dime . context . model . impl . Factory . createScope ( Constants . SCOPE_LOCATION_CIVILADDRESS_PLACE_NAME ) ) . getValue ( ) . getValue ( ) ) ) ) + "<sp>" ) + ( ( java . lang . String ) ( addrs [ i ] . getMetadata ( ) . getMetadatumValue ( eu . dime . context . model . impl . Factory . createScope ( Constants . SCOPE_METADATA_TIMESTAMP ) ) . getValue ( ) ) ) ) ) ; } } java . lang . String placeName = ( ( java . lang . String ) ( addrs [ 0 ] . getContextData ( ) . getContextValue ( eu . dime . context . model . impl . Factory . createScope ( Constants . SCOPE_LOCATION_CIVILADDRESS_PLACE_NAME ) ) . getValue ( ) . getValue ( ) ) ) ; for ( int i = 1 ; i <= index ; i ++ ) { java . lang . String currentPlace = ( ( java . lang . String ) ( addrs [ i ] . getContextData ( ) . getContextValue ( eu . dime . context . model . impl . Factory . createScope ( Constants . SCOPE_LOCATION_CIVILADDRESS_PLACE_NAME ) ) . getValue ( ) . getValue ( ) ) ) ; if ( ! ( currentPlace . equalsIgnoreCase ( placeName ) ) ) return "" ; } return "@" + placeName ; } return "" ; }
|
org . junit . Assert . assertFalse ( ( ( activity == null ) || ( ! ( activity . equalsIgnoreCase ( "" ) ) ) ) )
|
shouldExecuteForEachOperationOnCountWithValidResults ( ) { final java . util . List < java . util . List < java . lang . String > > inputIterable = java . util . Arrays . asList ( java . util . Arrays . asList ( "1" , "2" , "3" ) , java . util . Arrays . asList ( "4" , "5" ) , java . util . Arrays . asList ( ) ) ; final uk . gov . gchq . gaffer . operation . impl . ForEach < java . util . List < java . lang . String > , java . lang . Long > op = new ForEach . Builder < java . util . List < java . lang . String > , java . lang . Long > ( ) . input ( inputIterable ) . operation ( new uk . gov . gchq . gaffer . operation . OperationChain . Builder ( ) . first ( new uk . gov . gchq . gaffer . operation . impl . Count ( ) ) . then ( new uk . gov . gchq . gaffer . operation . impl . Map . Builder < > ( ) . first ( new uk . gov . gchq . koryphe . impl . function . ToInteger ( ) ) . build ( ) ) . build ( ) ) . build ( ) ; final java . lang . Iterable < ? extends java . lang . Long > output = graph . execute ( op , getUser ( ) ) ; "<AssertPlaceHolder>" ; } getUser ( ) { return uk . gov . gchq . gaffer . operation . export . graph . AuthorisedGraphForExportDelegate . user ; }
|
org . junit . Assert . assertEquals ( java . util . Arrays . asList ( 3 , 2 , 0 ) , com . google . common . collect . Lists . newArrayList ( output ) )
|
testExcludeTableNameAsRegex ( ) { com . streamsets . pipeline . stage . origin . jdbc . table . TableConfigBean tableConfigBean = new com . streamsets . pipeline . stage . origin . jdbc . table . TableJdbcSourceTestBuilder . TableConfigBeanTestBuilder ( ) . tablePattern ( "TABLE%" ) . schema ( com . streamsets . pipeline . stage . origin . jdbc . table . TestTableExclusion . SCHEMA ) . tableExclusionPattern ( "TABLE1" ) . build ( ) ; "<AssertPlaceHolder>" ; } listTablesForConfig ( java . sql . Connection , com . streamsets . pipeline . stage . origin . jdbc . table . TableConfigBean , com . streamsets . pipeline . stage . origin . jdbc . table . TableJdbcELEvalContext ) { return com . streamsets . pipeline . stage . origin . jdbc . table . TestTableExclusion . tableContextUtil . listTablesForConfig ( com . streamsets . pipeline . stage . origin . jdbc . table . TestTableExclusion . createTestContext ( ) , new java . util . LinkedList < com . streamsets . pipeline . api . Stage . ConfigIssue > ( ) , connection , tableConfigBean , tableJdbcELEvalContext , QuoteChar . NONE ) ; }
|
org . junit . Assert . assertEquals ( 9 , com . streamsets . pipeline . stage . origin . jdbc . table . TestTableExclusion . listTablesForConfig ( com . streamsets . pipeline . stage . origin . jdbc . table . TestTableExclusion . connection , tableConfigBean , com . streamsets . pipeline . stage . origin . jdbc . table . TestTableExclusion . tableJdbcELEvalContext ) . size ( ) )
|
shouldParseEscapedBrackets ( ) { java . lang . String scriptFragment = "read<sp>\"say<sp>[HAHA]\"" ; java . lang . String expectedValue = "say<sp>[HAHA]" ; org . kaazing . k3po . lang . internal . parser . ScriptParserImpl parser = new org . kaazing . k3po . lang . internal . parser . ScriptParserImpl ( ) ; org . kaazing . k3po . lang . internal . ast . AstReadValueNode actual = parser . parseWithStrategy ( scriptFragment , org . kaazing . k3po . lang . internal . parser . ScriptParseStrategy . READ ) ; org . kaazing . k3po . lang . internal . ast . AstReadValueNode expected = new org . kaazing . k3po . lang . internal . ast . AstReadValueNode ( ) ; expected . setMatchers ( java . util . Arrays . < org . kaazing . k3po . lang . internal . ast . matcher . AstValueMatcher > asList ( new org . kaazing . k3po . lang . internal . ast . matcher . AstExactTextMatcher ( expectedValue ) ) ) ; "<AssertPlaceHolder>" ; } setMatchers ( java . util . List ) { this . matchers = matchers ; }
|
org . junit . Assert . assertEquals ( expected , actual )
|
initialize_SSOAutoLogin ( ) { voUser . setStatus ( UserAccountStatus . LOCKED_NOT_CONFIRMED ) ; java . lang . String encodedString = org . oscm . converter . ParameterEncoder . encodeParameters ( new java . lang . String [ ] { userId , orgId , "MPID" , java . lang . String . valueOf ( serviceKey ) } ) ; confirmationBean . setEncodedParam ( encodedString ) ; doReturn ( Boolean . TRUE ) . when ( confirmationBean ) . isServiceProvider ( ) ; confirmationBean . initialize ( ) ; "<AssertPlaceHolder>" ; verify ( confirmationBean , times ( 1 ) ) . loginUser ( eq ( voUser ) , anyString ( ) , eq ( request ) , eq ( session ) ) ; } getSubscribeToServiceKey ( ) { if ( ( sessionBean . getSubscribeToServiceKey ( ) ) != null ) { subscribeToServiceKey = sessionBean . getSubscribeToServiceKey ( ) ; sessionBean . setSubscribeToServiceKey ( null ) ; } return subscribeToServiceKey ; }
|
org . junit . Assert . assertEquals ( serviceKey , sessionBean . getSubscribeToServiceKey ( ) )
|
testDefaultConfiguration ( ) { final com . bc . ceres . glayer . LayerType layerType = getLayerType ( ) ; final com . bc . ceres . binding . PropertySet template = layerType . createLayerConfig ( null ) ; "<AssertPlaceHolder>" ; ensurePropertyIsDeclaredButNotDefined ( template , "raster" , org . esa . beam . framework . datamodel . RasterDataNode . class ) ; ensurePropertyIsDefined ( template , "borderShown" , org . esa . beam . glayer . Boolean . class ) ; ensurePropertyIsDefined ( template , "borderWidth" , org . esa . beam . glayer . Double . class ) ; ensurePropertyIsDefined ( template , "borderColor" , java . awt . Color . class ) ; } createLayerConfig ( com . bc . ceres . glayer . LayerContext ) { final com . bc . ceres . binding . PropertyContainer valueContainer = new com . bc . ceres . binding . PropertyContainer ( ) ; valueContainer . addProperty ( new com . bc . ceres . binding . Property ( new com . bc . ceres . binding . PropertyDescriptor ( "product" , org . esa . beam . framework . datamodel . Product . class ) , new com . bc . ceres . binding . accessors . DefaultPropertyAccessor ( ) ) ) ; valueContainer . addProperty ( new com . bc . ceres . binding . Property ( new com . bc . ceres . binding . PropertyDescriptor ( "band" , org . esa . beam . framework . datamodel . Band . class ) , new com . bc . ceres . binding . accessors . DefaultPropertyAccessor ( ) ) ) ; return valueContainer ; }
|
org . junit . Assert . assertNotNull ( template )
|
testVerifyWithInvalidKeyIdFails ( ) { byte [ ] signature = signer . sign ( "privateKeyId" , message ) ; "<AssertPlaceHolder>" ; verifier . verify ( "invalid<sp>key<sp>id" , message , signature ) ; } sign ( java . lang . String , java . lang . String ) { com . springcryptoutils . core . signature . Base64EncodedSigner signer = cache . get ( privateKeyId ) ; if ( signer != null ) { return signer . sign ( message ) ; } com . springcryptoutils . core . signature . Base64EncodedSignerImpl signerImpl = new com . springcryptoutils . core . signature . Base64EncodedSignerImpl ( ) ; signerImpl . setAlgorithm ( algorithm ) ; signerImpl . setCharsetName ( charsetName ) ; signerImpl . setProvider ( provider ) ; java . security . PrivateKey privateKey = privateKeyMap . get ( privateKeyId ) ; if ( privateKey == null ) { throw new com . springcryptoutils . core . signature . SignatureException ( ( "private<sp>key<sp>not<sp>found:<sp>privateKeyId=" + privateKeyId ) ) ; } signerImpl . setPrivateKey ( privateKey ) ; cache . put ( privateKeyId , signerImpl ) ; return signerImpl . sign ( message ) ; }
|
org . junit . Assert . assertNotNull ( signature )
|
testTextToBinary ( ) { com . dremio . dac . model . job . JobDataFragment data = testConvert ( "convert_to(s_name<sp>,'utf8')<sp>as<sp>foo" , new com . dremio . dac . proto . model . dataset . FieldSimpleConvertToType ( BINARY ) , "s_name" , "cp.\"tpch/supplier.parquet\"" ) ; "<AssertPlaceHolder>" ; } getColumn ( java . lang . String ) { return nameToColumns . get ( name ) ; }
|
org . junit . Assert . assertEquals ( com . dremio . dac . server . BINARY , data . getColumn ( "foo" ) . getType ( ) )
|
testKeysOfDifferentClasses ( ) { org . jfree . chart . PaintMap m = new org . jfree . chart . PaintMap ( ) ; m . put ( "ABC" , Color . red ) ; m . put ( new java . lang . Integer ( 99 ) , Color . blue ) ; "<AssertPlaceHolder>" ; } getPaint ( double ) { return this . colorPalette . getPaint ( value ) ; }
|
org . junit . Assert . assertEquals ( Color . blue , m . getPaint ( new java . lang . Integer ( 99 ) ) )
|
findSingleSignOnNotExists ( ) { java . lang . String id = "sso" ; org . wildfly . clustering . ee . Batcher < org . wildfly . clustering . ee . Batch > batcher = mock ( org . wildfly . clustering . ee . Batcher . class ) ; org . wildfly . clustering . ee . Batch batch = mock ( org . wildfly . clustering . ee . Batch . class ) ; when ( this . manager . getBatcher ( ) ) . thenReturn ( batcher ) ; when ( batcher . createBatch ( ) ) . thenReturn ( batch ) ; when ( this . manager . findSSO ( id ) ) . thenReturn ( null ) ; io . undertow . security . impl . SingleSignOn result = this . subject . findSingleSignOn ( id ) ; "<AssertPlaceHolder>" ; verify ( batch ) . close ( ) ; verify ( batcher , never ( ) ) . suspendBatch ( ) ; } findSingleSignOn ( java . lang . String ) { try { java . util . Base64 . getUrlDecoder ( ) . decode ( id ) ; } catch ( java . lang . IllegalArgumentException e ) { return null ; } org . wildfly . clustering . ee . Batcher < org . wildfly . clustering . ee . Batch > batcher = this . manager . getBatcher ( ) ; @ org . wildfly . clustering . web . undertow . sso . SuppressWarnings ( "resource" ) org . wildfly . clustering . ee . Batch batch = batcher . createBatch ( ) ; try { org . wildfly . clustering . web . sso . SSO < io . undertow . security . api . AuthenticatedSessionManager . AuthenticatedSession , java . lang . String , java . lang . String , java . lang . Void > sso = this . manager . findSSO ( id ) ; if ( sso == null ) { if ( org . wildfly . clustering . web . undertow . sso . DistributableSingleSignOnManager . log . isTraceEnabled ( ) ) { org . wildfly . clustering . web . undertow . sso . DistributableSingleSignOnManager . log . tracef ( "SSO<sp>ID<sp>%s<sp>not<sp>found<sp>on<sp>the<sp>session<sp>manager." , id ) ; } batch . close ( ) ; return null ; } if ( org . wildfly . clustering . web . undertow . sso . DistributableSingleSignOnManager . log . isTraceEnabled ( ) ) { org . wildfly . clustering . web . undertow . sso . DistributableSingleSignOnManager . log . tracef ( "SSO<sp>ID<sp>%s<sp>found<sp>on<sp>the<sp>session<sp>manager." , id ) ; } return new org . wildfly . clustering . web . undertow . sso . DistributableSingleSignOn ( sso , this . registry , batcher , batcher . suspendBatch ( ) ) ; } catch ( java . lang . RuntimeException | java . lang . Error e ) { batch . discard ( ) ; batch . close ( ) ; throw e ; } }
|
org . junit . Assert . assertNull ( result )
|
testFindsPageIdByRemoteId ( ) { com . eclipsesource . tabris . internal . ui . RemoteUI remoteUI = createRemoteUI ( mock ( com . eclipsesource . tabris . ui . UI . class ) ) ; com . eclipsesource . tabris . internal . ui . RemotePage remotePage = mock ( com . eclipsesource . tabris . internal . ui . RemotePage . class ) ; com . eclipsesource . tabris . internal . ui . Controller controller = mock ( com . eclipsesource . tabris . internal . ui . Controller . class ) ; java . util . List < com . eclipsesource . tabris . internal . ui . rendering . PageRenderer > pages = new java . util . ArrayList < com . eclipsesource . tabris . internal . ui . rendering . PageRenderer > ( ) ; pages . add ( remotePage ) ; when ( controller . getAllPages ( ) ) . thenReturn ( pages ) ; remoteUI . setController ( controller ) ; com . eclipsesource . tabris . internal . ui . PageDescriptor pageDescriptor = mock ( com . eclipsesource . tabris . internal . ui . PageDescriptor . class ) ; when ( pageDescriptor . getId ( ) ) . thenReturn ( "bar" ) ; when ( remotePage . getDescriptor ( ) ) . thenReturn ( pageDescriptor ) ; when ( remotePage . getId ( ) ) . thenReturn ( "foo" ) ; java . lang . String pageId = remoteUI . getPageId ( "foo" ) ; "<AssertPlaceHolder>" ; } getPageId ( java . lang . String ) { java . util . List < com . eclipsesource . tabris . internal . ui . rendering . PageRenderer > pageRenderers = controller . getAllPages ( ) ; for ( com . eclipsesource . tabris . internal . ui . rendering . PageRenderer pageRenderer : pageRenderers ) { com . eclipsesource . tabris . internal . ui . RemotePage remotePage = ( ( com . eclipsesource . tabris . internal . ui . RemotePage ) ( pageRenderer ) ) ; if ( remotePage . getId ( ) . equals ( pageRendererId ) ) { return remotePage . getDescriptor ( ) . getId ( ) ; } } throw new java . lang . IllegalStateException ( ( ( "RemotePage<sp>with<sp>id<sp>" + pageRendererId ) + "<sp>does<sp>not<sp>exist." ) ) ; }
|
org . junit . Assert . assertEquals ( "bar" , pageId )
|
roundTrip ( ) { com . rabbitmq . client . test . functional . DirectReplyTo . QueueMessageConsumer c = new com . rabbitmq . client . test . functional . DirectReplyTo . QueueMessageConsumer ( channel ) ; java . lang . String replyTo = rpcFirstHalf ( c ) ; declare ( connection , replyTo , true ) ; channel . confirmSelect ( ) ; basicPublishVolatile ( "response" . getBytes ( ) , "" , replyTo , MessageProperties . BASIC ) ; channel . waitForConfirms ( ) ; byte [ ] body = c . nextDelivery ( 10000 ) ; "<AssertPlaceHolder>" ; } nextDelivery ( long ) { return messages . poll ( timeoutInMs , TimeUnit . MILLISECONDS ) ; }
|
org . junit . Assert . assertEquals ( "response" , new java . lang . String ( body ) )
|
testCreateOneSubscription ( ) { java . lang . String subId = java . lang . Long . toString ( java . lang . System . currentTimeMillis ( ) ) ; final org . oscm . internal . vo . VOSubscription sub = subMgmt . subscribeToService ( org . oscm . test . data . Subscriptions . createVOSubscription ( subId ) , org . oscm . serviceprovisioningservice . assembler . ProductAssembler . toVOProduct ( testPrd , new org . oscm . i18nservice . bean . LocalizerFacade ( localizer , "en" ) ) , null , null , null , new java . util . ArrayList < org . oscm . internal . vo . VOUda > ( ) ) ; runTX ( new java . util . concurrent . Callable < java . lang . Void > ( ) { @ org . oscm . subscriptionservice . bean . Override public org . oscm . subscriptionservice . bean . Void call ( ) throws org . oscm . subscriptionservice . bean . Exception { org . oscm . domobjects . Subscription subscription = mgr . find ( org . oscm . domobjects . Subscription . class , sub . getKey ( ) ) ; org . oscm . domobjects . Organization sourceOrganization = subscription . getProduct ( ) . getTechnicalProduct ( ) . getOrganization ( ) ; org . oscm . domobjects . Organization targetOrganization = subscription . getOrganization ( ) ; "<AssertPlaceHolder>" ; return null ; } } ) ; } getOrganizationReference ( org . oscm . domobjects . Organization , org . oscm . domobjects . Organization ) { for ( org . oscm . domobjects . OrganizationReference reference : targetOrganization . getSourcesForType ( OrganizationReferenceType . ON_BEHALF_ACTING ) ) { if ( ( ( reference . getSource ( ) . getKey ( ) ) == ( sourceOrganization . getKey ( ) ) ) && ( ( reference . getTargetKey ( ) ) == ( targetOrganization . getKey ( ) ) ) ) { return reference ; } } return null ; }
|
org . junit . Assert . assertNull ( getOrganizationReference ( sourceOrganization , targetOrganization ) )
|
testGetBondOrder ( ) { org . openscience . smsd . helper . BondEnergy instance = new org . openscience . smsd . helper . BondEnergy ( "H" , "I" , org . openscience . cdk . interfaces . IBond . Order . SINGLE , 295 ) ; org . openscience . cdk . interfaces . IBond . Order expResult = org . openscience . cdk . interfaces . IBond . Order . SINGLE ; org . openscience . cdk . interfaces . IBond . Order result = instance . getBondOrder ( ) ; "<AssertPlaceHolder>" ; } getBondOrder ( ) { return bondOrder ; }
|
org . junit . Assert . assertEquals ( expResult , result )
|
shouldNotAcceptFileFromPureLuceneProvider ( ) { java . io . File dir = directoriesByProviderKey ( storeDir ) . forProvider ( files . NativeIndexFileFilterTest . LUCENE_DESCRTIPTOR ) . directoryForIndex ( 1 ) ; java . io . File file = new java . io . File ( dir , "some-file" ) ; createFile ( file ) ; boolean accepted = filter . accept ( file ) ; "<AssertPlaceHolder>" ; } accept ( org . neo4j . kernel . impl . proc . Procedures ) { procs . register ( new org . neo4j . kernel . builtinprocs . ListComponentsProcedure ( procedureName ( "dbms" , "components" ) , neo4jVersion , neo4jEdition ) ) ; procs . register ( new org . neo4j . kernel . builtinprocs . JmxQueryProcedure ( procedureName ( "dbms" , "queryJmx" ) , java . lang . management . ManagementFactory . getPlatformMBeanServer ( ) ) ) ; }
|
org . junit . Assert . assertFalse ( accepted )
|
testEqualsSelf ( ) { org . jfree . data . time . Month month = new org . jfree . data . time . Month ( ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( o instanceof com . mysql . fabric . Server ) ) { return false ; } com . mysql . fabric . Server s = ( ( com . mysql . fabric . Server ) ( o ) ) ; return s . getUuid ( ) . equals ( getUuid ( ) ) ; }
|
org . junit . Assert . assertTrue ( month . equals ( month ) )
|
testCornerCaseSerializationLegnth ( ) { final java . util . Map < java . lang . String , org . apache . apex . malhar . lib . appdata . schemas . Type > fieldToType = com . google . common . collect . Maps . newHashMap ( ) ; fieldToType . put ( "a" , Type . OBJECT ) ; fieldToType . put ( "b" , Type . OBJECT ) ; org . apache . apex . malhar . lib . appdata . gpo . GPOMutable gpo = new org . apache . apex . malhar . lib . appdata . gpo . GPOMutable ( new org . apache . apex . malhar . lib . appdata . schemas . FieldsDescriptor ( fieldToType ) ) ; int serializeLength = org . apache . apex . malhar . lib . appdata . gpo . GPOUtils . serializedLength ( gpo ) ; "<AssertPlaceHolder>" ; } serializedLength ( org . apache . apex . malhar . lib . appdata . gpo . GPOMutable ) { int arrayLength = 0 ; org . apache . apex . malhar . lib . appdata . schemas . FieldsDescriptor fd = gpo . getFieldDescriptor ( ) ; java . util . List < org . apache . apex . malhar . lib . appdata . schemas . Type > types = fd . getTypesList ( ) ; for ( int typeIndex = 0 ; typeIndex < ( types . size ( ) ) ; typeIndex ++ ) { org . apache . apex . malhar . lib . appdata . schemas . Type type = types . get ( typeIndex ) ; switch ( type ) { case STRING : { for ( java . lang . String val : gpo . getFieldsString ( ) ) { arrayLength += Type . INTEGER . getByteSize ( ) ; arrayLength += val . getBytes ( ) . length ; } break ; } case OBJECT : { break ; } default : { arrayLength += ( fd . getTypeToFields ( ) . get ( type ) . size ( ) ) * ( type . getByteSize ( ) ) ; } } } return arrayLength ; }
|
org . junit . Assert . assertEquals ( 0 , serializeLength )
|
testSingletonIsTheSame ( ) { creational . singleton . Singleton once = Singleton . INSTANCE ; creational . singleton . Singleton twice = Singleton . INSTANCE ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertEquals ( once , twice )
|
testReadWithReadWriteTransactionClosed ( ) { org . opendaylight . controller . sal . core . spi . data . DOMStoreReadTransaction readTx = domStore . newReadWriteTransaction ( ) ; "<AssertPlaceHolder>" ; readTx . close ( ) ; org . opendaylight . controller . md . sal . dom . store . impl . InMemoryDataStoreTest . doReadAndThrowEx ( readTx ) ; } newReadWriteTransaction ( ) { return new org . opendaylight . controller . md . sal . binding . impl . BindingDOMReadWriteTransactionAdapter ( getDelegate ( ) . newReadWriteTransaction ( ) , getCodec ( ) ) ; }
|
org . junit . Assert . assertNotNull ( readTx )
|
testConstants ( ) { java . lang . reflect . Constructor < org . ff4j . ehcache . FF4JEhCacheConstants > ce = org . ff4j . ehcache . FF4JEhCacheConstants . class . getDeclaredConstructor ( ) ; ce . setAccessible ( true ) ; ce . newInstance ( ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertNotNull ( ce )
|
testSetInventory ( ) { world . bentobox . bentobox . api . panels . Panel p = new world . bentobox . bentobox . api . panels . Panel ( name , items , 10 , user , listener ) ; org . bukkit . inventory . Inventory inventory = mock ( org . bukkit . inventory . Inventory . class ) ; p . setInventory ( inventory ) ; "<AssertPlaceHolder>" ; } getInventory ( ) { return inventory ; }
|
org . junit . Assert . assertEquals ( inventory , p . getInventory ( ) )
|
testRemoteSparkCancel ( ) { org . apache . hadoop . hive . ql . exec . spark . status . impl . RemoteSparkJobStatus jobSts = mock ( org . apache . hadoop . hive . ql . exec . spark . status . impl . RemoteSparkJobStatus . class ) ; when ( jobSts . getRemoteJobState ( ) ) . thenReturn ( State . CANCELLED ) ; when ( jobSts . isRemoteActive ( ) ) . thenReturn ( true ) ; org . apache . hadoop . hive . conf . HiveConf hiveConf = new org . apache . hadoop . hive . conf . HiveConf ( ) ; org . apache . hadoop . hive . ql . session . SessionState . start ( hiveConf ) ; org . apache . hadoop . hive . ql . exec . spark . status . RemoteSparkJobMonitor remoteSparkJobMonitor = new org . apache . hadoop . hive . ql . exec . spark . status . RemoteSparkJobMonitor ( hiveConf , jobSts ) ; "<AssertPlaceHolder>" ; } startMonitor ( ) { boolean running = false ; boolean done = false ; int rc = 0 ; org . apache . spark . JobExecutionStatus lastState = null ; java . util . Map < org . apache . hadoop . hive . ql . exec . spark . status . SparkStage , org . apache . hadoop . hive . ql . exec . spark . status . SparkStageProgress > lastProgressMap = null ; perfLogger . PerfLogBegin ( org . apache . hadoop . hive . ql . exec . spark . status . CLASS_NAME , PerfLogger . SPARK_RUN_JOB ) ; perfLogger . PerfLogBegin ( org . apache . hadoop . hive . ql . exec . spark . status . CLASS_NAME , PerfLogger . SPARK_SUBMIT_TO_RUNNING ) ; startTime = java . lang . System . currentTimeMillis ( ) ; while ( true ) { try { org . apache . spark . JobExecutionStatus state = sparkJobStatus . getState ( ) ; if ( org . apache . hadoop . hive . ql . exec . spark . status . LOG . isDebugEnabled ( ) ) { console . printInfo ( ( "state<sp>=<sp>" + state ) ) ; } if ( state == null ) { long timeCount = ( ( java . lang . System . currentTimeMillis ( ) ) - ( startTime ) ) / 1000 ; if ( timeCount > ( monitorTimeoutInterval ) ) { console . printError ( ( ( "Job<sp>hasn't<sp>been<sp>submitted<sp>after<sp>" + timeCount ) + "s.<sp>Aborting<sp>it." ) ) ; console . printError ( ( "Status:<sp>" + state ) ) ; running = false ; done = true ; rc = 2 ; break ; } } else if ( ( state != lastState ) || ( state == ( org . apache . spark . JobExecutionStatus . RUNNING ) ) ) { lastState = state ; java . util . Map < org . apache . hadoop . hive . ql . exec . spark . status . SparkStage , org . apache . hadoop . hive . ql . exec . spark . status . SparkStageProgress > progressMap = sparkJobStatus . getSparkStageProgress ( ) ; switch ( state ) { case RUNNING : if ( ! running ) { perfLogger . PerfLogEnd ( org . apache . hadoop . hive . ql . exec . spark . status . CLASS_NAME , PerfLogger . SPARK_SUBMIT_TO_RUNNING ) ; console . printInfo ( ( ( "Status:<sp>Failed" 8 + ( sparkJobStatus . getJobId ( ) ) ) + "Status:<sp>Failed" 5 ) ) ; for ( int stageId : sparkJobStatus . getStageIds ( ) ) { console . printInfo ( java . lang . Integer . toString ( stageId ) ) ; } console . printInfo ( ( ( "\nStatus:<sp>Running<sp>(Hive<sp>on<sp>Spark<sp>job[" + ( sparkJobStatus . getJobId ( ) ) ) + "])" ) ) ; running = true ; console . printInfo ( ( "Status:<sp>Failed" 4 + "Status:<sp>Failed" 6 ) ) ; } updateFunction . printStatus ( progressMap , lastProgressMap ) ; lastProgressMap = progressMap ; break ; case SUCCEEDED : updateFunction . printStatus ( progressMap , lastProgressMap ) ; lastProgressMap = progressMap ; double duration = ( ( java . lang . System . currentTimeMillis ( ) ) - ( startTime ) ) / 1000.0 ; console . printInfo ( ( "Status:<sp>Finished<sp>successfully<sp>in<sp>" + ( java . lang . String . format ( "%.2f<sp>seconds" , duration ) ) ) ) ; running = false ; done = true ; break ; case FAILED : console . printError ( "Status:<sp>Failed" ) ; running = false ; done = true ; rc = 3 ; break ; case UNKNOWN : console . printError ( "Status:<sp>Failed" 2 ) ; running = false ; done = true ; rc = 4 ; break ; } } if ( ! done ) { java . lang . Thread . sleep ( checkInterval ) ; } } catch ( java . lang . Exception e ) { java . lang . String msg = ( "Status:<sp>Failed" 0 + ( org . apache . hadoop . hive . ql . exec . Utilities . getNameMessage ( e ) ) ) + "Status:<sp>Failed" 1 ; msg = ( ( "Failed<sp>to<sp>monitor<sp>Job[<sp>" + ( sparkJobStatus . getJobId ( ) ) ) + "Status:<sp>Failed" 3 ) + msg ; org . apache . hadoop . hive . ql . exec . spark . status . LOG . error ( msg , e ) ; console . printError ( msg , ( "Status:<sp>Failed" 7 + ( org . apache . hadoop . util . StringUtils . stringifyException ( e ) ) ) ) ; rc = 1 ; done = true ; sparkJobStatus . setMonitorError ( e ) ; } finally { if ( done ) { break ; } } } perfLogger . PerfLogEnd ( org . apache . hadoop . hive . ql . exec . spark . status . CLASS_NAME , PerfLogger . SPARK_RUN_JOB )
|
org . junit . Assert . assertEquals ( remoteSparkJobMonitor . startMonitor ( ) , 3 )
|
testDeprecateImage_Null ( ) { org . easymock . EasyMock . expect ( computeRpcMock . deprecateImage ( com . google . cloud . compute . deprecated . ComputeImplTest . IMAGE_ID . getProject ( ) , com . google . cloud . compute . deprecated . ComputeImplTest . IMAGE_ID . getImage ( ) , com . google . cloud . compute . deprecated . ComputeImplTest . DEPRECATION_STATUS . toPb ( ) , com . google . cloud . compute . deprecated . ComputeImplTest . EMPTY_RPC_OPTIONS ) ) . andReturn ( null ) ; org . easymock . EasyMock . replay ( computeRpcMock ) ; compute = options . getService ( ) ; "<AssertPlaceHolder>" ; } deprecate ( com . google . cloud . compute . deprecated . DeprecationStatus , com . google . cloud . compute . deprecated . Compute . OperationOption [ ] ) { return compute . deprecate ( getImageId ( ) , deprecationStatus , options ) ; }
|
org . junit . Assert . assertNull ( compute . deprecate ( com . google . cloud . compute . deprecated . ComputeImplTest . IMAGE_ID , com . google . cloud . compute . deprecated . ComputeImplTest . DEPRECATION_STATUS ) )
|
convert_builtin_list_members ( ) { com . psddev . dari . util . List < java . lang . String > inputs = com . psddev . dari . util . Arrays . asList ( "1" , "2" , "3" ) ; com . psddev . dari . util . List < java . lang . Integer > expect = com . psddev . dari . util . Arrays . asList ( 1 , 2 , 3 ) ; com . psddev . dari . util . TypeReference < com . psddev . dari . util . List < java . lang . Integer > > typeref = new com . psddev . dari . util . TypeReference < com . psddev . dari . util . List < java . lang . Integer > > ( ) { } ; com . psddev . dari . util . List < java . lang . Integer > output = ( ( com . psddev . dari . util . List < java . lang . Integer > ) ( converter . convert ( typeref . getType ( ) , inputs ) ) ) ; "<AssertPlaceHolder>" ; } getType ( ) { com . psddev . dari . db . ObjectType type = getDatabase ( ) . getEnvironment ( ) . getTypeById ( getTypeId ( ) ) ; if ( type == null ) { for ( java . lang . Object object : linkedObjects . values ( ) ) { if ( ( object instanceof com . psddev . dari . db . ObjectType ) && ( getId ( ) . equals ( getTypeId ( ) ) ) ) { type = ( ( com . psddev . dari . db . ObjectType ) ( object ) ) ; type . setObjectClassName ( com . psddev . dari . db . ObjectType . class . getName ( ) ) ; type . initialize ( ) ; } break ; } } return type ; }
|
org . junit . Assert . assertEquals ( expect , output )
|
getUrlModuleOnDeviceDeviceGoodCase ( ) { final java . lang . String deviceId = "xxx-device" ; final java . lang . String iotHubName = "b.c.d" ; final java . lang . String hostName = "HOSTNAME." + iotHubName ; final java . lang . String sharedAccessKeyName = "ACCESSKEYNAME" ; final java . lang . String policyName = "HostName=" 1 ; final java . lang . String sharedAccessKey = "1234567890abcdefghijklmnopqrstvwxyz=" ; final java . lang . String connectionString = ( ( ( ( ( ( "HostName=" + hostName ) + ";SharedAccessKeyName=" ) + sharedAccessKeyName ) + ";" ) + policyName ) + "=" ) + sharedAccessKey ; final com . microsoft . azure . sdk . iot . service . IotHubConnectionString iotHubConnectionString = com . microsoft . azure . sdk . iot . service . IotHubConnectionStringBuilder . createConnectionString ( connectionString ) ; final java . lang . String expected = "https://HOSTNAME.b.c.d/devices/xxx-device/modules?" + ( tests . unit . com . microsoft . azure . sdk . iot . service . IotHubConnectionStringTest . URL_API_VERSION ) ; java . lang . String actual = iotHubConnectionString . getUrlModulesOnDevice ( deviceId ) . toString ( ) ; "<AssertPlaceHolder>" ; } toString ( ) { return this . token ; }
|
org . junit . Assert . assertEquals ( "HostName=" 0 , expected , actual )
|
testIsCharacter ( ) { boolean result = fixture . isCharacter ( ) ; "<AssertPlaceHolder>" ; } isCharacter ( ) { return ( ( fLength ) == ( org . eclipse . tracecompass . ctf . core . event . types . IntegerDeclaration . SIZE_8 ) ) && ( ( fEncoding ) != ( Encoding . NONE ) ) ; }
|
org . junit . Assert . assertEquals ( false , result )
|
testHashCodeNotEqualId2 ( ) { org . oregami . service . ServiceErrorContext c1 = new org . oregami . service . ServiceErrorContext ( "test" , "id1" ) ; org . oregami . service . ServiceErrorContext c2 = new org . oregami . service . ServiceErrorContext ( "test" , "id2" ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertTrue ( ( ( c1 . hashCode ( ) ) != ( c2 . hashCode ( ) ) ) )
|
testTimestamp ( ) { org . knowm . xchange . binance . service . BinanceMarketDataService marketDataService = ( ( org . knowm . xchange . binance . service . BinanceMarketDataService ) ( org . knowm . xchange . test . binance . MarketDataServiceIntegration . exchange . getMarketDataService ( ) ) ) ; long serverTime = marketDataService . getTimestamp ( ) ; "<AssertPlaceHolder>" ; } getTimestamp ( ) { return createDateTime ; }
|
org . junit . Assert . assertTrue ( ( 0 < serverTime ) )
|
user_should_be_retrievable_from_userDao_by_username ( ) { java . lang . String username = "jdoe" ; org . jboss . as . quickstarts . tasksJsf . User retrieved = userDao . getForUsername ( username ) ; "<AssertPlaceHolder>" ; } getUsername ( ) { return username ; }
|
org . junit . Assert . assertEquals ( username , retrieved . getUsername ( ) )
|
testTranslateListSizeOne ( ) { java . util . List < org . sagebionetworks . repo . model . entity . query . Value > list = new java . util . ArrayList < org . sagebionetworks . repo . model . entity . query . Value > ( 1 ) ; org . sagebionetworks . repo . model . entity . query . StringValue sv = new org . sagebionetworks . repo . model . entity . query . StringValue ( ) ; java . lang . String in = "a<sp>string" ; sv . setValue ( in ) ; list . add ( sv ) ; java . lang . String out = ( ( java . lang . String ) ( manager . translateValue ( list ) ) ) ; "<AssertPlaceHolder>" ; } translateValue ( java . util . List ) { if ( ( values . size ( ) ) == 1 ) { return translateValue ( values . get ( 0 ) ) ; } else { java . util . List < java . lang . Object > list = new java . util . ArrayList < java . lang . Object > ( values . size ( ) ) ; for ( org . sagebionetworks . repo . model . entity . query . Value v : values ) { list . add ( translateValue ( v ) ) ; } return list ; } }
|
org . junit . Assert . assertEquals ( in , out )
|
testTopSource ( ) { com . questdb . ql . RecordSourcePrinter p = new com . questdb . ql . RecordSourcePrinter ( sink ) ; try ( com . questdb . ql . RecordSource rs = new com . questdb . ql . TopRecordSource ( compile ( "quote" ) , new com . questdb . ql . ops . constant . LongConstant ( 0 , 0 ) , new com . questdb . ql . ops . constant . LongConstant ( 10 , 0 ) ) ) { p . print ( rs , getFactory ( ) ) ; final java . lang . String expected = "quote" 0 + ( ( ( ( ( ( ( ( "2013-09-04T10:00:00.000Z\tADM.L\t104.021850585938\t0.006688738358\t1575378703\t1436881714\tFast<sp>trading\tLXE\n" + "2013-09-04T10:00:00.000Z\tAGK.L\t879.117187500000\t496.806518554688\t1530831067\t339631474\tFast<sp>trading\tLXE\n" ) + "2013-09-04T10:00:00.000Z\tABF.L\t768.000000000000\t0.000020634160\t426455968\t1432278050\tFast<sp>trading\tLXE\n" ) + "2013-09-04T10:00:00.000Z\tABF.L\t256.000000000000\t0.000000035797\t1404198\t1153445279\tFast<sp>trading\tLXE\n" ) + "2013-09-04T10:00:00.000Z\tWTB.L\t920.625000000000\t0.040750414133\t761275053\t1232884790\tFast<sp>trading\tLXE\n" ) + "2013-09-04T10:00:00.000Z\tAGK.L\t512.000000000000\t896.000000000000\t422941535\t113506296\tFast<sp>trading\tLXE\n" ) + "2013-09-04T10:00:00.000Z\tRRS.L\t12.923866510391\t0.032379742712\t2006313928\t2132716300\tFast<sp>trading\tLXE\n" ) + "2013-09-04T10:00:00.000Z\tBT-A.L\t0.006530375686\t0.000000000000\t1890602616\t2137969456\tFast<sp>trading\tLXE\n" ) + "2013-09-04T10:00:00.000Z\tABF.L\t0.000000017324\t720.000000000000\t410717394\t458818940\tFast<sp>trading\tLXE\n" ) ; "<AssertPlaceHolder>" ; } } toString ( ) { return ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( "Quote{" + "timestamp=" ) + ( timestamp ) ) + ",<sp>sym='" ) + ( sym ) ) + '\'' ) + ",<sp>bid=" ) + ( bid ) ) + ",<sp>ask=" ) + ( ask ) ) + ",<sp>bidSize=" ) + ( bidSize ) ) + ",<sp>askSize=" ) + ( askSize ) ) + ",<sp>mode='" ) + ( mode ) ) + '\'' ) + ",<sp>ex='" ) + ( ex ) ) + '\'' ) + '}' ; }
|
org . junit . Assert . assertEquals ( expected , sink . toString ( ) )
|
testUpdateByPrimaryKeySelectiveForceByNull ( ) { org . apache . ibatis . session . SqlSession sqlSession = getSqlSession ( ) ; try { tk . mybatis . mapper . additional . update . force . CountryIntMapper mapper = sqlSession . getMapper ( tk . mybatis . mapper . additional . update . force . CountryIntMapper . class ) ; tk . mybatis . mapper . additional . update . force . CountryInt country = new tk . mybatis . mapper . additional . update . force . CountryInt ( ) ; country . setId ( 174 ) ; country . setCountryname ( "" ) ; mapper . updateByPrimaryKeySelectiveForce ( country , null ) ; country = mapper . selectByPrimaryKey ( 174 ) ; "<AssertPlaceHolder>" ; } finally { sqlSession . close ( ) ; } } getCountrycode ( ) { return countrycode ; }
|
org . junit . Assert . assertNotNull ( country . getCountrycode ( ) )
|
getEncountersByPatient_shouldNotGetVoidedEncounters ( ) { org . openmrs . api . EncounterService encounterService = org . openmrs . api . context . Context . getEncounterService ( ) ; java . util . List < org . openmrs . Encounter > encounters = encounterService . getEncountersByPatient ( new org . openmrs . Patient ( 3 ) ) ; "<AssertPlaceHolder>" ; } size ( ) { return getMemberships ( ) . stream ( ) . filter ( ( m ) -> ! ( m . getVoided ( ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) . size ( ) ; }
|
org . junit . Assert . assertEquals ( 2 , encounters . size ( ) )
|
testGetInfo ( ) { "<AssertPlaceHolder>" ; } getInfo ( ) { return ( ( ( ( ( net . ontopia . Ontopia . getName ( ) ) + "<sp>" ) + ( net . ontopia . Ontopia . getVersion ( ) ) ) + "<sp>(" ) + ( net . ontopia . Ontopia . getBuild ( ) ) ) + ")" ; }
|
org . junit . Assert . assertNotNull ( net . ontopia . Ontopia . getInfo ( ) )
|
listInstances_hasRyaDetailsTable ( ) { final org . apache . rya . api . client . accumulo . AccumuloConnectionDetails connectionDetails = new org . apache . rya . api . client . accumulo . AccumuloConnectionDetails ( getUsername ( ) , getPassword ( ) . toCharArray ( ) , getInstanceName ( ) , getZookeepers ( ) ) ; final org . apache . rya . api . client . Install install = new org . apache . rya . api . client . accumulo . AccumuloInstall ( connectionDetails , getConnector ( ) ) ; install . install ( "instance1_" , org . apache . rya . api . client . Install . InstallConfiguration . builder ( ) . build ( ) ) ; install . install ( "instance2_" , org . apache . rya . api . client . Install . InstallConfiguration . builder ( ) . build ( ) ) ; install . install ( "instance3_" , org . apache . rya . api . client . Install . InstallConfiguration . builder ( ) . build ( ) ) ; final org . apache . rya . api . client . ListInstances listInstances = new org . apache . rya . api . client . accumulo . AccumuloListInstances ( connectionDetails , getConnector ( ) ) ; final java . util . List < java . lang . String > instances = listInstances . listInstances ( ) ; java . util . Collections . sort ( instances ) ; final java . util . List < java . lang . String > expected = com . google . common . collect . Lists . newArrayList ( "instance1_" , "instance2_" , "instance3_" ) ; "<AssertPlaceHolder>" ; } listInstances ( ) { final java . util . List < java . lang . String > ryaInstances = new java . util . ArrayList ( ) ; for ( final java . lang . String db : adminClient . listDatabaseNames ( ) ) { for ( final java . lang . String collection : adminClient . getDatabase ( db ) . listCollectionNames ( ) ) { if ( collection . equals ( MongoRyaInstanceDetailsRepository . INSTANCE_DETAILS_COLLECTION_NAME ) ) { ryaInstances . add ( db ) ; break ; } } } return ryaInstances ; }
|
org . junit . Assert . assertEquals ( expected , instances )
|
testWhitespaceQuery ( ) { "<AssertPlaceHolder>" ; } reparse ( java . lang . String , boolean , boolean , boolean ) { try { return toString ( org . postgresql . core . Parser . parseJdbcSql ( query , standardConformingStrings , withParameters , splitStatements , false ) ) ; } catch ( java . sql . SQLException e ) { throw new java . lang . IllegalStateException ( ( "Parser.parseJdbcSql:<sp>" + ( e . getMessage ( ) ) ) , e ) ; } }
|
org . junit . Assert . assertEquals ( "" , reparse ( "<sp>" , true , false , true ) )
|
testCreation ( ) { final org . esa . s3tbx . insitu . server . InsituServerRegistry registry = org . esa . s3tbx . insitu . server . InsituServerRegistry . getInstance ( ) ; final java . util . Set < org . esa . s3tbx . insitu . server . InsituServerSpi > registeredServers = registry . getAllRegisteredServers ( ) ; "<AssertPlaceHolder>" ; } getAllRegisteredServers ( ) { return java . util . Collections . unmodifiableSet ( registry . getServices ( ) ) ; }
|
org . junit . Assert . assertTrue ( ( ( registeredServers . size ( ) ) >= 1 ) )
|
getAccessLevelWithGuestUser ( ) { org . phenotips . data . Patient p = mock ( org . phenotips . data . Patient . class ) ; org . phenotips . data . permissions . internal . EntityAccessHelper helper = mock ( org . phenotips . data . permissions . internal . EntityAccessHelper . class ) ; org . phenotips . data . permissions . internal . EntityAccessManager am = mock ( org . phenotips . data . permissions . internal . EntityAccessManager . class ) ; org . phenotips . data . permissions . internal . EntityVisibilityManager vm = mock ( org . phenotips . data . permissions . internal . EntityVisibilityManager . class ) ; org . phenotips . data . permissions . PatientAccess pa = new org . phenotips . data . permissions . internal . DefaultPatientAccess ( p , helper , am , vm ) ; when ( am . getOwner ( p ) ) . thenReturn ( org . phenotips . data . permissions . internal . DefaultPatientAccessTest . OWNER_OBJECT ) ; when ( helper . getCurrentUser ( ) ) . thenReturn ( null ) ; org . phenotips . data . permissions . Visibility publicV = mock ( org . phenotips . data . permissions . Visibility . class ) ; when ( vm . getVisibility ( p ) ) . thenReturn ( publicV ) ; org . phenotips . data . permissions . AccessLevel view = new org . phenotips . data . permissions . internal . access . ViewAccessLevel ( ) ; when ( publicV . getDefaultAccessLevel ( ) ) . thenReturn ( view ) ; org . phenotips . data . permissions . AccessLevel none = new org . phenotips . data . permissions . internal . access . NoAccessLevel ( ) ; when ( am . resolveAccessLevel ( "none" ) ) . thenReturn ( none ) ; "<AssertPlaceHolder>" ; } getAccessLevel ( ) { return this . access ; }
|
org . junit . Assert . assertSame ( none , pa . getAccessLevel ( ) )
|
accept_reject ( ) { java . lang . String ignoreString = "10.0.0.1" ; com . navercorp . pinpoint . common . server . util . AddressFilter ignoreAddressFilter = new com . navercorp . pinpoint . common . server . util . IgnoreAddressFilter ( java . util . Arrays . asList ( ignoreString ) ) ; org . jboss . netty . channel . Channel ignoreChannel = mockChannel ( ignore ) ; com . navercorp . pinpoint . collector . receiver . thrift . AddressFilterAdaptor adaptor = new com . navercorp . pinpoint . collector . receiver . thrift . AddressFilterAdaptor ( ignoreAddressFilter ) ; "<AssertPlaceHolder>" ; } accept ( org . jboss . netty . channel . Channel ) { return true ; }
|
org . junit . Assert . assertFalse ( adaptor . accept ( ignoreChannel ) )
|
longWrittenWithUnionSchemaIsConvertedToUnionLongFloatSchema ( ) { org . apache . avro . Schema writer = org . apache . avro . TestReadingWritingDataInEvolvedSchemas . UNION_LONG_RECORD ; org . apache . avro . generic . GenericData . Record record = defaultRecordWithSchema ( writer , org . apache . avro . TestReadingWritingDataInEvolvedSchemas . FIELD_A , 42L ) ; byte [ ] encoded = encodeGenericBlob ( record ) ; org . apache . avro . generic . GenericData . Record decoded = decodeGenericBlob ( org . apache . avro . TestReadingWritingDataInEvolvedSchemas . UNION_LONG_FLOAT_RECORD , writer , encoded ) ; "<AssertPlaceHolder>" ; } get ( org . apache . trevni . MetaData ) { java . lang . String name = meta . getChecksum ( ) ; if ( ( name == null ) || ( "null" . equals ( name ) ) ) return new org . apache . trevni . NullChecksum ( ) ; else if ( "crc32" . equals ( name ) ) return new org . apache . trevni . Crc32Checksum ( ) ; else throw new org . apache . trevni . TrevniRuntimeException ( ( "Unknown<sp>checksum:<sp>" + name ) ) ; }
|
org . junit . Assert . assertEquals ( 42L , decoded . get ( org . apache . avro . TestReadingWritingDataInEvolvedSchemas . FIELD_A ) )
|
testSetGetMessageId ( ) { System . out . println ( "testSetGetMessageId" ) ; gov . hhs . fha . nhinc . asyncmsgs . model . AsyncMsgRecord instance = new gov . hhs . fha . nhinc . asyncmsgs . model . AsyncMsgRecord ( ) ; java . lang . String expResult = "uuid:1234567890" ; instance . setMessageId ( expResult ) ; java . lang . String result = instance . getMessageId ( ) ; "<AssertPlaceHolder>" ; } getMessageId ( ) { return properties . get ( SoapEdgeContext . MESSAGE_ID ) ; }
|
org . junit . Assert . assertEquals ( expResult , result )
|
countWithExceptionReturnsNegativeOne ( ) { when ( this . client . execute ( any ( org . apache . http . client . methods . HttpUriRequest . class ) ) ) . thenThrow ( new java . io . IOException ( ) ) ; long result = this . mocker . getComponentUnderTest ( ) . count ( new java . util . HashMap < java . lang . String , java . lang . Object > ( ) ) ; "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertEquals ( ( - 1 ) , result )
|
testSelectBigInts ( ) { for ( int gap = 1 ; gap <= 1024 ; gap *= 2 ) { org . roaringbitmap . RoaringBitmap rb = new org . roaringbitmap . RoaringBitmap ( ) ; for ( int k = 0 ; k < 100000 ; k += gap ) { rb . add ( ( ( 1 << 31 ) + k ) ) ; } for ( int k = 0 ; k < ( 100000 / gap ) ; ++ k ) { "<AssertPlaceHolder>" ; } } } select ( int ) { long leftover = org . roaringbitmap . Util . toUnsignedLong ( j ) ; for ( int i = 0 ; i < ( this . highLowContainer . size ( ) ) ; i ++ ) { int thiscard = this . highLowContainer . getCardinality ( i ) ; if ( thiscard > leftover ) { int keycontrib = ( this . highLowContainer . getKeyAtIndex ( i ) ) << 16 ; org . roaringbitmap . buffer . MappeableContainer c = this . highLowContainer . getContainerAtIndex ( i ) ; int lowcontrib = org . roaringbitmap . buffer . BufferUtil . BufferUtil . toIntUnsigned ( c . select ( ( ( int ) ( leftover ) ) ) ) ; return lowcontrib + keycontrib ; } leftover -= thiscard ; } throw new java . lang . IllegalArgumentException ( ( ( ( ( "You<sp>are<sp>trying<sp>to<sp>select<sp>the<sp>" + j ) + "th<sp>value<sp>when<sp>the<sp>cardinality<sp>is<sp>" ) + ( this . getCardinality ( ) ) ) + "." ) ) ; }
|
org . junit . Assert . assertEquals ( ( ( 1 << 31 ) + ( k * gap ) ) , rb . select ( k ) )
|
ignoreExceptionsFromTerminationHook ( ) { com . spotify . flo . context . TestTerminationHookFactory . injectHook ( exceptionalTerminationHook ) ; java . util . concurrent . atomic . AtomicInteger status = new java . util . concurrent . atomic . AtomicInteger ( ) ; com . spotify . flo . context . FloRunner . runTask ( FOO_TASK ) . waitAndExit ( status :: set ) ; verify ( exceptionalTerminationHook , times ( 1 ) ) . accept ( eq ( 0 ) ) ; "<AssertPlaceHolder>" ; } get ( ) { if ( ( value ) == null ) { synchronized ( this ) { if ( ( value ) == null ) { value = supplier . get ( ) ; } } } return value ; }
|
org . junit . Assert . assertThat ( status . get ( ) , org . hamcrest . CoreMatchers . is ( 0 ) )
|
serialize ( ) { com . google . gson . Gson gson = com . github . seratch . jslack . common . json . GsonFactory . createSnakeCase ( ) ; com . github . seratch . jslack . api . model . event . LinkSharedEvent event = new com . github . seratch . jslack . api . model . event . LinkSharedEvent ( ) ; java . lang . String generatedJson = gson . toJson ( event ) ; java . lang . String expectedJson = "{\"type\":\"link_shared\"}" ; "<AssertPlaceHolder>" ; } createSnakeCase ( ) { return new com . google . gson . GsonBuilder ( ) . setFieldNamingPolicy ( FieldNamingPolicy . LOWER_CASE_WITH_UNDERSCORES ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . LayoutBlock . class , new com . github . seratch . jslack . common . json . GsonLayoutBlockFactory ( ) ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . composition . TextObject . class , new com . github . seratch . jslack . common . json . GsonTextObjectFactory ( ) ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . ContextBlockElement . class , new com . github . seratch . jslack . common . json . GsonContextBlockElementFactory ( ) ) . registerTypeAdapter ( com . github . seratch . jslack . api . model . block . element . BlockElement . class , new com . github . seratch . jslack . common . json . GsonBlockElementFactory ( ) ) . create ( ) ; }
|
org . junit . Assert . assertThat ( generatedJson , org . hamcrest . CoreMatchers . is ( expectedJson ) )
|
onCreation_setsDefaultLoggingErrorsToFalse_ifGreenplumServerVersionIs6andAbove ( ) { org . mockito . Mockito . when ( mockDataSource . isServerVersionAtLeast ( 9 , 4 ) ) . thenReturn ( true ) ; org . jkiss . dbeaver . ext . greenplum . model . GreenplumExternalTable table = new org . jkiss . dbeaver . ext . greenplum . model . GreenplumExternalTable ( mockSchema , mockResults ) ; "<AssertPlaceHolder>" ; org . mockito . Mockito . verify ( mockResults , org . mockito . Mockito . times ( 0 ) ) . getBoolean ( "is_logging_errors" ) ; } isLoggingErrors ( ) { return loggingErrors ; }
|
org . junit . Assert . assertFalse ( table . isLoggingErrors ( ) )
|
testCreateEntityAndInferType ( ) { org . springframework . data . neo4j . support . IndexingNodeTypeRepresentationStrategyTest . Thing newThing = nodeTypeRepresentationStrategy . createEntity ( org . springframework . data . neo4j . support . IndexingNodeTypeRepresentationStrategyTest . node ( thing ) ) ; "<AssertPlaceHolder>" ; } node ( org . springframework . data . neo4j . support . IndexingNodeTypeRepresentationStrategyTest$Thing ) { return thing . getPersistentState ( ) ; }
|
org . junit . Assert . assertEquals ( thing , newThing )
|
shouldScanAndScroll ( ) { final com . jeromeloisel . db . repository . elasticsearch . Person saved = repository . save ( com . jeromeloisel . db . repository . elasticsearch . RepositoryIntegrationTest . PERSON ) ; final com . jeromeloisel . db . repository . elasticsearch . Person anotherSave = repository . save ( saved ) ; "<AssertPlaceHolder>" ; repository . delete ( saved ) ; } save ( T extends com . jeromeloisel . db . entity . Entity ) { return saveAll ( com . google . common . collect . ImmutableList . of ( entity ) ) . get ( 0 ) ; }
|
org . junit . Assert . assertEquals ( anotherSave . getId ( ) , saved . getId ( ) )
|
receiverAddsElementsToBundle ( ) { org . apache . beam . sdk . fn . data . FnDataReceiver < org . apache . beam . sdk . util . WindowedValue < byte [ ] > > receiver = factory . create ( fooPC . getId ( ) ) ; org . apache . beam . model . pipeline . v1 . RunnerApi . Components . Builder builder = baseComponents . toBuilder ( ) ; java . lang . String sdkWireCoderId = org . apache . beam . runners . fnexecution . wire . WireCoders . addSdkWireCoder ( fooPC , builder ) ; org . apache . beam . model . pipeline . v1 . RunnerApi . Components components = builder . build ( ) ; org . apache . beam . sdk . coders . Coder < org . apache . beam . sdk . util . WindowedValue < java . lang . String > > sdkCoder = ( ( org . apache . beam . sdk . coders . Coder < org . apache . beam . sdk . util . WindowedValue < java . lang . String > > ) ( org . apache . beam . runners . core . construction . RehydratedComponents . forComponents ( components ) . getCoder ( sdkWireCoderId ) ) ) ; org . apache . beam . sdk . coders . Coder < org . apache . beam . sdk . util . WindowedValue < byte [ ] > > runnerCoder = org . apache . beam . runners . fnexecution . wire . WireCoders . instantiateRunnerWireCoder ( fooPC , components ) ; org . apache . beam . sdk . util . WindowedValue < byte [ ] > firstElem = org . apache . beam . sdk . util . CoderUtils . decodeFromByteArray ( runnerCoder , org . apache . beam . sdk . util . CoderUtils . encodeToByteArray ( sdkCoder , org . apache . beam . sdk . util . WindowedValue . of ( "1" , new org . joda . time . Instant ( 120 ) , new org . apache . beam . sdk . transforms . windowing . IntervalWindow ( new org . joda . time . Instant ( 0 ) , org . joda . time . Duration . standardMinutes ( 5 ) ) , PaneInfo . NO_FIRING ) ) ) ; org . apache . beam . sdk . util . WindowedValue < byte [ ] > secondElem = org . apache . beam . sdk . util . CoderUtils . decodeFromByteArray ( runnerCoder , org . apache . beam . sdk . util . CoderUtils . encodeToByteArray ( sdkCoder , org . apache . beam . sdk . util . WindowedValue . of ( "2" , new org . joda . time . Instant ( 240 ) , new org . apache . beam . sdk . transforms . windowing . IntervalWindow ( new org . joda . time . Instant ( 0 ) , org . joda . time . Duration . standardMinutes ( 5 ) ) , PaneInfo . NO_FIRING ) ) ) ; receiver . accept ( firstElem ) ; receiver . accept ( secondElem ) ; org . apache . beam . runners . direct . portable . CommittedBundle < ? > output = getOnlyElement ( outputBundles ) . commit ( org . joda . time . Instant . now ( ) ) ; "<AssertPlaceHolder>" ; } containsInAnyOrder ( org . apache . beam . sdk . coders . Coder , T [ ] ) { final org . apache . beam . sdk . testing . SerializableMatchers . SerializableSupplier < T [ ] > itemsSupplier = new org . apache . beam . sdk . testing . SerializableMatchers . SerializableArrayViaCoder ( coder , items ) ; return org . apache . beam . sdk . testing . SerializableMatchers . fromSupplier ( ( ) -> org . hamcrest . Matchers . containsInAnyOrder ( itemsSupplier . get ( ) ) ) ; }
|
org . junit . Assert . assertThat ( output , org . hamcrest . Matchers . containsInAnyOrder ( firstElem , secondElem ) )
|
testMapSeriousDisease ( ) { final riv . clinicalprocess . healthcond . description . _2 . AlertInformationBodyType testObj = new riv . clinicalprocess . healthcond . description . _2 . AlertInformationBodyType ( ) ; final se . rivta . en13606 . ehrextract . v11 . ELEMENT alvarligSjukdom = new se . rivta . en13606 . ehrextract . v11 . ELEMENT ( ) ; alvarligSjukdom . setMeaning ( se . skl . skltpservices . npoadapter . mapper . AlertInformationMapperTest . cdType ( se . skl . skltpservices . npoadapter . mapper . AlertInformationMapperTest . TEST_DATA_1 , se . skl . skltpservices . npoadapter . mapper . AlertInformationMapperTest . TEST_DATA_2 , se . skl . skltpservices . npoadapter . mapper . AlertInformationMapperTest . TEST_DATA_3 ) ) ; se . skl . skltpservices . npoadapter . mapper . AlertInformationMapperTest . mapper . mapSeriousDisease ( testObj , alvarligSjukdom , "upp-uas-invalid" ) ; "<AssertPlaceHolder>" ; se . skl . skltpservices . npoadapter . mapper . AlertInformationMapperTest . mapper . mapSeriousDisease ( testObj , alvarligSjukdom , "upp-uas-sjd" ) ; verifyCDType ( alvarligSjukdom . getMeaning ( ) , testObj . getSeriousDisease ( ) . getDisease ( ) ) ; } mapSeriousDisease ( riv . clinicalprocess . healthcond . description . _2 . AlertInformationBodyType , se . rivta . en13606 . ehrextract . v11 . ITEM , java . lang . String ) { if ( ( bodyType . getSeriousDisease ( ) ) == null ) { bodyType . setSeriousDisease ( new riv . clinicalprocess . healthcond . description . _2 . SeriousDiseaseType ( ) ) ; } switch ( meaning ) { case se . skl . skltpservices . npoadapter . mapper . AlertInformationMapper . SJUKDOM : if ( item instanceof se . rivta . en13606 . ehrextract . v11 . ELEMENT ) { bodyType . getSeriousDisease ( ) . setDisease ( se . skl . skltpservices . npoadapter . mapper . util . EHRUtil . cvTypeFromElementWithValueST ( ( ( se . rivta . en13606 . ehrextract . v11 . ELEMENT ) ( item ) ) , riv . clinicalprocess . healthcond . description . _2 . CVType . class ) ) ; } break ; } }
|
org . junit . Assert . assertNull ( testObj . getSeriousDisease ( ) . getDisease ( ) )
|
unresolvedIssueDetectorFailedTest ( ) { "<AssertPlaceHolder>" ; }
|
org . junit . Assert . assertTrue ( true )
|
fromPropertiesPoolMaxWaiters ( ) { final java . util . Properties p = new java . util . Properties ( ) ; p . setProperty ( "db.dataSourceSupplierClass" , dataSourceSupplierClass ) ; p . setProperty ( "db.host" , host ) ; p . setProperty ( "db.port" , java . lang . Integer . toString ( port ) ) ; p . setProperty ( "db.user" , user ) ; p . setProperty ( "db.poolMaxWaiters" , java . lang . Integer . toString ( poolMaxWaiters ) ) ; final io . trane . ndbc . Config c = io . trane . ndbc . Config . fromProperties ( "db" , p ) ; "<AssertPlaceHolder>" ; } poolMaxWaiters ( ) { final io . trane . ndbc . Config c = io . trane . ndbc . Config . create ( dataSourceSupplierClass , host , port , user ) ; final int poolMaxWaiters = 100 ; org . junit . Assert . assertEquals ( java . util . Optional . of ( poolMaxWaiters ) , c . poolMaxWaiters ( poolMaxWaiters ) . poolMaxWaiters ( ) ) ; }
|
org . junit . Assert . assertEquals ( c . poolMaxWaiters ( ) , java . util . Optional . of ( poolMaxWaiters ) )
|
testInvokeSetter ( ) { cn . alfredyuan . java7 . dynamics . CustomReflectTest . MyObject obj = new cn . alfredyuan . java7 . dynamics . CustomReflectTest . MyObject ( ) ; java . lang . String name = "Alfred" ; try { cn . alfredyuan . java7 . dynamics . CustomReflect . invokeSetter ( obj , "name" , name ) ; } catch ( java . lang . NoSuchMethodException ex ) { java . util . logging . Logger . getLogger ( cn . alfredyuan . java7 . dynamics . CustomReflect . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } catch ( java . lang . reflect . InvocationTargetException ex ) { java . util . logging . Logger . getLogger ( cn . alfredyuan . java7 . dynamics . CustomReflect . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } catch ( java . lang . IllegalAccessException ex ) { java . util . logging . Logger . getLogger ( cn . alfredyuan . java7 . dynamics . CustomReflect . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } "<AssertPlaceHolder>" ; } getName ( ) { return name ; }
|
org . junit . Assert . assertEquals ( name , obj . getName ( ) )
|
testNormal ( ) { try { com . github . xiaour . utils . WXBizMsgCrypt pc = new com . github . xiaour . utils . WXBizMsgCrypt ( token , encodingAesKey , appId ) ; java . lang . String afterEncrpt = pc . encryptMsg ( replyMsg , timestamp , nonce ) ; javax . xml . parsers . DocumentBuilderFactory dbf = javax . xml . parsers . DocumentBuilderFactory . newInstance ( ) ; javax . xml . parsers . DocumentBuilder db = dbf . newDocumentBuilder ( ) ; java . io . StringReader sr = new java . io . StringReader ( afterEncrpt ) ; org . xml . sax . InputSource is = new org . xml . sax . InputSource ( sr ) ; org . w3c . dom . Document document = db . parse ( is ) ; org . w3c . dom . Element root = document . getDocumentElement ( ) ; org . w3c . dom . NodeList nodelist1 = root . getElementsByTagName ( "Encrypt" ) ; org . w3c . dom . NodeList nodelist2 = root . getElementsByTagName ( "MsgSignature" ) ; java . lang . String encrypt = nodelist1 . item ( 0 ) . getTextContent ( ) ; java . lang . String msgSignature = nodelist2 . item ( 0 ) . getTextContent ( ) ; java . lang . String fromXML = java . lang . String . format ( xmlFormat , encrypt ) ; java . lang . String afterDecrpt = pc . decryptMsg ( msgSignature , timestamp , nonce , fromXML ) ; "<AssertPlaceHolder>" ; } catch ( com . github . xiaour . exception . AesException e ) { org . junit . Assert . fail ( "" ) ; } } decryptMsg ( java . lang . String , java . lang . String , java . lang . String , java . lang . String ) { java . lang . String signature = com . github . xiaour . utils . SHA1 . getSHA1 ( token , timeStamp , nonce , encrypt ) ; if ( ! ( signature . equals ( msgSignature ) ) ) { throw new com . github . xiaour . exception . AesException ( com . github . xiaour . exception . AesException . ValidateSignatureError ) ; } java . lang . String result = decrypt ( encrypt ) ; return result ; }
|
org . junit . Assert . assertEquals ( replyMsg , afterDecrpt )
|
testEcommerceShippingCostF ( ) { request . setEcommerceShippingCost ( null ) ; "<AssertPlaceHolder>" ; } getEcommerceShippingCost ( ) { return ( ( java . lang . Double ) ( getParameter ( org . piwik . java . tracking . PiwikRequest . ECOMMERCE_SHIPPING_COST ) ) ) ; }
|
org . junit . Assert . assertNull ( request . getEcommerceShippingCost ( ) )
|
computeFactor_FirstWeekIncludingEndOfLastBilling ( ) { long startTimeUsage = org . oscm . test . DateTimeHandling . calculateMillis ( "2012-01-31<sp>23:59:58" ) ; long endTimeUsage = org . oscm . test . DateTimeHandling . calculateMillis ( "2012-01-31<sp>23:59:59" ) ; org . oscm . billingservice . service . model . BillingInput billingInput = org . oscm . billingservice . business . calculation . revenue . BillingInputFactory . newBillingInput ( "2012-02-01<sp>00:00:00" , "2012-03-01<sp>00:00:00" ) ; double factor = calculator . computeFactor ( PricingPeriod . WEEK , billingInput , startTimeUsage , endTimeUsage , true , true ) ; "<AssertPlaceHolder>" ; } computeFactor ( org . oscm . internal . types . enumtypes . PricingPeriod , org . oscm . billingservice . service . model . BillingInput , long , long , boolean , boolean ) { if ( usagePeriodEnd < usagePeriodStart ) { throw new org . oscm . internal . types . exception . IllegalArgumentException ( ( ( ( ( "Usage<sp>period<sp>end<sp>(" + ( new java . util . Date ( usagePeriodEnd ) ) ) + ")<sp>before<sp>usage<sp>period<sp>start<sp>(" ) + ( new java . util . Date ( usagePeriodStart ) ) ) + ")" ) ) ; } java . util . Calendar adjustedBillingPeriodStart = org . oscm . billingservice . business . calculation . revenue . PricingPeriodDateConverter . getStartTime ( billingInput . getCutOffDate ( ) , pricingPeriod ) ; java . util . Calendar adjustedBillingPeriodEnd = org . oscm . billingservice . business . calculation . revenue . PricingPeriodDateConverter . getStartTime ( billingInput . getBillingPeriodEnd ( ) , pricingPeriod ) ; if ( usagePeriodOutsideOfAdjustedBillingPeriod ( usagePeriodStart , usagePeriodEnd , adjustedBillingPeriodStart . getTimeInMillis ( ) , adjustedBillingPeriodEnd . getTimeInMillis ( ) ) ) { return 0.0 ; } else { java . util . Calendar startTimeForFactorCalculation = determineStartTimeForFactorCalculation ( pricingPeriod , adjustedBillingPeriodStart , usagePeriodStart , adjustsPeriodStart ) ; java . util . Calendar endTimeForFactorCalculation = determineEndTimeForFactorCalculation ( pricingPeriod , adjustedBillingPeriodEnd , usagePeriodEnd , adjustsPeriodEnd ) ; return computeFractionalFactor ( startTimeForFactorCalculation . getTimeInMillis ( ) , endTimeForFactorCalculation . getTimeInMillis ( ) , pricingPeriod ) ; } }
|
org . junit . Assert . assertEquals ( 1 , factor , 0 )
|
testDeprecateImage_Operation ( ) { org . easymock . EasyMock . expect ( computeRpcMock . deprecateImage ( com . google . cloud . compute . deprecated . ComputeImplTest . IMAGE_ID . getProject ( ) , com . google . cloud . compute . deprecated . ComputeImplTest . IMAGE_ID . getImage ( ) , com . google . cloud . compute . deprecated . ComputeImplTest . DEPRECATION_STATUS . toPb ( ) , com . google . cloud . compute . deprecated . ComputeImplTest . EMPTY_RPC_OPTIONS ) ) . andReturn ( globalOperation . toPb ( ) ) ; org . easymock . EasyMock . replay ( computeRpcMock ) ; compute = options . getService ( ) ; "<AssertPlaceHolder>" ; } deprecate ( com . google . cloud . compute . deprecated . DeprecationStatus , com . google . cloud . compute . deprecated . Compute . OperationOption [ ] ) { return compute . deprecate ( getImageId ( ) , deprecationStatus , options ) ; }
|
org . junit . Assert . assertEquals ( globalOperation , compute . deprecate ( com . google . cloud . compute . deprecated . ComputeImplTest . IMAGE_ID , com . google . cloud . compute . deprecated . ComputeImplTest . DEPRECATION_STATUS ) )
|
testErrorInFinally ( ) { cliOut . reset ( ) ; final org . jboss . as . cli . CommandContext ctx = org . jboss . as . test . integration . management . util . CLITestUtil . getCommandContext ( cliOut ) ; try { ctx . connectController ( ) ; ctx . handle ( "try" ) ; ctx . handle ( this . getAddPropertyReq ( "try" ) ) ; ctx . handle ( "finally" ) ; ctx . handle ( this . getReadNonexistingPropReq ( ) ) ; ctx . handle ( "end-try" ) ; org . junit . Assert . fail ( "finally<sp>is<sp>expceted<sp>to<sp>throw<sp>an<sp>exception" ) ; } catch ( org . jboss . as . cli . CommandLineException e ) { cliOut . reset ( ) ; ctx . handle ( getReadPropertyReq ( ) ) ; "<AssertPlaceHolder>" ; } finally { ctx . handleSafe ( getRemovePropertyReq ( ) ) ; ctx . terminateSession ( ) ; cliOut . reset ( ) ; } } getValue ( ) { return streamServer ; }
|
org . junit . Assert . assertEquals ( "try" , getValue ( ) )
|
testGetAll ( ) { System . out . println ( "test:getAll" ) ; org . glassfish . flashlight . datatree . TreeNode server = setupComplexTree ( ) ; java . util . List < org . glassfish . flashlight . datatree . TreeNode > list = server . getNodes ( "*" , false , true ) ; java . lang . String [ ] expected = new java . lang . String [ 7 ] ; expected [ 0 ] = "server" ; expected [ 1 ] = "server.wto" ; expected [ 2 ] = "server" 2 ; expected [ 3 ] = "server.wto.wtoson.wtosonsdaughter" ; expected [ 4 ] = "server" 0 ; expected [ 5 ] = "server" 1 ; expected [ 6 ] = "server.wto.wtodaughter.wtodaughtersdaughter" ; "<AssertPlaceHolder>" ; } size ( ) { return mappings . size ( ) ; }
|
org . junit . Assert . assertEquals ( expected . length , list . size ( ) )
|
test_initializeOptionsWithRequestMetrics ( ) { final com . amazonaws . services . dynamodbv2 . AcquireLockOptions options = com . amazonaws . services . dynamodbv2 . AcquireLockOptions . builder ( "hashKey" ) . build ( ) ; "<AssertPlaceHolder>" ; } build ( ) { return new com . amazonaws . services . dynamodbv2 . CreateDynamoDBTableOptions ( this . dynamoDBClient , this . provisionedThroughput , this . tableName , this . partitionKeyName , this . sortKeyName ) ; }
|
org . junit . Assert . assertNotNull ( options )
|
testInschrijvingParserGeenWaarden ( ) { final nl . bzk . migratiebrp . bericht . model . lo3 . parser . Lo3InschrijvingParser parser = new nl . bzk . migratiebrp . bericht . model . lo3 . parser . Lo3InschrijvingParser ( ) ; final nl . bzk . migratiebrp . conversie . model . lo3 . Lo3Stapel < nl . bzk . migratiebrp . conversie . model . lo3 . categorie . Lo3InschrijvingInhoud > parsedInhoud = parser . parse ( new java . util . ArrayList < nl . bzk . migratiebrp . conversie . model . lo3 . syntax . Lo3CategorieWaarde > ( ) ) ; "<AssertPlaceHolder>" ; } parse ( nl . bzk . brp . expressietaal . lexical . tokens . TokenStack ) { nl . bzk . brp . expressietaal . parser . ParserResultaat result ; nl . bzk . brp . expressietaal . parser . ParserFout fout ; symbolTable = nl . bzk . brp . expressietaal . symbols . BmrSymbolTableFactory . createSymbolTable ( ) ; if ( ( tokenStack == null ) || ( ( tokenStack . size ( ) ) == 0 ) ) { fout = new nl . bzk . brp . expressietaal . parser . ParserFout ( ParserFoutCode . EXPRESSIE_VERWACHT , new nl . bzk . brp . expressietaal . lexical . tokens . EndOfLineToken ( 0 ) ) ; result = new nl . bzk . brp . expressietaal . parser . ParserResultaat ( fout ) ; } else { stack = tokenStack ; nl . bzk . brp . expressietaal . parser . ParserContext context = new nl . bzk . brp . expressietaal . parser . ParserContext ( ) ; context . addIdentifier ( new nl . bzk . brp . expressietaal . parser . Identifier ( "persoon" , nl . bzk . brp . expressietaal . parser . syntaxtree . ExpressieType . PERSOON ) ) ; nl . bzk . brp . expressietaal . parser . ParserResultaat exp = parseExpression ( context ) ; fout = exp . getFout ( ) ; if ( ( ( fout == null ) && ( ! ( stack . finished ( ) ) ) ) && ( ( stack . currentToken ( ) . getTokenType ( ) ) != ( nl . bzk . brp . expressietaal . lexical . tokens . TokenType . END_OF_LINE ) ) ) { if ( ( exp . getExpressie ( ) ) == null ) { fout = new nl . bzk . brp . expressietaal . parser . ParserFout ( ParserFoutCode . SYNTAX_ERROR , stack . currentToken ( ) ) ; } else { fout = new nl . bzk . brp . expressietaal . parser . ParserFout ( ParserFoutCode . EINDE_EXPRESSIE_VERWACHT , stack . currentToken ( ) ) ; } result = new nl . bzk . brp . expressietaal . parser . ParserResultaat ( fout ) ; } else { result = new nl . bzk . brp . expressietaal . parser . ParserResultaat ( exp . getExpressie ( ) , fout ) ; } } return result ; }
|
org . junit . Assert . assertNull ( parsedInhoud )
|
testGetActiveSubscriptionsForUser ( ) { runTX ( new java . util . concurrent . Callable < java . lang . Void > ( ) { @ org . oscm . domobjects . Override public org . oscm . domobjects . Void call ( ) throws org . oscm . domobjects . Exception { supplier = org . oscm . test . data . Organizations . createOrganization ( mgr , OrganizationRoleType . TECHNOLOGY_PROVIDER , OrganizationRoleType . SUPPLIER ) ; admin = org . oscm . test . data . Organizations . createUserForOrg ( mgr , supplier , true , "admin" ) ; org . oscm . domobjects . Product prod = org . oscm . test . data . Products . createProduct ( supplier . getOrganizationId ( ) , "prod" , "techProd" , mgr ) ; org . oscm . domobjects . Subscription sub = createSubscription ( prod , "Subscription1" , supplier ) ; createUsageLicense ( sub , admin ) ; org . oscm . domobjects . Subscription sub2 = createSubscription ( prod , "Subscription2" , supplier ) ; createUsageLicense ( sub2 , admin ) ; return null ; } } ) ; runTX ( new java . util . concurrent . Callable < java . lang . Void > ( ) { @ org . oscm . domobjects . Override public org . oscm . domobjects . Void call ( ) throws org . oscm . domobjects . Exception { javax . persistence . Query query = mgr . createNamedQuery ( "Subscription.getCurrentUserSubscriptions" ) ; query . setParameter ( "userKey" , java . lang . Long . valueOf ( admin . getKey ( ) ) ) ; query . setParameter ( "status" , Subscription . VISIBLE_SUBSCRIPTION_STATUS ) ; java . util . List < ? > result = query . getResultList ( ) ; "<AssertPlaceHolder>" ; return null ; } } ) ; } size ( ) { return categoriesForMarketplace . size ( ) ; }
|
org . junit . Assert . assertEquals ( 2 , result . size ( ) )
|
testVerifyAndReconnect ( ) { com . streamsets . pipeline . lib . remote . RemoteConnector connector = getConnector ( getBean ( ( ( ( ( getScheme ( ) ) + "://localhost:" ) + ( port ) ) + "/" ) , true , false , com . streamsets . pipeline . lib . remote . TESTUSER , com . streamsets . pipeline . lib . remote . TESTPASS , null , null , null , true , null ) ) ; java . util . List < com . streamsets . pipeline . api . Stage . ConfigIssue > issues = initWithNoIssues ( connector ) ; "<AssertPlaceHolder>" ; verifyConnection ( connector ) ; testVerifyAndReconnectHelper ( connector ) ; connector . close ( ) ; } size ( ) { return delegate . size ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , issues . size ( ) )
|
checkLength ( ) { java . io . File [ ] files = ( this . tempFile . isDirectory ( ) ) ? this . tempFile . listFiles ( ) : new java . io . File [ ] { this . tempFile } ; java . util . Arrays . sort ( files ) ; for ( int fileIndex = 0 ; fileIndex < ( this . degreeOfParallelism ) ; fileIndex ++ ) { long lastBlockLength = ( this . rawDataSizes [ fileIndex ] ) % ( ( this . blockSize ) - ( this . info . getInfoSize ( ) ) ) ; long expectedLength = ( ( ( ( this . getExpectedBlockCount ( fileIndex ) ) - 1 ) * ( this . blockSize ) ) + ( this . info . getInfoSize ( ) ) ) + lastBlockLength ; "<AssertPlaceHolder>" ; } } getInfoSize ( ) { return ( 8 + 8 ) + 8 ; }
|
org . junit . Assert . assertEquals ( expectedLength , files [ fileIndex ] . length ( ) )
|
stripTableAliasesFromFormula_aggregate ( ) { java . lang . String formula = "count(a.id)" ; java . lang . String expected = "count(id)" ; java . lang . String result = new org . pentaho . pms . mql . dialect . HiveDialect ( ) . stripTableAliasesFromFormula ( formula ) ; "<AssertPlaceHolder>" ; } stripTableAliasesFromFormula ( java . lang . String ) { return TABLE_QUALIFIER_PATTERN . matcher ( formula ) . replaceAll ( new java . lang . String ( ) ) ; }
|
org . junit . Assert . assertEquals ( expected , result )
|
streamingEmptyReaderDoesNotCauseFailure ( ) { java . util . List < java . lang . Object > streamedObjects = new java . util . ArrayList ( ) ; com . facebook . buck . apple . XctoolOutputParsing . streamOutputFromReader ( new java . io . StringReader ( "" ) , com . facebook . buck . apple . XctoolOutputParsingTest . eventCallbackAddingEventsToList ( streamedObjects ) ) ; "<AssertPlaceHolder>" ; } empty ( ) { return new com . facebook . buck . util . RichStreamImpl ( java . util . stream . Stream . empty ( ) ) ; }
|
org . junit . Assert . assertThat ( streamedObjects , org . hamcrest . Matchers . is ( org . hamcrest . Matchers . empty ( ) ) )
|
testCreateContextParameters ( ) { java . lang . String prefixName = "testHDFS" ; org . talend . repository . model . hdfs . HDFSConnection connection = ( ( org . talend . repository . model . hdfs . HDFSConnection ) ( org . talend . repository . hdfs . ui . handler . testutils . HDFSConnectionTestUtils . createDefaultHDFSItem ( ) . getConnection ( ) ) ) ; java . util . Set < org . talend . metadata . managment . ui . model . IConnParamName > paramSet = new java . util . HashSet ( ) ; paramSet . add ( EHadoopParamName . HdfsUser ) ; paramSet . add ( EHadoopParamName . HdfsFileSeparator ) ; paramSet . add ( EHadoopParamName . HdfsRowSeparator ) ; paramSet . add ( EHadoopParamName . HdfsRowHeader ) ; java . util . List < java . lang . String > contextParameterNames = new java . util . ArrayList ( ) ; for ( org . talend . metadata . managment . ui . model . IConnParamName paramName : paramSet ) { contextParameterNames . add ( ( ( prefixName + ( org . talend . metadata . managment . ui . utils . ConnectionContextHelper . LINE ) ) + paramName ) ) ; } java . util . List < org . talend . core . model . process . IContextParameter > contextParameters = contextHandler . createContextParameters ( prefixName , connection , paramSet ) ; for ( org . talend . core . model . process . IContextParameter contextParameter : contextParameters ) { java . lang . String contextParameterName = contextParameter . getName ( ) ; "<AssertPlaceHolder>" ; checkContextParameter ( contextParameter , connection ) ; } } getName ( ) { return this . name ; }
|
org . junit . Assert . assertTrue ( contextParameterNames . contains ( contextParameterName ) )
|
testSerializeHandshakeMessageContent ( ) { de . rub . nds . tlsattacker . core . protocol . message . ECDHEServerKeyExchangeMessage msg = new de . rub . nds . tlsattacker . core . protocol . message . ECDHEServerKeyExchangeMessage ( ) ; msg . setCompleteResultingMessage ( expectedPart ) ; msg . setCurveType ( curveType ) ; msg . setLength ( length ) ; msg . setType ( type . getValue ( ) ) ; msg . setNamedGroup ( namedGroup ) ; msg . setPublicKey ( pubKey ) ; msg . setPublicKeyLength ( pubKeyLength ) ; if ( ( signatureAndHashAlgo ) != null ) { msg . setSignatureAndHashAlgorithm ( signatureAndHashAlgo ) ; } msg . setPublicKey ( pubKey ) ; msg . setPublicKeyLength ( pubKeyLength ) ; msg . setSignatureLength ( sigLength ) ; msg . setSignature ( signature ) ; de . rub . nds . tlsattacker . core . protocol . serializer . ECDHEServerKeyExchangeSerializer serializer = new de . rub . nds . tlsattacker . core . protocol . serializer . ECDHEServerKeyExchangeSerializer ( msg , version ) ; "<AssertPlaceHolder>" ; } serialize ( ) { de . rub . nds . tlsattacker . core . protocol . serializer . ServerHelloSerializer serializer = new de . rub . nds . tlsattacker . core . protocol . serializer . ServerHelloSerializer ( helloMessage , de . rub . nds . tlsattacker . core . constants . ProtocolVersion . TLS12 ) ; byte [ ] serialised = serializer . serialize ( ) ; org . junit . Assert . assertArrayEquals ( serialised , message ) ; }
|
org . junit . Assert . assertArrayEquals ( expectedPart , serializer . serialize ( ) )
|
testEqualityWithAnotherInfo ( ) { edu . harvard . iq . dataverse . actionlogging . ActionLogRecord record = new edu . harvard . iq . dataverse . actionlogging . ActionLogRecord ( ) ; record . setStartTime ( this . referenceRecord . getStartTime ( ) ) ; record . setEndTime ( this . referenceRecord . getEndTime ( ) ) ; record . setActionResult ( this . referenceRecord . getActionResult ( ) ) ; record . setUserIdentifier ( this . referenceRecord . getUserIdentifier ( ) ) ; record . setActionType ( this . referenceRecord . getActionType ( ) ) ; record . setActionSubType ( this . referenceRecord . getInfo ( ) ) ; record . setInfo ( "info2" ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ! ( object instanceof edu . harvard . iq . dataverse . makedatacount . DatasetExternalCitations ) ) { return false ; } edu . harvard . iq . dataverse . makedatacount . DatasetExternalCitations other = ( ( edu . harvard . iq . dataverse . makedatacount . DatasetExternalCitations ) ( object ) ) ; if ( ( ( ( this . id ) == null ) && ( ( other . id ) != null ) ) || ( ( ( this . id ) != null ) && ( ! ( this . id . equals ( other . id ) ) ) ) ) { return false ; } return true ; }
|
org . junit . Assert . assertFalse ( this . referenceRecord . equals ( record ) )
|
testDrawRectangleWithZeroWidthAndHeight ( ) { gc . drawRectangle ( 1 , 2 , 0 , 0 ) ; org . eclipse . swt . internal . graphics . GCOperation [ ] gcOperations = org . eclipse . swt . graphics . ControlGC_Test . getGCOperations ( gc ) ; "<AssertPlaceHolder>" ; } getGCOperations ( org . eclipse . swt . graphics . GC ) { return org . eclipse . swt . graphics . ControlGC_Test . getGCAdapter ( gc ) . getGCOperations ( ) ; }
|
org . junit . Assert . assertEquals ( 0 , gcOperations . length )
|
detectResultType_should_return_LIST_OF_FIELDS ( ) { org . springframework . data . simpledb . query . SimpleDbQueryMethod repositoryMethod = prepareQueryMethodToTest ( "sampleAllSampleListSelect" , org . springframework . data . simpledb . query . SampleEntity . class ) ; org . springframework . data . simpledb . query . executions . MultipleResultExecution multipleResultExecution = new org . springframework . data . simpledb . query . executions . MultipleResultExecution ( null ) ; "<AssertPlaceHolder>" ; } detectResultType ( org . springframework . data . simpledb . query . SimpleDbQueryMethod ) { java . lang . String query = method . getAnnotatedQuery ( ) ; if ( method . returnsCollectionOfDomainClass ( ) ) { return org . springframework . data . simpledb . query . executions . MultipleResultExecution . MultipleResultType . COLLECTION_OF_DOMAIN_ENTITIES ; } else if ( ( org . springframework . data . simpledb . query . QueryUtils . getQueryPartialFieldNames ( query ) . size ( ) ) > 1 ) { return org . springframework . data . simpledb . query . executions . MultipleResultExecution . MultipleResultType . LIST_OF_LIST_OF_OBJECT ; } else { if ( method . returnsListOfListOfObject ( ) ) { return org . springframework . data . simpledb . query . executions . MultipleResultExecution . MultipleResultType . LIST_OF_LIST_OF_OBJECT ; } else if ( method . returnsFieldOfTypeCollection ( ) ) { return org . springframework . data . simpledb . query . executions . MultipleResultExecution . MultipleResultType . FIELD_OF_TYPE_COLLECTION ; } else if ( java . util . List . class . isAssignableFrom ( method . getReturnType ( ) ) ) { return org . springframework . data . simpledb . query . executions . MultipleResultExecution . MultipleResultType . LIST_OF_FIELDS ; } else if ( java . util . Set . class . isAssignableFrom ( method . getReturnType ( ) ) ) { return org . springframework . data . simpledb . query . executions . MultipleResultExecution . MultipleResultType . SET_OF_FIELDS ; } else { throw new java . lang . IllegalArgumentException ( ( "Wrong<sp>return<sp>type<sp>for<sp>query:<sp>" + query ) ) ; } } }
|
org . junit . Assert . assertEquals ( MultipleResultExecution . MultipleResultType . LIST_OF_FIELDS , multipleResultExecution . detectResultType ( repositoryMethod ) )
|
testDecimalBinary ( ) { byte [ ] value = new java . math . BigInteger ( "12323232233434" , 10 ) . toString ( ) . getBytes ( org . apache . hadoop . hive . accumulo . UTF_8 ) ; byte [ ] encoded = new java . lang . String ( value ) . getBytes ( org . apache . hadoop . hive . accumulo . UTF_8 ) ; byte [ ] lex = org . apache . hadoop . hive . accumulo . AccumuloIndexLexicoder . encodeValue ( value , serdeConstants . DECIMAL_TYPE_NAME , false ) ; "<AssertPlaceHolder>" ; } encodeValue ( byte [ ] , java . lang . String , boolean ) { if ( stringEncoded ) { return org . apache . hadoop . hive . accumulo . AccumuloIndexLexicoder . encodeStringValue ( value , hiveType ) ; } else { return org . apache . hadoop . hive . accumulo . AccumuloIndexLexicoder . encodeBinaryValue ( value , hiveType ) ; } }
|
org . junit . Assert . assertArrayEquals ( lex , encoded )
|
testMean ( ) { int [ ] shape = new int [ ] { 1 , 2 , 2 , 2 , 2 , 2 } ; int len = org . nd4j . linalg . util . ArrayUtil . prod ( shape ) ; org . nd4j . linalg . api . ndarray . INDArray val = org . nd4j . linalg . factory . Nd4j . linspace ( 1 , len , len , DataType . DOUBLE ) . reshape ( 'c' , shape ) ; org . nd4j . linalg . api . ndarray . INDArray sum = val . sum ( 2 , 3 ) ; double [ ] assertionData = new double [ ] { 28.0 , 32.0 , 36.0 , 40.0 , 92.0 , 96.0 , 100.0 , 104.0 } ; org . nd4j . linalg . api . ndarray . INDArray avgExpected = org . nd4j . linalg . factory . Nd4j . create ( assertionData ) . reshape ( 1 , 2 , 2 , 2 ) ; "<AssertPlaceHolder>" ; } create ( org . nd4j . linalg . api . shape . LongShapeDescriptor ) { return org . nd4j . linalg . factory . Nd4j . create ( descriptor , true ) ; }
|
org . junit . Assert . assertEquals ( avgExpected , sum )
|
equals_null ( ) { final net . sf . qualitycheck . immutableobject . domain . ImmutableSettings a = new net . sf . qualitycheck . immutableobject . domain . ImmutableSettingsTest . Blueprint ( ) . build ( ) ; "<AssertPlaceHolder>" ; } equals ( java . lang . Object ) { if ( ( this ) == obj ) { return true ; } if ( obj == null ) { return false ; } if ( ( getClass ( ) ) != ( obj . getClass ( ) ) ) { return false ; } final net . sf . qualitycheck . ConditionalCheckTest . NotEqual other = ( ( net . sf . qualitycheck . ConditionalCheckTest . NotEqual ) ( obj ) ) ; if ( ( value ) != ( other . value ) ) { return false ; } return true ; }
|
org . junit . Assert . assertFalse ( a . equals ( null ) )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.