input
stringlengths
28
18.7k
output
stringlengths
39
1.69k
testNodeReplacementInEmptyNetwork ( ) { org . apache . beam . vendor . guava . v20_0 . com . google . common . graph . MutableNetwork < java . lang . String , java . lang . String > network = org . apache . beam . runners . core . construction . graph . NetworksTest . createEmptyNetwork ( ) ; org . apache . beam . runners . core . construction . graph . Networks . replaceDirectedNetworkNodes ( network , String :: toLowerCase ) ; "<AssertPlaceHolder>" ; } empty ( ) { org . apache . beam . sdk . util . ApiSurface . LOG . debug ( "Returning<sp>an<sp>empty<sp>ApiSurface" ) ; return new org . apache . beam . sdk . util . ApiSurface ( java . util . Collections . emptySet ( ) , java . util . Collections . emptySet ( ) ) ; }
org . junit . Assert . assertThat ( network . nodes ( ) , empty ( ) )
serializeDeserialize_sameObject ( ) { byte [ ] serializedValue = objectMapper . writeValueAsBytes ( window ) ; com . neovim . Window deserializedValue = objectMapper . readValue ( serializedValue , com . neovim . Window . class ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertThat ( deserializedValue , org . hamcrest . core . Is . is ( window ) )
whenCreatingBranchWithMetadata_ThenItShouldBeStored ( ) { final com . b2international . commons . options . Metadata metadata = new com . b2international . commons . options . MetadataImpl ( ) ; metadata . put ( "key" , "value" ) ; final java . lang . String b = branching ( ) . createBranch ( com . b2international . index . revision . MAIN , "b" , metadata ) ; "<AssertPlaceHolder>" ; } getBranch ( java . lang . String ) { return branching . getBranch ( branchPath ) ; }
org . junit . Assert . assertEquals ( "value" , getBranch ( b ) . metadata ( ) . get ( "key" ) )
contentTypeServiceDocumentXml ( ) { final org . apache . http . HttpResponse response = callUri ( "" , HttpHeaders . ACCEPT , HttpContentType . APPLICATION_XML , HttpStatusCodes . OK ) ; checkMediaType ( response , HttpContentType . APPLICATION_XML_UTF8 ) ; "<AssertPlaceHolder>" ; } getBody ( org . apache . http . HttpResponse ) { org . junit . Assert . assertNotNull ( response ) ; org . junit . Assert . assertNotNull ( response . getEntity ( ) ) ; org . junit . Assert . assertNotNull ( response . getEntity ( ) . getContent ( ) ) ; return org . apache . olingo . odata2 . testutil . helper . StringHelper . inputStreamToString ( response . getEntity ( ) . getContent ( ) ) ; }
org . junit . Assert . assertTrue ( ( ( getBody ( response ) . length ( ) ) > 100 ) )
testGreaterThanRecordThresholdCount ( ) { int thresholdVolume = 5 ; int uploadCheckPeriod = 2000 ; org . kaaproject . kaa . client . logging . LogStorageStatus logStorageStatus = org . mockito . Mockito . mock ( org . kaaproject . kaa . client . logging . LogStorageStatus . class ) ; org . mockito . Mockito . when ( logStorageStatus . getConsumedVolume ( ) ) . thenReturn ( ( ( long ) ( thresholdVolume + 1 ) ) ) ; org . kaaproject . kaa . client . logging . strategies . StorageSizeWithTimeLimitLogUploadStrategy strategy = new org . kaaproject . kaa . client . logging . strategies . StorageSizeWithTimeLimitLogUploadStrategy ( thresholdVolume , uploadCheckPeriod , java . util . concurrent . TimeUnit . MILLISECONDS ) ; "<AssertPlaceHolder>" ; } checkUploadNeeded ( org . kaaproject . kaa . client . logging . LogStorageStatus ) { org . kaaproject . kaa . client . logging . LogUploadStrategyDecision decision = LogUploadStrategyDecision . NOOP ; if ( ( status . getConsumedVolume ( ) ) >= ( volumeThreshold ) ) { org . kaaproject . kaa . client . logging . DefaultLogUploadStrategy . LOG . info ( "Need<sp>to<sp>upload<sp>logs<sp>-<sp>current<sp>size:<sp>{},<sp>threshold:<sp>{}" , status . getConsumedVolume ( ) , volumeThreshold ) ; decision = LogUploadStrategyDecision . UPLOAD ; } else if ( ( status . getRecordCount ( ) ) >= ( countThreshold ) ) { org . kaaproject . kaa . client . logging . DefaultLogUploadStrategy . LOG . info ( "Need<sp>to<sp>upload<sp>logs<sp>-<sp>current<sp>count:<sp>{},<sp>threshold:<sp>{}" , status . getRecordCount ( ) , countThreshold ) ; decision = LogUploadStrategyDecision . UPLOAD ; } return decision ; }
org . junit . Assert . assertEquals ( strategy . checkUploadNeeded ( logStorageStatus ) , LogUploadStrategyDecision . UPLOAD )
experimentTypeLearningCurvedFixedTest ( ) { builder . experiment ( org . dkpro . tc . ml . experiment . builder . LEARNING_CURVE_FIXED_TEST_SET , "fixedTest" ) ; "<AssertPlaceHolder>" ; } toString ( ) { return name ; }
org . junit . Assert . assertEquals ( builder . type . toString ( ) , org . dkpro . tc . ml . experiment . builder . LEARNING_CURVE_FIXED_TEST_SET . toString ( ) )
testGetCriteriaByTemplateId ( ) { int templateId1 = 101 ; int templateId2 = 202 ; org . oscarehr . PMmodule . model . Criteria criteria1 = new org . oscarehr . PMmodule . model . Criteria ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( criteria1 ) ; criteria1 . setTemplateId ( templateId1 ) ; dao . persist ( criteria1 ) ; org . oscarehr . PMmodule . model . Criteria criteria2 = new org . oscarehr . PMmodule . model . Criteria ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( criteria2 ) ; criteria2 . setTemplateId ( templateId2 ) ; dao . persist ( criteria2 ) ; org . oscarehr . PMmodule . model . Criteria criteria3 = new org . oscarehr . PMmodule . model . Criteria ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( criteria3 ) ; criteria3 . setTemplateId ( templateId1 ) ; dao . persist ( criteria3 ) ; org . oscarehr . PMmodule . model . Criteria criteria4 = new org . oscarehr . PMmodule . model . Criteria ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( criteria4 ) ; criteria4 . setTemplateId ( templateId2 ) ; dao . persist ( criteria4 ) ; org . oscarehr . PMmodule . model . Criteria criteria5 = new org . oscarehr . PMmodule . model . Criteria ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( criteria5 ) ; criteria5 . setTemplateId ( templateId1 ) ; dao . persist ( criteria5 ) ; java . util . List < org . oscarehr . PMmodule . model . Criteria > expectedResult = new java . util . ArrayList < org . oscarehr . PMmodule . model . Criteria > ( java . util . Arrays . asList ( criteria1 , criteria3 , criteria5 ) ) ; java . util . List < org . oscarehr . PMmodule . model . Criteria > result = dao . getCriteriaByTemplateId ( templateId1 ) ; org . apache . log4j . Logger logger = org . oscarehr . util . MiscUtils . getLogger ( ) ; if ( ( result . size ( ) ) != ( expectedResult . size ( ) ) ) { logger . warn ( "Array<sp>sizes<sp>do<sp>not<sp>match." ) ; org . junit . Assert . fail ( "Array<sp>sizes<sp>do<sp>not<sp>match." ) ; } for ( int i = 0 ; i < ( expectedResult . size ( ) ) ; i ++ ) { if ( ! ( expectedResult . get ( i ) . equals ( result . get ( i ) ) ) ) { logger . warn ( "Items<sp>not<sp>sorted<sp>by<sp>Billing<sp>Payment<sp>Date." ) ; org . junit . Assert . fail ( "Items<sp>not<sp>sorted<sp>by<sp>Billing<sp>Payment<sp>Date." ) ; } } "<AssertPlaceHolder>" ; } get ( java . lang . String ) { try { return terser . get ( path ) ; } catch ( ca . uhn . hl7v2 . HL7Exception e ) { oscar . oscarLab . ca . all . parsers . CLSHandler . logger . warn ( ( "Unable<sp>to<sp>get<sp>field<sp>at<sp>" + path ) , e ) ; return null ; } }
org . junit . Assert . assertTrue ( true )
testMultiNormalizerHybridGlobalAndSpecificStats ( ) { org . nd4j . linalg . dataset . MultiNormalizerHybrid original = new org . nd4j . linalg . dataset . MultiNormalizerHybrid ( ) . standardizeAllInputs ( ) . minMaxScaleInput ( 0 , ( - 5 ) , 5 ) . minMaxScaleAllOutputs ( ( - 10 ) , 10 ) . standardizeOutput ( 1 ) ; java . util . Map < java . lang . Integer , org . nd4j . linalg . dataset . api . preprocessor . stats . NormalizerStats > inputStats = new java . util . HashMap ( ) ; inputStats . put ( 0 , new org . nd4j . linalg . dataset . api . preprocessor . stats . MinMaxStats ( org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 1 , 2 } ) , org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 3 , 4 } ) ) ) ; inputStats . put ( 1 , new org . nd4j . linalg . dataset . api . preprocessor . stats . DistributionStats ( org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 5 , 6 } ) , org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 7 , 8 } ) ) ) ; java . util . Map < java . lang . Integer , org . nd4j . linalg . dataset . api . preprocessor . stats . NormalizerStats > outputStats = new java . util . HashMap ( ) ; outputStats . put ( 0 , new org . nd4j . linalg . dataset . api . preprocessor . stats . MinMaxStats ( org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 9 , 10 } ) , org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 11 , 12 } ) ) ) ; outputStats . put ( 1 , new org . nd4j . linalg . dataset . api . preprocessor . stats . DistributionStats ( org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 13 , 14 } ) , org . nd4j . linalg . factory . Nd4j . create ( new float [ ] { 15 , 16 } ) ) ) ; original . setInputStats ( inputStats ) ; original . setOutputStats ( outputStats ) ; SUT . write ( original , tmpFile ) ; org . nd4j . linalg . dataset . MultiNormalizerHybrid restored = SUT . restore ( tmpFile ) ; "<AssertPlaceHolder>" ; } restore ( org . nd4j . linalg . dataset . api . preprocessor . serializer . InputStream ) { org . nd4j . linalg . dataset . api . preprocessor . serializer . DataInputStream dis = new org . nd4j . linalg . dataset . api . preprocessor . serializer . DataInputStream ( stream ) ; boolean fitLabels = dis . readBoolean ( ) ; double targetMin = dis . readDouble ( ) ; double targetMax = dis . readDouble ( ) ; org . nd4j . linalg . dataset . api . preprocessor . NormalizerMinMaxScaler result = new org . nd4j . linalg . dataset . api . preprocessor . NormalizerMinMaxScaler ( targetMin , targetMax ) ; result . fitLabel ( fitLabels ) ; result . setFeatureStats ( org . nd4j . linalg . factory . Nd4j . read ( dis ) , org . nd4j . linalg . factory . Nd4j . read ( dis ) ) ; if ( fitLabels ) { result . setLabelStats ( org . nd4j . linalg . factory . Nd4j . read ( dis ) , org . nd4j . linalg . factory . Nd4j . read ( dis ) ) ; } return result ; }
org . junit . Assert . assertEquals ( original , restored )
testSyncCreateGroupNesting ( ) { syncConfig . user ( ) . setMembershipNestingDepth ( 2 ) ; org . apache . jackrabbit . api . security . user . UserManager userManager = getUserManager ( root ) ; org . apache . jackrabbit . oak . api . ContentSession cs = null ; try { cs = login ( new javax . jcr . SimpleCredentials ( USER_ID , new char [ 0 ] ) ) ; root . refresh ( ) ; for ( java . lang . String id : new java . lang . String [ ] { "a" , "b" , "c" , "aa" , "aaa" } ) { "<AssertPlaceHolder>" ; } } finally { if ( cs != null ) { cs . close ( ) ; } options . clear ( ) ; } } getAuthorizable ( org . apache . jackrabbit . oak . api . ResultRow ) { org . apache . jackrabbit . api . security . user . Authorizable authorizable = null ; if ( row != null ) { java . lang . String resultPath = row . getValue ( QueryConstants . JCR_PATH ) . getValue ( Type . STRING ) ; try { org . apache . jackrabbit . oak . api . Tree tree = root . getTree ( resultPath ) ; org . apache . jackrabbit . oak . spi . security . user . AuthorizableType type = org . apache . jackrabbit . oak . spi . security . user . util . UserUtil . getType ( tree ) ; while ( ( ( tree . exists ( ) ) && ( ! ( tree . isRoot ( ) ) ) ) && ( type == null ) ) { tree = tree . getParent ( ) ; type = org . apache . jackrabbit . oak . spi . security . user . util . UserUtil . getType ( tree ) ; } if ( ( tree . exists ( ) ) && ( ( ( targetType ) == null ) || ( ( targetType ) == type ) ) ) { authorizable = userManager . getAuthorizable ( tree ) ; } } catch ( javax . jcr . RepositoryException e ) { org . apache . jackrabbit . oak . security . user . query . ResultRowToAuthorizable . log . debug ( ( "Failed<sp>to<sp>access<sp>authorizable<sp>" + resultPath ) ) ; } } return authorizable ; }
org . junit . Assert . assertNotNull ( userManager . getAuthorizable ( id ) )
thatWhichGoesInIsThatWhichComesOutAgain ( ) { com . github . danielflower . mavenplugins . gitlog . renderers . NullMessageConverter converter = new com . github . danielflower . mavenplugins . gitlog . renderers . NullMessageConverter ( ) ; java . lang . String someText = "Yeah<sp>1<sp>#1<sp>GH-1<sp>yeah<sp>yeah" ; "<AssertPlaceHolder>" ; } formatCommitMessage ( java . lang . String ) { try { java . util . regex . Matcher matcher = pattern . matcher ( original ) ; java . lang . String result = matcher . replaceAll ( "[$3]($2)" ) ; return result ; } catch ( java . lang . Exception e ) { log . info ( ( "Unable<sp>to<sp>convert<sp>a<sp>HTML<sp>link<sp>into<sp>markdown<sp>link:<sp>" + original ) , e ) ; } return original ; }
org . junit . Assert . assertEquals ( someText , converter . formatCommitMessage ( someText ) )
testRecordCountForIsValid ( ) { org . xbill . DNS . Message m = new org . xbill . DNS . Message ( ) ; m . addRecord ( new org . xbill . DNS . ARecord ( org . xbill . DNS . Name . root , org . xbill . DNS . DClass . IN , 0 , java . net . InetAddress . getByAddress ( new byte [ ] { 0 , 0 , 0 , 0 } ) ) , Section . ANSWER ) ; org . jitsi . dnssec . SMessage sm = new org . jitsi . dnssec . SMessage ( m ) ; int count = sm . getCount ( Section . ANSWER ) ; "<AssertPlaceHolder>" ; } getCount ( int ) { if ( section == ( org . xbill . DNS . Section . QUESTION ) ) { return 1 ; } java . util . List < org . jitsi . dnssec . SRRset > sectionList = this . getSectionRRsets ( section ) ; if ( ( sectionList . size ( ) ) == 0 ) { return 0 ; } int count = 0 ; for ( org . jitsi . dnssec . SRRset sr : sectionList ) { count += sr . size ( ) ; } return count ; }
org . junit . Assert . assertEquals ( 1 , count )
testLocaleMatches ( ) { final org . apache . commons . lang3 . time . DateParser parser = getInstance ( org . apache . commons . lang3 . time . FastDateParserTest . yMdHmsSZ , org . apache . commons . lang3 . time . FastDateParserTest . SWEDEN ) ; "<AssertPlaceHolder>" ; } getLocale ( ) { return mLocale ; }
org . junit . Assert . assertEquals ( org . apache . commons . lang3 . time . FastDateParserTest . SWEDEN , parser . getLocale ( ) )
of_File_empty ( ) { java . io . File [ ] f = new java . io . File [ 0 ] ; try { net . coobird . thumbnailator . Thumbnails . of ( f ) ; org . junit . Assert . fail ( ) ; } catch ( java . lang . IllegalArgumentException e ) { "<AssertPlaceHolder>" ; throw e ; } } of ( java . lang . String [ ] ) { net . coobird . thumbnailator . Thumbnails . checkForNull ( files , "Cannot<sp>specify<sp>null<sp>for<sp>input<sp>files." ) ; net . coobird . thumbnailator . Thumbnails . checkForEmpty ( files , "Cannot<sp>specify<sp>an<sp>empty<sp>array<sp>for<sp>input<sp>files." ) ; return net . coobird . thumbnailator . Thumbnails . Builder . ofStrings ( java . util . Arrays . asList ( files ) ) ; }
org . junit . Assert . assertEquals ( "Cannot<sp>specify<sp>an<sp>empty<sp>array<sp>for<sp>input<sp>files." , e . getMessage ( ) )
testHasDependencyExcludingVersionWhenDependencyHasDifferentType ( ) { final org . springframework . roo . project . Dependency mockExistingDependency = getMockDependency ( org . springframework . roo . project . maven . PomTest . DEPENDENCY_GROUP_ID , org . springframework . roo . project . maven . PomTest . DEPENDENCY_ARTIFACT_ID , "1.0" , DependencyType . JAR ) ; final org . springframework . roo . project . maven . Pom pom = getMinimalPom ( org . springframework . roo . project . maven . PomTest . JAR , mockExistingDependency ) ; final org . springframework . roo . project . Dependency mockOtherDependency = getMockDependency ( org . springframework . roo . project . maven . PomTest . DEPENDENCY_GROUP_ID , org . springframework . roo . project . maven . PomTest . DEPENDENCY_ARTIFACT_ID , "1.0" , DependencyType . OTHER ) ; "<AssertPlaceHolder>" ; } hasDependencyExcludingVersion ( org . springframework . roo . project . Dependency ) { return ! ( getDependenciesExcludingVersion ( dependency ) . isEmpty ( ) ) ; }
org . junit . Assert . assertFalse ( pom . hasDependencyExcludingVersion ( mockOtherDependency ) )
noSubscriptions ( ) { doReturn ( org . oscm . billingservice . business . calculation . revenue . PerformBillingRunForSubscriptionTest . createDocument ( false ) ) . when ( bb ) . createBillingDataForOrganization ( any ( org . oscm . billingservice . service . model . BillingInput . class ) , any ( org . oscm . domobjects . BillingResult . class ) ) ; doReturn ( java . lang . Long . valueOf ( 1 ) ) . when ( queryMock ) . getSingleResult ( ) ; org . oscm . domobjects . BillingResult result = performBillingRunForSubscription ( 1L , new java . util . ArrayList < org . oscm . domobjects . SubscriptionHistory > ( ) , 1L , 1L , 3L , "EUR" , false ) ; "<AssertPlaceHolder>" ; verify ( bb . bdr , never ( ) ) . persistBillingResult ( any ( org . oscm . domobjects . BillingResult . class ) ) ; verify ( bb . bdr , never ( ) ) . removeBillingResult ( any ( org . oscm . domobjects . BillingResult . class ) ) ; } getResultXML ( ) { return resultXML ; }
org . junit . Assert . assertEquals ( "" , result . getResultXML ( ) )
registerMetricSet ( ) { net . centro . rtb . monitoringcenter . MetricCollector metricCollector = net . centro . rtb . monitoringcenter . MonitoringCenter . getMetricCollector ( net . centro . rtb . monitoringcenter . MonitoringCenterTest . class ) ; net . centro . rtb . monitoringcenter . MetricSet metricSet = new net . centro . rtb . monitoringcenter . MetricSet ( ) { @ net . centro . rtb . monitoringcenter . Override public java . util . Map < java . lang . String , net . centro . rtb . monitoringcenter . Metric > getMetrics ( ) { java . util . Map < java . lang . String , net . centro . rtb . monitoringcenter . Metric > metricMap = new java . util . HashMap ( ) ; metricMap . put ( "test" , new net . centro . rtb . monitoringcenter . Timer ( ) ) ; return metricMap ; } } ; metricCollector . registerMetricSet ( metricSet , "testTimerSet" ) ; try { metricCollector . registerMetricSet ( metricSet , "testTimerSet" ) ; } catch ( java . lang . Exception e ) { "<AssertPlaceHolder>" ; } metricCollector . removeAll ( ) ; } registerMetricSet ( com . codahale . metrics . MetricSet , java . lang . String [ ] ) { }
org . junit . Assert . assertTrue ( ( e instanceof java . lang . IllegalArgumentException ) )
any ( ) { final com . b2international . index . query . Expression actual = eval ( "*" ) ; final com . b2international . index . query . Expression expected = com . b2international . index . query . Expressions . matchAll ( ) ; "<AssertPlaceHolder>" ; } matchAll ( ) { return MatchAll . INSTANCE ; }
org . junit . Assert . assertEquals ( expected , actual )
testBuildContainerWithParentAttribute ( ) { com . picocontainer . classname . DefaultClassLoadingPicoContainer parent = new com . picocontainer . classname . DefaultClassLoadingPicoContainer ( ) ; parent . addComponent ( "hello" , "world" ) ; java . io . Reader script = new java . io . StringReader ( ( "A<sp>=<sp>com.picocontainer.script.testmodel.A\n" + ( ( "container(:parent<sp>=><sp>$parent)<sp>{\n" + "<sp>component(A)\n" ) + "}\n" ) ) ) ; com . picocontainer . PicoContainer pico = buildContainer ( script , parent , com . picocontainer . script . jruby . JRubyContainerBuilderTestCase . ASSEMBLY_SCOPE ) ; "<AssertPlaceHolder>" ; } getComponent ( java . lang . Class ) { checkReadPermission ( ) ; return pico . getComponent ( componentType ) ; }
org . junit . Assert . assertEquals ( "world" , pico . getComponent ( "hello" ) )
getNewTestCaseSourceCode_A$_Slim3_AbstractModelRef ( ) { java . lang . String encoding = org . junithelper . core . util . UniversalDetectorUtil . getDetectedEncoding ( org . junithelper . core . util . IOUtil . getResourceAsStream ( "inputs/Slim3_AbstractModelRef.txt" ) ) ; java . lang . String sourceCodeString = org . junithelper . core . util . IOUtil . readAsString ( org . junithelper . core . util . IOUtil . getResourceAsStream ( "inputs/Slim3_AbstractModelRef.txt" ) , encoding ) ; org . junithelper . core . meta . ClassMeta targetClassMeta = classMetaExtractor . extract ( sourceCodeString ) ; target . initialize ( targetClassMeta ) ; java . lang . String actual = target . getNewTestCaseSourceCode ( ) ; "<AssertPlaceHolder>" ; } getNewTestCaseSourceCode ( ) { java . lang . StringBuilder buf = new java . lang . StringBuilder ( ) ; if ( ( ( targetClassMeta . packageName ) != null ) && ( ( targetClassMeta . packageName . trim ( ) . length ( ) ) > 0 ) ) { buf . append ( "package<sp>" ) ; buf . append ( targetClassMeta . packageName ) ; buf . append ( ";" ) ; appender . appendLineBreak ( buf ) ; appender . appendLineBreak ( buf ) ; } for ( java . lang . String imported : targetClassMeta . importedList ) { if ( ( imported != null ) && ( ( imported . trim ( ) . length ( ) ) > 0 ) ) { buf . append ( "package<sp>" 0 ) ; buf . append ( imported ) ; buf . append ( ";" ) ; appender . appendLineBreak ( buf ) ; } } if ( ( ( config . testCaseClassNameToExtend ) != null ) && ( ( config . testCaseClassNameToExtend . trim ( ) . length ( ) ) > 0 ) ) { if ( ( ( config . junitVersion ) == ( org . junithelper . core . config . JUnitVersion . version3 ) ) || ( ! ( config . testCaseClassNameToExtend . equals ( "junit.framework.TestCase" ) ) ) ) { buf . append ( "package<sp>" 0 ) ; buf . append ( config . testCaseClassNameToExtend ) ; buf . append ( ";" ) ; appender . appendLineBreak ( buf ) ; appender . appendLineBreak ( buf ) ; } } else { appender . appendLineBreak ( buf ) ; } buf . append ( "public<sp>class<sp>" ) ; buf . append ( targetClassMeta . name ) ; buf . append ( "Test<sp>" ) ; if ( ( ( config . testCaseClassNameToExtend ) != null ) && ( ( config . testCaseClassNameToExtend . trim ( ) . length ( ) ) > 0 ) ) { if ( ( ( config . junitVersion ) == ( org . junithelper . core . config . JUnitVersion . version3 ) ) || ( ! ( config . testCaseClassNameToExtend . equals ( "junit.framework.TestCase" ) ) ) ) { buf . append ( "extends<sp>" ) ; java . lang . String [ ] splittedArray = config . testCaseClassNameToExtend . split ( "\\." ) ; buf . append ( splittedArray [ ( ( splittedArray . length ) - 1 ) ] ) ; buf . append ( "<sp>" ) ; } } buf . append ( "{" ) ; appender . appendLineBreak ( buf ) ; appender . appendLineBreak ( buf ) ; buf . append ( "}" ) ; appender . appendLineBreak ( buf ) ; return getTestCaseSourceCodeWithLackingTestMethod ( buf . toString ( ) ) ; }
org . junit . Assert . assertNotNull ( actual )
isArray_A$String_StringIsNull ( ) { org . junithelper . core . meta . ClassMeta classMeta = null ; org . junithelper . core . extractor . AvailableTypeDetector target = new org . junithelper . core . extractor . AvailableTypeDetector ( classMeta ) ; java . lang . String typeName = null ; boolean actual = target . isArray ( typeName ) ; boolean expected = false ; "<AssertPlaceHolder>" ; } isArray ( java . lang . String ) { return ( typeName != null ) && ( typeName . matches ( ".+?\\[\\]$" ) ) ; }
org . junit . Assert . assertThat ( actual , org . hamcrest . CoreMatchers . is ( org . hamcrest . CoreMatchers . equalTo ( expected ) ) )
testJarReader ( ) { com . blade . ioc . reader . JarReaderImpl jarReader = new com . blade . ioc . reader . JarReaderImpl ( ) ; java . util . Set < com . blade . ioc . bean . ClassInfo > classInfos = jarReader . readClasses ( com . blade . ioc . bean . Scanner . builder ( ) . packageName ( "org.slf4j.impl" ) . build ( ) ) ; "<AssertPlaceHolder>" ; } readClasses ( com . blade . ioc . bean . Scanner ) { return this . getClassByAnnotation ( scanner . getPackageName ( ) , scanner . getParent ( ) , scanner . getAnnotation ( ) , scanner . isRecursive ( ) ) ; }
org . junit . Assert . assertNotNull ( classInfos )
testModifyOverrideForcesNameToLowercase ( ) { org . candlepin . model . ConsumerContentOverride override = new org . candlepin . model . ConsumerContentOverride ( consumer , "test-repo" , "gpgcheck" , "0" ) ; org . candlepin . model . ConsumerContentOverride created = this . consumerContentOverrideCurator . create ( override ) ; created . setName ( "GPGCHECK" ) ; org . candlepin . model . ConsumerContentOverride merged = this . consumerContentOverrideCurator . merge ( created ) ; "<AssertPlaceHolder>" ; } getName ( ) { if ( ( pool ) != null ) { return pool . getProductName ( ) ; } return null ; }
org . junit . Assert . assertEquals ( "gpgcheck" , merged . getName ( ) )
validAugAbsTest ( ) { final org . opendaylight . yangtools . yang . parser . stmt . reactor . ReactorDeclaredModel result = org . opendaylight . yangtools . yang . parser . rfc7950 . reactor . RFC7950Reactors . defaultReactor ( ) . newBuild ( ) . addSources ( org . opendaylight . yangtools . yang . stmt . AugmentArgumentParsingTest . IMPORTED , org . opendaylight . yangtools . yang . stmt . AugmentArgumentParsingTest . VALID_ARGS ) . build ( ) ; "<AssertPlaceHolder>" ; } build ( ) { return org . opendaylight . yangtools . yang . data . api . FixedYangInstanceIdentifier . create ( path , hash . build ( ) ) ; }
org . junit . Assert . assertNotNull ( result )
testReferenceSchema ( ) { final java . net . URL url = getClass ( ) . getClassLoader ( ) . getResource ( "reference-schema.json" ) ; java . util . Objects . requireNonNull ( url ) ; final java . lang . String schema = org . apache . flink . util . FileUtils . readFileUtf8 ( new java . io . File ( url . getFile ( ) ) ) ; final org . apache . flink . api . common . typeinfo . TypeInformation < ? > result = org . apache . flink . formats . json . JsonRowSchemaConverter . convert ( schema ) ; final org . apache . flink . api . common . typeinfo . TypeInformation < ? > expected = org . apache . flink . api . common . typeinfo . Types . ROW_NAMED ( new java . lang . String [ ] { "billing_address" , "shipping_address" , "optional_address" } , org . apache . flink . api . common . typeinfo . Types . ROW_NAMED ( new java . lang . String [ ] { "street_address" , "city" , "state" } , Types . STRING , Types . STRING , Types . STRING ) , org . apache . flink . api . common . typeinfo . Types . ROW_NAMED ( new java . lang . String [ ] { "street_address" , "city" , "state" } , Types . STRING , Types . STRING , Types . STRING ) , org . apache . flink . api . common . typeinfo . Types . ROW_NAMED ( new java . lang . String [ ] { "street_address" , "city" , "state" } , Types . STRING , Types . STRING , Types . STRING ) ) ; "<AssertPlaceHolder>" ; } convert ( java . lang . String ) { org . apache . flink . util . Preconditions . checkNotNull ( jsonSchema , "JSON<sp>schema" ) ; final org . apache . flink . shaded . jackson2 . com . fasterxml . jackson . databind . ObjectMapper mapper = new org . apache . flink . shaded . jackson2 . com . fasterxml . jackson . databind . ObjectMapper ( ) ; mapper . getFactory ( ) . enable ( JsonParser . Feature . ALLOW_COMMENTS ) . enable ( JsonParser . Feature . ALLOW_UNQUOTED_FIELD_NAMES ) . enable ( JsonParser . Feature . ALLOW_SINGLE_QUOTES ) ; final org . apache . flink . shaded . jackson2 . com . fasterxml . jackson . databind . JsonNode node ; try { node = mapper . readTree ( jsonSchema ) ; } catch ( java . io . IOException e ) { throw new java . lang . IllegalArgumentException ( "Invalid<sp>JSON<sp>schema." , e ) ; } return ( ( org . apache . flink . api . common . typeinfo . TypeInformation < T > ) ( org . apache . flink . formats . json . JsonRowSchemaConverter . convertType ( "<root>" , node , node ) ) ) ; }
org . junit . Assert . assertEquals ( expected , result )
testParseQueryOnly ( ) { final fr . gouv . vitam . common . database . parser . request . multiple . SelectParserMultiple request = new fr . gouv . vitam . common . database . parser . request . multiple . SelectParserMultiple ( ) ; final java . lang . String ex = "{}" ; request . parseQueryOnly ( ex ) ; "<AssertPlaceHolder>" ; } parseQueryOnly ( java . lang . String ) { fr . gouv . vitam . common . database . parser . request . GlobalDatasParser . sanityRequestCheck ( query ) ; sourceRequest = query ; if ( ( request ) != null ) { request . reset ( ) ; } else { request = getNewRequest ( ) ; } lastDepth = 0 ; hasFullTextQuery = false ; rootNode = fr . gouv . vitam . common . json . JsonHandler . getFromString ( query ) ; if ( rootNode . isMissingNode ( ) ) { throw new fr . gouv . vitam . common . exception . InvalidParseOperationException ( "The<sp>current<sp>Node<sp>is<sp>missing(empty):<sp>RequestRoot" ) ; } rootParse ( fr . gouv . vitam . common . json . JsonHandler . createArrayNode ( ) ) ; queryParse ( rootNode ) ; filterParse ( fr . gouv . vitam . common . json . JsonHandler . createObjectNode ( ) ) ; }
org . junit . Assert . assertNotNull ( request )
encodeLabelShouldSucceed ( ) { java . lang . String label = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789.-~_@:!$&'()*+,;=<sp>?/" ; java . lang . String expected = ( ( ( ( ( label . substring ( 0 , ( ( label . length ( ) ) - 3 ) ) ) + "%20" ) + "%" ) + ( java . lang . Integer . toHexString ( '?' ) . toUpperCase ( Locale . US ) ) ) + "%" ) + ( java . lang . Integer . toHexString ( '/' ) . toUpperCase ( Locale . US ) ) ; "<AssertPlaceHolder>" ; } encodeLabel ( java . lang . String ) { com . google . common . base . Preconditions . checkNotNull ( label ) ; return com . lochbridge . oath . otp . keyprovisioning . OTPAuthURI . safeEncodeLabel ( label ) ; }
org . junit . Assert . assertEquals ( expected , com . lochbridge . oath . otp . keyprovisioning . OTPAuthURI . encodeLabel ( label ) )
saveExperimentPageStateExceptionTest ( ) { com . intuit . wasabi . experimentobjects . Experiment . ID experimentID = Experiment . ID . valueOf ( experimentId ) ; com . intuit . wasabi . experimentobjects . ExperimentPageList oldPageList = new com . intuit . wasabi . experimentobjects . ExperimentPageList ( ) ; com . intuit . wasabi . experimentobjects . ExperimentPageList newPageList = new com . intuit . wasabi . experimentobjects . ExperimentPageList ( ) ; doThrow ( com . datastax . driver . core . exceptions . WriteTimeoutException . class ) . when ( experimentAuditLogAccessor ) . insertBy ( eq ( experimentId ) , any ( java . util . Date . class ) , eq ( "pages" ) , any ( java . lang . String . class ) , any ( java . lang . String . class ) ) ; repository . saveExperimentPageState ( experimentID , oldPageList , newPageList ) ; "<AssertPlaceHolder>" ; } saveExperimentPageState ( com . intuit . wasabi . experimentobjects . Experiment$ID , com . intuit . wasabi . experimentobjects . ExperimentPageList , com . intuit . wasabi . experimentobjects . ExperimentPageList ) { try { this . experimentAuditLogAccessor . insertBy ( experimentID . getRawID ( ) , new java . util . Date ( ) , "pages" , oldPageList . toString ( ) , newPageList . toString ( ) ) ; } catch ( com . datastax . driver . core . exceptions . WriteTimeoutException | com . datastax . driver . core . exceptions . UnavailableException | com . datastax . driver . core . exceptions . NoHostAvailableException e ) { throw new com . intuit . wasabi . repository . RepositoryException ( ( ( "Could<sp>not<sp>write<sp>pages<sp>change<sp>to<sp>audit<sp>log:<sp>\"" + experimentID ) + "\"" ) , e ) ; } }
org . junit . Assert . assertThat ( 1 , org . hamcrest . CoreMatchers . is ( 2 ) )
testGetAccelerationCache_List_ExecutorService ( ) { System . out . println ( "getAccelerationCache" ) ; java . util . List < ? extends jsat . linear . Vec > vecs = null ; java . util . concurrent . ExecutorService threadpool = null ; jsat . linear . distancemetrics . JaccardDistance instance = new jsat . linear . distancemetrics . JaccardDistance ( ) ; java . util . List < java . lang . Double > expResult = null ; java . util . List < java . lang . Double > result = instance . getAccelerationCache ( vecs , true ) ; "<AssertPlaceHolder>" ; } getAccelerationCache ( java . util . List , boolean ) { double [ ] cache = new double [ vecs . size ( ) ] ; jsat . utils . concurrent . ParallelUtils . run ( parallel , vecs . size ( ) , ( start , end ) -> { for ( int i = start ; i < end ; i ++ ) { jsat . linear . Vec v = vecs . get ( i ) ; cache [ i ] = jsat . linear . VecOps . weightedDot ( invStndDevs , v , v ) ; } } ) ; return jsat . utils . DoubleList . view ( cache , vecs . size ( ) ) ; }
org . junit . Assert . assertEquals ( expResult , result )
testToUnsignedByteArrayInt_HighestBit ( ) { byte [ ] actual = de . persosim . simulator . utils . Utils . toUnsignedByteArray ( ( ( int ) ( - 2146360813 ) ) ) ; byte [ ] expected = new byte [ ] { ( ( byte ) ( 128 ) ) , ( ( byte ) ( 17 ) ) , ( ( byte ) ( 34 ) ) , ( ( byte ) ( 19 ) ) } ; "<AssertPlaceHolder>" ; } toUnsignedByteArray ( java . math . BigInteger ) { if ( bigInt == null ) { throw new java . lang . NullPointerException ( ) ; } byte [ ] resultTMP ; byte [ ] resultFINAL ; java . math . BigInteger bigIntTmp ; if ( ( ( resultTMP [ 0 ] ) == ( ( byte ) ( 0 ) ) ) && ( ( resultTMP . length ) > 1 ) ) { resultFINAL = new byte [ ( resultTMP . length ) - 1 ] ; java . lang . System . arraycopy ( resultTMP , 1 , resultFINAL , 0 , resultFINAL . length ) ; } else { resultFINAL = resultTMP ; } return resultFINAL ; }
org . junit . Assert . assertArrayEquals ( expected , actual )
testRequestId ( ) { com . google . apphosting . api . ApiProxy . Environment env = com . google . apphosting . api . ApiProxy . getCurrentEnvironment ( ) ; java . lang . String requestId = ( ( java . lang . String ) ( env . getAttributes ( ) . get ( "com.google.appengine.runtime.request_log_id" ) ) ) ; java . lang . String errMsg = "The<sp>request<sp>id<sp>should<sp>not<sp>be<sp>null" ; "<AssertPlaceHolder>" ; } get ( com . google . appengine . api . datastore . Key ) { return com . google . appengine . tck . modules . support . LibHelper . ds . get ( key ) ; }
org . junit . Assert . assertNotNull ( errMsg , requestId )
customStringRequest ( ) { java . lang . String customData = "{\"c\":1.0}" ; io . seldon . protos . PredictionProtos . SeldonMessage . Builder b = io . seldon . protos . PredictionProtos . SeldonMessage . newBuilder ( ) ; b . setStrData ( customData ) ; io . seldon . protos . PredictionProtos . SeldonMessage request = b . build ( ) ; java . lang . String json = io . seldon . engine . pb . ProtoBufUtils . toJson ( request ) ; System . out . println ( json ) ; io . seldon . protos . PredictionProtos . SeldonMessage . Builder b2 = io . seldon . protos . PredictionProtos . SeldonMessage . newBuilder ( ) ; io . seldon . engine . pb . ProtoBufUtils . updateMessageBuilderFromJson ( b2 , json ) ; io . seldon . protos . PredictionProtos . SeldonMessage request2 = b2 . build ( ) ; java . lang . String json2 = io . seldon . engine . pb . ProtoBufUtils . toJson ( request2 ) ; System . out . println ( json2 ) ; "<AssertPlaceHolder>" ; } toJson ( com . google . protobuf . Message ) { boolean omittingInsignificantWhitespace = false ; return io . seldon . engine . pb . ProtoBufUtils . toJson ( message , omittingInsignificantWhitespace ) ; }
org . junit . Assert . assertEquals ( json , json2 )
testFindSimilarMedicationsNoSimilar ( ) { org . mockito . Mockito . when ( medicationsDao . findSimilarMedicationsIds ( anyLong ( ) , anyListOf ( com . marand . thinkmed . medications . business . Long . class ) , any ( ) ) ) . thenReturn ( java . util . Collections . emptySet ( ) ) ; final java . util . List < com . marand . thinkmed . html . components . tree . TreeNodeData > searchResult = medicationsFinder . findSimilarMedications ( 11L , java . util . Collections . singletonList ( 10L ) , new org . joda . time . DateTime ( ) ) ; "<AssertPlaceHolder>" ; } findSimilarMedications ( long , java . util . List , org . joda . time . DateTime ) { com . google . common . base . Preconditions . checkNotNull ( routeIds , "routeIds<sp>must<sp>not<sp>be<sp>null" ) ; com . google . common . base . Preconditions . checkNotNull ( when , "when<sp>must<sp>not<sp>be<sp>null" ) ; final java . util . Set < java . lang . Long > similarMedicationsIds = medicationsDao . findSimilarMedicationsIds ( medicationId , routeIds , when ) ; final java . util . Map < java . lang . Long , com . marand . thinkmed . medications . dto . MedicationHolderDto > similarMedicationsMap = similarMedicationsIds . stream ( ) . map ( ( i ) -> medicationsValueHolder . getValue ( ) . get ( i ) ) . filter ( ( m ) -> m != null ) . collect ( java . util . stream . Collectors . toMap ( MedicationHolderDto :: getId , ( m ) -> m ) ) ; return buildMedicationsTree ( similarMedicationsMap ) ; }
org . junit . Assert . assertTrue ( searchResult . isEmpty ( ) )
testSanitizerStyleBadPadding ( ) { java . lang . String input = "<p<sp>style=\"padding-left:<sp>any;\">content</p>" ; java . lang . String expected = "<p<sp>style=\"\">content</p>" ; "<AssertPlaceHolder>" ; } sanitize ( java . lang . String ) { return com . github . bordertech . wcomponents . util . HtmlSanitizerUtil . sanitize ( input , false ) ; }
org . junit . Assert . assertEquals ( expected , com . github . bordertech . wcomponents . util . HtmlSanitizerUtil . sanitize ( input ) )
addIndexBothRemovedTest ( ) { org . threadly . concurrent . collections . ConcurrentArrayList . DataSet < java . lang . Integer > result = org . threadly . concurrent . collections . ConcurrentArrayListDataSetTest . removedFromBoth . add ( 5 , 100 ) . add ( 0 , 200 ) ; java . lang . Integer [ ] expectedResult = new java . lang . Integer [ ] { 200 , 1 , 2 , 3 , 4 , 5 , 100 , 6 , 7 , 8 } ; "<AssertPlaceHolder>" ; } makeDataSet ( java . lang . Object [ ] , int , int ) { return new org . threadly . concurrent . collections . ConcurrentArrayList . DataSet ( dataArray , startPosition , endPosition , 0 , 0 ) ; }
org . junit . Assert . assertTrue ( result . equals ( org . threadly . concurrent . collections . ConcurrentArrayListDataSetTest . makeDataSet ( expectedResult , 0 , expectedResult . length ) ) )
testFreeSelectSqlBuilderInTableShardsWithTableName ( ) { java . util . List < java . lang . Integer > list = new java . util . ArrayList ( ) ; list . add ( 3 ) ; list . add ( 4 ) ; com . ctrip . platform . dal . dao . sqlbuilder . FreeSelectSqlBuilder < java . util . List < com . ctrip . platform . dal . dao . shard . ClientTestModel > > builder = new com . ctrip . platform . dal . dao . sqlbuilder . FreeSelectSqlBuilder ( ) ; builder . selectAll ( ) . from ( com . ctrip . platform . dal . dao . shard . BaseDalTabelDaoShardByTableTest . TABLE_NAME ) . where ( com . ctrip . platform . dal . dao . sqlbuilder . Expressions . in ( "id" , list , Types . INTEGER ) ) ; builder . mapWith ( new com . ctrip . platform . dal . dao . helper . DalDefaultJpaMapper ( com . ctrip . platform . dal . dao . shard . ClientTestModel . class ) ) ; java . util . List < com . ctrip . platform . dal . dao . shard . ClientTestModel > result = null ; try { java . util . Set < java . lang . String > tableShards = new java . util . HashSet ( ) ; tableShards . add ( "2" ) ; tableShards . add ( "3" ) ; result = com . ctrip . platform . dal . dao . shard . BaseDalTabelDaoShardByTableTest . queryDao . query ( builder , new com . ctrip . platform . dal . dao . shard . DalHints ( ) . inTableShards ( tableShards ) ) ; } catch ( java . lang . Exception e ) { e . printStackTrace ( ) ; org . junit . Assert . fail ( ) ; } "<AssertPlaceHolder>" ; } size ( ) { return allKeys . size ( ) ; }
org . junit . Assert . assertEquals ( 3 , result . size ( ) )
testAvroDataFileWriteRead ( ) { com . cloudera . flume . handlers . debug . MemorySinkSource mem = com . cloudera . flume . handlers . debug . MemorySinkSource . cannedData ( "test<sp>" , 5 ) ; java . io . File f = com . cloudera . util . FileUtil . createTempFile ( "avrodata" , ".avro" ) ; f . deleteOnExit ( ) ; com . cloudera . flume . handlers . avro . TestAvroDataFile . LOG . info ( ( "filename<sp>before<sp>escaping:<sp>" + ( f . getAbsolutePath ( ) ) ) ) ; java . lang . String custom = ( "text(\"" + ( org . apache . commons . lang . StringEscapeUtils . escapeJava ( f . getAbsolutePath ( ) ) ) ) + "\",<sp>\"avrodata\")" ; com . cloudera . flume . handlers . avro . TestAvroDataFile . LOG . info ( ( "sink<sp>to<sp>parse:<sp>" + custom ) ) ; com . cloudera . flume . core . EventSink snk = com . cloudera . flume . conf . FlumeBuilder . buildSink ( new com . cloudera . flume . conf . Context ( ) , custom ) ; snk . open ( ) ; mem . open ( ) ; com . cloudera . flume . core . EventUtil . dumpAll ( mem , snk ) ; snk . close ( ) ; mem . open ( ) ; org . apache . avro . io . DatumReader < com . cloudera . flume . core . EventImpl > dtm = new org . apache . avro . reflect . ReflectDatumReader < com . cloudera . flume . core . EventImpl > ( com . cloudera . flume . core . EventImpl . class ) ; org . apache . avro . file . DataFileReader < com . cloudera . flume . core . EventImpl > dr = new org . apache . avro . file . DataFileReader < com . cloudera . flume . core . EventImpl > ( f , dtm ) ; com . cloudera . flume . core . EventImpl eout = null ; for ( java . lang . Object o : dr ) { eout = ( ( com . cloudera . flume . core . EventImpl ) ( o ) ) ; com . cloudera . flume . core . Event expected = mem . next ( ) ; "<AssertPlaceHolder>" ; } } getBody ( ) { return evt . getRenderedMessage ( ) . getBytes ( ) ; }
org . junit . Assert . assertTrue ( java . util . Arrays . equals ( eout . getBody ( ) , expected . getBody ( ) ) )
removeFilter_A$Class ( ) { org . junithelper . core . filter . TrimFilterManager target = new org . junithelper . core . filter . TrimFilterManager ( ) ; target . addFilter ( new org . junithelper . core . filter . impl . TrimCommentFilter ( ) , new org . junithelper . core . filter . impl . TrimInsideOfBraceFilter ( ) , new org . junithelper . core . filter . impl . TrimQuotationFilter ( ) ) ; java . lang . Class < ? > filterClass = org . junithelper . core . filter . impl . TrimQuotationFilter . class ; target . removeFilter ( filterClass ) ; "<AssertPlaceHolder>" ; } getFilters ( ) { return this . filters ; }
org . junit . Assert . assertEquals ( 2 , target . getFilters ( ) . size ( ) )
testGetAsStringSpaceAndThreeByteSpaceMixtureTrailing ( ) { java . lang . String in = " " + ( ( ( ( "<sp>" + "  " ) + "<sp>" ) + " " ) + "<sp>" ) ; java . lang . String out = converter . getAsString ( context , component , in ) ; "<AssertPlaceHolder>" ; } getAsString ( javax . faces . context . FacesContext , javax . faces . component . UIComponent , java . lang . Object ) { java . lang . String retVal ; if ( ! ( object instanceof org . oscm . internal . vo . VOPaymentInfo ) ) { retVal = "" ; } else { retVal = java . lang . String . valueOf ( ( ( org . oscm . internal . vo . VOPaymentInfo ) ( object ) ) . getKey ( ) ) ; } return retVal ; }
org . junit . Assert . assertEquals ( null , out )
testGetItemDocument ( ) { org . wikidata . wdtk . datamodel . helpers . ItemDocument o1 = org . wikidata . wdtk . datamodel . helpers . Datamodel . makeItemDocument ( factory . getItemIdValue ( "Q42" , "foo" ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyMap ( ) ) ; org . wikidata . wdtk . datamodel . helpers . ItemDocument o2 = factory . getItemDocument ( factory . getItemIdValue ( "Q42" , "foo" ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyList ( ) , java . util . Collections . emptyMap ( ) , 0 ) ; "<AssertPlaceHolder>" ; } getItemIdValue ( java . lang . String , java . lang . String ) { return new org . wikidata . wdtk . datamodel . implementation . ItemIdValueImpl ( id , siteIri ) ; }
org . junit . Assert . assertEquals ( o1 , o2 )
testGetReactionStep_IReaction ( ) { org . openscience . cdk . reaction . ReactionChain chain = new org . openscience . cdk . reaction . ReactionChain ( ) ; org . openscience . cdk . interfaces . IReaction reaction1 = org . openscience . cdk . reaction . ReactionChainTest . builder . newInstance ( org . openscience . cdk . interfaces . IReaction . class ) ; reaction1 . setID ( "reaction1" ) ; chain . addReaction ( reaction1 , 0 ) ; org . openscience . cdk . interfaces . IReaction reaction2 = org . openscience . cdk . reaction . ReactionChainTest . builder . newInstance ( org . openscience . cdk . interfaces . IReaction . class ) ; reaction1 . setID ( "reaction2" ) ; org . openscience . cdk . interfaces . IReaction reaction3 = org . openscience . cdk . reaction . ReactionChainTest . builder . newInstance ( org . openscience . cdk . interfaces . IReaction . class ) ; reaction1 . setID ( "reaction3" ) ; chain . addReaction ( reaction1 , 0 ) ; chain . addReaction ( reaction2 , 1 ) ; chain . addReaction ( reaction3 , 2 ) ; "<AssertPlaceHolder>" ; } getReactionStep ( org . openscience . cdk . interfaces . IReaction ) { if ( hashMapChain . containsKey ( reaction ) ) return hashMapChain . get ( reaction ) ; else return - 1 ; }
org . junit . Assert . assertEquals ( 1 , chain . getReactionStep ( reaction2 ) )
testNoneMatchWithTrueResult ( ) { boolean match = com . annimon . stream . Stream . of ( 2 , 3 , 5 , 8 , 13 ) . noneMatch ( com . annimon . stream . Functions . remainder ( 10 ) ) ; "<AssertPlaceHolder>" ; } remainder ( int ) { return new com . annimon . stream . Predicate < java . lang . Integer > ( ) { @ com . annimon . stream . Override public boolean test ( java . lang . Integer v ) { return ( v % val ) == 0 ; } } ; }
org . junit . Assert . assertTrue ( match )
stringWithFractionalTimeReturnsCorrectMillisecondsAsFractionNotCount ( ) { java . lang . String exampleDate = "2012-11-28T17:43:12.1Z" ; java . util . Date parsedTime = new com . microsoft . windowsazure . services . media . implementation . ODataDateAdapter ( ) . unmarshal ( exampleDate ) ; java . util . Calendar timeToNearestSecond = java . util . Calendar . getInstance ( ) ; timeToNearestSecond . setTimeZone ( com . microsoft . windowsazure . services . media . implementation . ODataDateParsingTest . utc ) ; timeToNearestSecond . set ( 2012 , 10 , 28 , 17 , 43 , 12 ) ; timeToNearestSecond . set ( Calendar . MILLISECOND , 0 ) ; long millis = ( parsedTime . getTime ( ) ) - ( timeToNearestSecond . getTimeInMillis ( ) ) ; "<AssertPlaceHolder>" ; } set ( java . lang . String , java . lang . String ) { queryParameters . add ( parameterName , parameterValue ) ; return this ; }
org . junit . Assert . assertEquals ( 100 , millis )
fetchEventsForAYearWithoutAnyEvents ( ) { java . util . Calendar year = java . util . Calendar . getInstance ( ) ; year . add ( Calendar . YEAR , 10 ) ; java . util . List < com . stratelia . webactiv . almanach . model . EventDetail > events = new java . util . ArrayList < com . stratelia . webactiv . almanach . model . EventDetail > ( eventDAO . findAllEventsInPeriod ( org . silverpeas . core . date . period . Period . from ( year . getTime ( ) , PeriodType . year , "en" ) , almanachIds ) ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return ( this . size ( ) ) == 0 ; }
org . junit . Assert . assertThat ( events . isEmpty ( ) , is ( true ) )
testEmailAddressType ( ) { com . effektif . workflow . api . workflow . ExecutableWorkflow workflow = new com . effektif . workflow . api . workflow . ExecutableWorkflow ( ) . variable ( "v" , new com . effektif . workflow . api . types . EmailAddressType ( ) ) ; deploy ( workflow ) ; com . effektif . workflow . api . workflowinstance . WorkflowInstance workflowInstance = workflowEngine . start ( new com . effektif . workflow . api . model . TriggerInstance ( ) . workflowId ( workflow . getId ( ) ) . data ( "v" , "info@effektif.com" ) ) ; java . lang . Object value = workflowInstance . getVariableValue ( "v" ) ; "<AssertPlaceHolder>" ; } getVariableValue ( java . lang . String ) { if ( variableId == null ) { return null ; } if ( ( variableInstances ) != null ) { for ( com . effektif . workflow . api . workflowinstance . VariableInstance variableInstance : variableInstances ) { if ( variableId . equals ( variableInstance . getVariableId ( ) ) ) { return ( ( T ) ( variableInstance . getValue ( ) ) ) ; } } } return null ; }
org . junit . Assert . assertEquals ( "info@effektif.com" , value )
bundleForValidWorkflow ( ) { org . commonwl . view . workflow . Workflow validWorkflow = new org . commonwl . view . workflow . Workflow ( "Valid<sp>Workflow" , "Doc<sp>for<sp>Valid<sp>Workflow" , new java . util . HashMap ( ) , new java . util . HashMap ( ) , new java . util . HashMap ( ) ) ; validWorkflow . setRetrievedFrom ( org . mockito . Mockito . mock ( org . commonwl . view . git . GitDetails . class ) ) ; org . commonwl . view . researchobject . ROBundleService mockROBundleService = org . mockito . Mockito . mock ( org . commonwl . view . researchobject . ROBundleService . class ) ; when ( mockROBundleService . saveToFile ( anyObject ( ) ) ) . thenReturn ( java . nio . file . Paths . get ( "test/path/to/check/for.zip" ) ) ; org . commonwl . view . workflow . WorkflowRepository mockRepository = org . mockito . Mockito . mock ( org . commonwl . view . workflow . WorkflowRepository . class ) ; when ( mockRepository . findByRetrievedFrom ( anyObject ( ) ) ) . thenReturn ( null ) . thenReturn ( null ) . thenReturn ( validWorkflow ) ; org . commonwl . view . researchobject . ROBundleFactory factory = new org . commonwl . view . researchobject . ROBundleFactory ( mockROBundleService , mockRepository ) ; factory . createWorkflowRO ( validWorkflow ) ; "<AssertPlaceHolder>" ; } getRoBundlePath ( ) { return roBundlePath ; }
org . junit . Assert . assertEquals ( java . nio . file . Paths . get ( "test/path/to/check/for.zip" ) , java . nio . file . Paths . get ( validWorkflow . getRoBundlePath ( ) ) )
setSasTokenSets ( ) { com . microsoft . azure . sdk . iot . provisioning . device . internal . contract . amqp . AmqpsProvisioningSymmetricKeySaslHandler handler = mockit . Deencapsulation . newInstance ( com . microsoft . azure . sdk . iot . provisioning . device . internal . contract . amqp . AmqpsProvisioningSymmetricKeySaslHandler . class , new java . lang . Class [ ] { java . lang . String . class , java . lang . String . class , java . lang . String . class } , tests . unit . com . microsoft . azure . sdk . iot . provisioning . device . internal . contract . amqp . AmqpsProvisioningSymmetricKeySaslHandlerTest . idScope , tests . unit . com . microsoft . azure . sdk . iot . provisioning . device . internal . contract . amqp . AmqpsProvisioningSymmetricKeySaslHandlerTest . registrationId , tests . unit . com . microsoft . azure . sdk . iot . provisioning . device . internal . contract . amqp . AmqpsProvisioningSymmetricKeySaslHandlerTest . sasToken ) ; handler . setSasToken ( tests . unit . com . microsoft . azure . sdk . iot . provisioning . device . internal . contract . amqp . AmqpsProvisioningSymmetricKeySaslHandlerTest . sasToken ) ; java . lang . String actualSasToken = mockit . Deencapsulation . getField ( handler , "sasToken" ) ; "<AssertPlaceHolder>" ; } setSasToken ( java . lang . String ) { this . sasToken = sasToken ; }
org . junit . Assert . assertEquals ( tests . unit . com . microsoft . azure . sdk . iot . provisioning . device . internal . contract . amqp . AmqpsProvisioningSymmetricKeySaslHandlerTest . sasToken , actualSasToken )
testEagerlyRegisterIncompatibleSerializer ( ) { org . apache . flink . runtime . state . StateSerializerProvider < org . apache . flink . runtime . testutils . statemigration . TestType > testProvider = org . apache . flink . runtime . state . StateSerializerProvider . fromNewRegisteredSerializer ( new org . apache . flink . runtime . testutils . statemigration . TestType . IncompatibleTestTypeSerializer ( ) ) ; org . apache . flink . api . common . typeutils . TypeSerializerSchemaCompatibility < org . apache . flink . runtime . testutils . statemigration . TestType > schemaCompatibility = testProvider . setPreviousSerializerSnapshotForRestoredState ( new org . apache . flink . runtime . testutils . statemigration . TestType . V1TestTypeSerializer ( ) . snapshotConfiguration ( ) ) ; "<AssertPlaceHolder>" ; try { testProvider . currentSchemaSerializer ( ) ; org . junit . Assert . fail ( ) ; } catch ( java . lang . Exception excepted ) { } } isIncompatible ( ) { return ( compatibilityType ) == ( TypeSerializerSchemaCompatibility . Type . INCOMPATIBLE ) ; }
org . junit . Assert . assertTrue ( schemaCompatibility . isIncompatible ( ) )
testAclFeature ( ) { org . apache . hadoop . security . UserGroupInformation ugi = org . apache . hadoop . security . UserGroupInformation . createUserForTesting ( "testuser" , new java . lang . String [ ] { "testgroup" } ) ; ugi . doAs ( ( ( java . security . PrivilegedExceptionAction < java . lang . Object > ) ( ( ) -> { org . apache . hadoop . fs . FileSystem fs = miniDFS . getFileSystem ( ) ; org . apache . hadoop . fs . Path aclDir = new org . apache . hadoop . fs . Path ( "/user/acl" ) ; fs . mkdirs ( aclDir ) ; org . apache . hadoop . fs . Path aclChildDir = new org . apache . hadoop . fs . Path ( aclDir , "subdir" ) ; fs . mkdirs ( aclChildDir ) ; org . apache . hadoop . hdfs . server . namenode . AclStatus aclStatus = fs . getAclStatus ( aclDir ) ; "<AssertPlaceHolder>" ; return null ; } ) ) ) ; } getEntries ( ) { java . util . List < org . apache . hadoop . hdfs . federation . protocol . proto . HdfsServerFederationProtos . MountTableRecordProto > entries = this . translator . getProtoOrBuilder ( ) . getEntriesList ( ) ; java . util . List < org . apache . hadoop . hdfs . server . federation . store . records . MountTable > ret = new java . util . ArrayList < org . apache . hadoop . hdfs . server . federation . store . records . MountTable > ( ) ; for ( org . apache . hadoop . hdfs . federation . protocol . proto . HdfsServerFederationProtos . MountTableRecordProto entry : entries ) { org . apache . hadoop . hdfs . server . federation . store . records . MountTable record = new org . apache . hadoop . hdfs . server . federation . store . records . impl . pb . MountTablePBImpl ( entry ) ; ret . add ( record ) ; } return ret ; }
org . junit . Assert . assertEquals ( 0 , aclStatus . getEntries ( ) . size ( ) )
testSetFontWithSameFont ( ) { org . eclipse . swt . graphics . Font font = createFont ( ) ; gc . setFont ( font ) ; org . eclipse . swt . internal . graphics . GCAdapter adapter = org . eclipse . swt . graphics . ControlGC_Test . getGCAdapter ( gc ) ; adapter . clearGCOperations ( ) ; gc . setFont ( font ) ; org . eclipse . swt . internal . graphics . GCOperation [ ] gcOperations = org . eclipse . swt . graphics . ControlGC_Test . getGCOperations ( gc ) ; "<AssertPlaceHolder>" ; } getGCOperations ( org . eclipse . swt . graphics . GC ) { return org . eclipse . swt . graphics . ControlGC_Test . getGCAdapter ( gc ) . getGCOperations ( ) ; }
org . junit . Assert . assertEquals ( 0 , gcOperations . length )
testHStore ( ) { for ( java . lang . String s : com . typemapper . postgres . HStoreTest . validHStoreStrings ) { com . typemapper . postgres . HStore hs = new com . typemapper . postgres . HStore ( s ) ; for ( Map . Entry < java . lang . String , java . lang . String > e : hs ) { "<AssertPlaceHolder>" ; } } }
org . junit . Assert . assertNotNull ( e )
testScriptingConfigurationDeserialization ( ) { com . thoughtworks . xstream . XStream xstream = new com . thoughtworks . xstream . XStream ( ) ; it . geosolutions . geobatch . xstream . Alias alias = new it . geosolutions . geobatch . xstream . Alias ( ) ; alias . setAliasRegistry ( aliasRegistry ) ; alias . setAliases ( xstream ) ; java . io . File configFile = new java . io . File ( "src/test/resources/scripting.xml" ) ; it . geosolutions . geobatch . configuration . flow . file . FileBasedFlowConfiguration configuration = ( ( it . geosolutions . geobatch . configuration . flow . file . FileBasedFlowConfiguration ) ( xstream . fromXML ( configFile ) ) ) ; boolean configurationDeserialized = false ; for ( it . geosolutions . geobatch . configuration . event . action . ActionConfiguration actionConfiguration : configuration . getEventConsumerConfiguration ( ) . getActions ( ) ) { if ( ( actionConfiguration != null ) && ( actionConfiguration instanceof it . geosolutions . geobatch . action . scripting . ScriptingConfiguration ) ) { configurationDeserialized = true ; break ; } } "<AssertPlaceHolder>" ; } getActions ( ) { return this . actions ; }
org . junit . Assert . assertTrue ( configurationDeserialized )
testParameters1 ( ) { java . lang . String className = "<sp>SET<sp>name<sp>=<sp>:_name2;" 5 ; db . createVertexClass ( className ) ; java . lang . String script = ( ( ( ( ( ( ( ( "BEGIN;" + "LET<sp>$a<sp>=<sp>CREATE<sp>VERTEX<sp>" ) + className ) + "<sp>SET<sp>name<sp>=<sp>:_name2;" 1 ) + "<sp>SET<sp>name<sp>=<sp>:_name2;" 2 ) + className ) + "<sp>SET<sp>name<sp>=<sp>:_name2;" ) + "LET<sp>$edge<sp>=<sp>CREATE<sp>EDGE<sp>E<sp>from<sp>$a<sp>to<sp>$b;" ) + "COMMIT;" ) + "<sp>SET<sp>name<sp>=<sp>:_name2;" 4 ; com . orientechnologies . orient . core . sql . HashMap < java . lang . String , java . lang . Object > map = new com . orientechnologies . orient . core . sql . HashMap ( ) ; map . put ( "name" , "bozo" ) ; map . put ( "_name2" , "<sp>SET<sp>name<sp>=<sp>:_name2;" 0 ) ; com . orientechnologies . orient . core . sql . executor . OResultSet rs = db . execute ( "sql" , script , map ) ; rs . close ( ) ; rs = db . query ( ( ( "<sp>SET<sp>name<sp>=<sp>:_name2;" 3 + className ) + "<sp>WHERE<sp>name<sp>=<sp>?" ) , "bozo" ) ; "<AssertPlaceHolder>" ; rs . next ( ) ; rs . close ( ) ; } hasNext ( ) { return false ; }
org . junit . Assert . assertTrue ( rs . hasNext ( ) )
testPostDown ( ) { setStackStatus ( StatusEnum . DOWN , org . sagebionetworks . repo . web . StackStatusInterceptorTest . MSG_FORMAT ) ; "<AssertPlaceHolder>" ; org . sagebionetworks . repo . model . Project child = new org . sagebionetworks . repo . model . Project ( ) ; child . setParentId ( sampleProject . getId ( ) ) ; servletTestHelper . createEntity ( dispatchServlet , child , adminUserId ) ; org . junit . Assert . fail ( ) ; } getCurrentStatus ( ) { return stackStatusDao . getFullCurrentStatus ( ) ; }
org . junit . Assert . assertEquals ( StatusEnum . DOWN , stackStatusDao . getCurrentStatus ( ) )
testWildcardTypeLowerBound ( ) { javax . ws . rs . core . GenericType < java . util . List < ? super java . lang . String > > genericType = new javax . ws . rs . core . GenericType < java . util . List < ? super java . lang . String > > ( ) { } ; java . lang . reflect . ParameterizedType pt = ( ( java . lang . reflect . ParameterizedType ) ( genericType . getType ( ) ) ) ; java . lang . reflect . Type t = pt . getActualTypeArguments ( ) [ 0 ] ; printTypes ( t ) ; java . lang . Class < ? > rawType = org . jboss . resteasy . spi . util . Types . getRawType ( t ) ; "<AssertPlaceHolder>" ; } getRawType ( java . lang . reflect . Type ) { if ( type instanceof java . lang . Class < ? > ) { return ( ( java . lang . Class < ? > ) ( type ) ) ; } else if ( type instanceof java . lang . reflect . ParameterizedType ) { java . lang . reflect . ParameterizedType parameterizedType = ( ( java . lang . reflect . ParameterizedType ) ( type ) ) ; java . lang . reflect . Type rawType = parameterizedType . getRawType ( ) ; return ( ( java . lang . Class < ? > ) ( rawType ) ) ; } else if ( type instanceof java . lang . reflect . GenericArrayType ) { final java . lang . reflect . GenericArrayType genericArrayType = ( ( java . lang . reflect . GenericArrayType ) ( type ) ) ; final java . lang . Class < ? > componentRawType = org . jboss . resteasy . spi . util . Types . getRawType ( genericArrayType . getGenericComponentType ( ) ) ; return java . lang . reflect . Array . newInstance ( componentRawType , 0 ) . getClass ( ) ; } else if ( type instanceof java . lang . reflect . TypeVariable ) { final java . lang . reflect . TypeVariable typeVar = ( ( java . lang . reflect . TypeVariable ) ( type ) ) ; if ( ( ( typeVar . getBounds ( ) ) != null ) && ( ( typeVar . getBounds ( ) . length ) > 0 ) ) { return org . jboss . resteasy . spi . util . Types . getRawType ( typeVar . getBounds ( ) [ 0 ] ) ; } } else if ( type instanceof java . lang . reflect . WildcardType ) { java . lang . reflect . WildcardType wildcardType = ( ( java . lang . reflect . WildcardType ) ( type ) ) ; java . lang . reflect . Type [ ] upperBounds = wildcardType . getUpperBounds ( ) ; if ( ( upperBounds != null ) && ( ( upperBounds . length ) > 0 ) ) { return org . jboss . resteasy . spi . util . Types . getRawType ( upperBounds [ 0 ] ) ; } } throw new java . lang . RuntimeException ( Messages . MESSAGES . unableToDetermineBaseClass ( ) ) ; }
org . junit . Assert . assertEquals ( java . lang . Object . class , rawType )
testGenerateDegree ( ) { com . codebits . d4m . TestableMutation mutation = new com . codebits . d4m . TestableMutation ( "CITY_NAME|Akron" ) ; mutation . put ( com . codebits . d4m . ingest . MutationFactoryTest . emptyCF , com . codebits . d4m . ingest . MutationFactoryTest . degree , com . codebits . d4m . ingest . MutationFactoryTest . one ) ; java . util . List < org . apache . accumulo . core . data . Mutation > expected = new java . util . ArrayList < org . apache . accumulo . core . data . Mutation > ( ) ; expected . add ( mutation ) ; java . util . List < org . apache . accumulo . core . data . Mutation > actual = instance . generateDegree ( row , fieldNames , fieldValues ) ; "<AssertPlaceHolder>" ; } generateDegree ( java . lang . String , java . lang . String [ ] , java . lang . String [ ] ) { org . apache . commons . lang . Validate . notNull ( mutationFactory , com . codebits . d4m . ingest . KeyFactory . MUTATION_FACTORY_VALUE_ERROR ) ; java . util . Map < org . apache . accumulo . core . data . Key , org . apache . accumulo . core . data . Value > entries = new java . util . TreeMap < org . apache . accumulo . core . data . Key , org . apache . accumulo . core . data . Value > ( ) ; for ( org . apache . accumulo . core . data . Mutation mutation : mutationFactory . generateDegree ( row , fieldNames , fieldValues ) ) { org . apache . hadoop . io . Text fact = new org . apache . hadoop . io . Text ( mutation . getRow ( ) ) ; for ( org . apache . accumulo . core . data . ColumnUpdate columnUpdate : mutation . getUpdates ( ) ) { if ( underTest ) { key = new org . apache . accumulo . core . data . Key ( fact , com . codebits . d4m . ingest . MutationFactory . EMPTY_CF , com . codebits . d4m . ingest . MutationFactory . DEGREE , 0 ) ; } else { key = new org . apache . accumulo . core . data . Key ( fact , com . codebits . d4m . ingest . MutationFactory . EMPTY_CF , com . codebits . d4m . ingest . MutationFactory . DEGREE ) ; } entries . put ( key , new org . apache . accumulo . core . data . Value ( columnUpdate . getValue ( ) ) ) ; } } return entries ; }
org . junit . Assert . assertEquals ( expected , actual )
testGetExtensionByNameDoesNotExist ( ) { final org . apache . nifi . registry . db . entity . ExtensionEntity entity = metadataService . getExtensionByName ( "eb1-v1" , "org.apache.nifi.DOESNOTEXIST" ) ; "<AssertPlaceHolder>" ; } getExtensionByName ( java . lang . String , java . lang . String ) { final java . lang . String selectSql = ( org . apache . nifi . registry . db . DatabaseMetadataService . BASE_EXTENSION_SQL ) + "<sp>AND<sp>e.bundle_version_id<sp>=<sp>?<sp>AND<sp>e.name<sp>=<sp>?" ; try { return jdbcTemplate . queryForObject ( selectSql , new org . apache . nifi . registry . db . mapper . ExtensionEntityRowMapper ( ) , bundleVersionId , name ) ; } catch ( org . springframework . dao . EmptyResultDataAccessException e ) { return null ; } }
org . junit . Assert . assertNull ( entity )
testRewriteModeChainRuleTree4 ( ) { java . lang . String grammar = "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 6 + ( ( ( ( "options<sp>{output=AST;}\n" + "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 7 ) + "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" ) + "WS<sp>:<sp>(\'<sp>\'|\'\\n\')<sp>{$channel=HIDDEN;}<sp>;\n" 0 ) + "WS<sp>:<sp>(\'<sp>\'|\'\\n\')<sp>{$channel=HIDDEN;}<sp>;\n" ) ; java . lang . String treeGrammar = "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 9 + ( ( ( ( ( "options<sp>{output=AST;<sp>ASTLabelType=CommonTree;<sp>tokenVocab=T;<sp>rewrite=true;}\n" + "tokens<sp>{<sp>X;<sp>}\n" ) + "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 4 ) + "a<sp>:<sp>X<sp>;\n" ) + "b<sp>:<sp>^(ID<sp>INT)<sp>-><sp>INT\n" ) + "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 8 ) ; java . lang . String found = execTreeParser ( "WS<sp>:<sp>(\'<sp>\'|\'\\n\')<sp>{$channel=HIDDEN;}<sp>;\n" 1 , grammar , "TParser" , "TP.g" , treeGrammar , "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 2 , "TLexer" , "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 0 , "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 5 , "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 1 ) ; "<AssertPlaceHolder>" ; } execTreeParser ( java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String , java . lang . String ) { return execTreeParser ( parserGrammarFileName , parserGrammarStr , parserName , treeParserGrammarFileName , treeParserGrammarStr , treeParserName , lexerName , parserStartRuleName , treeParserStartRuleName , input , false ) ; }
org . junit . Assert . assertEquals ( "ID<sp>:<sp>\'a\'..\'z\'+<sp>;\n" 3 , found )
oneMillionEntries ( ) { "<AssertPlaceHolder>" ; int howMany = 1000000 ; int size = ( ( int ) ( mem . capacity ( ) ) ) / howMany ; size -= ( size / 100 ) * 1 ; org . apache . directmemory . memory . MallocWithUnsafeTest . logger . info ( ( "payload<sp>size=" + size ) ) ; org . apache . directmemory . memory . MallocWithUnsafeTest . logger . info ( ( "entries=" + howMany ) ) ; org . apache . directmemory . memory . MallocWithUnsafeTest . logger . info ( "starting..." ) ; long start = java . lang . System . currentTimeMillis ( ) ; byte [ ] payload = new byte [ size ] ; for ( int i = 0 ; i < howMany ; i ++ ) { mem . store ( payload ) ; } org . apache . directmemory . memory . MallocWithUnsafeTest . logger . info ( ( ( "...done<sp>in<sp>" + ( ( java . lang . System . currentTimeMillis ( ) ) - start ) ) + "<sp>msecs." ) ) ; }
org . junit . Assert . assertNotNull ( mem )
testGetOperator ( ) { final java . lang . String expectedOperator = "operator" ; when ( plugin . getFactField ( ) ) . thenReturn ( "factField" ) ; when ( plugin . editingCol ( ) ) . thenReturn ( editingCol ) ; when ( editingCol . getOperator ( ) ) . thenReturn ( expectedOperator ) ; final java . lang . String operator = page . getOperator ( ) ; verify ( plugin ) . editingCol ( ) ; verify ( editingCol ) . getOperator ( ) ; "<AssertPlaceHolder>" ; } getOperator ( ) { return editingCol ( ) . getOperator ( ) ; }
org . junit . Assert . assertEquals ( expectedOperator , operator )
testJson ( ) { boolean showRelevance = false ; boolean showEntityIds = false ; boolean showApiUrls = false ; edu . harvard . iq . dataverse . search . SolrSearchResult result01 = new edu . harvard . iq . dataverse . search . SolrSearchResult ( "myQuery" , "myName" ) ; result01 . setType ( SearchConstants . DATAVERSES ) ; javax . json . JsonObjectBuilder actual01 = result01 . json ( showRelevance , showEntityIds , showApiUrls ) ; javax . json . JsonObject actual = actual01 . build ( ) ; System . out . println ( ( "actual:<sp>" + actual ) ) ; javax . json . JsonObjectBuilder expResult = javax . json . Json . createObjectBuilder ( ) ; expResult . add ( "type" , SearchConstants . DATAVERSE ) ; javax . json . JsonObject expected = expResult . build ( ) ; System . out . println ( ( "expect:<sp>" + expected ) ) ; "<AssertPlaceHolder>" ; } println ( java . lang . String ) { System . out . println ( s ) ; System . out . flush ( ) ; if ( ( edu . harvard . iq . dataverse . util . bagit . BagGenerator . pw ) != null ) { edu . harvard . iq . dataverse . util . bagit . BagGenerator . pw . println ( s ) ; edu . harvard . iq . dataverse . util . bagit . BagGenerator . pw . flush ( ) ; } return ; }
org . junit . Assert . assertEquals ( expected , actual )
resolveMethod ( java . lang . String , java . lang . String , java . lang . String ) { java . nio . file . Path path1 = createPath ( data1 ) ; java . nio . file . Path path2 = createPath ( data2 ) ; "<AssertPlaceHolder>" ; } resolve ( java . nio . file . Path ) { return new com . facebook . buck . rules . modern . OutputPath ( path . resolve ( subPath ) ) ; }
org . junit . Assert . assertEquals ( expected , path1 . resolve ( path2 ) . toString ( ) )
checkValidNameTest ( ) { java . lang . String code = "class<sp>A{isA<sp>12T;}trait<sp>12T{}" ; cruise . umple . compiler . UmpleModel model = getModel ( code ) ; boolean result = false ; try { model . run ( ) ; } catch ( java . lang . Exception e ) { result = e . getMessage ( ) . contains ( "200" ) ; } finally { "<AssertPlaceHolder>" ; cruise . umple . util . SampleFileWriter . destroy ( "traitTest.ump" ) ; } } contains ( java . lang . Object ) { if ( ( parent ) != null ) { return ( super . contains ( obj ) ) || ( parent . contains ( obj ) ) ; } else { return super . contains ( obj ) ; } }
org . junit . Assert . assertTrue ( result )
testMatchExactlyNwDstErr ( ) { params = new java . util . HashMap < java . lang . String , java . lang . String > ( ) { { put ( "type" , "aaa" ) ; put ( "in_node" , "node01" ) ; put ( "in_port" , "port01" ) ; put ( OFPFlowMatch . IPV4_DST , "nw_dst" ) ; } } ; target = new org . o3project . odenos . core . component . network . flow . query . OFPFlowMatchQuery ( params ) ; target . parse ( ) ; org . o3project . odenos . core . component . network . flow . ofpflow . OFPFlowMatch match = new org . o3project . odenos . core . component . network . flow . ofpflow . OFPFlowMatch ( ) ; match . setIpv4Dst ( "bbb" ) ; "<AssertPlaceHolder>" ; } matchExactly ( org . o3project . odenos . core . component . network . flow . basic . FlowAction ) { if ( ( target == null ) || ( ! ( target . getType ( ) . equals ( org . o3project . odenos . core . component . network . flow . ofpflow . OFPFlowActionCopyTtlOut . class . getSimpleName ( ) ) ) ) ) { return false ; } return true ; }
org . junit . Assert . assertThat ( target . matchExactly ( match ) , org . hamcrest . CoreMatchers . is ( false ) )
testLastFollowedByUnit ( ) { java . lang . String outcome = opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . UNIT_A ; java . lang . String [ ] inputSequence = new java . lang . String [ ] { "TypeA" , "TypeA" , "TypeA" , "AnyType" , "something" } ; java . lang . String [ ] outcomesSequence = new java . lang . String [ ] { opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . START_A , opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . CONTINUE_A , opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . LAST_A } ; "<AssertPlaceHolder>" ; } validSequence ( int , java . lang . String [ ] , java . lang . String [ ] , java . lang . String ) { if ( outcome . endsWith ( BioCodec . CONTINUE ) ) { int li = ( outcomesSequence . length ) - 1 ; if ( li == ( - 1 ) ) { return false ; } else if ( outcomesSequence [ li ] . endsWith ( BioCodec . OTHER ) ) { return false ; } else if ( ( outcomesSequence [ li ] . endsWith ( BioCodec . CONTINUE ) ) || ( outcomesSequence [ li ] . endsWith ( BioCodec . START ) ) ) { java . lang . String previousNameType = opennlp . tools . namefind . NameFinderME . extractNameType ( outcomesSequence [ li ] ) ; java . lang . String nameType = opennlp . tools . namefind . NameFinderME . extractNameType ( outcome ) ; if ( ( previousNameType != null ) || ( nameType != null ) ) { if ( nameType != null ) { if ( nameType . equals ( previousNameType ) ) { return true ; } } return false ; } } } return true ; }
org . junit . Assert . assertTrue ( opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . validator . validSequence ( 3 , inputSequence , outcomesSequence , outcome ) )
isApiToken_whenOAuthAppTokenIsSupplied_returnsTrue ( ) { slacknotifications . SlackNotificationImpl impl = new slacknotifications . SlackNotificationImpl ( ) ; impl . setToken ( "34tsrfdgdrtyrysdfg" ) ; "<AssertPlaceHolder>" ; } getIsApiToken ( ) { if ( ( ( this . token ) != null ) && ( this . token . startsWith ( "http" ) ) ) { return false ; } return ( ( this . token ) == null ) || ( ( this . token . split ( "-" ) . length ) > 1 ) ; }
org . junit . Assert . assertFalse ( impl . getIsApiToken ( ) )
isAmong_needleIsNullAndHaystackIsContainsANull_returnsTrue ( ) { boolean found = eu . chargetime . ocpp . utilities . ModelUtil . isAmong ( null , "something" , null ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertThat ( found , org . hamcrest . CoreMatchers . is ( true ) )
testHistorischZoekBereikMaterieelGeenAutorisatie ( ) { final nl . bzk . brp . domain . element . AttribuutElement attribuutElement = getAttribuutElement ( Element . PERSOON_IDENTIFICATIENUMMERS_BURGERSERVICENUMMER . getId ( ) ) ; final nl . bzk . brp . service . bevraging . zoekpersoongeneriek . AbstractZoekPersoonVerzoek bevragingVerzoek = maakBevragingVerzoek ( attribuutElement , "4" , Zoekoptie . EXACT ) ; bevragingVerzoek . getParameters ( ) . setZoekBereik ( Zoekbereik . MATERIELE_PERIODE ) ; final nl . bzk . brp . domain . algemeen . Autorisatiebundel autorisatieBundel = maakAutorisatiebundel ( false , attribuutElement ) ; final java . util . Set < nl . bzk . brp . domain . algemeen . Melding > meldingen = valideerZoekCriteriaService . valideerZoekCriteria ( bevragingVerzoek , autorisatieBundel ) ; "<AssertPlaceHolder>" ; } size ( ) { return elementen . size ( ) ; }
org . junit . Assert . assertEquals ( 0 , meldingen . size ( ) )
testWritableTextAreaShouldClear ( ) { driver . get ( pages . readOnlyPage ) ; org . openqa . selenium . WebElement element = driver . findElement ( org . openqa . selenium . By . id ( "writableTextArea" ) ) ; element . clear ( ) ; "<AssertPlaceHolder>" ; } getAttribute ( java . lang . String ) { return commandProcessor . getString ( "getAttribute" , new java . lang . String [ ] { attributeLocator } ) ; }
org . junit . Assert . assertEquals ( "" , element . getAttribute ( "value" ) )
testExecutePreparationScripts_NoScripts ( ) { java . sql . Connection connection = com . btc . redg . generator . extractor . DatabaseManager . connectToDatabase ( "org.h2.Driver" , "jdbc:h2:mem:redg" , "" , "" ) ; "<AssertPlaceHolder>" ; com . btc . redg . generator . extractor . DatabaseManager . executePreparationScripts ( connection , null ) ; com . btc . redg . generator . extractor . DatabaseManager . executePreparationScripts ( connection , new java . io . File [ 0 ] ) ; } connectToDatabase ( java . lang . String , java . lang . String , java . lang . String , java . lang . String ) { try { com . btc . redg . generator . extractor . DatabaseManager . LOG . debug ( ( "Trying<sp>to<sp>load<sp>jdbc<sp>driver<sp>" + jdbcDriver ) ) ; java . lang . Class . forName ( jdbcDriver ) ; com . btc . redg . generator . extractor . DatabaseManager . LOG . info ( "Successfully<sp>loaded<sp>jdbc<sp>driver" ) ; } catch ( java . lang . ClassNotFoundException e ) { com . btc . redg . generator . extractor . DatabaseManager . LOG . error ( ( "Could<sp>not<sp>load<sp>jdbc<sp>driver<sp>with<sp>name<sp>" + jdbcDriver ) ) ; throw e ; } try { com . btc . redg . generator . extractor . DatabaseManager . LOG . debug ( ( "Connecting<sp>to<sp>extractor<sp>" + connectionString ) ) ; final java . sql . Connection conn = java . sql . DriverManager . getConnection ( connectionString , username , password ) ; com . btc . redg . generator . extractor . DatabaseManager . LOG . info ( "Successfully<sp>connected<sp>to<sp>extractor" ) ; return conn ; } catch ( java . sql . SQLException e ) { com . btc . redg . generator . extractor . DatabaseManager . LOG . error ( "Could<sp>not<sp>connect<sp>to<sp>extractor" , e ) ; throw e ; } }
org . junit . Assert . assertNotNull ( connection )
testGetStepTypeCodeToIdMap ( ) { org . pentaho . di . repository . kdr . KettleDatabaseRepository repository = mock ( org . pentaho . di . repository . kdr . KettleDatabaseRepository . class ) ; org . pentaho . di . repository . kdr . delegates . KettleDatabaseRepositoryConnectionDelegate connectionDelegate = mock ( org . pentaho . di . repository . kdr . delegates . KettleDatabaseRepositoryConnectionDelegate . class ) ; repository . connectionDelegate = connectionDelegate ; org . pentaho . di . core . database . DatabaseMeta databaseMeta = mock ( org . pentaho . di . core . database . DatabaseMeta . class ) ; when ( connectionDelegate . getDatabaseMeta ( ) ) . thenReturn ( databaseMeta ) ; when ( databaseMeta . quoteField ( anyString ( ) ) ) . thenAnswer ( new org . mockito . stubbing . Answer < java . lang . String > ( ) { @ org . pentaho . di . repository . kdr . delegates . Override public java . lang . String answer ( org . mockito . invocation . InvocationOnMock invocationOnMock ) throws java . lang . Throwable { return "QUOTE_" + ( java . lang . String . valueOf ( ( ( invocationOnMock . getArguments ( ) [ 0 ] ) + "_QUOTE" ) ) ) ; } } ) ; when ( databaseMeta . getQuotedSchemaTableCombination ( anyString ( ) , anyString ( ) ) ) . thenAnswer ( new org . mockito . stubbing . Answer < java . lang . String > ( ) { @ org . pentaho . di . repository . kdr . delegates . Override public java . lang . String answer ( org . mockito . invocation . InvocationOnMock invocationOnMock ) throws java . lang . Throwable { return ( ( "QUOTE_" + ( java . lang . String . valueOf ( invocationOnMock . getArguments ( ) [ 0 ] ) ) ) + "____" ) + ( java . lang . String . valueOf ( ( ( invocationOnMock . getArguments ( ) [ 1 ] ) + "_QUOTE" ) ) ) ; } } ) ; when ( connectionDelegate . getDatabaseMeta ( ) ) . thenReturn ( databaseMeta ) ; org . pentaho . di . repository . kdr . delegates . KettleDatabaseRepositoryStepDelegate kettleDatabaseRepositoryStepDelegate = new org . pentaho . di . repository . kdr . delegates . KettleDatabaseRepositoryStepDelegate ( repository ) ; java . util . Map map = mock ( java . util . Map . class ) ; when ( connectionDelegate . getValueToIdMap ( kettleDatabaseRepositoryStepDelegate . quoteTable ( KettleDatabaseRepository . TABLE_R_STEP_TYPE ) , kettleDatabaseRepositoryStepDelegate . quote ( KettleDatabaseRepository . FIELD_STEP_TYPE_ID_STEP_TYPE ) , kettleDatabaseRepositoryStepDelegate . quote ( KettleDatabaseRepository . FIELD_STEP_TYPE_CODE ) ) ) . thenReturn ( map ) ; "<AssertPlaceHolder>" ; } getStepTypeCodeToIdMap ( ) { return repository . connectionDelegate . getValueToIdMap ( quoteTable ( KettleDatabaseRepository . TABLE_R_STEP_TYPE ) , quote ( KettleDatabaseRepository . FIELD_STEP_TYPE_ID_STEP_TYPE ) , quote ( KettleDatabaseRepository . FIELD_STEP_TYPE_CODE ) ) ; }
org . junit . Assert . assertEquals ( map , kettleDatabaseRepositoryStepDelegate . getStepTypeCodeToIdMap ( ) )
hasElapsedIfCurrentClockReadingPartiallyExceedsTargetRange ( ) { expect ( mockClock . getGranularity ( ) ) . andReturn ( 1L ) . anyTimes ( ) ; expect ( mockClock . getNanoTime ( ) ) . andReturn ( new org . fishwife . jrugged . interval . DiscreteInterval ( 4L , 5L ) ) ; expect ( mockClock . getNanoTime ( ) ) . andReturn ( new org . fishwife . jrugged . interval . DiscreteInterval ( 1015L , 2000L ) ) . anyTimes ( ) ; replay ( mockClock ) ; impl . set ( 1000L , 100L ) ; impl . start ( ) ; "<AssertPlaceHolder>" ; verify ( mockClock ) ; } hasElapsed ( ) { if ( ( startTime ) == null ) return false ; return ( clock . getNanoTime ( ) . getMin ( ) ) >= ( targetEndTime . getMin ( ) ) ; }
org . junit . Assert . assertTrue ( impl . hasElapsed ( ) )
shouldAcceptValidPutHttpTimeout ( ) { hm = org . openstack . atlas . api . validation . validators . HealthMonitorValidatorTest . whenValidatingPut . initHealthMonitor ( null , org . openstack . atlas . api . validation . validators . HTTP , null , "30" , null , null , null , null ) ; result = hmv . validate ( hm , org . openstack . atlas . api . validation . validators . PUT ) ; "<AssertPlaceHolder>" ; } resultMessage ( org . openstack . atlas . api . validation . results . ValidatorResult , java . lang . Enum ) { java . lang . StringBuilder sb = new java . lang . StringBuilder ( ) ; if ( ! ( result . passedValidation ( ) ) ) { java . util . List < org . openstack . atlas . api . validation . results . ExpectationResult > ers = result . getValidationResults ( ) ; sb . append ( java . lang . String . format ( "ON<sp>%s<sp>result.withMessage([" , ctx . toString ( ) ) ) ; for ( org . openstack . atlas . api . validation . results . ExpectationResult er : ers ) { sb . append ( java . lang . String . format ( "%s" , er . getMessage ( ) ) ) ; sb . append ( "])" ) ; } } else { sb . append ( java . lang . String . format ( "On<sp>%s<sp>All<sp>Expectations<sp>PASSED\n" , ctx . toString ( ) ) ) ; } return sb . toString ( ) ; }
org . junit . Assert . assertTrue ( resultMessage ( result , org . openstack . atlas . api . validation . validators . PUT ) , result . passedValidation ( ) )
testLoadApplicationTemplate_invalidDirectory ( ) { java . io . File source = net . roboconf . core . internal . tests . TestUtils . findApplicationDirectory ( "lamp" ) ; "<AssertPlaceHolder>" ; java . io . File apps = new java . io . File ( this . dmDirectory , net . roboconf . dm . internal . utils . ConfigurationUtils . TEMPLATES ) ; java . io . File target = new java . io . File ( apps , "Legacy<sp>LAMP<sp>-<sp>1.0.1-SNAPSHOT/sub/dir" ) ; net . roboconf . core . utils . Utils . copyDirectory ( source , target ) ; this . mngr . loadApplicationTemplate ( target ) ; } findApplicationDirectory ( java . lang . String ) { java . lang . String suffix = "core/roboconf-core/src/test/resources/applications/" + appName ; java . io . File result = new java . io . File ( ( "../../" + suffix ) ) . getCanonicalFile ( ) ; return result ; }
org . junit . Assert . assertTrue ( source . exists ( ) )
testGetPath ( ) { unitD . setParent ( unitC ) ; unitC . setParent ( unitB ) ; unitB . setParent ( unitA ) ; java . lang . String expected = "/uidA/uidB/uidC/uidD" ; "<AssertPlaceHolder>" ; } getPath ( ) { return path ; }
org . junit . Assert . assertEquals ( expected , unitD . getPath ( ) )
updateUser_setsAttributes ( ) { when ( userRepository . save ( userA ) ) . thenReturn ( userA ) ; when ( passwordEncoder . encode ( edu . zipcloud . cloudstreetmarket . core . services . CommunityServiceImplTest . USER_A_PASSWORD ) ) . thenReturn ( edu . zipcloud . cloudstreetmarket . core . services . CommunityServiceImplTest . USER_A_PASSWORD_ENCODED ) ; when ( securityContext . getAuthentication ( ) ) . thenReturn ( authentication ) ; when ( authentication . getPrincipal ( ) ) . thenReturn ( userA ) ; edu . zipcloud . cloudstreetmarket . core . entities . User user = communityServiceImpl . updateUser ( userA ) ; "<AssertPlaceHolder>" ; verify ( passwordEncoder , times ( 1 ) ) . encode ( edu . zipcloud . cloudstreetmarket . core . services . CommunityServiceImplTest . USER_A_PASSWORD ) ; } getPassword ( ) { return password ; }
org . junit . Assert . assertEquals ( edu . zipcloud . cloudstreetmarket . core . services . CommunityServiceImplTest . USER_A_PASSWORD_ENCODED , user . getPassword ( ) )
saveRetryNew ( ) { com . psddev . dari . h2 . WriteModel model1 = new com . psddev . dari . h2 . WriteModel ( ) ; model1 . save ( ) ; com . psddev . dari . h2 . WriteModel model2 = new com . psddev . dari . h2 . WriteModel ( ) ; model2 . getState ( ) . setId ( model1 . getId ( ) ) ; model2 . save ( ) ; "<AssertPlaceHolder>" ; } from ( java . lang . Class ) { return new com . psddev . dari . db . Query < T > ( ( objectClass != null ? objectClass . getName ( ) : null ) , objectClass ) ; }
org . junit . Assert . assertThat ( com . psddev . dari . db . Query . from ( com . psddev . dari . h2 . WriteModel . class ) . first ( ) , is ( model1 ) )
testToString1 ( ) { org . eclipse . kura . core . net . WifiInterfaceConfigImpl config = createConfig ( 0 ) ; java . lang . String expected = "name=wifiInterface<sp>::<sp>loopback=false<sp>::<sp>pointToPoint=false<sp>::<sp>virtual=false" + ( ( "<sp>::<sp>supportsMulticast=false<sp>::<sp>up=false<sp>::<sp>mtu=0<sp>::<sp>driver=null<sp>::<sp>driverVersion=null" + "<sp>::<sp>firmwareVersion=null<sp>::<sp>state=null<sp>::<sp>autoConnect=false" ) + "<sp>::<sp>InterfaceAddress=NetConfig:<sp>no<sp>configurations<sp>::<sp>capabilities=null" ) ; "<AssertPlaceHolder>" ; } toString ( ) { return ( ( ( ( ( "ComponentConfigurationImpl<sp>[pid=" + ( pid ) ) + ",<sp>definition=" ) + ( definition ) ) + ",<sp>properties=" ) + ( properties ) ) + "]" ; }
org . junit . Assert . assertEquals ( expected , config . toString ( ) )
testStridedExp ( ) { org . nd4j . linalg . api . ops . executioner . OpExecutioner opExecutioner = org . nd4j . linalg . factory . Nd4j . getExecutioner ( ) ; org . nd4j . linalg . api . ndarray . INDArray arr = org . nd4j . linalg . factory . Nd4j . linspace ( 1 , 6 , 6 , DataType . DOUBLE ) . reshape ( 2 , 3 ) ; org . nd4j . linalg . api . ndarray . INDArray slice = arr . slice ( 0 ) ; lombok . val expected = new double [ ( ( int ) ( slice . length ( ) ) ) ] ; for ( int i = 0 ; i < ( slice . length ( ) ) ; i ++ ) expected [ i ] = ( ( float ) ( java . lang . Math . exp ( slice . getDouble ( i ) ) ) ) ; org . nd4j . linalg . api . ops . impl . transforms . strict . Exp exp = new org . nd4j . linalg . api . ops . impl . transforms . strict . Exp ( slice ) ; opExecutioner . exec ( exp ) ; "<AssertPlaceHolder>" ; } getFailureMessage ( ) { return ( ( "Failed<sp>with<sp>backend<sp>" + ( backend . getClass ( ) . getName ( ) ) ) + "<sp>and<sp>ordering<sp>" ) + ( ordering ( ) ) ; }
org . junit . Assert . assertEquals ( getFailureMessage ( ) , org . nd4j . linalg . factory . Nd4j . create ( expected ) , slice )
doTest ( ) { java . util . List < com . github . drinkjava2 . test . ActiveEntityTest > userList = new com . github . drinkjava2 . test . ActiveEntityTest ( ) . selectUsers ( 50 ) ; "<AssertPlaceHolder>" ; } size ( ) { int size = 0 ; for ( com . github . drinkjava2 . jdialects . springsrc . utils . ConcurrentReferenceHashMap < K , V > . Segment segment : this . segments ) { size += segment . getCount ( ) ; } return size ; }
org . junit . Assert . assertEquals ( 50 , userList . size ( ) )
true ( ) { javax . ws . rs . core . Response . ResponseBuilder rb = javax . ws . rs . core . Response . status ( Status . FOUND ) . type ( MediaType . APPLICATION_JSON_TYPE ) ; rb . header ( HttpHeaders . LOCATION , ( ( ( ( ( ( ( ( ( ( ( ( com . fujitsu . dc . test . unit . core . UrlUtils . cellRoot ( "authz" ) ) + "#" ) + ( OAuth2Helper . Key . ACCESS_TOKEN ) ) + "=tokenstr&" ) + ( OAuth2Helper . Key . TOKEN_TYPE ) ) + "=" ) + ( OAuth2Helper . Scheme . BEARER ) ) + "&" ) + ( OAuth2Helper . Key . EXPIRES_IN ) ) + "=9999&" ) + ( OAuth2Helper . Key . STATE ) ) + "=State" ) ) ; javax . ws . rs . core . Response res = rb . entity ( "" ) . build ( ) ; com . fujitsu . dc . test . unit . core . auth . AuthzTest . AuthzEndPointResourceMock authz = new com . fujitsu . dc . test . unit . core . auth . AuthzTest . AuthzEndPointResourceMock ( null , null ) ; "<AssertPlaceHolder>" ; } isSuccessAuthorization ( javax . ws . rs . core . Response ) { if ( ( Status . FOUND . getStatusCode ( ) ) != ( response . getStatus ( ) ) ) { return false ; } java . lang . String locationStr = ( ( java . lang . String ) ( response . getMetadata ( ) . getFirst ( HttpHeaders . LOCATION ) ) ) ; try { java . net . URI uri = new java . net . URI ( locationStr ) ; java . lang . String fragment = uri . getFragment ( ) ; if ( null == fragment ) { return false ; } if ( ( ( ( ( fragment . indexOf ( ( ( com . fujitsu . dc . core . auth . OAuth2Helper . Key . ERROR ) + "=" ) ) ) >= 0 ) && ( ( fragment . indexOf ( ( ( com . fujitsu . dc . core . auth . OAuth2Helper . Key . ERROR_DESCRIPTION ) + "=" ) ) ) >= 0 ) ) && ( ( fragment . indexOf ( ( ( com . fujitsu . dc . core . auth . OAuth2Helper . Key . STATE ) + "=" ) ) ) >= 0 ) ) && ( ( fragment . indexOf ( ( ( com . fujitsu . dc . core . auth . OAuth2Helper . Key . CODE ) + "=" ) ) ) >= 0 ) ) { return false ; } } catch ( java . net . URISyntaxException e ) { return false ; } return true ; }
org . junit . Assert . assertTrue ( authz . isSuccessAuthorization ( res ) )
testRewriteWithSimplePattern ( ) { java . lang . String pattern = ( org . pentaho . platform . plugin . action . jfreereport . helper . PentahoURLRewriterTest . TEST_SRC ) + "/solution/test/reporting/system" ; org . pentaho . platform . plugin . action . jfreereport . helper . PentahoURLRewriter rewriter = new org . pentaho . platform . plugin . action . jfreereport . helper . PentahoURLRewriter ( pattern ) ; java . io . File dataDirectory = new java . io . File ( ( ( org . pentaho . platform . plugin . action . jfreereport . helper . PentahoURLRewriterTest . TEST_SRC ) + "/solution/test/reporting/" ) ) ; org . pentaho . reporting . libraries . repository . file . FileRepository dataRepository = new org . pentaho . reporting . libraries . repository . file . FileRepository ( dataDirectory ) ; java . io . File contentEntryBackend = new java . io . File ( ( ( org . pentaho . platform . plugin . action . jfreereport . helper . PentahoURLRewriterTest . TEST_SRC ) + "/solution/test/reporting/contentEntryBackend" ) ) ; java . io . File dataEntityBackend = new java . io . File ( ( ( org . pentaho . platform . plugin . action . jfreereport . helper . PentahoURLRewriterTest . TEST_SRC ) + "/solution/test/reporting/dataEntityBackend" ) ) ; org . pentaho . reporting . libraries . repository . ContentEntity contentEntry = new org . pentaho . reporting . libraries . repository . file . FileContentItem ( dataRepository . getRoot ( ) , contentEntryBackend ) ; org . pentaho . reporting . libraries . repository . ContentEntity dataEntity = new org . pentaho . reporting . libraries . repository . file . FileContentItem ( dataRepository . getRoot ( ) , dataEntityBackend ) ; java . lang . String result = rewriter . rewrite ( contentEntry , dataEntity ) ; "<AssertPlaceHolder>" ; } rewrite ( org . pentaho . reporting . libraries . repository . ContentEntity , org . pentaho . reporting . libraries . repository . ContentEntity ) { try { final java . util . ArrayList < java . lang . String > entityNames = new java . util . ArrayList < java . lang . String > ( ) ; entityNames . add ( dataEntity . getName ( ) ) ; org . pentaho . reporting . libraries . repository . ContentLocation location = dataEntity . getParent ( ) ; while ( location != null ) { entityNames . add ( location . getName ( ) ) ; location = location . getParent ( ) ; } final java . util . ArrayList < java . lang . String > contentNames = new java . util . ArrayList < java . lang . String > ( ) ; location = dataEntity . getRepository ( ) . getRoot ( ) ; while ( location != null ) { contentNames . add ( location . getName ( ) ) ; location = location . getParent ( ) ; } while ( ( ( contentNames . isEmpty ( ) ) == false ) && ( ( entityNames . isEmpty ( ) ) == false ) ) { final java . lang . String lastEntity = ( ( java . lang . String ) ( entityNames . get ( ( ( entityNames . size ( ) ) - 1 ) ) ) ) ; final java . lang . String lastContent = ( ( java . lang . String ) ( contentNames . get ( ( ( contentNames . size ( ) ) - 1 ) ) ) ) ; if ( ( lastContent . equals ( lastEntity ) ) == false ) { break ; } entityNames . remove ( ( ( entityNames . size ( ) ) - 1 ) ) ; contentNames . remove ( ( ( contentNames . size ( ) ) - 1 ) ) ; } final java . lang . StringBuffer b = new java . lang . StringBuffer ( ) ; for ( int i = ( entityNames . size ( ) ) - 1 ; i >= 0 ; i -- ) { final java . lang . String name = ( ( java . lang . String ) ( entityNames . get ( i ) ) ) ; b . append ( name ) ; if ( i != 0 ) { b . append ( "/" ) ; } } if ( ( pattern ) == null ) { return b . toString ( ) ; } return java . text . MessageFormat . format ( pattern , new java . lang . Object [ ] { b . toString ( ) } ) ; } catch ( org . pentaho . reporting . libraries . repository . ContentIOException cioe ) { throw new org . pentaho . reporting . engine . classic . core . modules . output . table . html . URLRewriteException ( ) ; } }
org . junit . Assert . assertEquals ( pattern , result )
keySet_extras ( ) { com . psddev . dari . util . Set < java . lang . String > expect = new com . psddev . dari . util . HashMap < java . lang . String , java . lang . Object > ( ) { { put ( "field_pub" , "pub<sp>field<sp>value" ) ; put ( "field_priv" , "priv<sp>field<sp>value" ) ; put ( "field_int" , new java . lang . Integer ( 1000 ) ) ; put ( "putfield" , "put<sp>value" ) ; } } . keySet ( ) ; com . psddev . dari . util . ObjectMap objmap = new com . psddev . dari . util . ObjectMap ( new com . psddev . dari . util . ObjectMapTest . ObjEntrySet ( ) ) ; objmap . put ( "putfield" , "put<sp>value" ) ; "<AssertPlaceHolder>" ; } keySet ( ) { return rawValues . keySet ( ) ; }
org . junit . Assert . assertEquals ( expect , objmap . keySet ( ) )
gcLotsOfCandidatesIT ( ) { killMacGc ( ) ; log . info ( "Filling<sp>metadata<sp>table<sp>with<sp>bogus<sp>delete<sp>flags" ) ; try ( org . apache . accumulo . core . client . AccumuloClient c = org . apache . accumulo . core . client . Accumulo . newClient ( ) . from ( getClientProperties ( ) ) . build ( ) ) { org . apache . accumulo . test . functional . GarbageCollectorIT . addEntries ( c ) ; cluster . getConfig ( ) . setDefaultMemory ( 10 , MemoryUnit . MEGABYTE ) ; org . apache . accumulo . miniclusterImpl . MiniAccumuloClusterImpl . ProcessInfo gc = cluster . exec ( org . apache . accumulo . gc . SimpleGarbageCollector . class ) ; sleepUninterruptibly ( 20 , TimeUnit . SECONDS ) ; java . lang . String output = "" ; while ( ! ( output . contains ( "delete<sp>candidates<sp>has<sp>exceeded" ) ) ) { try { output = gc . readStdOut ( ) ; } catch ( java . io . UncheckedIOException ex ) { break ; } } gc . getProcess ( ) . destroy ( ) ; "<AssertPlaceHolder>" ; } } contains ( java . lang . Object ) { return true ; }
org . junit . Assert . assertTrue ( output . contains ( "delete<sp>candidates<sp>has<sp>exceeded" ) )
testTwoIndexFilter1 ( ) { org . eclipse . rdf4j . query . parser . sparql . SPARQLParser parser = new org . eclipse . rdf4j . query . parser . sparql . SPARQLParser ( ) ; org . eclipse . rdf4j . query . parser . ParsedQuery pq1 = parser . parseQuery ( q15 , null ) ; org . eclipse . rdf4j . query . parser . ParsedQuery pq2 = parser . parseQuery ( q16 , null ) ; org . eclipse . rdf4j . query . parser . ParsedQuery pq3 = parser . parseQuery ( q17 , null ) ; System . out . println ( ( "Query<sp>is<sp>" + ( pq1 . getTupleExpr ( ) ) ) ) ; org . apache . rya . indexing . external . tupleSet . SimpleExternalTupleSet extTup1 = new org . apache . rya . indexing . external . tupleSet . SimpleExternalTupleSet ( ( ( org . eclipse . rdf4j . query . algebra . Projection ) ( pq2 . getTupleExpr ( ) ) ) ) ; org . apache . rya . indexing . external . tupleSet . SimpleExternalTupleSet extTup2 = new org . apache . rya . indexing . external . tupleSet . SimpleExternalTupleSet ( ( ( org . eclipse . rdf4j . query . algebra . Projection ) ( pq3 . getTupleExpr ( ) ) ) ) ; java . util . List < org . apache . rya . indexing . external . tupleSet . ExternalTupleSet > list = new java . util . ArrayList < org . apache . rya . indexing . external . tupleSet . ExternalTupleSet > ( ) ; list . add ( extTup1 ) ; list . add ( extTup2 ) ; org . apache . rya . indexing . IndexPlanValidator . VarConstantIndexListPruner vci = new org . apache . rya . indexing . IndexPlanValidator . VarConstantIndexListPruner ( pq1 . getTupleExpr ( ) ) ; java . util . List < org . apache . rya . indexing . external . tupleSet . ExternalTupleSet > processedIndexSet = vci . getRelevantIndices ( list ) ; System . out . println ( "Relevant<sp>indexes<sp>are:<sp>" ) ; for ( org . apache . rya . indexing . external . tupleSet . ExternalTupleSet e : processedIndexSet ) { System . out . println ( e ) ; } java . util . Set < org . apache . rya . indexing . external . tupleSet . ExternalTupleSet > indexSet = com . google . common . collect . Sets . newHashSet ( ) ; indexSet . add ( extTup2 ) ; "<AssertPlaceHolder>" ; } intersection ( com . spatial4j . core . shape . Shape , com . spatial4j . core . shape . Shape ) { return context . makeShape ( context . getGeometryFrom ( s1 ) . intersection ( context . getGeometryFrom ( s2 ) ) ) ; }
org . junit . Assert . assertTrue ( com . google . common . collect . Sets . intersection ( indexSet , com . google . common . collect . Sets . newHashSet ( processedIndexSet ) ) . equals ( com . google . common . collect . Sets . newHashSet ( processedIndexSet ) ) )
getOrderHistoryByConcept_shouldReturnEmptyListForConceptWithoutOrders ( ) { org . openmrs . Concept concept = org . openmrs . api . context . Context . getConceptService ( ) . getConcept ( 21 ) ; org . openmrs . Patient patient = org . openmrs . api . context . Context . getPatientService ( ) . getPatient ( 2 ) ; java . util . List < org . openmrs . Order > orders = orderService . getOrderHistoryByConcept ( patient , concept ) ; "<AssertPlaceHolder>" ; } size ( ) { return getMemberships ( ) . stream ( ) . filter ( ( m ) -> ! ( m . getVoided ( ) ) ) . collect ( java . util . stream . Collectors . toList ( ) ) . size ( ) ; }
org . junit . Assert . assertEquals ( 0 , orders . size ( ) )
shouldReturnEmptyQueryOnEmptyListOfSrandmember ( ) { when ( localParamsMock . get ( "command" ) ) . thenReturn ( "srandmember" ) ; when ( localParamsMock . get ( "key" ) ) . thenReturn ( "simpleKey" ) ; when ( localParamsMock . get ( QueryParsing . V ) ) . thenReturn ( "string_field" ) ; when ( jedisMock . srandmember ( anyString ( ) , anyInt ( ) ) ) . thenReturn ( new com . sematext . solr . redis . ArrayList < java . lang . String > ( ) ) ; when ( requestMock . getSchema ( ) ) . thenReturn ( schema ) ; when ( schema . getQueryAnalyzer ( ) ) . thenReturn ( new org . apache . lucene . analysis . standard . StandardAnalyzer ( ) ) ; redisQParser = new com . sematext . solr . redis . RedisQParser ( "string_field" , localParamsMock , paramsMock , requestMock , commandHandler ) ; final org . apache . lucene . search . Query query = redisQParser . parse ( ) ; verify ( jedisMock ) . srandmember ( "simpleKey" , 1 ) ; org . apache . lucene . search . IndexSearcher searcher = new org . apache . lucene . search . IndexSearcher ( new org . apache . lucene . index . MultiReader ( ) ) ; final com . sematext . solr . redis . Set < org . apache . lucene . index . Term > terms = com . sematext . solr . redis . TestRedisQParser . extractTerms ( searcher , query ) ; "<AssertPlaceHolder>" ; } extractTerms ( org . apache . lucene . search . IndexSearcher , org . apache . lucene . search . Query ) { final com . sematext . solr . redis . Set < org . apache . lucene . index . Term > terms = new com . sematext . solr . redis . HashSet ( ) ; org . apache . lucene . search . Query rewrittenQuery = searcher . rewrite ( query ) ; if ( rewrittenQuery instanceof org . apache . lucene . search . ConstantScoreQuery ) { org . apache . lucene . search . ConstantScoreQuery constantScoreQuery = ( ( org . apache . lucene . search . ConstantScoreQuery ) ( rewrittenQuery ) ) ; rewrittenQuery = constantScoreQuery . getQuery ( ) ; } searcher . createNormalizedWeight ( rewrittenQuery , true ) . extractTerms ( terms ) ; return terms ; }
org . junit . Assert . assertEquals ( 0 , terms . size ( ) )
testCustomPartitioningErased ( ) { org . apache . flink . api . common . operators . SingleInputSemanticProperties sProp = new org . apache . flink . api . common . operators . SingleInputSemanticProperties ( ) ; org . apache . flink . api . java . functions . SemanticPropUtil . getSemanticPropsSingleFromString ( sProp , new java . lang . String [ ] { "0;1;2" } , null , null , tupleInfo , tupleInfo ) ; org . apache . flink . optimizer . dataproperties . RequestedGlobalProperties rgProps = new org . apache . flink . optimizer . dataproperties . RequestedGlobalProperties ( ) ; rgProps . setCustomPartitioned ( new org . apache . flink . api . common . operators . util . FieldSet ( 0 , 1 , 2 ) , new org . apache . flink . optimizer . dataproperties . MockPartitioner ( ) ) ; org . apache . flink . optimizer . dataproperties . RequestedGlobalProperties filtered = rgProps . filterBySemanticProperties ( sProp , 0 ) ; "<AssertPlaceHolder>" ; } filterBySemanticProperties ( org . apache . flink . api . common . operators . SemanticProperties , int ) { if ( props == null ) { throw new java . lang . NullPointerException ( "SemanticProperties<sp>may<sp>not<sp>be<sp>null." ) ; } org . apache . flink . optimizer . dataproperties . RequestedGlobalProperties rgProp = new org . apache . flink . optimizer . dataproperties . RequestedGlobalProperties ( ) ; switch ( this . partitioning ) { case FULL_REPLICATION : case FORCED_REBALANCED : case CUSTOM_PARTITIONING : case RANDOM_PARTITIONED : case ANY_DISTRIBUTION : return null ; case HASH_PARTITIONED : case ANY_PARTITIONING : org . apache . flink . api . common . operators . util . FieldSet newFields ; if ( ( this . partitioningFields ) instanceof org . apache . flink . api . common . operators . util . FieldList ) { newFields = new org . apache . flink . api . common . operators . util . FieldList ( ) ; } else { newFields = new org . apache . flink . api . common . operators . util . FieldSet ( ) ; } for ( java . lang . Integer targetField : this . partitioningFields ) { int sourceField = props . getForwardingSourceField ( input , targetField ) ; if ( sourceField >= 0 ) { newFields = newFields . addField ( sourceField ) ; } else { return null ; } } rgProp . partitioning = this . partitioning ; rgProp . partitioningFields = newFields ; return rgProp ; case RANGE_PARTITIONED : org . apache . flink . api . common . operators . Ordering newOrdering = new org . apache . flink . api . common . operators . Ordering ( ) ; for ( int i = 0 ; i < ( this . ordering . getInvolvedIndexes ( ) . size ( ) ) ; i ++ ) { int value = this . ordering . getInvolvedIndexes ( ) . get ( i ) ; int sourceField = props . getForwardingSourceField ( input , value ) ; if ( sourceField >= 0 ) { newOrdering . appendOrdering ( sourceField , this . ordering . getType ( i ) , this . ordering . getOrder ( i ) ) ; } else { return null ; } } rgProp . partitioning = this . partitioning ; rgProp . ordering = newOrdering ; rgProp . dataDistribution = this . dataDistribution ; return rgProp ; default : throw new java . lang . RuntimeException ( "Unknown<sp>partitioning<sp>type<sp>encountered." ) ; } }
org . junit . Assert . assertNull ( filtered )
filterCustomerConfiguration_Change ( ) { org . oscm . internal . vo . VOOrganizationPaymentConfiguration conf = org . oscm . accountservice . bean . PaymentConfigurationFilterTest . createCustomerConfiguration ( customer . getOrganizationId ( ) , PaymentType . CREDIT_CARD ) ; java . util . List < org . oscm . internal . vo . VOOrganizationPaymentConfiguration > input = java . util . Arrays . asList ( conf ) ; java . util . List < org . oscm . internal . vo . VOOrganizationPaymentConfiguration > result = pcf . filterCustomerConfiguration ( input ) ; "<AssertPlaceHolder>" ; } filterCustomerConfiguration ( java . util . List ) { java . util . List < org . oscm . internal . vo . VOOrganizationPaymentConfiguration > result = new java . util . ArrayList < org . oscm . internal . vo . VOOrganizationPaymentConfiguration > ( ) ; if ( conf == null ) { return result ; } org . oscm . domobjects . Organization vendor = ds . getCurrentUser ( ) . getOrganization ( ) ; for ( org . oscm . internal . vo . VOOrganizationPaymentConfiguration c : conf ) { org . oscm . domobjects . Organization org = new org . oscm . domobjects . Organization ( ) ; org . setOrganizationId ( c . getOrganization ( ) . getOrganizationId ( ) ) ; org = ( ( org . oscm . domobjects . Organization ) ( ds . getReferenceByBusinessKey ( org ) ) ) ; final org . oscm . domobjects . OrganizationReference ref = checkSellerRelationship ( vendor , org ) ; if ( customerConfigurationChanged ( c , ref ) ) { result . add ( c ) ; } } return result ; }
org . junit . Assert . assertEquals ( input , result )
getRemoteAddrIgnoresXForwardHeader ( ) { ninja . utils . AbstractContextImpl context = spy ( abstractContext ) ; when ( ninjaProperties . getBooleanWithDefault ( Context . NINJA_PROPERTIES_X_FORWARDED_FOR , false ) ) . thenReturn ( Boolean . FALSE ) ; doReturn ( "1.1.1.1" ) . when ( context ) . getRealRemoteAddr ( ) ; doReturn ( "2.2.2.2" ) . when ( context ) . getHeader ( Context . X_FORWARD_HEADER ) ; "<AssertPlaceHolder>" ; } getRemoteAddr ( ) { return wrapped . getRemoteAddr ( ) ; }
org . junit . Assert . assertThat ( context . getRemoteAddr ( ) , org . hamcrest . CoreMatchers . is ( "1.1.1.1" ) )
testAdd ( ) { org . kocakosm . pitaya . collection . Bag < java . lang . String > bag = new org . kocakosm . pitaya . collection . HashBag < java . lang . String > ( ) ; bag . add ( "Hello" ) ; "<AssertPlaceHolder>" ; } contains ( java . lang . Object ) { return entries . contains ( o ) ; }
org . junit . Assert . assertTrue ( bag . contains ( "Hello" ) )
testMin01 ( ) { javax . el . ELProcessor processor = new javax . el . ELProcessor ( ) ; java . lang . Object result = processor . getValue ( "[1,2,3,4,5].stream().min()" , java . lang . Object . class ) ; "<AssertPlaceHolder>" ; } get ( ) { return connectionList ; }
org . junit . Assert . assertEquals ( java . lang . Long . valueOf ( 1 ) , ( ( org . apache . el . stream . Optional ) ( result ) ) . get ( ) )
createWithCollectionContentsCanRetrieveContents ( ) { java . util . List < java . lang . String > result = createStringListResult ( ) ; for ( int i = 0 ; i < ( expectedStrings . length ) ; ++ i ) { "<AssertPlaceHolder>" ; } } get ( com . microsoft . windowsazure . services . media . entityoperations . EntityGetOperation ) { try { return service . get ( getter ) ; } catch ( com . sun . jersey . api . client . UniformInterfaceException e ) { throw processCatch ( new com . microsoft . windowsazure . exception . ServiceException ( e ) ) ; } catch ( com . sun . jersey . api . client . ClientHandlerException e ) { throw processCatch ( new com . microsoft . windowsazure . exception . ServiceException ( e ) ) ; } }
org . junit . Assert . assertEquals ( expectedStrings [ i ] , result . get ( i ) )
testCanSerialiseEdgeId ( ) { final uk . gov . gchq . gaffer . data . element . id . EdgeId edgeId = new uk . gov . gchq . gaffer . operation . data . EdgeSeed ( "source" , "destination" , true ) ; final byte [ ] serialisedEdgeId = serialiser . serialise ( edgeId ) ; final uk . gov . gchq . gaffer . data . element . id . ElementId deserialisedEdgeId = serialiser . deserialise ( serialisedEdgeId ) ; "<AssertPlaceHolder>" ; } deserialise ( byte [ ] ) { final int [ ] lastDelimiter = new int [ ] { 0 } ; final java . lang . String group = uk . gov . gchq . gaffer . serialisation . util . LengthValueBytesSerialiserUtil . deserialise ( stringSerialiser , bytes , lastDelimiter ) ; if ( group . isEmpty ( ) ) { throw new java . lang . IllegalArgumentException ( ( "Group<sp>is<sp>required<sp>for<sp>deserialising<sp>" + ( uk . gov . gchq . gaffer . data . element . GroupedProperties . class . getSimpleName ( ) ) ) ) ; } final uk . gov . gchq . gaffer . store . schema . SchemaElementDefinition elementDefinition = schema . getElement ( group ) ; if ( null == elementDefinition ) { throw new uk . gov . gchq . gaffer . exception . SerialisationException ( ( ( "No<sp>SchemaElementDefinition<sp>found<sp>for<sp>group<sp>" + group ) + ",<sp>is<sp>this<sp>group<sp>in<sp>your<sp>schema?" ) ) ; } final uk . gov . gchq . gaffer . data . element . GroupedProperties properties = new uk . gov . gchq . gaffer . data . element . GroupedProperties ( group ) ; deserialiseProperties ( bytes , properties , elementDefinition , lastDelimiter ) ; return properties ; }
org . junit . Assert . assertEquals ( edgeId , deserialisedEdgeId )
testConvertFromAvroBoolean ( ) { org . talend . components . common . config . jdbc . TalendType expectedType = TalendType . BOOLEAN ; org . apache . avro . Schema fieldSchema = org . talend . daikon . avro . AvroUtils . _boolean ( ) ; "<AssertPlaceHolder>" ; } convertFromAvro ( org . apache . avro . Schema ) { org . apache . avro . Schema type = org . talend . daikon . avro . AvroUtils . unwrapIfNullable ( avroType ) ; java . lang . String logicalType = org . talend . daikon . avro . LogicalTypeUtils . getLogicalTypeName ( type ) ; if ( logicalType != null ) { return org . talend . components . common . config . jdbc . TalendType . getTalendByLogicalType ( logicalType ) ; } java . lang . String javaClass = type . getProp ( SchemaConstants . JAVA_CLASS_FLAG ) ; if ( javaClass != null ) { return org . talend . components . common . config . jdbc . TalendType . getTalendByJavaClass ( javaClass ) ; } return org . talend . components . common . config . jdbc . TalendType . getTalendByAvroType ( type . getType ( ) ) ; }
org . junit . Assert . assertEquals ( expectedType , org . talend . components . common . config . jdbc . TalendType . convertFromAvro ( fieldSchema ) )
testSortInPlaceRanges ( ) { java . util . List < org . jaitools . numeric . Range < java . lang . Integer > > sorted = org . jaitools . CollectionFactory . list ( ) ; sorted . add ( org . jaitools . numeric . Range . create ( null , false , 5 , true ) ) ; sorted . add ( org . jaitools . numeric . Range . create ( ( - 10 ) , true , 5 , true ) ) ; sorted . add ( org . jaitools . numeric . Range . create ( 0 ) ) ; sorted . add ( org . jaitools . numeric . Range . create ( 5 , true , 10 , true ) ) ; sorted . add ( org . jaitools . numeric . Range . create ( 5 , true , null , false ) ) ; java . util . List < org . jaitools . numeric . Range < java . lang . Integer > > raw = org . jaitools . CollectionFactory . list ( ) ; for ( int i : new int [ ] { 4 , 2 , 3 , 1 , 0 } ) { raw . add ( sorted . get ( i ) ) ; } org . jaitools . numeric . RangeUtils . sortInPlace ( raw ) ; int k = 0 ; for ( org . jaitools . numeric . Range r : raw ) { "<AssertPlaceHolder>" ; } } equals ( java . lang . Object ) { if ( obj == null ) { return false ; } if ( ( getClass ( ) ) != ( obj . getClass ( ) ) ) { return false ; } final org . jaitools . imageutils . FloodFiller . ScanSegment other = ( ( org . jaitools . imageutils . FloodFiller . ScanSegment ) ( obj ) ) ; if ( ( this . startX ) != ( other . startX ) ) { return false ; } if ( ( this . endX ) != ( other . endX ) ) { return false ; } if ( ( this . y ) != ( other . y ) ) { return false ; } return true ; }
org . junit . Assert . assertTrue ( r . equals ( sorted . get ( ( k ++ ) ) ) )
testCreate ( ) { org . oscarehr . PMmodule . model . CriteriaType entity = new org . oscarehr . PMmodule . model . CriteriaType ( ) ; org . oscarehr . common . dao . utils . EntityDataGenerator . generateTestDataForModelClass ( entity ) ; dao . persist ( entity ) ; "<AssertPlaceHolder>" ; } getId ( ) { return this . id ; }
org . junit . Assert . assertNotNull ( entity . getId ( ) )
testUsernameExists ( ) { lockedDirectory . insert ( input ) ; "<AssertPlaceHolder>" ; } exists ( java . lang . String ) { if ( ( dataSource ) == null ) { throw new java . lang . Exception ( "Should<sp>not<sp>call<sp>exists<sp>if<sp>the<sp>data<sp>source<sp>hasn't<sp>been<sp>created" ) ; } if ( ( this . status ) != ( apps . provisioning . data . UsernameCache . STATUS_READY ) ) { if ( ( this . status ) == ( apps . provisioning . data . UsernameCache . STATUS_REFRESHING ) ) { logger . log ( Level . WARNING , "Checking<sp>an<sp>out-of-date<sp>cache." ) ; } else { throw new java . lang . Exception ( ( "Trying<sp>to<sp>read<sp>the<sp>cache<sp>when<sp>it's<sp>not<sp>ready.<sp>Current<sp>status:<sp>" + ( status ) ) ) ; } } return dataSource . exists ( username ) ; }
org . junit . Assert . assertTrue ( lockedDirectory . exists ( input ) )
testAddingContainerSetsItToLast ( ) { com . picocontainer . web . chain . ContainerChain chain = new com . picocontainer . web . chain . ContainerChain ( ) ; com . picocontainer . PicoContainer container = new com . picocontainer . DefaultPicoContainer ( ) ; chain . addContainer ( container ) ; "<AssertPlaceHolder>" ; } getLast ( ) { return last ; }
org . junit . Assert . assertSame ( container , chain . getLast ( ) )
testRowNumber3OrderByCols ( ) { java . lang . String sqlText = java . lang . String . format ( ( "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 2 + "FROM<sp>%s<sp>--SPLICE-PROPERTIES<sp>useSpark<sp>=<sp>%s<sp>\n<sp>" ) , this . getTableReference ( com . splicemachine . derby . impl . sql . execute . operations . WindowFunctionIT . EMPTAB ) , useSpark ) ; java . sql . ResultSet rs = com . splicemachine . derby . impl . sql . execute . operations . WindowFunctionIT . methodWatcher . executeQuery ( sqlText ) ; java . lang . String expected = "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 9 + ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( "------------------------------\n" + "<sp>60<sp>|<sp>1<sp>|<sp>78000<sp>|<sp>1<sp>|\n" ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 8 ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 5 ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 6 ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 0 ) + "<sp>10<sp>|<sp>1<sp>|<sp>50000<sp>|<sp>7<sp>|\n" ) + "<sp>49<sp>|<sp>2<sp>|<sp>53000<sp>|<sp>8<sp>|\n" ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 3 ) + "<sp>44<sp>|<sp>2<sp>|<sp>52000<sp>|<sp>10<sp>|\n" ) + "<sp>90<sp>|<sp>2<sp>|<sp>51000<sp>|<sp>11<sp>|\n" ) + "<sp>30<sp>|<sp>3<sp>|<sp>84000<sp>|<sp>12<sp>|\n" ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 1 ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 4 ) + "<sp>100<sp>|<sp>3<sp>|<sp>55000<sp>|<sp>15<sp>|" ) ; "<AssertPlaceHolder>" ; rs . close ( ) ; } toStringUnsorted ( com . splicemachine . homeless . ResultSet ) { return com . splicemachine . homeless . TestUtils . FormattedResult . ResultFactory . convert ( "" , rs , false ) . toString ( ) . trim ( ) ; }
org . junit . Assert . assertEquals ( ( ( "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 7 + sqlText ) + "<sp>50<sp>|<sp>1<sp>|<sp>52000<sp>|<sp>5<sp>|\n" 7 ) , expected , TestUtils . FormattedResult . ResultFactory . toStringUnsorted ( rs ) )
testResolveUnknownTypePermission ( ) { sonia . scm . security . RepositoryPermissionResolver resolver = new sonia . scm . security . RepositoryPermissionResolver ( ) ; sonia . scm . security . RepositoryPermission p = resolver . resolvePermission ( "repository:scm:asd" ) ; "<AssertPlaceHolder>" ; } resolvePermission ( java . lang . String ) { sonia . scm . security . RepositoryPermission permission = null ; if ( ! ( com . google . common . base . Strings . isNullOrEmpty ( permissionString ) ) ) { java . util . Iterator < java . lang . String > permissionIt = com . google . common . base . Splitter . on ( ':' ) . omitEmptyStrings ( ) . trimResults ( ) . split ( permissionString ) . iterator ( ) ; if ( permissionIt . hasNext ( ) ) { java . lang . String type = permissionIt . next ( ) ; if ( type . equals ( RepositoryPermission . TYPE ) ) { permission = createRepositoryPermission ( permissionIt ) ; } else if ( sonia . scm . security . RepositoryPermissionResolver . logger . isWarnEnabled ( ) ) { sonia . scm . security . RepositoryPermissionResolver . logger . warn ( "permission<sp>'{}'<sp>is<sp>not<sp>a<sp>repository<sp>permission" , permissionString ) ; } } } else { sonia . scm . security . RepositoryPermissionResolver . logger . warn ( "permision<sp>string<sp>is<sp>empty,<sp>could<sp>not<sp>resolve<sp>empty<sp>permission" ) ; } return permission ; }
org . junit . Assert . assertNull ( p )