input
stringlengths
28
18.7k
output
stringlengths
39
1.69k
testRemoteUserConverter ( ) { ch . qos . logback . access . pattern . RemoteUserConverter converter = new ch . qos . logback . access . pattern . RemoteUserConverter ( ) ; converter . start ( ) ; java . lang . String result = converter . convert ( event ) ; "<AssertPlaceHolder>" ; } getRemoteUser ( ) { if ( ( remoteUser ) == null ) { if ( ( httpRequest ) != null ) { remoteUser = httpRequest . getRemoteUser ( ) ; } else { remoteUser = NA ; } } return remoteUser ; }
org . junit . Assert . assertEquals ( request . getRemoteUser ( ) , result )
testParticipantForgetAndRemove ( ) { com . hp . mwtests . ts . jta . jts . tools . ObjStoreBrowserTest . HeuristicTestData hd = getHeuristic ( ) ; tryRemove ( false , false , hd ) ; "<AssertPlaceHolder>" ; } getHeuristicParticipants ( ) { java . util . Set < javax . management . ObjectName > names = com . arjuna . ats . arjuna . tools . osb . util . JMXServer . getAgent ( ) . queryNames ( resourceBeanName , null ) ; return names != null ? names : new java . util . HashSet ( ) ; }
org . junit . Assert . assertEquals ( 0 , hd . getHeuristicParticipants ( ) . size ( ) )
testOnVisitorGetDamageVoidIslandHere ( ) { when ( im . getIslandAt ( org . mockito . Mockito . any ( ) ) ) . thenReturn ( optionalIsland ) ; org . bukkit . event . entity . EntityDamageEvent e = new org . bukkit . event . entity . EntityDamageEvent ( player , org . bukkit . event . entity . EntityDamageEvent . DamageCause . VOID , 0.0 ) ; listener . onVisitorGetDamage ( e ) ; "<AssertPlaceHolder>" ; org . mockito . Mockito . verify ( player ) . setGameMode ( org . mockito . Mockito . eq ( GameMode . SPECTATOR ) ) ; } isCancelled ( ) { return cancelled ; }
org . junit . Assert . assertTrue ( e . isCancelled ( ) )
testLoadTimeStampFromNonExistentFile ( ) { com . sonyericsson . hudson . plugins . gerrit . trigger . playback . GerritMissedEventsPlaybackManager . getConfigXml ( "defaultServer" ) . delete ( ) ; com . sonyericsson . hudson . plugins . gerrit . trigger . playback . GerritMissedEventsPlaybackManager missingEventsPlaybackManager = new com . sonyericsson . hudson . plugins . gerrit . trigger . playback . GerritMissedEventsPlaybackManager ( "defaultServer" ) ; try { missingEventsPlaybackManager . load ( ) ; } catch ( java . io . IOException e ) { org . junit . Assert . fail ( e . getMessage ( ) ) ; } "<AssertPlaceHolder>" ; } load ( ) { hudson . XmlFile xml = com . sonyericsson . hudson . plugins . gerrit . trigger . playback . GerritMissedEventsPlaybackManager . getConfigXml ( serverName ) ; if ( ( xml != null ) && ( xml . exists ( ) ) ) { serverTimestamp = ( ( com . sonyericsson . hudson . plugins . gerrit . trigger . playback . EventTimeSlice ) ( xml . unmarshal ( serverTimestamp ) ) ) ; } else { serverTimestamp = null ; } }
org . junit . Assert . assertNull ( missingEventsPlaybackManager . serverTimestamp )
testGetMountFoldersCountWithHiddenRepository ( ) { long classNameId = com . liferay . portal . kernel . util . PortalUtil . getClassNameId ( com . liferay . portal . repository . liferayrepository . LiferayRepository . class ) ; com . liferay . portal . kernel . service . RepositoryLocalServiceUtil . addRepository ( com . liferay . portal . kernel . test . util . TestPropsValues . getUserId ( ) , _group . getGroupId ( ) , classNameId , DLFolderConstants . DEFAULT_PARENT_FOLDER_ID , com . liferay . portal . kernel . test . util . RandomTestUtil . randomString ( ) , com . liferay . portal . kernel . test . util . RandomTestUtil . randomString ( ) , com . liferay . portal . kernel . test . util . RandomTestUtil . randomString ( ) , new com . liferay . portal . kernel . util . UnicodeProperties ( ) , true , new com . liferay . portal . kernel . service . ServiceContext ( ) ) ; "<AssertPlaceHolder>" ; } getMountFoldersCount ( long , long ) { return com . liferay . document . library . kernel . service . DLFolderServiceUtil . getService ( ) . getMountFoldersCount ( groupId , parentFolderId ) ; }
org . junit . Assert . assertEquals ( 0 , com . liferay . document . library . kernel . service . DLFolderServiceUtil . getMountFoldersCount ( _group . getGroupId ( ) , DLFolderConstants . DEFAULT_PARENT_FOLDER_ID ) )
testVisitCubeWithRuntimeAggregates ( ) { org . apache . kylin . storage . hbase . cube . v2 . RawScan rawScan = org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . mockFullScan ( org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . gtInfo , getTestConfig ( ) ) ; org . apache . hadoop . hbase . CoprocessorEnvironment env = org . powermock . api . mockito . PowerMockito . mock ( org . apache . hadoop . hbase . coprocessor . RegionCoprocessorEnvironment . class ) ; org . powermock . api . mockito . PowerMockito . when ( env , "getRegion" ) . thenReturn ( org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . region ) ; final org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitService service = new org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitService ( ) ; service . start ( env ) ; final org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . generated . CubeVisitProtos . CubeVisitRequest request = org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . mockScanRequestWithRuntimeAggregates ( org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . gtInfo , com . google . common . collect . Lists . newArrayList ( rawScan ) ) ; com . google . protobuf . RpcCallback < org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . generated . CubeVisitProtos . CubeVisitResponse > done = new com . google . protobuf . RpcCallback < org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . generated . CubeVisitProtos . CubeVisitResponse > ( ) { @ org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . Override public void run ( org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . generated . CubeVisitProtos . CubeVisitResponse result ) { try { byte [ ] rawData = org . apache . kylin . common . util . CompressionUtils . decompress ( com . google . protobuf . HBaseZeroCopyByteString . zeroCopyGetBytes ( result . getCompressedRows ( ) ) ) ; org . apache . kylin . storage . gtrecord . PartitionResultIterator iterator = new org . apache . kylin . storage . gtrecord . PartitionResultIterator ( rawData , org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . gtInfo , org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . setOf ( 1 , 3 ) ) ; java . util . Map < java . lang . String , java . math . BigDecimal > actRet = com . google . common . collect . Maps . newHashMap ( ) ; while ( iterator . hasNext ( ) ) { org . apache . kylin . gridtable . GTRecord record = iterator . next ( ) ; java . lang . String key = ( ( java . lang . String ) ( record . decodeValue ( 1 ) ) ) ; java . math . BigDecimal value = ( ( java . math . BigDecimal ) ( record . decodeValue ( 3 ) ) ) ; actRet . put ( key , value ) ; } java . util . Map < java . lang . String , java . math . BigDecimal > innerExpUserRet = com . google . common . collect . Maps . newHashMap ( ) ; for ( java . lang . String key : org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . expUserRet . keySet ( ) ) { java . math . BigDecimal value = new java . math . BigDecimal ( 0 ) ; if ( key . equals ( "Ken" ) ) { value = value . add ( org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . expUserRet . get ( key ) ) ; value = value . multiply ( new java . math . BigDecimal ( 2 ) ) ; value = value . add ( org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . userCnt ) ; } else { value = value . add ( org . apache . kylin . storage . hbase . cube . v2 . coprocessor . endpoint . CubeVisitServiceTest . userCnt ) ; } innerExpUserRet . put ( key , value ) ; } "<AssertPlaceHolder>" ; } catch ( java . lang . Exception e ) { org . junit . Assert . fail ( ( "Fail<sp>due<sp>to<sp>" + e ) ) ; } } } ; service . visitCube ( null , request , done ) ; } put ( java . lang . String , org . apache . kylin . storage . hbase . cube . v2 . SegmentQueryResult ) { memcachedCache . put ( key , segmentQueryResult ) ; }
org . junit . Assert . assertEquals ( innerExpUserRet , actRet )
testListStateAddAllNullEntries ( ) { org . apache . flink . runtime . state . AbstractKeyedStateBackend < java . lang . String > keyedBackend = createKeyedBackend ( StringSerializer . INSTANCE ) ; final org . apache . flink . api . common . state . ListStateDescriptor < java . lang . Long > stateDescr = new org . apache . flink . api . common . state . ListStateDescriptor ( "my-state" , org . apache . flink . runtime . state . Long . class ) ; try { org . apache . flink . api . common . state . ListState < java . lang . Long > state = keyedBackend . getPartitionedState ( VoidNamespace . INSTANCE , VoidNamespaceSerializer . INSTANCE , stateDescr ) ; keyedBackend . setCurrentKey ( "abc" ) ; "<AssertPlaceHolder>" ; expectedException . expect ( org . apache . flink . runtime . state . NullPointerException . class ) ; java . util . List < java . lang . Long > adding = new java . util . ArrayList ( ) ; adding . add ( 3L ) ; adding . add ( null ) ; adding . add ( 5L ) ; state . addAll ( adding ) ; } finally { keyedBackend . close ( ) ; keyedBackend . dispose ( ) ; } } get ( ) { return value ; }
org . junit . Assert . assertNull ( state . get ( ) )
stops_IfSchedulerIsNotDefined ( ) { "<AssertPlaceHolder>" ; } startup ( org . pentaho . platform . api . engine . IPentahoSession ) { org . pentaho . di . job . JobMeta jobMeta = null ; java . lang . String jobFileFullPath = getJobFileFullPath ( ) ; try { jobMeta = new org . pentaho . di . job . JobMeta ( jobFileFullPath , null ) ; } catch ( org . pentaho . di . core . exception . KettleXMLException kxe ) { org . pentaho . platform . util . logging . Logger . error ( ( "Error<sp>opening<sp>" + jobFileFullPath ) , kxe . getMessage ( ) ) ; return false ; } return executeJob ( jobMeta , jobFileFullPath ) ; }
org . junit . Assert . assertFalse ( listener . startup ( null ) )
when_shipmentroute_loadAtAct3ShouldBe10 ( ) { stateManager . informInsertionStarts ( java . util . Arrays . asList ( shipment_route ) , java . util . Collections . < jsprit . core . algorithm . state . Job > emptyList ( ) ) ; jsprit . core . algorithm . state . Capacity atAct = stateManager . getActivityState ( shipment_route . getActivities ( ) . get ( 2 ) , InternalStates . LOAD , jsprit . core . algorithm . state . Capacity . class ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertEquals ( 10 , atAct . get ( 0 ) )
testFastMd5WithNull ( ) { "<AssertPlaceHolder>" ; } fastMD5 ( java . io . File ) { try ( java . io . FileInputStream in = new java . io . FileInputStream ( file ) ) { return com . cedarsoftware . util . EncryptionUtilities . calculateMD5Hash ( in . getChannel ( ) ) ; } catch ( java . io . IOException e ) { return null ; } }
org . junit . Assert . assertNull ( com . cedarsoftware . util . EncryptionUtilities . fastMD5 ( null ) )
testGetEmrClusterDefinition ( ) { org . finra . herd . model . api . xml . EmrClusterDefinitionKey emrClusterDefinitionKey = new org . finra . herd . model . api . xml . EmrClusterDefinitionKey ( NAMESPACE , EMR_CLUSTER_DEFINITION_NAME ) ; org . finra . herd . model . api . xml . EmrClusterDefinition emrClusterDefinition = new org . finra . herd . model . api . xml . EmrClusterDefinition ( ) ; org . finra . herd . model . api . xml . EmrClusterDefinitionInformation emrClusterDefinitionInformation = new org . finra . herd . model . api . xml . EmrClusterDefinitionInformation ( ID , emrClusterDefinitionKey , emrClusterDefinition ) ; when ( emrClusterDefinitionService . getEmrClusterDefinition ( emrClusterDefinitionKey ) ) . thenReturn ( emrClusterDefinitionInformation ) ; org . finra . herd . model . api . xml . EmrClusterDefinitionInformation result = emrClusterDefinitionRestController . getEmrClusterDefinition ( org . finra . herd . rest . NAMESPACE , org . finra . herd . rest . EMR_CLUSTER_DEFINITION_NAME ) ; verify ( emrClusterDefinitionService ) . getEmrClusterDefinition ( emrClusterDefinitionKey ) ; verifyNoMoreInteractionsHelper ( ) ; "<AssertPlaceHolder>" ; } verifyNoMoreInteractionsHelper ( ) { verifyNoMoreInteractions ( awsHelper , javaPropertiesHelper , retryPolicyFactory , s3Operations ) ; }
org . junit . Assert . assertEquals ( emrClusterDefinitionInformation , result )
shouldDetermineEdgesAreNotEqual ( ) { final org . apache . tinkerpop . gremlin . structure . Element mockEdgeA = mock ( org . apache . tinkerpop . gremlin . structure . Edge . class ) ; final org . apache . tinkerpop . gremlin . structure . Element mockEdgeB = mock ( org . apache . tinkerpop . gremlin . structure . Edge . class ) ; when ( mockEdgeA . id ( ) ) . thenReturn ( "1" ) ; when ( mockEdgeB . id ( ) ) . thenReturn ( "2" ) ; "<AssertPlaceHolder>" ; } areEqual ( org . apache . tinkerpop . gremlin . structure . Element , java . lang . Object ) { if ( ( null == b ) || ( null == a ) ) return false ; if ( a == b ) return true ; if ( ! ( ( ( ( a instanceof org . apache . tinkerpop . gremlin . structure . Vertex ) && ( b instanceof org . apache . tinkerpop . gremlin . structure . Vertex ) ) || ( ( a instanceof org . apache . tinkerpop . gremlin . structure . Edge ) && ( b instanceof org . apache . tinkerpop . gremlin . structure . Edge ) ) ) || ( ( a instanceof org . apache . tinkerpop . gremlin . structure . VertexProperty ) && ( b instanceof org . apache . tinkerpop . gremlin . structure . VertexProperty ) ) ) ) return false ; return org . apache . tinkerpop . gremlin . structure . util . ElementHelper . haveEqualIds ( a , ( ( org . apache . tinkerpop . gremlin . structure . Element ) ( b ) ) ) ; }
org . junit . Assert . assertFalse ( org . apache . tinkerpop . gremlin . structure . util . ElementHelper . areEqual ( mockEdgeA , mockEdgeB ) )
testCheckWriterId ( ) { java . util . Properties properties = new java . util . Properties ( ) ; properties . setProperty ( ProducerConfig . BOOTSTRAP_SERVERS_CONFIG , "localhost:9290" ) ; org . apache . hadoop . hive . kafka . SimpleKafkaWriter writer = new org . apache . hadoop . hive . kafka . SimpleKafkaWriter ( "t" , null , properties ) ; "<AssertPlaceHolder>" ; } getWriterId ( ) { return writerId ; }
org . junit . Assert . assertNotNull ( writer . getWriterId ( ) )
pageIsSetCorrectly ( ) { net . fortytwo . smsn . server . actions . SetProperties action = new net . fortytwo . smsn . server . actions . SetProperties ( ) ; action . setId ( atom . getId ( ) ) ; action . setName ( SemanticSynchrony . PropertyKeys . TEXT ) ; action . setValue ( "after" ) ; perform ( action ) ; atom = topicGraph . getAtomById ( atom . getId ( ) ) . get ( ) ; "<AssertPlaceHolder>" ; } getText ( ) { return text ; }
org . junit . Assert . assertEquals ( "after" , atom . getText ( ) )
testCreateDefinition ( ) { java . lang . String fieldName = "" ; java . nio . ByteBuffer allocate = java . nio . ByteBuffer . allocate ( 100 ) ; org . eclipse . tracecompass . ctf . core . event . io . BitBuffer bb = new org . eclipse . tracecompass . ctf . core . event . io . BitBuffer ( allocate ) ; org . eclipse . tracecompass . ctf . core . event . types . StructDefinition result = fixture . createDefinition ( null , fieldName , bb ) ; "<AssertPlaceHolder>" ; } createDefinition ( org . eclipse . tracecompass . ctf . core . trace . CTFStreamInputReader , org . eclipse . tracecompass . ctf . core . event . io . BitBuffer , long ) { org . eclipse . tracecompass . ctf . core . event . types . StructDeclaration streamEventContextDecl = streamInputReader . getStreamEventContextDecl ( ) ; @ org . eclipse . jdt . annotation . Nullable final org . eclipse . tracecompass . internal . ctf . core . trace . CTFStream stream = fStream ; final org . eclipse . tracecompass . ctf . core . trace . CTFTrace trace = ( stream == null ) ? null : stream . getTrace ( ) ; org . eclipse . tracecompass . ctf . core . event . types . StructDefinition streamEventContext = ( streamEventContextDecl != null ) ? streamEventContextDecl . createDefinition ( trace , ILexicalScope . STREAM_EVENT_CONTEXT , input ) : null ; org . eclipse . tracecompass . ctf . core . event . types . ICompositeDefinition packetContext = streamInputReader . getCurrentPacketReader ( ) . getCurrentPacketEventHeader ( ) ; org . eclipse . tracecompass . ctf . core . event . types . StructDefinition eventContext = ( ( fContext ) != null ) ? fContext . createDefinition ( trace , ILexicalScope . CONTEXT , input ) : null ; org . eclipse . tracecompass . ctf . core . event . types . StructDefinition eventPayload = ( ( fFields ) != null ) ? fFields . createDefinition ( trace , ILexicalScope . FIELDS , input ) : null ; return new org . eclipse . tracecompass . internal . ctf . core . event . EventDefinition ( this , streamInputReader . getCPU ( ) , timestamp , null , streamEventContext , eventContext , packetContext , eventPayload , streamInputReader . getCurrentPacketReader ( ) . getCurrentPacket ( ) ) ; }
org . junit . Assert . assertNotNull ( result )
testDetectRetweet ( ) { java . util . List < java . lang . Class > detected = new org . apache . streams . twitter . converter . TwitterDocumentClassifier ( ) . detectClasses ( retweet ) ; "<AssertPlaceHolder>" ; java . lang . Class result = detected . get ( 0 ) ; if ( ! ( result . equals ( org . apache . streams . twitter . pojo . Retweet . class ) ) ) { org . junit . Assert . fail ( ) ; } } size ( ) { return queue . size ( ) ; }
org . junit . Assert . assertTrue ( ( ( detected . size ( ) ) == 1 ) )
testHaalPersoonCachesOp ( ) { final java . util . List < nl . bzk . algemeenbrp . dal . domein . brp . entity . PersoonCache > persoonCaches = persoonCacheSelectieRepository . haalPersoonCachesOp ( 1 , 100 ) ; "<AssertPlaceHolder>" ; } size ( ) { return elementen . size ( ) ; }
org . junit . Assert . assertEquals ( 6 , persoonCaches . size ( ) )
testUnitFollowedByBegin ( ) { java . lang . String outcome = opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . START_A ; java . lang . String [ ] inputSequence = new java . lang . String [ ] { "AnyType" , "TypeA" , "something" } ; java . lang . String [ ] outcomesSequence = new java . lang . String [ ] { opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . UNIT_A } ; "<AssertPlaceHolder>" ; } validSequence ( int , java . lang . String [ ] , java . lang . String [ ] , java . lang . String ) { if ( outcome . endsWith ( BioCodec . CONTINUE ) ) { int li = ( outcomesSequence . length ) - 1 ; if ( li == ( - 1 ) ) { return false ; } else if ( outcomesSequence [ li ] . endsWith ( BioCodec . OTHER ) ) { return false ; } else if ( ( outcomesSequence [ li ] . endsWith ( BioCodec . CONTINUE ) ) || ( outcomesSequence [ li ] . endsWith ( BioCodec . START ) ) ) { java . lang . String previousNameType = opennlp . tools . namefind . NameFinderME . extractNameType ( outcomesSequence [ li ] ) ; java . lang . String nameType = opennlp . tools . namefind . NameFinderME . extractNameType ( outcome ) ; if ( ( previousNameType != null ) || ( nameType != null ) ) { if ( nameType != null ) { if ( nameType . equals ( previousNameType ) ) { return true ; } } return false ; } } } return true ; }
org . junit . Assert . assertTrue ( opennlp . tools . namefind . BilouNameFinderSequenceValidatorTest . validator . validSequence ( 1 , inputSequence , outcomesSequence , outcome ) )
testCompleteUndo ( ) { java . lang . String template = "document[titles[title][title[2]]][authors/author[first='John'][last='Doe']]" ; org . jdom2 . Document doc = new org . jdom2 . Document ( new org . mycore . common . xml . MCRNodeBuilder ( ) . buildElement ( template , null , null ) ) ; org . jdom2 . Document before = doc . clone ( ) ; org . mycore . frontend . xeditor . tracker . MCRChangeTracker tracker = new org . mycore . frontend . xeditor . tracker . MCRChangeTracker ( ) ; org . jdom2 . Element titles = ( ( org . jdom2 . Element ) ( new org . mycore . frontend . xeditor . MCRBinding ( "document/titles" , true , new org . mycore . frontend . xeditor . MCRBinding ( doc ) ) . getBoundNode ( ) ) ) ; org . jdom2 . Element title = new org . jdom2 . Element ( "title" ) . setAttribute ( "type" , "alternative" ) ; titles . addContent ( 2 , title ) ; tracker . track ( org . mycore . frontend . xeditor . tracker . MCRAddedElement . added ( title ) ) ; org . jdom2 . Attribute lang = new org . jdom2 . Attribute ( "lang" , "de" ) ; doc . getRootElement ( ) . setAttribute ( lang ) ; tracker . track ( org . mycore . frontend . xeditor . tracker . MCRAddedAttribute . added ( lang ) ) ; org . jdom2 . Element author = ( ( org . jdom2 . Element ) ( new org . mycore . frontend . xeditor . MCRBinding ( "document/authors/author" , true , new org . mycore . frontend . xeditor . MCRBinding ( doc ) ) . getBoundNode ( ) ) ) ; tracker . track ( org . mycore . frontend . xeditor . tracker . MCRRemoveElement . remove ( author ) ) ; tracker . undoChanges ( doc ) ; "<AssertPlaceHolder>" ; } deepEqual ( org . jdom2 . Document , org . jdom2 . Document ) { try { return org . mycore . common . xml . MCRXMLHelper . JDOMEquivalent . equivalent ( org . mycore . common . xml . MCRXMLHelper . canonicalElement ( d1 ) , org . mycore . common . xml . MCRXMLHelper . canonicalElement ( d2 ) ) ; } catch ( java . lang . Exception e ) { org . mycore . common . xml . MCRXMLHelper . LOGGER . warn ( "Could<sp>not<sp>compare<sp>documents." , e ) ; return false ; } }
org . junit . Assert . assertTrue ( org . mycore . common . xml . MCRXMLHelper . deepEqual ( before , doc ) )
testSection427LinkNoHref ( ) { org . apache . abdera . i18n . iri . IRI uri = org . apache . abdera . test . parser . stax . FeedValidatorTest . baseURI . resolve ( "4.2.7.1/link-no-href.xml" ) ; org . apache . abdera . model . Document < org . apache . abdera . model . Feed > doc = parse ( uri ) ; org . apache . abdera . model . Entry entry = doc . getRoot ( ) . getEntries ( ) . get ( 0 ) ; org . apache . abdera . model . Link link = entry . getLinks ( ) . get ( 0 ) ; "<AssertPlaceHolder>" ; } getHref ( ) { java . lang . String href = getAttributeValue ( "href" ) ; return href != null ? new org . apache . abdera . i18n . iri . IRI ( href ) : null ; }
org . junit . Assert . assertNull ( link . getHref ( ) )
testInsertWithDeferredIdAllocation_LongId_List ( ) { com . jmethods . catatumbo . DatastoreBatch batch = com . jmethods . catatumbo . DatastoreBatchTest . em . newBatch ( ) ; java . util . List < com . jmethods . catatumbo . entities . LongId > entities = new java . util . ArrayList ( ) ; for ( int i = 0 ; i < 5 ; i ++ ) { com . jmethods . catatumbo . entities . LongId entity = new com . jmethods . catatumbo . entities . LongId ( ) ; entity . setField1 ( ( "Batch<sp>Insert<sp>Test<sp>with<sp>Deferred<sp>ID<sp>Allocation<sp>" + i ) ) ; entities . add ( entity ) ; } batch . insertWithDeferredIdAllocation ( entities ) ; com . jmethods . catatumbo . DatastoreBatch . Response response = batch . submit ( ) ; "<AssertPlaceHolder>" ; } getGeneratedKeys ( ) { return com . jmethods . catatumbo . impl . DatastoreUtils . toDatastoreKeys ( nativeResponse . getGeneratedKeys ( ) ) ; }
org . junit . Assert . assertTrue ( ( ( response . getGeneratedKeys ( ) . size ( ) ) == 5 ) )
testStableOutput ( ) { objectStorage . put ( "DummyObject" , new org . eclipse . smarthome . storage . json . internal . JSonStorageTest . DummyObject ( ) ) ; persistAndReadAgain ( ) ; java . lang . String storageString1 = org . apache . commons . io . FileUtils . readFileToString ( tmpFile ) ; objectStorage = new org . eclipse . smarthome . storage . json . internal . JsonStorage ( tmpFile , this . getClass ( ) . getClassLoader ( ) , 0 , 0 , 0 ) ; objectStorage . flush ( ) ; java . lang . String storageString2 = org . apache . commons . io . FileUtils . readFileToString ( tmpFile ) ; "<AssertPlaceHolder>" ; } flush ( ) { if ( ( commitTimerTask ) != null ) { commitTimerTask . cancel ( ) ; commitTimerTask = null ; } if ( dirty ) { java . lang . String json = internalMapper . toJson ( map ) ; synchronized ( map ) { writeDatabaseFile ( file , json ) ; writeDatabaseFile ( new java . io . File ( ( ( ( file . getParent ( ) ) + ( java . io . File . separator ) ) + ( BACKUP_EXTENSION ) ) , ( ( ( java . lang . System . currentTimeMillis ( ) ) + ( SEPARATOR ) ) + ( file . getName ( ) ) ) ) , json ) ; cleanupBackups ( ) ; deferredSince = 0 ; dirty = false ; } } }
org . junit . Assert . assertEquals ( storageString1 , storageString2 )
testMinuss9999Previous ( ) { org . jfree . data . time . Year current = new org . jfree . data . time . Year ( ( - 9999 ) ) ; org . jfree . data . time . Year previous = ( ( org . jfree . data . time . Year ) ( current . previous ( ) ) ) ; "<AssertPlaceHolder>" ; } previous ( ) { synchronized ( checkClosed ( ) . getConnectionMutex ( ) ) { if ( this . onInsertRow ) { this . onInsertRow = false ; } if ( this . doingUpdates ) { this . doingUpdates = false ; } return prev ( ) ; } }
org . junit . Assert . assertNull ( previous )
testGetAnswer ( ) { org . drools . informer . Question q = new org . drools . informer . Question ( ) ; try { q . getAnswer ( ) ; org . junit . Assert . fail ( ) ; } catch ( java . lang . IllegalStateException e ) { } q . setAnswerType ( Question . QuestionType . TYPE_DECIMAL ) ; q . setDecimalAnswer ( new java . math . BigDecimal ( "4.56" ) ) ; "<AssertPlaceHolder>" ; } getAnswer ( ) { if ( ( answerType ) == null ) { throw new java . lang . IllegalStateException ( "answerType<sp>has<sp>not<sp>been<sp>specified" ) ; } org . drools . informer . Question . QuestionType basicAnswerType = getBasicAnswerType ( ) ; if ( basicAnswerType . equals ( org . drools . informer . Question . QuestionType . TYPE_TEXT ) ) { return textAnswer ; } if ( basicAnswerType . equals ( org . drools . informer . Question . QuestionType . TYPE_NUMBER ) ) { return numberAnswer ; } if ( basicAnswerType . equals ( org . drools . informer . Question . QuestionType . TYPE_DECIMAL ) ) { return decimalAnswer ; } if ( basicAnswerType . equals ( org . drools . informer . Question . QuestionType . TYPE_BOOLEAN ) ) { return booleanAnswer ; } if ( basicAnswerType . equals ( org . drools . informer . Question . QuestionType . TYPE_DATE ) ) { return getDateAnswer ( ) ; } if ( basicAnswerType . equals ( org . drools . informer . Question . QuestionType . TYPE_LIST ) ) { return listAnswer ; } throw new java . lang . IllegalStateException ( ) ; }
org . junit . Assert . assertEquals ( new java . math . BigDecimal ( "4.56" ) , q . getAnswer ( ) )
testGetDataSourceId ( ) { java . util . Map < java . lang . String , java . lang . Object > map = new java . util . HashMap ( ) ; map . put ( "a" , "aval" ) ; map . put ( "b" , "bval" ) ; com . hortonworks . streamline . streams . StreamlineEvent event = com . hortonworks . streamline . streams . common . StreamlineEventImpl . builder ( ) . fieldsAndValues ( map ) . dataSourceId ( "1" ) . build ( ) ; "<AssertPlaceHolder>" ; } getDataSourceId ( ) { java . lang . String res = dataSourceId ; if ( res == null ) { java . lang . Object dataSourceIds = header . get ( "dataSourceIds" ) ; if ( dataSourceIds instanceof java . util . List ) { res = com . google . common . base . Joiner . on ( "," ) . join ( com . google . common . collect . Collections2 . filter ( ( ( java . util . List ) ( dataSourceIds ) ) , new com . google . common . base . Predicate ( ) { @ com . hortonworks . streamline . streams . common . Override public boolean apply ( java . lang . Object input ) { return input != null ; } } ) ) ; } } return res ; }
org . junit . Assert . assertEquals ( "1" , event . getDataSourceId ( ) )
testOneToOneBatched ( ) { final int cycle = 1024 ; final int size = 1024 * cycle ; final com . questdb . mp . RingQueue < com . questdb . mp . Event > queue = new com . questdb . mp . RingQueue ( Event . FACTORY , cycle ) ; final com . questdb . mp . SPSequence pubSeq = new com . questdb . mp . SPSequence ( cycle ) ; final com . questdb . mp . SCSequence subSeq = new com . questdb . mp . SCSequence ( ) ; pubSeq . then ( subSeq ) . then ( pubSeq ) ; java . util . concurrent . CyclicBarrier barrier = new java . util . concurrent . CyclicBarrier ( 2 ) ; new java . lang . Thread ( ( ) -> { try { barrier . await ( ) ; com . questdb . std . Rnd rnd = new com . questdb . std . Rnd ( ) ; for ( int i = 0 ; i < size ; ) { long cursor = pubSeq . next ( ) ; if ( cursor > ( - 1 ) ) { long available = pubSeq . available ( ) ; while ( ( cursor < available ) && ( i < size ) ) { com . questdb . mp . Event event = queue . get ( ( cursor ++ ) ) ; event . value = rnd . nextInt ( ) ; i ++ ; } pubSeq . done ( ( cursor - 1 ) ) ; } } } catch ( e ) { com . questdb . mp . e . printStackTrace ( ) ; } } ) . start ( ) ; barrier . await ( ) ; int consumed = 0 ; final com . questdb . std . Rnd rnd2 = new com . questdb . std . Rnd ( ) ; while ( consumed < size ) { long cursor = subSeq . next ( ) ; if ( cursor > ( - 1 ) ) { long available = subSeq . available ( ) ; while ( cursor < available ) { "<AssertPlaceHolder>" ; consumed ++ ; } subSeq . done ( ( available - 1 ) ) ; } } } nextInt ( ) { return ( ( int ) ( nextLong ( ) ) ) ; }
org . junit . Assert . assertEquals ( rnd2 . nextInt ( ) , queue . get ( ( cursor ++ ) ) . value )
testMergeMaxF ( ) { lombok . val array0 = org . nd4j . linalg . factory . Nd4j . rand ( 'f' , 5 , 2 ) . add ( 1 ) ; lombok . val array1 = array0 . dup ( 'f' ) . add ( 5 ) ; array1 . put ( 0 , 0 , 0 ) ; lombok . val exp = array1 . dup ( 'f' ) ; exp . putScalar ( 0 , 0 , array0 . getDouble ( 0 , 0 ) ) ; lombok . val zF = org . nd4j . linalg . factory . Nd4j . zeros ( array0 . shape ( ) , 'f' ) ; org . nd4j . linalg . api . ops . CustomOp op = org . nd4j . linalg . api . ops . DynamicCustomOp . builder ( "mergemax" ) . addInputs ( array0 , array1 ) . addOutputs ( zF ) . build ( ) ; org . nd4j . linalg . factory . Nd4j . getExecutioner ( ) . exec ( op ) ; "<AssertPlaceHolder>" ; } exec ( java . lang . String ) { code = org . datavec . python . PythonExecutioner . getFunctionalCode ( ( "__f_" + ( java . lang . Thread . currentThread ( ) . getId ( ) ) ) , code ) ; org . datavec . python . PythonExecutioner . acquireGIL ( ) ; log . info ( "CPython:<sp>PyRun_SimpleStringFlag()" ) ; log . info ( code ) ; int result = PyRun_SimpleStringFlags ( code , null ) ; if ( result != 0 ) { PyErr_Print ( ) ; throw new java . lang . RuntimeException ( "exec<sp>failed" ) ; } log . info ( "Exec<sp>done" ) ; org . datavec . python . PythonExecutioner . releaseGIL ( ) ; }
org . junit . Assert . assertEquals ( exp , zF )
deveGerarXMLDeAcordoComOPadraoEstabelecido ( ) { final com . fincatto . documentofiscal . nfe310 . classes . nota . NFNotaInfoItemImpostoICMSPartilhado icmsPartilhado = new com . fincatto . documentofiscal . nfe310 . classes . nota . NFNotaInfoItemImpostoICMSPartilhado ( ) ; icmsPartilhado . setSituacaoTributaria ( NFNotaInfoImpostoTributacaoICMS . OUTROS ) ; icmsPartilhado . setModalidadeBCICMSST ( NFNotaInfoItemModalidadeBCICMSST . LISTA_NEUTRA ) ; icmsPartilhado . setModalidadeBCICMS ( NFNotaInfoItemModalidadeBCICMS . PAUTA ) ; icmsPartilhado . setOrigem ( NFOrigem . NACIONAL ) ; icmsPartilhado . setPercentualAliquotaImposto ( new java . math . BigDecimal ( "99.99" ) ) ; icmsPartilhado . setPercentualAliquotaImpostoICMSST ( new java . math . BigDecimal ( "99.99" ) ) ; icmsPartilhado . setPercentualBCOperacaoPropria ( new java . math . BigDecimal ( "99.99" ) ) ; icmsPartilhado . setPercentualMargemValorAdicionadoICMSST ( new java . math . BigDecimal ( "99.99" ) ) ; icmsPartilhado . setPercentualReducaoBC ( new java . math . BigDecimal ( "99.99" ) ) ; icmsPartilhado . setPercentualReducaoBCICMSST ( new java . math . BigDecimal ( "99.99" ) ) ; icmsPartilhado . setUfICMSST ( DFUnidadeFederativa . TO ) ; icmsPartilhado . setValorBCICMS ( new java . math . BigDecimal ( "999999999999.99" ) ) ; icmsPartilhado . setValorBCICMSST ( new java . math . BigDecimal ( "999999999999.99" ) ) ; icmsPartilhado . setValorICMS ( new java . math . BigDecimal ( "999999999999.99" ) ) ; icmsPartilhado . setValorICMSST ( new java . math . BigDecimal ( "999999999999.99" ) ) ; final java . lang . String xmlEsperado = "<NFNotaInfoItemImpostoICMSPartilhado><orig>0</orig><CST>90</CST><modBC>1</modBC><vBC>999999999999.99</vBC><pRedBC>99.99</pRedBC><pICMS>99.99</pICMS><vICMS>999999999999.99</vICMS><modBCST>3</modBCST><pMVAST>99.99</pMVAST><pRedBCST>99.99</pRedBCST><vBCST>999999999999.99</vBCST><pICMSST>99.99</pICMSST><vICMSST>999999999999.99</vICMSST><pBCOp>99.99</pBCOp><UFST>TO</UFST></NFNotaInfoItemImpostoICMSPartilhado>" ; "<AssertPlaceHolder>" ; } toString ( ) { return this . getDescricao ( ) ; }
org . junit . Assert . assertEquals ( xmlEsperado , icmsPartilhado . toString ( ) )
testSetDataGranularity ( ) { org . apache . druid . java . util . common . granularity . Granularity granularity = org . apache . druid . java . util . common . granularity . Granularities . DAY ; granularityPathSpec . setDataGranularity ( granularity ) ; "<AssertPlaceHolder>" ; } getDataGranularity ( ) { return dataGranularity ; }
org . junit . Assert . assertEquals ( granularity , granularityPathSpec . getDataGranularity ( ) )
test_issue411 ( ) { org . nutz . el . El el = new org . nutz . el . El ( "a[0].b.isPass('')?'1':'2'" ) ; org . nutz . lang . util . Context ctx = org . nutz . lang . Lang . context ( ) ; ctx . set ( "a" , new java . lang . Object [ ] { new org . nutz . el . issue411 . Issue411 . A ( ) } ) ; "<AssertPlaceHolder>" ; } eval ( org . nutz . lang . util . Context ) { if ( ( rc ) == null ) { throw new org . nutz . el . ElException ( "!" ) ; } return rc . calculate ( context ) ; }
org . junit . Assert . assertEquals ( "1" , el . eval ( ctx ) )
testCollectNetworks ( ) { tv . floe . metronome . classification . neuralnetworks . iterativereduce . NeuralNetworkUtil util = new tv . floe . metronome . classification . neuralnetworks . iterativereduce . NeuralNetworkUtil ( ) ; tv . floe . metronome . classification . neuralnetworks . core . NeuralNetwork nn0 = buildXORMLP ( ) ; nn0 . getLayerByIndex ( 1 ) . getNeuronAt ( 0 ) . getInConnections ( ) . get ( 0 ) . setWeight ( new tv . floe . metronome . classification . neuralnetworks . core . Weight ( 0 ) ) ; nn0 . getLayerByIndex ( 1 ) . getNeuronAt ( 0 ) . getInConnections ( ) . get ( 1 ) . setWeight ( new tv . floe . metronome . classification . neuralnetworks . core . Weight ( 1 ) ) ; util . AccumulateWorkerNetwork ( nn0 ) ; util . AccumulateWorkerNetwork ( buildXORMLP ( ) ) ; util . AccumulateWorkerNetwork ( buildXORMLP ( ) ) ; "<AssertPlaceHolder>" ; } getNetworkBufferCount ( ) { return this . worker_networks . size ( ) ; }
org . junit . Assert . assertEquals ( 3 , util . getNetworkBufferCount ( ) )
createArrayList_noArgs ( ) { java . util . List < java . lang . Integer > list = createArrayList ( ) ; "<AssertPlaceHolder>" ; } createArrayList ( ) { return new java . util . ArrayList < T > ( ) ; }
org . junit . Assert . assertTrue ( ( list instanceof java . util . ArrayList < ? > ) )
hasCauseOfTypeTest ( ) { java . lang . Exception e = new java . lang . Exception ( new java . lang . IllegalArgumentException ( new java . lang . Exception ( ) ) ) ; "<AssertPlaceHolder>" ; } hasCauseOfType ( java . lang . Throwable , java . lang . Class ) { return ( org . threadly . util . ExceptionUtils . getCauseOfType ( rootError , type ) ) != null ; }
org . junit . Assert . assertTrue ( org . threadly . util . ExceptionUtils . hasCauseOfType ( e , org . threadly . util . IllegalArgumentException . class ) )
uuidTest ( ) { org . apache . jena . arq . querybuilder . Expr e = factory . uuid ( ) ; "<AssertPlaceHolder>" ; } uuid ( ) { return new org . apache . jena . sparql . expr . E_UUID ( ) ; }
org . junit . Assert . assertTrue ( ( e instanceof org . apache . jena . arq . querybuilder . E_UUID ) )
getSlowQueryTime ( ) { int time = org . fastquery . util . FastQueryJSONObject . getSlowQueryTime ( ) ; "<AssertPlaceHolder>" ; } getSlowQueryTime ( ) { return org . fastquery . util . FastQueryJSONObject . getJsonObject ( ) . getIntValue ( "slowQueryTime" ) ; }
org . junit . Assert . assertThat ( time , is ( 50 ) )
testAnonFingerprints ( ) { java . util . BitSet [ ] expected = new java . util . BitSet [ ] { org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) , org . openscience . cdk . modeling . builder3d . TemplateHandler3DTest . parseBitSet ( "{148,<sp>206,<sp>392,<sp>542,<sp>637,<sp>742,<sp>752,<sp>830}" ) } ; java . lang . String filename = "data/mdl/fingerprints_from_modelbuilder3d.sdf" ; java . io . InputStream ins = this . getClass ( ) . getClassLoader ( ) . getResourceAsStream ( filename ) ; java . util . List < org . openscience . cdk . fingerprint . IBitFingerprint > data = new org . openscience . cdk . modeling . builder3d . TemplateExtractor ( ) . makeFingerprintsFromSdf ( true , true , new java . util . HashMap < java . lang . String , java . lang . Integer > ( ) , new java . io . BufferedReader ( new java . io . InputStreamReader ( ins ) ) , 10 ) ; org . openscience . cdk . isomorphism . matchers . QueryChemObject obj = new org . openscience . cdk . isomorphism . matchers . QueryChemObject ( org . openscience . cdk . DefaultChemObjectBuilder . getInstance ( ) ) ; obj . getBuilder ( ) ; for ( int i = 0 ; i < ( data . size ( ) ) ; i ++ ) { org . openscience . cdk . fingerprint . IBitFingerprint bs = data . get ( i ) ; "<AssertPlaceHolder>" ; } } asBitSet ( ) { return ( ( java . util . BitSet ) ( bitset . clone ( ) ) ) ; }
org . junit . Assert . assertEquals ( expected [ i ] , bs . asBitSet ( ) )
ofSortedMap ( ) { com . gs . collections . impl . map . sorted . immutable . ImmutableTreeMap < java . lang . Integer , java . lang . String > immutableMap = new com . gs . collections . impl . map . sorted . immutable . ImmutableTreeMap < java . lang . Integer , java . lang . String > ( SortedMaps . mutable . of ( 1 , "1" , 2 , "2" , 3 , "3" , 4 , "4" ) ) ; "<AssertPlaceHolder>" ; } ofSortedMap ( java . util . SortedMap ) { return this . withSortedMap ( map ) ; }
org . junit . Assert . assertSame ( immutableMap , SortedMaps . immutable . ofSortedMap ( immutableMap ) )
givenIntArray_whenSortingDescending_thenCorrectlySorted ( ) { numbers = java . util . stream . IntStream . of ( numbers ) . boxed ( ) . sorted ( java . util . Comparator . reverseOrder ( ) ) . mapToInt ( ( i ) -> i ) . toArray ( ) ; "<AssertPlaceHolder>" ; } toArray ( ) { return java . util . Arrays . copyOf ( internal , internal . length ) ; }
org . junit . Assert . assertArrayEquals ( new int [ ] { 10 , 9 , 7 , 5 , 3 , - 2 , - 8 } , numbers )
testGetLoginCtxProvider ( ) { authConfiguration . setSecurityProvider ( org . apache . jackrabbit . oak . security . internal . SecurityProviderBuilder . newBuilder ( ) . build ( ) ) ; "<AssertPlaceHolder>" ; } getLoginContextProvider ( org . apache . jackrabbit . oak . api . ContentRepository ) { return new org . apache . jackrabbit . oak . spi . security . authentication . LoginContextProvider ( ) { @ org . jetbrains . annotations . NotNull @ org . apache . jackrabbit . oak . spi . security . authentication . Override public org . apache . jackrabbit . oak . spi . security . authentication . LoginContext getLoginContext ( final javax . jcr . Credentials credentials , java . lang . String workspaceName ) { return new org . apache . jackrabbit . oak . spi . security . authentication . LoginContext ( ) { @ org . apache . jackrabbit . oak . spi . security . authentication . Override public javax . security . auth . Subject getSubject ( ) { javax . security . auth . Subject subject = new javax . security . auth . Subject ( ) ; if ( credentials != null ) { subject . getPrivateCredentials ( ) . add ( credentials ) ; } subject . setReadOnly ( ) ; return subject ; } @ org . apache . jackrabbit . oak . spi . security . authentication . Override public void login ( ) { } @ org . apache . jackrabbit . oak . spi . security . authentication . Override public void logout ( ) { } } ; } } ; }
org . junit . Assert . assertNotNull ( authConfiguration . getLoginContextProvider ( repo ) )
testCODECOPY_5 ( ) { org . ethereum . vm . VM vm = new org . ethereum . vm . VM ( ) ; program = new org . ethereum . vm . Program ( org . spongycastle . util . encoders . Hex . decode ( "611234600054615566602054607060006020396000605f556014600054601e60205463abcddcba6040545b51602001600a5254516040016014525451606001601e5254516080016028525460a052546016604860003960166000f26000603f556103e756600054600053602002351234" ) , invoke ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; vm . step ( program ) ; "<AssertPlaceHolder>" ; } isStopped ( ) { return stopped ; }
org . junit . Assert . assertFalse ( program . isStopped ( ) )
testConstructor ( ) { new org . openhealthtools . mdht . uml . cda . operations . EntryRelationshipOperations ( ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertTrue ( true )
startModule_shouldCreateDwrModulesXmlIfNotExists ( ) { org . openmrs . module . Module mod = buildModuleForMessageTest ( ) ; org . openmrs . module . ModuleFactory . getStartedModulesMap ( ) . put ( mod . getModuleId ( ) , mod ) ; javax . servlet . ServletContext servletContext = mock ( javax . servlet . ServletContext . class ) ; java . lang . String realPath = servletContext . getRealPath ( "" ) ; if ( realPath == null ) realPath = java . lang . System . getProperty ( "user.dir" ) ; java . io . File f = new java . io . File ( ( realPath + "/WEB-INF/dwr-modules.xml" ) ) ; f . delete ( ) ; org . openmrs . module . web . WebModuleUtil . startModule ( mod , servletContext , true ) ; "<AssertPlaceHolder>" ; org . openmrs . module . ModuleFactory . getStartedModulesMap ( ) . clear ( ) ; } exists ( ) { if ( isSingleResult ( ) ) { return ( ( ( ( ( ( valueBoolean ) != null ) && ( valueBoolean ) ) || ( ( valueCoded ) != null ) ) || ( ( valueDatetime ) != null ) ) || ( ( ( valueNumeric ) != null ) && ( ( valueNumeric ) != 0 ) ) ) || ( ( ( valueText ) != null ) && ( ( valueText . length ( ) ) > 0 ) ) ; } for ( org . openmrs . logic . result . Result r : this ) { if ( r . exists ( ) ) { return true ; } } return false ; }
org . junit . Assert . assertTrue ( f . exists ( ) )
shouldHandleOgnlExpression ( ) { final java . util . HashMap < java . lang . String , java . lang . String > parameterObject = new java . util . HashMap < java . lang . String , java . lang . String > ( ) { { put ( "name" , "Steve" ) ; } } ; final java . lang . String expected = "Expression<sp>test:<sp>3<sp>/<sp>yes." ; org . apache . ibatis . scripting . xmltags . DynamicSqlSource source = createDynamicSqlSource ( new org . apache . ibatis . scripting . xmltags . TextSqlNode ( "Expression<sp>test:<sp>${name.indexOf(\'v\')}<sp>/<sp>${name<sp>in<sp>{\'Bob\',<sp>\'Steve\'\\}<sp>?<sp>\'yes\'<sp>:<sp>\'no\'}." ) ) ; org . apache . ibatis . mapping . BoundSql boundSql = source . getBoundSql ( parameterObject ) ; "<AssertPlaceHolder>" ; } getSql ( ) { return sql ; }
org . junit . Assert . assertEquals ( expected , boundSql . getSql ( ) )
selectedFieldsCombined ( ) { cascading . tuple . Fields fields = new com . hotels . plunger . Data ( new cascading . tuple . Fields ( "A" , "B" ) , new java . util . ArrayList < cascading . tuple . Tuple > ( ) ) . withFields ( new cascading . tuple . Fields ( "A" ) , new cascading . tuple . Fields ( "A" ) , new cascading . tuple . Fields ( "B" ) ) . selectedFields ( ) ; "<AssertPlaceHolder>" ; } selectedFields ( ) { cascading . tuple . Fields fields = new com . hotels . plunger . Data ( new cascading . tuple . Fields ( "A" , "B" ) , new java . util . ArrayList < cascading . tuple . Tuple > ( ) ) . withFields ( new cascading . tuple . Fields ( "A" ) ) . selectedFields ( ) ; org . junit . Assert . assertThat ( fields , org . hamcrest . CoreMatchers . is ( new cascading . tuple . Fields ( "A" ) ) ) ; }
org . junit . Assert . assertThat ( fields , org . hamcrest . CoreMatchers . is ( new cascading . tuple . Fields ( "A" , "B" ) ) )
populateMapFromStringFromHTTPWithProp ( ) { mockObjects ( "false" ) ; org . easymock . EasyMock . replay ( mockMessage , mockRequest ) ; javax . ws . rs . core . MultivaluedMap < java . lang . String , java . lang . String > params = new org . apache . cxf . jaxrs . impl . MetadataMap ( ) ; org . apache . cxf . jaxrs . utils . FormUtils . populateMapFromString ( params , mockMessage , null , StandardCharsets . UTF_8 . name ( ) , false , mockRequest ) ; "<AssertPlaceHolder>" ; } size ( ) { return cache . getSize ( ) ; }
org . junit . Assert . assertEquals ( 0 , params . size ( ) )
testNullMetacard ( ) { ddf . catalog . data . Result result = mock ( ddf . catalog . data . Result . class ) ; ddf . catalog . plugin . PolicyResponse response = filterPluginResponseHelper ( result , null , true , false ) ; "<AssertPlaceHolder>" ; } itemPolicy ( ) { return itemPolicy ; }
org . junit . Assert . assertThat ( response . itemPolicy ( ) . isEmpty ( ) , org . hamcrest . core . Is . is ( true ) )
one_$parent_elem_match_returns_true_when_at_least_one_value_in_array_matches ( ) { com . redhat . lightblue . query . QueryExpression expr = com . redhat . lightblue . eval . EvalTestContext . queryExpressionFromJson ( "{'array':'field6.$parent.field7','elemMatch':{'field':'elemf3','op':'>','rvalue':3}}" ) ; com . redhat . lightblue . eval . QueryEvaluator eval = com . redhat . lightblue . eval . QueryEvaluator . getInstance ( expr , md ) ; com . redhat . lightblue . eval . QueryEvaluationContext context = eval . evaluate ( jsonDoc ) ; "<AssertPlaceHolder>" ; } getResult ( ) { return result ; }
org . junit . Assert . assertTrue ( context . getResult ( ) )
setObservationsFromUnknownSensors ( ) { boolean dataRejected = false ; final java . lang . String provider = "prov1" ; final java . lang . String sensor = "sensor1" ; final org . springframework . data . redis . listener . Topic topic = org . sentilo . platform . service . utils . ChannelUtils . buildTopic ( PubSubChannelPrefix . data , provider , sensor ) ; final java . util . List < org . sentilo . platform . common . domain . Observation > observations = buildObservations ( provider , sensor ) ; when ( inputMessage . getObservations ( ) ) . thenReturn ( observations ) ; when ( resourceService . getSensor ( eq ( provider ) , eq ( sensor ) ) ) . thenReturn ( new org . sentilo . platform . common . domain . Sensor ( provider , sensor ) ) ; try { service . setObservations ( inputMessage ) ; } catch ( final org . sentilo . platform . common . exception . EventRejectedException ere ) { dataRejected = true ; } "<AssertPlaceHolder>" ; verify ( inputMessage ) . getObservations ( ) ; verify ( jedisSequenceUtils , times ( 0 ) ) . getSid ( provider , sensor ) ; verify ( jedisSequenceUtils , times ( 0 ) ) . getSdid ( ) ; verify ( jedisTemplate , times ( 0 ) ) . publish ( eq ( topic . getTopic ( ) ) , anyString ( ) ) ; } setObservations ( org . sentilo . platform . common . domain . DataInputMessage ) { final java . util . List < org . sentilo . platform . common . domain . Observation > observations = message . getObservations ( ) ; final org . sentilo . platform . common . exception . RejectedResourcesContext rejectedContext = new org . sentilo . platform . common . exception . RejectedResourcesContext ( ) ; for ( final org . sentilo . platform . common . domain . Observation observation : observations ) { try { final org . sentilo . platform . common . domain . Sensor sensor = getSensorMetadata ( observation . getProvider ( ) , observation . getSensor ( ) ) ; checkTargetResourceState ( sensor , observation ) ; setObservation ( sensor , observation ) ; } catch ( final org . sentilo . platform . common . exception . ResourceNotFoundException rnfe ) { rejectedContext . rejectEvent ( observation . getSensor ( ) , rnfe . getMessage ( ) ) ; org . sentilo . platform . service . impl . DataServiceImpl . LOGGER . warn ( "Observation<sp>[{}]<sp>has<sp>been<sp>rejected<sp>because<sp>sensor<sp>[{}],<sp>belonging<sp>to<sp>provider<sp>[{}],<sp>doesn't<sp>exist<sp>on<sp>Sentilo." , observation . getValue ( ) , observation . getSensor ( ) , observation . getProvider ( ) ) ; } catch ( final org . sentilo . platform . common . exception . ResourceOfflineException roe ) { rejectedContext . rejectEvent ( observation . getSensor ( ) , roe . getMessage ( ) ) ; org . sentilo . platform . service . impl . DataServiceImpl . LOGGER . warn ( "Observation<sp>[{}]<sp>has<sp>been<sp>rejected<sp>because<sp>sensor<sp>[{}],<sp>belonging<sp>to<sp>provider<sp>[{}],<sp>is<sp>not<sp>online." , observation . getValue ( ) , observation . getSensor ( ) , observation . getProvider ( ) ) ; } } if ( ! ( rejectedContext . isEmpty ( ) ) ) { throw new org . sentilo . platform . common . exception . EventRejectedException ( org . sentilo . common . enums . EventType . DATA , rejectedContext ) ; } }
org . junit . Assert . assertTrue ( dataRejected )
testRegistreerGBAGeboorte ( ) { initMocks ( nl . bzk . brp . model . bijhouding . RegistreerGBAGeboorteBericht . class , SoortBericht . ISC_MIG_REGISTREER_GEBOORTE ) ; initBerichtVerwerker ( new java . util . ArrayList < nl . bzk . brp . model . validatie . Melding > ( ) , nl . bzk . brp . bijhouding . business . dto . bijhouding . BijhoudingResultaat . class , true ) ; final nl . bzk . brp . model . bijhouding . RegistreerGBAGeboorteAntwoordBericht testBericht = new nl . bzk . brp . model . bijhouding . RegistreerGBAGeboorteAntwoordBericht ( ) ; when ( getAntwoordBerichtFactory ( ) . bouwAntwoordBericht ( any ( nl . bzk . brp . model . logisch . ber . Bericht . class ) , any ( nl . bzk . brp . bijhouding . business . dto . bijhouding . BijhoudingResultaat . class ) ) ) . thenReturn ( testBericht ) ; final nl . bzk . brp . model . bericht . kern . AdministratieveHandelingBericht adminstratieveHandeling = new nl . bzk . brp . model . bericht . kern . HandelingGBAGeboorteBericht ( ) ; adminstratieveHandeling . setObjectSleutel ( nl . bzk . brp . web . service . BijhoudingServiceTest . OBJECT_SLEUTEL ) ; voegAdministratieveHandelingToeAanBericht ( adminstratieveHandeling ) ; nl . bzk . brp . model . bijhouding . RegistreerGBAGeboorteAntwoordBericht resultaat = ( ( nl . bzk . brp . web . service . BijhoudingService ) ( getWebService ( ) ) ) . registreerGBAGeboorte ( ( ( nl . bzk . brp . model . bijhouding . RegistreerGBAGeboorteBericht ) ( getBericht ( ) ) ) ) ; "<AssertPlaceHolder>" ; } getBericht ( ) { return bericht ; }
org . junit . Assert . assertEquals ( testBericht , resultaat )
testDatumMededelingOpVandaag ( ) { final nl . bzk . brp . bijhouding . bericht . model . ElementBuilder . VerblijfsrechtParameters params = new nl . bzk . brp . bijhouding . bericht . model . ElementBuilder . VerblijfsrechtParameters ( ) ; params . aanduidingCode ( "98" ) ; params . datumAanvang ( 20160101 ) ; params . datumMededeling ( nl . bzk . algemeenbrp . util . common . DatumUtil . vandaag ( ) ) ; final nl . bzk . brp . bijhouding . bericht . model . VerblijfsrechtElement element = builder . maakVerblijfsrechtElement ( "comm_id" , params ) ; final java . util . List < nl . bzk . brp . bijhouding . bericht . model . MeldingElement > meldingen = element . valideerInhoud ( ) ; "<AssertPlaceHolder>" ; } size ( ) { return elementen . size ( ) ; }
org . junit . Assert . assertEquals ( 0 , meldingen . size ( ) )
testConvertBlank ( ) { java . sql . Date result = underTest . convert ( "<sp>" ) ; "<AssertPlaceHolder>" ; } convert ( java . lang . String ) { if ( "S" . equalsIgnoreCase ( value ) ) { return Size . TINY ; } else if ( "M" . equalsIgnoreCase ( value ) ) { return Size . NORMAL ; } else if ( "L" . equalsIgnoreCase ( value ) ) { return Size . HUGE ; } else { return null ; } }
org . junit . Assert . assertNull ( result )
testTimeout ( ) { final int total = 100 ; com . cloudera . flume . reporter . aggregator . CounterSink cnt = new com . cloudera . flume . reporter . aggregator . CounterSink ( "count" ) ; com . cloudera . flume . handlers . debug . MemorySinkSource mem = new com . cloudera . flume . handlers . debug . MemorySinkSource ( ) ; com . cloudera . flume . core . FanOutSink < com . cloudera . flume . core . EventSink > fo = new com . cloudera . flume . core . FanOutSink < com . cloudera . flume . core . EventSink > ( cnt , mem ) ; com . cloudera . flume . handlers . batch . BatchingDecorator < com . cloudera . flume . core . EventSink > b = new com . cloudera . flume . handlers . batch . BatchingDecorator < com . cloudera . flume . core . EventSink > ( fo , 1024 , 3000 ) ; b . open ( ) ; for ( int i = 0 ; i < total ; i ++ ) { com . cloudera . flume . core . Event e = new com . cloudera . flume . core . EventImpl ( ( "message<sp>" + i ) . getBytes ( ) ) ; b . append ( e ) ; } java . lang . Thread . sleep ( 5000 ) ; "<AssertPlaceHolder>" ; b . close ( ) ; } getCount ( ) { return cnt . get ( ) ; }
org . junit . Assert . assertEquals ( 1 , cnt . getCount ( ) )
testSequence_add1_next ( ) { com . navercorp . pinpoint . common . server . bo . SpanEventBo prev = new com . navercorp . pinpoint . common . server . bo . SpanEventBo ( ) ; com . navercorp . pinpoint . common . server . bo . SpanEventBo current = new com . navercorp . pinpoint . common . server . bo . SpanEventBo ( ) ; prev . setSequence ( ( ( short ) ( 10 ) ) ) ; current . setSequence ( ( ( short ) ( 11 ) ) ) ; com . navercorp . pinpoint . common . server . bo . serializer . trace . v2 . bitfield . SpanEventBitField bitField = com . navercorp . pinpoint . common . server . bo . serializer . trace . v2 . bitfield . SpanEventBitField . build ( current , prev ) ; "<AssertPlaceHolder>" ; } getSequenceEncodingStrategy ( ) { final int set = getBit ( com . navercorp . pinpoint . common . server . bo . serializer . trace . v2 . bitfield . SpanEventBitField . SEQUENCE_ENCODING_STRATEGY ) ; switch ( set ) { case 0 : return SequenceEncodingStrategy . PREV_ADD1 ; case 1 : return SequenceEncodingStrategy . PREV_DELTA ; default : throw new java . lang . IllegalArgumentException ( "SEQUENCE_ENCODING_STRATEGY" ) ; } }
org . junit . Assert . assertEquals ( bitField . getSequenceEncodingStrategy ( ) , SequenceEncodingStrategy . PREV_ADD1 )
testContainsTimeValues ( ) { de . lessvoid . xml . xpp3 . Attributes p = new de . lessvoid . xml . xpp3 . Attributes ( ) ; p . set ( "time" , "2" ) ; p . set ( "value" , "4" ) ; effectValues . add ( p ) ; "<AssertPlaceHolder>" ; } containsTimeValues ( ) { if ( values . isEmpty ( ) ) { return false ; } for ( de . lessvoid . xml . xpp3 . Attributes p : values ) { if ( p . isSet ( "time" ) ) { return true ; } } return false ; }
org . junit . Assert . assertTrue ( effectValues . containsTimeValues ( ) )
testLocalizeDoubleArray ( ) { final long [ ] initial = new long [ ] { 532 , 632 , 987421 } ; final double [ ] result = new double [ 3 ] ; final net . imglib2 . Point p = new net . imglib2 . Point ( initial ) ; p . localize ( result ) ; for ( int i = 0 ; i < ( initial . length ) ; i ++ ) { "<AssertPlaceHolder>" ; } } localize ( int [ ] ) { source . localize ( position ) ; }
org . junit . Assert . assertEquals ( initial [ i ] , result [ i ] , 0 )
collapse_should_not_occur_above_min_path_length ( ) { int k = 25 ; java . util . List < au . edu . wehi . idsv . DirectedEvidence > input = new java . util . ArrayList < au . edu . wehi . idsv . DirectedEvidence > ( ) ; input . add ( SCE ( au . edu . wehi . idsv . debruijn . positional . FWD , withSequence ( S ( au . edu . wehi . idsv . debruijn . positional . RANDOM ) . substring ( 0 , 75 ) , Read ( 0 , 1 , "50M25S" ) ) ) ) ; input . add ( SCE ( au . edu . wehi . idsv . debruijn . positional . FWD , withSequence ( ( ( S ( au . edu . wehi . idsv . debruijn . positional . RANDOM ) . substring ( 0 , 70 ) ) + "GG" ) , Read ( 0 , 1 , "50M22S" ) ) ) ) ; java . util . List < au . edu . wehi . idsv . debruijn . positional . KmerPathNode > result = go ( input , k , 200 , 1 , 100 ) ; "<AssertPlaceHolder>" ; } size ( ) { return kmers . size ( ) ; }
org . junit . Assert . assertEquals ( 4 , result . size ( ) )
entity_should_be_managed_by_extended_pc_outside_active_ejb ( ) { com . acme . jpa . model . Record record = repository . retrieveById ( com . acme . jpa . model . Record . class , com . acme . jpa . business . RepositoryTestCase . idOfFirstRecord ) ; "<AssertPlaceHolder>" ; } isManaging ( java . io . Serializable ) { return em . contains ( entity ) ; }
org . junit . Assert . assertTrue ( repository . isManaging ( record ) )
testOneTensor ( ) { org . nd4j . linalg . api . ndarray . INDArray arr = org . nd4j . linalg . factory . Nd4j . ones ( 1 , 1 , 1 , 1 , 1 , 1 , 1 ) ; org . nd4j . linalg . api . ndarray . INDArray matrixToBroadcast = org . nd4j . linalg . factory . Nd4j . ones ( 1 , 1 ) ; "<AssertPlaceHolder>" ; } broadcast ( long [ ] ) { return broadcast ( org . nd4j . linalg . factory . Nd4j . createUninitialized ( shape ) ) ; }
org . junit . Assert . assertEquals ( matrixToBroadcast . broadcast ( arr . shape ( ) ) , arr )
testGetServiceReference ( ) { final org . osgi . framework . ServiceReference handlerReference = context . mock ( org . osgi . framework . ServiceReference . class , "handlerReference" ) ; context . checking ( new org . jmock . Expectations ( ) { { allowing ( handlerReference ) . getProperty ( EventConstants . EVENT_TOPIC ) ; will ( returnValue ( null ) ) ; allowing ( handlerReference ) . getProperty ( EventConstants . EVENT_FILTER ) ; will ( returnValue ( null ) ) ; allowing ( handlerReference ) . getProperty ( org . osgi . framework . Constants . SERVICE_ID ) ; will ( returnValue ( 20L ) ) ; allowing ( handlerReference ) . getProperty ( org . osgi . framework . Constants . SERVICE_RANKING ) ; will ( returnValue ( null ) ) ; allowing ( handlerReference ) . getProperty ( EventEngine . REENTRANT_HANDLER ) ; will ( returnValue ( null ) ) ; } } ) ; com . ibm . ws . event . internal . HandlerHolder holder = new com . ibm . ws . event . internal . HandlerHolder ( eventEngine , handlerReference , false ) ; "<AssertPlaceHolder>" ; } getServiceReference ( ) { return serviceReference ; }
org . junit . Assert . assertSame ( handlerReference , holder . getServiceReference ( ) )
testGetHttpSourceResourceInfoReturningNil ( ) { java . util . Map < java . lang . String , ? > result = instance . getHttpSourceResourceInfo ( ) ; "<AssertPlaceHolder>" ; } isEmpty ( ) { return pairs . isEmpty ( ) ; }
org . junit . Assert . assertTrue ( result . isEmpty ( ) )
shouldFindProcessorsProvider ( ) { victim = ro . isdc . wro . util . provider . ProviderFinder . of ( ro . isdc . wro . model . resource . processor . support . ProcessorProvider . class ) ; "<AssertPlaceHolder>" ; } find ( ) { final java . util . List < T > providers = new java . util . ArrayList < T > ( ) ; try { final java . util . Iterator < T > iterator = lookupProviders ( type ) ; for ( ; iterator . hasNext ( ) ; ) { final T provider = iterator . next ( ) ; ro . isdc . wro . util . provider . ProviderFinder . LOG . debug ( "found<sp>provider:<sp>{}" , provider ) ; providers . add ( provider ) ; } collectConfigurableProviders ( providers ) ; } catch ( final java . lang . Exception e ) { ro . isdc . wro . util . provider . ProviderFinder . LOG . error ( "Failed<sp>to<sp>discover<sp>providers<sp>using<sp>ServiceRegistry.<sp>Cannot<sp>continue..." , e ) ; throw ro . isdc . wro . WroRuntimeException . wrap ( e ) ; } java . util . Collections . sort ( providers , Ordered . DESCENDING_COMPARATOR ) ; ro . isdc . wro . util . provider . ProviderFinder . LOG . debug ( "found<sp>providers:<sp>{}" , providers ) ; return providers ; }
org . junit . Assert . assertEquals ( 3 , victim . find ( ) . size ( ) )
shouldNotRollForwardDateIfStartingDateMatchesDayOfMonth ( ) { org . joda . time . DateTime secondLastDayOfJune = new org . joda . time . DateTime ( ) . withMonthOfYear ( 6 ) . withDayOfMonth ( 29 ) . toDateMidnight ( ) . toDateTime ( ) ; org . joda . time . DateTime adjustedDate = org . mifos . calendar . CalendarUtils . getFirstDateForMonthOnDate ( secondLastDayOfJune , 29 ) ; "<AssertPlaceHolder>" ; } getFirstDateForMonthOnDate ( org . joda . time . DateTime , int ) { final java . util . GregorianCalendar gc = new java . util . GregorianCalendar ( ) ; gc . setTime ( startDate . toDate ( ) ) ; int dt = gc . get ( GregorianCalendar . DATE ) ; if ( dt > dayOfMonth ) { gc . add ( GregorianCalendar . MONTH , 1 ) ; } int M1 = gc . get ( GregorianCalendar . MONTH ) ; gc . set ( GregorianCalendar . DATE , dayOfMonth ) ; int M2 = gc . get ( GregorianCalendar . MONTH ) ; int daynum = dayOfMonth ; while ( M1 != M2 ) { gc . set ( GregorianCalendar . MONTH , ( ( gc . get ( GregorianCalendar . MONTH ) ) - 1 ) ) ; gc . set ( GregorianCalendar . DATE , ( daynum - 1 ) ) ; M2 = gc . get ( GregorianCalendar . MONTH ) ; daynum -- ; } return new org . joda . time . DateTime ( gc . getTime ( ) ) ; }
org . junit . Assert . assertThat ( adjustedDate , org . hamcrest . CoreMatchers . is ( secondLastDayOfJune ) )
testBuildTableNameRootServicePathDataModelByEntityEncoding ( ) { System . out . println ( ( ( ( ( getTestTraceHead ( "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" 2 ) ) + "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" 0 ) + "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" ) + "<entityId><sp>and<sp><entityType>" ) ) ; java . lang . String attrPersistence = null ; java . lang . String batchSize = null ; java . lang . String batchTime = null ; java . lang . String batchTTL = null ; java . lang . String dataModel = "dm-by-entity" ; java . lang . String enableEncoding = "true" ; java . lang . String enableGrouping = null ; java . lang . String enableLowercase = null ; java . lang . String host = null ; java . lang . String password = null ; java . lang . String port = null ; java . lang . String username = null ; com . telefonica . iot . cygnus . sinks . NGSIMySQLSink sink = new com . telefonica . iot . cygnus . sinks . NGSIMySQLSink ( ) ; sink . configure ( createContext ( attrPersistence , batchSize , batchTime , batchTTL , dataModel , enableEncoding , enableGrouping , enableLowercase , host , password , port , username ) ) ; java . lang . String servicePath = "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" 1 ; java . lang . String entity = "someId=someType" ; java . lang . String attribute = null ; try { java . lang . String builtTableName = sink . buildTableName ( servicePath , entity , attribute ) ; java . lang . String expecetedTableName = "x002fxffffsomeIdxffffsomeType" ; try { "<AssertPlaceHolder>" ; System . out . println ( ( ( ( ( getTestTraceHead ( "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" 2 ) ) + "-<sp>OK<sp>-<sp>'" ) + builtTableName ) + "'<sp>is<sp>equals<sp>to<sp>the<sp>encoding<sp>of<sp><service-path>" ) ) ; } catch ( java . lang . AssertionError e ) { System . out . println ( ( ( ( ( getTestTraceHead ( "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" 2 ) ) + "-<sp>FAIL<sp>-<sp>'" ) + builtTableName ) + "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" 3 ) ) ; throw e ; } } catch ( java . lang . Exception e ) { System . out . println ( ( ( getTestTraceHead ( "'dm-by-service-path'<sp>the<sp>MySQL<sp>table<sp>name<sp>is<sp>the<sp>encoding<sp>of<sp>the<sp>concatenation<sp>of<sp><service-path>,<sp>" 2 ) ) + "-<sp>FAIL<sp>-<sp>There<sp>was<sp>some<sp>problem<sp>when<sp>building<sp>the<sp>table<sp>name" ) ) ; throw e ; } buildTableName ( java . lang . String , java . lang . String , java . lang . String ) { java . lang . String name ; switch ( dataModel ) { case DMBYSERVICEPATH : name = com . telefonica . iot . cygnus . utils . NGSICharsets . encodePostgreSQL ( servicePath ) ; break ; case DMBYENTITY : java . lang . String truncatedServicePath = com . telefonica . iot . cygnus . utils . NGSICharsets . encodePostgreSQL ( servicePath ) ; name = ( truncatedServicePath . isEmpty ( ) ? "" : truncatedServicePath + ( com . telefonica . iot . cygnus . utils . CommonConstants . CONCATENATOR ) ) + ( com . telefonica . iot . cygnus . utils . NGSICharsets . encodePostgreSQL ( entity ) ) ; break ; case DMBYATTRIBUTE : truncatedServicePath = com . telefonica . iot . cygnus . utils . NGSICharsets . encodePostgreSQL ( servicePath ) ; name = ( ( ( truncatedServicePath . isEmpty ( ) ? "" : truncatedServicePath + ( com . telefonica . iot . cygnus . utils . CommonConstants . CONCATENATOR ) ) + ( com . telefonica . iot . cygnus . utils . NGSICharsets . encodePostgreSQL ( entity ) ) ) + ( com . telefonica . iot . cygnus . utils . CommonConstants . CONCATENATOR ) ) + ( com . telefonica . iot . cygnus . utils . NGSICharsets . encodePostgreSQL ( attribute ) ) ; break ; default : throw new com . telefonica . iot . cygnus . errors . CygnusBadConfiguration ( ( ( "Unknown<sp>data<sp>model<sp>'" + ( dataModel . toString ( ) ) ) + "'.<sp>Please,<sp>use<sp>dm-by-service-path,<sp>dm-by-entity<sp>or<sp>dm-by-attribute" ) ) ; } if ( ( name . length ( ) ) > ( com . telefonica . iot . cygnus . utils . NGSIConstants . POSTGRESQL_MAX_NAME_LEN ) ) { throw new com . telefonica . iot . cygnus . errors . CygnusBadConfiguration ( ( ( ( "Building<sp>table<sp>name<sp>'" + name ) + "'<sp>and<sp>its<sp>length<sp>is<sp>greater<sp>than<sp>" ) + ( com . telefonica . iot . cygnus . utils . NGSIConstants . POSTGRESQL_MAX_NAME_LEN ) ) ) ; } return name ; }
org . junit . Assert . assertEquals ( expecetedTableName , builtTableName )
adminProjectTestWithAdminPermission ( ) { org . apache . kylin . metadata . project . ProjectInstance project = org . apache . ranger . authorization . kylin . authorizer . RangerKylinAuthorizerTest . name2Projects . get ( org . apache . ranger . authorization . kylin . authorizer . RangerKylinAuthorizerTest . TEST_PROJECT ) ; boolean result = aclEvaluate . hasProjectAdminPermission ( project ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertTrue ( result )
query_processing_error_707 ( ) { com . redhat . lightblue . crud . FindRequest fr = new com . redhat . lightblue . crud . FindRequest ( ) ; fr . setQuery ( query ( ( "{'$and':[" + ( ( ( ( ( ( ( ( ( ( ( "<sp>}" 5 + "<sp>}" 1 ) + "<sp>{'array':'refchild.*.array','elemMatch':{" ) + "<sp>'$and':[" ) + "<sp>}" 4 ) + "<sp>}" 3 ) + "<sp>]" ) + "<sp>}" ) + "<sp>}" 2 ) + "<sp>]}," ) + "<sp>{'field':'refchild.*.refparent.*.field2','op':'=','rvalue':true}" ) + "<sp>}" 0 ) ) ) ) ; fr . setProjection ( projection ( "[{'field':'*'},{'field':'refchild'}]" ) ) ; fr . setEntityVersion ( new com . redhat . lightblue . EntityVersion ( "root_loop" , "1.0.0." ) ) ; com . redhat . lightblue . Response response = mediator . find ( fr ) ; System . out . println ( response . getEntityData ( ) ) ; "<AssertPlaceHolder>" ; } getMatchCount ( ) { return matchCount ; }
org . junit . Assert . assertEquals ( 5 , response . getMatchCount ( ) )
testBoundaryErrorEventDefaultHandlerWithErrorCodeWithoutStructureRef ( ) { org . kie . api . KieBase kbase = createKnowledgeBase ( "BPMN2-BoundaryErrorEventDefaultHandlerWithErrorCodeWithoutStructureRef.bpmn2" ) ; ksession = createKnowledgeSession ( kbase ) ; org . jbpm . bpmn2 . ErrorEventTest . ExceptionWorkItemHandler handler = new org . jbpm . bpmn2 . ErrorEventTest . ExceptionWorkItemHandler ( ) ; ksession . getWorkItemManager ( ) . registerWorkItemHandler ( "Human<sp>Task" , handler ) ; try { org . kie . api . runtime . process . ProcessInstance processInstance = ksession . startProcess ( "com.sample.bpmn.hello" ) ; org . junit . Assert . fail ( "This<sp>is<sp>not<sp>a<sp>default<sp>handler.<sp>So<sp>WorkflowRuntimeException<sp>must<sp>be<sp>thrown" ) ; } catch ( org . jbpm . workflow . instance . WorkflowRuntimeException e ) { "<AssertPlaceHolder>" ; } } startProcess ( java . lang . String ) { if ( ( ksession ) == null ) { org . kie . api . KieBase kbase = createKnowledgeBase ( ) ; org . kie . api . runtime . Environment env = createEnvironment ( context ) ; ksession = org . jbpm . process . audit . AbstractAuditLogServiceTest . createKieSession ( kbase , env ) ; new org . jbpm . process . audit . JPAWorkingMemoryDbLogger ( ksession ) ; ksession . getWorkItemManager ( ) . registerWorkItemHandler ( "Human<sp>Task" , new org . jbpm . process . instance . impl . demo . SystemOutWorkItemHandler ( ) ) ; } return ksession . startProcess ( processName ) ; }
org . junit . Assert . assertTrue ( true )
testInvalidURIGetFileSystem ( ) { final java . net . URI newRepo = java . net . URI . create ( "git:///new-repo-name" ) ; try { provider . getFileSystem ( newRepo ) ; failBecauseExceptionWasNotThrown ( org . uberfire . java . nio . fs . jgit . IllegalArgumentException . class ) ; } catch ( final java . lang . IllegalArgumentException ex ) { "<AssertPlaceHolder>" . isEqualTo ( "Parameter<sp>named<sp>'uri'<sp>is<sp>invalid,<sp>missing<sp>host<sp>repository!" ) ; } } getMessage ( ) { return message ; }
org . junit . Assert . assertThat ( ex . getMessage ( ) )
testWorker2 ( ) { java . util . List < org . ourgrid . common . specification . worker . WorkerSpecification > workers = org . ourgrid . common . specification . main . DescriptionFileCompile . compileNewSDF ( org . ourgrid . common . jdlTests . WorkerTest . test_file2 ) ; "<AssertPlaceHolder>" ; } compileNewSDF ( java . lang . String ) { java . lang . Compiler compiler = new org . ourgrid . common . specification . main . JDLCompiler ( ) ; compiler . compile ( descriptionFilePath , FileType . SDF ) ; java . util . List answer = compiler . getResult ( ) ; if ( answer == null ) { throw new org . ourgrid . common . specification . main . CompilerException ( ( "Site<sp>" + ( org . ourgrid . common . specification . CompilerMessages . DESCRIPTION_FILE_IS_EMPTY ) ) ) ; } return answer ; }
org . junit . Assert . assertTrue ( ( 2 == ( workers . size ( ) ) ) )
overlappingEquivalenceClasses ( ) { org . apache . clerezza . commons . rdf . Graph mGraph = new org . apache . clerezza . commons . rdf . impl . utils . simple . SimpleGraph ( ) ; org . apache . clerezza . commons . rdf . IRI mbox1 = new org . apache . clerezza . commons . rdf . IRI ( "mailto:foo@example.org" ) ; final org . apache . clerezza . commons . rdf . BlankNode bNode1 = new org . apache . clerezza . commons . rdf . BlankNode ( ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( bNode1 , org . apache . clerezza . rdf . ontologies . FOAF . mbox , mbox1 ) ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( bNode1 , org . apache . clerezza . rdf . ontologies . RDFS . comment , new org . apache . clerezza . commons . rdf . impl . utils . PlainLiteralImpl ( "a<sp>comment" ) ) ) ; final org . apache . clerezza . commons . rdf . BlankNode bNode2 = new org . apache . clerezza . commons . rdf . BlankNode ( ) ; org . apache . clerezza . commons . rdf . IRI mbox2 = new org . apache . clerezza . commons . rdf . IRI ( "mailto:bar@example.org" ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( bNode2 , org . apache . clerezza . rdf . ontologies . FOAF . mbox , mbox1 ) ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( bNode2 , org . apache . clerezza . rdf . ontologies . FOAF . mbox , mbox2 ) ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( bNode2 , org . apache . clerezza . rdf . ontologies . RDFS . comment , new org . apache . clerezza . commons . rdf . impl . utils . PlainLiteralImpl ( "another<sp>comment" ) ) ) ; final org . apache . clerezza . commons . rdf . BlankNode bNode3 = new org . apache . clerezza . commons . rdf . BlankNode ( ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( bNode3 , org . apache . clerezza . rdf . ontologies . FOAF . mbox , mbox2 ) ) ; mGraph . add ( new org . apache . clerezza . commons . rdf . impl . utils . TripleImpl ( bNode3 , org . apache . clerezza . rdf . ontologies . RDFS . comment , new org . apache . clerezza . commons . rdf . impl . utils . PlainLiteralImpl ( "yet<sp>another<sp>comment" ) ) ) ; org . apache . clerezza . rdf . utils . Smusher . smush ( mGraph , ontology ) ; "<AssertPlaceHolder>" ; } size ( ) { expandTill ( Integer . MAX_VALUE ) ; return valueList . size ( ) ; }
org . junit . Assert . assertEquals ( 5 , mGraph . size ( ) )
testListenersAtHighVolume ( ) { Common . serialPortManager . refreshFreeCommPorts ( ) ; java . lang . String portName = "/dev/null" ; java . util . List < com . infiniteautomation . mango . io . serial . SerialPortProxyEventListener > listeners = new java . util . ArrayList ( ) ; java . util . concurrent . atomic . AtomicInteger count = new java . util . concurrent . atomic . AtomicInteger ( ) ; listeners . add ( ( evt ) -> { count . incrementAndGet ( ) ; } ) ; com . infiniteautomation . mango . io . serial . JsscSerialPortInputStreamTest . TestSerialPort port = new com . infiniteautomation . mango . io . serial . JsscSerialPortInputStreamTest . TestSerialPort ( portName ) ; long period = 100 ; java . util . concurrent . TimeUnit unit = java . util . concurrent . TimeUnit . NANOSECONDS ; com . infiniteautomation . mango . io . serial . JsscSerialPortInputStream is = new com . infiniteautomation . mango . io . serial . JsscSerialPortInputStream ( port , period , unit , listeners ) ; for ( int i = 0 ; i < 100 ; i ++ ) { jssc . SerialPortEvent event = new jssc . SerialPortEvent ( portName , jssc . SerialPortEvent . RXCHAR , i ) ; is . serialEvent ( event ) ; } while ( ( JsscSerialPortManager . instance . eventQueue . size ( ) ) > 0 ) { try { System . out . println ( "Waiting..." ) ; java . lang . Thread . sleep ( 100 ) ; } catch ( java . lang . InterruptedException e ) { } } is . close ( ) ; "<AssertPlaceHolder>" ; } get ( ) { org . directwebremoting . WebContext context = org . directwebremoting . WebContextFactory . get ( ) ; if ( context == null ) { return null ; } return new uk . ltd . getahead . dwr . ExecutionContext ( context ) ; }
org . junit . Assert . assertEquals ( 100 , count . get ( ) )
moduleThrowingInitExceptionShouldBeMarkedForReinitializationOnlyTheFirstTime ( ) { final com . graphaware . runtime . TxDrivenModule mockModule = mockTxModule ( ) ; when ( mockModule . getConfiguration ( ) ) . thenReturn ( com . graphaware . runtime . NullTxDrivenModuleConfiguration . getInstance ( ) ) ; doThrow ( new com . graphaware . runtime . NeedsInitializationException ( ) ) . when ( mockModule ) . beforeCommit ( any ( com . graphaware . tx . event . improved . api . ImprovedTransactionData . class ) ) ; com . graphaware . runtime . GraphAwareRuntime runtime = com . graphaware . runtime . GraphAwareRuntimeFactory . createRuntime ( database , defaultConfiguration ( database ) . withTimingStrategy ( com . graphaware . runtime . ProductionRuntimeTest . TIMING_STRATEGY ) ) ; runtime . registerModule ( mockModule ) ; runtime . start ( ) ; try ( com . graphaware . runtime . Transaction tx = database . beginTx ( ) ) { database . createNode ( new com . graphaware . runtime . Label [ ] { } ) ; tx . success ( ) ; } long firstFailureTimestamp ; try ( com . graphaware . runtime . Transaction tx = database . beginTx ( ) ) { com . graphaware . runtime . TxDrivenModuleMetadata moduleMetadata = txRepo . getModuleMetadata ( mockModule ) ; firstFailureTimestamp = moduleMetadata . problemTimestamp ( ) ; } java . lang . Thread . sleep ( 1 ) ; try ( com . graphaware . runtime . Transaction tx = database . beginTx ( ) ) { database . createNode ( new com . graphaware . runtime . Label [ ] { } ) ; tx . success ( ) ; } long secondFailureTimestamp ; try ( com . graphaware . runtime . Transaction tx = database . beginTx ( ) ) { com . graphaware . runtime . TxDrivenModuleMetadata moduleMetadata = txRepo . getModuleMetadata ( mockModule ) ; secondFailureTimestamp = moduleMetadata . problemTimestamp ( ) ; } "<AssertPlaceHolder>" ; } problemTimestamp ( ) { return problemTimestamp ; }
org . junit . Assert . assertEquals ( firstFailureTimestamp , secondFailureTimestamp )
testParseWrongUrl ( ) { java . net . URL wsdlUrl = new java . net . URL ( "http://asd.com/asd.wsdl" ) ; org . reficio . ws . builder . core . Wsdl parser = org . reficio . ws . builder . core . Wsdl . parse ( wsdlUrl ) ; "<AssertPlaceHolder>" ; } parse ( java . net . URL ) { com . google . common . base . Preconditions . checkNotNull ( wsdlUrl , "URL<sp>of<sp>the<sp>WSDL<sp>cannot<sp>be<sp>null" ) ; return new org . reficio . ws . builder . core . Wsdl ( wsdlUrl ) ; }
org . junit . Assert . assertNotNull ( parser )
testReadObjectId ( ) { final com . allanbank . mongodb . bson . builder . DocumentBuilder fileResult = com . allanbank . mongodb . bson . builder . BuilderFactory . start ( ) ; fileResult . addString ( "_id" , "id" ) . add ( GridFs . LENGTH_FIELD , 4 ) . add ( GridFs . CHUNK_SIZE_FIELD , 1024 ) ; final com . allanbank . mongodb . bson . builder . DocumentBuilder chunkResult = com . allanbank . mongodb . bson . builder . BuilderFactory . start ( ) ; chunkResult . addBinary ( "data" , new byte [ ] { 1 , 2 , 3 , 4 } ) . add ( GridFs . CHUNK_NUMBER_FIELD , 0 ) ; final com . allanbank . mongodb . MongoDatabase mockDb = createMock ( com . allanbank . mongodb . MongoDatabase . class ) ; final com . allanbank . mongodb . MongoCollection mockFiles = createMock ( com . allanbank . mongodb . MongoCollection . class ) ; final com . allanbank . mongodb . MongoCollection mockChunks = createMock ( com . allanbank . mongodb . MongoCollection . class ) ; final com . allanbank . mongodb . MongoIterator < com . allanbank . mongodb . bson . Document > mockIterator = createMock ( com . allanbank . mongodb . MongoIterator . class ) ; expect ( mockDb . getCollection ( ( "fs" + ( GridFs . FILES_SUFFIX ) ) ) ) . andReturn ( mockFiles ) ; expect ( mockDb . getCollection ( ( "fs" + ( GridFs . CHUNKS_SUFFIX ) ) ) ) . andReturn ( mockChunks ) ; expect ( mockFiles . findOne ( anyObject ( com . allanbank . mongodb . bson . DocumentAssignable . class ) ) ) . andReturn ( fileResult . build ( ) ) ; expect ( mockChunks . find ( anyObject ( com . allanbank . mongodb . builder . Find . class ) ) ) . andReturn ( mockIterator ) ; expect ( mockIterator . iterator ( ) ) . andReturn ( mockIterator ) ; expect ( mockIterator . hasNext ( ) ) . andReturn ( Boolean . TRUE ) ; expect ( mockIterator . next ( ) ) . andReturn ( chunkResult . build ( ) ) ; expect ( mockIterator . hasNext ( ) ) . andReturn ( Boolean . FALSE ) ; mockIterator . close ( ) ; expectLastCall ( ) ; replay ( mockDb , mockFiles , mockChunks , mockIterator ) ; final com . allanbank . mongodb . gridfs . GridFs fs = new com . allanbank . mongodb . gridfs . GridFs ( mockDb ) ; final java . io . ByteArrayOutputStream sink = new java . io . ByteArrayOutputStream ( 4 ) ; fs . read ( new com . allanbank . mongodb . bson . element . ObjectId ( ) , sink ) ; "<AssertPlaceHolder>" ; verify ( mockDb , mockFiles , mockChunks , mockIterator ) ; } read ( com . allanbank . mongodb . bson . element . ObjectId , java . io . OutputStream ) { final com . allanbank . mongodb . bson . Document fileDoc = myFilesCollection . findOne ( where ( com . allanbank . mongodb . gridfs . GridFs . ID_FIELD ) . equals ( id ) ) ; if ( fileDoc == null ) { throw new java . io . FileNotFoundException ( id . toString ( ) ) ; } doRead ( fileDoc , sink ) ; }
org . junit . Assert . assertArrayEquals ( new byte [ ] { 1 , 2 , 3 , 4 } , sink . toByteArray ( ) )
testCos ( ) { final org . hipparchus . analysis . UnivariateFunction cos = new org . hipparchus . analysis . function . Cos ( ) ; final org . hipparchus . analysis . integration . gauss . GaussIntegrator integrator = org . hipparchus . analysis . integration . gauss . LegendreHighPrecisionTest . factory . legendreHighPrecision ( 7 , 0 , ( ( Math . PI ) / 2 ) ) ; final double s = integrator . integrate ( cos ) ; "<AssertPlaceHolder>" ; } ulp ( float ) { if ( java . lang . Float . isInfinite ( x ) ) { return Float . POSITIVE_INFINITY ; } return org . hipparchus . util . FastMath . abs ( ( x - ( java . lang . Float . intBitsToFloat ( ( ( java . lang . Float . floatToIntBits ( x ) ) ^ 1 ) ) ) ) ) ; }
org . junit . Assert . assertEquals ( 1 , s , java . lang . Math . ulp ( 1.0 ) )
testRequestWithIncompleteAuthorization ( ) { javax . servlet . http . HttpServletRequest request = org . mockito . Mockito . mock ( javax . servlet . http . HttpServletRequest . class ) ; javax . servlet . http . HttpServletResponse response = org . mockito . Mockito . mock ( javax . servlet . http . HttpServletResponse . class ) ; org . mockito . Mockito . when ( request . getHeader ( HttpConstants . AUTHORIZATION_HEADER ) ) . thenReturn ( HttpConstants . BASIC ) ; "<AssertPlaceHolder>" ; } authenticate ( java . net . URL , org . apache . hadoop . security . authentication . client . AuthenticatedURL$Token ) { if ( ! ( hasDelegationToken ( url ) ) ) { super . authenticate ( url , token ) ; } }
org . junit . Assert . assertNull ( handler . authenticate ( request , response ) )
testBasicWithTwoServices ( ) { com . spinn3r . artemis . init . ServiceTypeReferences serviceReferences = new com . spinn3r . artemis . init . ServiceTypeReferences ( ) . put ( com . spinn3r . artemis . init . advertisements . HostnameServiceType . class , com . spinn3r . artemis . init . MockHostnameService . class ) . put ( com . spinn3r . artemis . init . advertisements . VersionServiceType . class , com . spinn3r . artemis . init . MockVersionService . class ) ; com . spinn3r . artemis . init . modular . ModularLauncher modularLauncher = com . spinn3r . artemis . init . modular . ModularLauncher . create ( serviceReferences ) . build ( ) ; modularLauncher . start ( ) ; "<AssertPlaceHolder>" ; modularLauncher . getInjector ( ) . injectMembers ( this ) ; modularLauncher . stop ( ) ; } getInjector ( ) { return injector ; }
org . junit . Assert . assertNotNull ( modularLauncher . getInjector ( ) )
checkTest3 ( ) { com . navercorp . pinpoint . web . vo . Application application = new com . navercorp . pinpoint . web . vo . Application ( com . navercorp . pinpoint . web . alarm . checker . TotalCountToCalleeCheckerTest . FROM_SERVICE_NAME , com . navercorp . pinpoint . common . trace . ServiceType . STAND_ALONE ) ; com . navercorp . pinpoint . web . alarm . collector . MapStatisticsCallerDataCollector dataCollector = new com . navercorp . pinpoint . web . alarm . collector . MapStatisticsCallerDataCollector ( com . navercorp . pinpoint . web . alarm . DataCollectorFactory . DataCollectorCategory . CALLER_STAT , application , com . navercorp . pinpoint . web . alarm . checker . TotalCountToCalleeCheckerTest . dao , java . lang . System . currentTimeMillis ( ) , 300000 ) ; com . navercorp . pinpoint . web . alarm . vo . Rule rule = new com . navercorp . pinpoint . web . alarm . vo . Rule ( com . navercorp . pinpoint . web . alarm . checker . TotalCountToCalleeCheckerTest . FROM_SERVICE_NAME , com . navercorp . pinpoint . web . alarm . checker . TotalCountToCalleeCheckerTest . SERVICE_TYPE , CheckerCategory . TOTAL_COUNT_TO_CALLEE . getName ( ) , 10 , "testGroup" , false , false , ( ( com . navercorp . pinpoint . web . alarm . checker . TotalCountToCalleeCheckerTest . TO_SERVICE_NAME ) + 2 ) ) ; com . navercorp . pinpoint . web . alarm . checker . TotalCountToCalleeChecker checker = new com . navercorp . pinpoint . web . alarm . checker . TotalCountToCalleeChecker ( dataCollector , rule ) ; checker . check ( ) ; "<AssertPlaceHolder>" ; } isDetected ( ) { return detected ; }
org . junit . Assert . assertTrue ( checker . isDetected ( ) )
testParseInvalid ( ) { java . lang . String expr = "${customer.ad" ; try { org . netbeans . modules . web . el . ELParser . parse ( expr ) ; org . junit . Assert . fail ( ( "Should<sp>not<sp>parse:<sp>" + expr ) ) ; } catch ( javax . el . ELException ele ) { "<AssertPlaceHolder>" ; } } getMessage ( ) { java . lang . StringBuilder b = new java . lang . StringBuilder ( "Cannot<sp>Undo.\nFollowing<sp>files<sp>were<sp>modified:\n" ) ; for ( java . lang . String f : files ) { b . append ( f ) ; b . append ( '\n' ) ; } return b . toString ( ) ; }
org . junit . Assert . assertNotNull ( ele . getMessage ( ) )
testKieSessionRef ( ) { org . kie . api . runtime . KieSession ksession = ( ( org . kie . api . runtime . KieSession ) ( org . kie . aries . blueprint . tests . KieBlueprintGAVTest . container . getComponentInstance ( "ksession1" ) ) ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertNotNull ( ksession )
testOwnerInfoEntitlementsConsumedByFamilyPoolOverridesProduct ( ) { owner . addEntitlementPool ( pool1 ) ; pool1 . setAttribute ( Pool . Attributes . PRODUCT_FAMILY , "test<sp>family" ) ; org . candlepin . model . Product product = new org . candlepin . model . Product ( "testProd" , "testProd" ) ; product . setAttribute ( Pool . Attributes . PRODUCT_FAMILY , "bad<sp>test<sp>family" ) ; createProduct ( product , owner ) ; pool1 . setProduct ( product ) ; org . candlepin . model . ConsumerType type = consumerTypeCurator . getByLabel ( "system" ) ; org . candlepin . model . Consumer consumer = new org . candlepin . model . Consumer ( "test-consumer" , "test-user" , owner , type ) ; consumerCurator . create ( consumer ) ; org . candlepin . model . EntitlementCertificate cert = createEntitlementCertificate ( "fake" , "fake" ) ; org . candlepin . model . Entitlement entitlement = createEntitlement ( owner , consumer , pool1 , cert ) ; entitlement . setQuantity ( 1 ) ; entitlementCurator . create ( entitlement ) ; pool1 . getEntitlements ( ) . add ( entitlement ) ; org . candlepin . model . OwnerInfo info = ownerInfoCurator . getByOwner ( owner ) ; java . util . Map < java . lang . String , org . candlepin . model . OwnerInfo . ConsumptionTypeCounts > expected = new java . util . HashMap < java . lang . String , org . candlepin . model . OwnerInfo . ConsumptionTypeCounts > ( ) { { put ( "test<sp>family" , new org . candlepin . model . OwnerInfo . ConsumptionTypeCounts ( 1 , 0 ) ) ; } } ; "<AssertPlaceHolder>" ; } getEntitlementsConsumedByFamily ( ) { return entitlementsConsumedByFamily ; }
org . junit . Assert . assertEquals ( expected , info . getEntitlementsConsumedByFamily ( ) )
testReductionsBackwards ( ) { for ( int i = 0 ; i < 7 ; i ++ ) { org . nd4j . autodiff . samediff . SameDiff sd = org . nd4j . autodiff . samediff . SameDiff . create ( ) ; int nOut = 4 ; int minibatch = 3 ; org . nd4j . autodiff . samediff . SDVariable input = sd . var ( "max" 2 , DataType . DOUBLE , new long [ ] { minibatch , nOut } ) ; org . nd4j . autodiff . samediff . SDVariable label = sd . var ( "label" , DataType . DOUBLE , new long [ ] { minibatch , nOut } ) ; org . nd4j . autodiff . samediff . SDVariable diff = input . sub ( label ) ; org . nd4j . autodiff . samediff . SDVariable sqDiff = diff . mul ( diff ) ; org . nd4j . autodiff . samediff . SDVariable msePerEx = sd . mean ( "msePerEx" , sqDiff , 1 ) ; org . nd4j . autodiff . samediff . SDVariable loss ; java . lang . String name ; switch ( i ) { case 0 : loss = sd . mean ( "loss" , msePerEx , 0 ) ; name = "mean" ; break ; case 1 : loss = sd . sum ( "loss" , msePerEx , 0 ) ; name = "sum" ; break ; case 2 : loss = sd . standardDeviation ( "loss" , msePerEx , true , 0 ) ; name = "max" 0 ; break ; case 3 : loss = sd . min ( "loss" , msePerEx , 0 ) ; name = "min" ; break ; case 4 : loss = sd . max ( "loss" , msePerEx , 0 ) ; name = "max" ; break ; case 5 : loss = sd . variance ( "loss" , msePerEx , true , 0 ) ; name = "variance" ; break ; case 6 : loss = sd . prod ( "loss" , msePerEx , 0 ) ; name = "max" 1 ; break ; default : throw new java . lang . RuntimeException ( ) ; } java . lang . String msg = ( ( "test:<sp>" + i ) + "<sp>-<sp>" ) + name ; log . info ( ( "max" 3 + msg ) ) ; org . nd4j . linalg . api . ndarray . INDArray inputArr = org . nd4j . linalg . factory . Nd4j . rand ( DataType . DOUBLE , minibatch , nOut ) ; org . nd4j . linalg . api . ndarray . INDArray labelArr = org . nd4j . linalg . factory . Nd4j . rand ( DataType . DOUBLE , minibatch , nOut ) ; sd . associateArrayWithVariable ( inputArr , input ) ; sd . associateArrayWithVariable ( labelArr , label ) ; org . nd4j . linalg . api . ndarray . INDArray result = sd . execAndEndResult ( ) ; "<AssertPlaceHolder>" ; sd . execBackwards ( java . util . Collections . emptyMap ( ) ) ; } } length ( ) { return indexes . length ; }
org . junit . Assert . assertEquals ( 1 , result . length ( ) )
match_operations_default_inetnum ( ) { parse ( "-T<sp>inetnum<sp>10.0.0.0" ) ; "<AssertPlaceHolder>" ; } matchOperation ( ) { return matchOperation ; }
org . junit . Assert . assertNull ( subject . matchOperation ( ) )
testTrimmedSubtemplatesNoArgs ( ) { org . stringtemplate . v4 . STGroup group = new org . stringtemplate . v4 . STGroup ( ) ; group . defineTemplate ( "test" , "[<foo({<sp>foo<sp>})>]" ) ; group . defineTemplate ( "foo" , "x" , "<x>" ) ; org . stringtemplate . v4 . ST st = group . getInstanceOf ( "test" ) ; java . lang . String expected = "[<sp>foo<sp>]" ; java . lang . String result = st . render ( ) ; "<AssertPlaceHolder>" ; } render ( ) { return render ( java . util . Locale . getDefault ( ) ) ; }
org . junit . Assert . assertEquals ( expected , result )
testGetBit ( ) { byte b = ( ( byte ) ( 134 ) ) ; boolean [ ] booleans = new boolean [ ] { false , true , true , false , false , false , false , true } ; for ( int i = 0 ; i < 8 ; i ++ ) { "<AssertPlaceHolder>" ; } } getBit ( byte , int ) { return ( ch . bfh . unicrypt . helper . math . MathUtil . and ( b , ch . bfh . unicrypt . helper . math . MathUtil . BIT_MASKS [ i ] ) ) != 0 ; }
org . junit . Assert . assertEquals ( booleans [ i ] , ch . bfh . unicrypt . helper . math . MathUtil . getBit ( b , i ) )
discardOutput ( ) { final com . esotericsoftware . kryo . io . Input [ ] result = new com . esotericsoftware . kryo . io . Input [ 2 ] ; kryoInputPool . run ( ( input ) -> { result [ 0 ] = input ; return null ; } , ( ( KryoInputPool . MAX_POOLED_BUFFER_SIZE ) + 1 ) ) ; kryoInputPool . run ( ( input ) -> { result [ 1 ] = input ; return null ; } , 0 ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertTrue ( ( ( result [ 0 ] ) != ( result [ 1 ] ) ) )
testAvroCoderTreeMapDeterminism ( ) { org . apache . beam . sdk . coders . AvroCoderTest . TreeMapField size1 = new org . apache . beam . sdk . coders . AvroCoderTest . TreeMapField ( ) ; org . apache . beam . sdk . coders . AvroCoderTest . TreeMapField size2 = new org . apache . beam . sdk . coders . AvroCoderTest . TreeMapField ( ) ; size1 . field . put ( "hello" , "world" ) ; size1 . field . put ( "another" , "entry" ) ; size2 . field . put ( "another" , "entry" ) ; size2 . field . put ( "hello" , "world" ) ; org . apache . beam . sdk . coders . AvroCoder < org . apache . beam . sdk . coders . AvroCoderTest . TreeMapField > coder = org . apache . beam . sdk . coders . AvroCoder . of ( org . apache . beam . sdk . coders . AvroCoderTest . TreeMapField . class ) ; coder . verifyDeterministic ( ) ; java . io . ByteArrayOutputStream outStream1 = new java . io . ByteArrayOutputStream ( ) ; java . io . ByteArrayOutputStream outStream2 = new java . io . ByteArrayOutputStream ( ) ; org . apache . beam . sdk . coders . Coder . Context context = org . apache . beam . sdk . coders . Coder . Context . NESTED ; coder . encode ( size1 , outStream1 , context ) ; coder . encode ( size2 , outStream2 , context ) ; "<AssertPlaceHolder>" ; } toByteArray ( ) { if ( ( ! ( isFallback ) ) && ( ( count ) > 0 ) ) { return buf ; } else { return super . toByteArray ( ) ; } }
org . junit . Assert . assertArrayEquals ( outStream1 . toByteArray ( ) , outStream2 . toByteArray ( ) )
testDeployMap1 ( ) { "<AssertPlaceHolder>" ; net . jini . lookup . ServiceDiscoveryManager sdm = testManager . getServiceDiscoveryManager ( ) ; org . rioproject . test . ServiceMonitor < org . rioproject . cybernode . Cybernode > cyberMon = new org . rioproject . test . ServiceMonitor ( sdm , org . rioproject . cybernode . Cybernode . class ) ; cyberMon . waitFor ( 4 ) ; org . rioproject . test . ServiceMonitor < org . rioproject . monitor . ProvisionMonitor > pmMon = new org . rioproject . test . ServiceMonitor < org . rioproject . monitor . ProvisionMonitor > ( sdm , org . rioproject . monitor . ProvisionMonitor . class ) ; pmMon . waitFor ( 1 ) ; org . rioproject . monitor . ProvisionMonitor monitor = pmMon . getServices ( ) . get ( 0 ) ; org . rioproject . deploy . DeployAdmin admin = ( ( org . rioproject . deploy . DeployAdmin ) ( monitor . getAdmin ( ) ) ) ; java . io . File opstring = new java . io . File ( "src/test/resources/opstring/fixed.groovy" ) ; java . io . File opstring2 = new java . io . File ( "src/test/resources/opstring/fixed2.groovy" ) ; org . rioproject . opstring . OperationalString [ ] operationalStrings = new org . rioproject . impl . opstring . OpStringLoader ( ) . parseOperationalString ( opstring ) ; org . rioproject . opstring . OperationalString [ ] operationalStrings2 = new org . rioproject . impl . opstring . OpStringLoader ( ) . parseOperationalString ( opstring2 ) ; java . util . concurrent . CyclicBarrier gate = new java . util . concurrent . CyclicBarrier ( 101 ) ; java . util . concurrent . CyclicBarrier gate2 = new java . util . concurrent . CyclicBarrier ( 101 ) ; for ( int i = 0 ; i < 100 ; i ++ ) new java . lang . Thread ( new org . rioproject . test . monitor . FixedTest . Deployer ( admin , operationalStrings [ 0 ] , gate ) ) . start ( ) ; gate . await ( ) ; org . rioproject . opstring . OperationalStringManager mgr = null ; while ( mgr == null ) { if ( ( admin . getOperationalStringManagers ( ) . length ) > 0 ) mgr = admin . getOperationalStringManagers ( ) [ 0 ] ; java . lang . Thread . sleep ( 1000 ) ; } for ( int i = 0 ; i < 100 ; i ++ ) new java . lang . Thread ( new org . rioproject . test . monitor . FixedTest . Deployer ( admin , operationalStrings2 [ 0 ] , gate2 ) ) . start ( ) ; gate2 . await ( ) ; testManager . waitForDeployment ( mgr ) ; org . rioproject . opstring . ServiceElement serviceElement = mgr . getOperationalString ( ) . getServices ( ) [ 0 ] ; for ( int i = 0 ; i < 50 ; i ++ ) { org . rioproject . deploy . ServiceBeanInstance [ ] instances = mgr . getServiceBeanInstances ( serviceElement ) ; System . out . println ( ( ( "Have<sp>" + ( instances . length ) ) + "<sp>services" ) ) ; java . lang . Thread . sleep ( 500 ) ; } testManager . undeployAll ( monitor ) ; }
org . junit . Assert . assertNotNull ( testManager )
testCheckHdfsNameVariableNotNull ( ) { namedCluster . setHdfsHost ( "${hdfsHost}" ) ; when ( variableSpace . getVariable ( "hdfsHost" ) ) . thenReturn ( "test" ) ; when ( variableSpace . environmentSubstitute ( namedCluster . getHdfsHost ( ) ) ) . thenReturn ( "test" ) ; "<AssertPlaceHolder>" ; } isHdfsHostEmpty ( org . pentaho . di . core . variables . VariableSpace ) { java . lang . String hostNameParsed = getHostNameParsed ( variableSpace ) ; return ( hostNameParsed == null ) || ( hostNameParsed . trim ( ) . isEmpty ( ) ) ; }
org . junit . Assert . assertEquals ( false , namedCluster . isHdfsHostEmpty ( variableSpace ) )
assertGetTimeForColumnLabel ( ) { for ( java . sql . ResultSet each : resultSets . values ( ) ) { try { each . getTime ( columnName ) ; org . junit . Assert . fail ( "Expected<sp>an<sp>SQLException<sp>to<sp>be<sp>thrown" ) ; } catch ( final java . lang . Exception ex ) { "<AssertPlaceHolder>" ; } } } isEmpty ( ) { return tables . isEmpty ( ) ; }
org . junit . Assert . assertFalse ( ex . getMessage ( ) . isEmpty ( ) )
testGetCompletionConfig ( ) { classUnderTest . setCompletionConfig ( 10 ) ; "<AssertPlaceHolder>" ; } getCompletionConfig ( ) { return completionConfig ; }
org . junit . Assert . assertEquals ( 10 , classUnderTest . getCompletionConfig ( ) )
checkTaConfigTest_SuccessfulCase ( ) { jenkins . plugins . coverity . TaOptionBlock taOptionBlock = new jenkins . plugins . coverity . Utils . TaOptionBlockBuilder ( ) . withJavaOptionBlock ( true ) . withJavaCoverageTool ( "Jacoco" ) . withJunitFramework ( true ) . withPolicyFile ( "TestPolicyFilePath" ) . build ( ) ; "<AssertPlaceHolder>" ; } checkTaConfig ( ) { boolean delim = true ; java . lang . String errorText = org . apache . commons . lang . StringUtils . EMPTY ; if ( ( ( ! ( this . javaOptionBlock ) ) && ( ! ( this . cOptionBlock ) ) ) && ( ! ( this . csOptionBlock ) ) ) { errorText += "[Test<sp>Advisor]<sp>No<sp>Coverage<sp>language<sp>was<sp>chosen,<sp>please<sp>pick<sp>at<sp>least<sp>one<sp>\n" ; delim = false ; } if ( org . apache . commons . lang . StringUtils . isEmpty ( this . policyFile ) ) { errorText += "[Test<sp>Advisor]<sp>Policy<sp>file<sp>is<sp>not<sp>specified.<sp>\n" ; delim = false ; } if ( ( ( ( this . javaOptionBlock ) && ( this . javaCoverageTool . equals ( "none" ) ) ) || ( ( this . cOptionBlock ) && ( this . cxxCoverageTool . equals ( "none" ) ) ) ) || ( ( this . csOptionBlock ) && ( this . csCoverageTool . equals ( "none" ) ) ) ) { errorText += "[Test<sp>Advisor]<sp>No<sp>Coverage<sp>tool<sp>was<sp>chosen<sp>\n" ; delim = false ; } if ( this . cOptionBlock ) { if ( ( this . cxxCoverageTool . equals ( "bullseye" ) ) && ( org . apache . commons . lang . StringUtils . isEmpty ( this . bullsEyeDir ) ) ) { errorText += "[Test<sp>Advisor]<sp>Bulls<sp>eye<sp>requires<sp>the<sp>installation<sp>directory.<sp>\n" ; delim = false ; } } if ( delim ) { errorText = "Pass" ; } return errorText ; }
org . junit . Assert . assertEquals ( "Pass" , taOptionBlock . checkTaConfig ( ) )
testGetEvents ( ) { for ( org . rioproject . event . RemoteServiceEvent event : org . rioproject . eventcollector . service . TransientEventManagerTest . createRemoteServiceEvents ( 5 ) ) { eventManager . notify ( event ) ; } "<AssertPlaceHolder>" ; } getEvents ( ) { org . rioproject . eventcollector . service . List < org . rioproject . event . RemoteServiceEvent > events = new org . rioproject . eventcollector . service . LinkedList < org . rioproject . event . RemoteServiceEvent > ( ) ; events . addAll ( eventLog . values ( ) ) ; return events ; }
org . junit . Assert . assertEquals ( 5 , eventManager . getEvents ( ) . size ( ) )
scopeForV3Tokens ( ) { org . eclipse . emf . common . util . EList < com . github . jknack . antlr4ide . lang . V3Token > tokenList = new org . eclipse . emf . common . util . BasicEList < com . github . jknack . antlr4ide . lang . V3Token > ( ) ; com . github . jknack . antlr4ide . lang . V3Tokens tokens = createMock ( com . github . jknack . antlr4ide . lang . V3Tokens . class ) ; com . github . jknack . antlr4ide . lang . V3Token token = createMock ( com . github . jknack . antlr4ide . lang . V3Token . class ) ; expect ( tokens . getTokens ( ) ) . andReturn ( tokenList ) ; tokenList . add ( token ) ; java . lang . Object [ ] mocks = new java . lang . Object [ ] { tokens , token } ; replay ( mocks ) ; java . util . List < org . eclipse . emf . ecore . EObject > scopes = com . google . common . collect . Lists . newArrayList ( ) ; new com . github . jknack . antlr4ide . scoping . Antlr4ScopeProvider ( ) . scopeFor ( tokens , scopes , com . github . jknack . antlr4ide . lang . Rule . class ) ; "<AssertPlaceHolder>" ; verify ( mocks ) ; }
org . junit . Assert . assertEquals ( com . google . common . collect . Lists . newArrayList ( token ) , scopes )
shouldBeValidWithEnoughCustomLength ( ) { when ( parameterService . findAll ( Key . API_QUALITY_METRICS_DESCRIPTION_MIN_LENGTH ) ) . thenReturn ( java . util . Arrays . asList ( "3" ) ) ; io . gravitee . management . model . api . ApiEntity api = mock ( io . gravitee . management . model . api . ApiEntity . class ) ; when ( api . getDescription ( ) ) . thenReturn ( "123" ) ; boolean valid = srv . isValid ( api ) ; "<AssertPlaceHolder>" ; } isValid ( io . gravitee . management . model . api . ApiEntity ) { return ( ( api . getViews ( ) ) != null ) && ( ! ( api . getViews ( ) . isEmpty ( ) ) ) ; }
org . junit . Assert . assertTrue ( valid )
testShouldBeAbleToSetMoreThanOneLineOfTextInATextArea ( ) { org . openqa . selenium . qtwebkit . nativetests . WebElement textarea = driver . findElement ( org . openqa . selenium . qtwebkit . nativetests . By . id ( "emptytext" ) ) ; textarea . clear ( ) ; wait . until ( org . openqa . selenium . qtwebkit . nativetests . WaitingConditions . elementTextToEqual ( textarea , "" ) ) ; java . lang . String expectedText = ( ( "i<sp>like<sp>cheese" + ( newLine ) ) + ( newLine ) ) + "it's<sp>really<sp>nice" ; textarea . sendKeys ( expectedText ) ; java . lang . String seenText = textarea . getText ( ) ; "<AssertPlaceHolder>" ; } getText ( ) { return stb . getText ( ) ; }
org . junit . Assert . assertThat ( seenText , org . hamcrest . Matchers . equalTo ( expectedText ) )
testFindStaticMethod ( ) { java . lang . invoke . MethodHandle mh = com . jmethods . catatumbo . impl . IntrospectionUtils . findStaticMethod ( com . jmethods . catatumbo . entities . ImmutablePhone . class , "newBuilder" , java . lang . Object . class ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertNotNull ( mh )
containerToNormalized ( ) { final java . util . Map . Entry < org . opendaylight . yangtools . yang . data . api . YangInstanceIdentifier , org . opendaylight . yangtools . yang . data . api . schema . NormalizedNode < ? , ? > > entry = registry . toNormalizedNode ( org . opendaylight . yangtools . yang . binding . InstanceIdentifier . create ( org . opendaylight . yang . gen . v1 . urn . opendaylight . params . xml . ns . yang . mdsal . test . binding . rev140701 . Top . class ) , top ( ) ) ; final org . opendaylight . yangtools . yang . data . api . schema . ContainerNode topNormalized = org . opendaylight . mdsal . binding . dom . codec . test . NormalizedNodeSerializeDeserializeTest . getEmptyTop ( ) ; "<AssertPlaceHolder>" ; } getValue ( ) { return value ; }
org . junit . Assert . assertEquals ( topNormalized , entry . getValue ( ) )
testReturnsTrueWhenMatchesDefinedTextArgument ( ) { boolean result = ( ( java . lang . Boolean ) ( wildcardFunction . call ( new java . lang . Object [ ] { "TEST*TEST" , "TESTfooTEST" } , context ) ) ) ; "<AssertPlaceHolder>" ; }
org . junit . Assert . assertTrue ( result )
testGetStyleForMetacardFloatAttribute ( ) { java . lang . Float testFloat = java . lang . Float . valueOf ( "2.0" ) ; ddf . catalog . data . Metacard metacard = new org . codice . ddf . spatial . kml . transformer . MockMetacard ( AttributeFormat . FLOAT . toString ( ) , testFloat ) ; org . codice . ddf . spatial . kml . transformer . KmlStyleMap mapper = new org . codice . ddf . spatial . kml . transformer . KmlStyleMap ( ) ; mapper . addMapEntry ( new org . codice . ddf . spatial . kml . transformer . KmlStyleMapEntryImpl ( AttributeFormat . FLOAT . toString ( ) , java . lang . String . valueOf ( testFloat ) , org . codice . ddf . spatial . kml . transformer . TestKmlStyleMap . DEFAULT_STYLE_URL ) ) ; "<AssertPlaceHolder>" ; } getStyleForMetacard ( ddf . catalog . data . Metacard ) { for ( org . codice . ddf . spatial . kml . transformer . KmlStyleMapEntry mapEntry : styleMap ) { if ( mapEntry . metacardMatch ( metacard ) ) { return mapEntry . getStyleUrl ( ) ; } } return "" ; }
org . junit . Assert . assertThat ( mapper . getStyleForMetacard ( metacard ) , org . hamcrest . Matchers . is ( org . codice . ddf . spatial . kml . transformer . TestKmlStyleMap . DEFAULT_STYLE_URL ) )
testDecodeGeneralizedTimeWithoutTimeZone ( ) { java . util . Calendar cal = java . util . Calendar . getInstance ( ) ; cal . clear ( ) ; cal . set ( Calendar . YEAR , 2010 ) ; cal . set ( Calendar . MONTH , Calendar . JULY ) ; cal . set ( Calendar . DAY_OF_MONTH , 12 ) ; cal . set ( Calendar . HOUR_OF_DAY , 21 ) ; cal . set ( Calendar . MINUTE , 45 ) ; cal . set ( Calendar . SECOND , 27 ) ; java . util . Date expectedDate = cal . getTime ( ) ; byte [ ] data = new byte [ ] { 24 , 14 , 50 , 48 , 49 , 48 , 48 , 55 , 49 , 50 , 50 , 49 , 52 , 53 , 50 , 55 } ; java . util . Date actualDate = org . kaazing . gateway . util . asn1 . Asn1Utils . decodeGeneralizedTime ( java . nio . ByteBuffer . wrap ( data ) ) ; "<AssertPlaceHolder>" ; } wrap ( org . kaazing . mina . netty . buffer . ByteBufferWrappingChannelBuffer ) { this . buffer = buffer . buffer ; order = buffer . order ; capacity = buffer . capacity ; setIndex ( buffer . readerIndex ( ) , buffer . writerIndex ( ) ) ; return this ; }
org . junit . Assert . assertEquals ( expectedDate , actualDate )