idx int64 0 41.2k | question stringlengths 73 5.81k | target stringlengths 5 918 |
|---|---|---|
27,000 | public Set < String > scan ( ) { initPatterns ( ) ; List < String > roots = packagePatterns . getRoots ( ) ; InternalScanner scanner = new InternalScanner ( getClassLoader ( ) ) ; Set < String > exports = scanner . findInPackages ( new PatternTest ( ) , roots . toArray ( new String [ roots . size ( ) ] ) ) ; return exports ; } | Scans the classloader as configured . |
27,001 | public void processSheet ( StylesTable styles , ReadOnlySharedStringsTable strings , SheetContentsHandler sheetHandler , InputStream sheetInputStream ) throws IOException , ParserConfigurationException , SAXException { DataFormatter formatter = new DataFormatter ( ) ; InputSource sheetSource = new InputSource ( sheetInputStream ) ; try { XMLReader sheetParser = SAXHelper . newXMLReader ( ) ; ContentHandler handler = new XSSFSheetXMLHandler ( styles , null , strings , sheetHandler , formatter , false ) ; sheetParser . setContentHandler ( handler ) ; sheetParser . parse ( sheetSource ) ; } catch ( ParserConfigurationException e ) { throw new RuntimeException ( "SAX parser appears to be broken - " + e . getMessage ( ) ) ; } } | Parses and shows the content of one sheet using the specified styles and shared - strings tables . |
27,002 | public static ResultSet query ( String sql , Object ... args ) { ResultSet result = null ; Connection con = getconnnection ( ) ; PreparedStatement ps = null ; try { ps = con . prepareStatement ( sql ) ; if ( args != null ) { for ( int i = 0 ; i < args . length ; i ++ ) { ps . setObject ( ( i + 1 ) , args [ i ] ) ; } } result = ps . executeQuery ( ) ; } catch ( SQLException e ) { e . printStackTrace ( ) ; } return result ; } | query because need to manually close the resource so not recommended for use it |
27,003 | public List < String > process ( ) throws IOException { MissingRecordAwareHSSFListener listener = new MissingRecordAwareHSSFListener ( this ) ; formatListener = new FormatTrackingHSSFListener ( listener ) ; HSSFEventFactory factory = new HSSFEventFactory ( ) ; HSSFRequest request = new HSSFRequest ( ) ; if ( outputFormulaValues ) { request . addListenerForAllRecords ( formatListener ) ; } else { workbookBuildingListener = new SheetRecordCollectingListener ( formatListener ) ; request . addListenerForAllRecords ( workbookBuildingListener ) ; } factory . processWorkbookEvents ( request , fs ) ; return results ; } | Initiates the processing of the XLS file to CSV |
27,004 | public void createPersistent ( String path , boolean createParents ) throws ZkInterruptedException , IllegalArgumentException , ZkException , RuntimeException { createPersistent ( path , createParents , ZooDefs . Ids . OPEN_ACL_UNSAFE ) ; } | Create a persistent node and set its ACLs . |
27,005 | public void setAcl ( final String path , final List < ACL > acl ) throws ZkException { if ( path == null ) { throw new NullPointerException ( "Missing value for path" ) ; } if ( acl == null || acl . size ( ) == 0 ) { throw new NullPointerException ( "Missing value for ACL" ) ; } if ( ! exists ( path ) ) { throw new RuntimeException ( "trying to set acls on non existing node " + path ) ; } retryUntilConnected ( new Callable < Void > ( ) { public Void call ( ) throws Exception { Stat stat = new Stat ( ) ; _connection . readData ( path , stat , false ) ; _connection . setAcl ( path , acl , stat . getAversion ( ) ) ; return null ; } } ) ; } | Sets the acl on path |
27,006 | public Map . Entry < List < ACL > , Stat > getAcl ( final String path ) throws ZkException { if ( path == null ) { throw new NullPointerException ( "Missing value for path" ) ; } if ( ! exists ( path ) ) { throw new RuntimeException ( "trying to get acls on non existing node " + path ) ; } return retryUntilConnected ( new Callable < Map . Entry < List < ACL > , Stat > > ( ) { public Map . Entry < List < ACL > , Stat > call ( ) throws Exception { return _connection . getAcl ( path ) ; } } ) ; } | Gets the acl on path |
27,007 | public void createEphemeral ( final String path , final List < ACL > acl ) throws ZkInterruptedException , IllegalArgumentException , ZkException , RuntimeException { create ( path , null , acl , CreateMode . EPHEMERAL ) ; } | Create an ephemeral node and set its ACL . |
27,008 | public String create ( final String path , Object data , final List < ACL > acl , final CreateMode mode ) { if ( path == null ) { throw new NullPointerException ( "Missing value for path" ) ; } if ( acl == null || acl . size ( ) == 0 ) { throw new NullPointerException ( "Missing value for ACL" ) ; } final byte [ ] bytes = data == null ? null : serialize ( data ) ; return retryUntilConnected ( new Callable < String > ( ) { public String call ( ) throws Exception { return _connection . create ( path , bytes , acl , mode ) ; } } ) ; } | Create a node with ACL . |
27,009 | public void addAuthInfo ( final String scheme , final byte [ ] auth ) { retryUntilConnected ( new Callable < Object > ( ) { public Object call ( ) throws Exception { _connection . addAuthInfo ( scheme , auth ) ; return null ; } } ) ; } | Add authentication information to the connection . This will be used to identify the user and check access to nodes protected by ACLs |
27,010 | public void connect ( final long maxMsToWaitUntilConnected , Watcher watcher ) throws ZkInterruptedException , ZkTimeoutException , IllegalStateException { boolean started = false ; acquireEventLock ( ) ; try { setShutdownTrigger ( false ) ; _eventThread = new ZkEventThread ( _connection . getServers ( ) ) ; _eventThread . start ( ) ; _connection . connect ( watcher ) ; LOG . debug ( "Awaiting connection to Zookeeper server" ) ; boolean waitSuccessful = waitUntilConnected ( maxMsToWaitUntilConnected , TimeUnit . MILLISECONDS ) ; if ( ! waitSuccessful ) { throw new ZkTimeoutException ( "Unable to connect to zookeeper server '" + _connection . getServers ( ) + "' with timeout of " + maxMsToWaitUntilConnected + " ms" ) ; } started = true ; } finally { getEventLock ( ) . unlock ( ) ; if ( ! started ) { close ( ) ; } } } | Connect to ZooKeeper . |
27,011 | protected Class resolveProxyClass ( String [ ] interfaces ) throws IOException , ClassNotFoundException { ClassLoader cl = getClass ( ) . getClassLoader ( ) ; Class [ ] cinterfaces = new Class [ interfaces . length ] ; for ( int i = 0 ; i < interfaces . length ; i ++ ) { try { cinterfaces [ i ] = cl . loadClass ( interfaces [ i ] ) ; } catch ( ClassNotFoundException ex ) { ClassLoader tccl = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( tccl != null ) { return tccl . loadClass ( interfaces [ i ] ) ; } else { throw ex ; } } } try { return Proxy . getProxyClass ( cinterfaces [ 0 ] . getClassLoader ( ) , cinterfaces ) ; } catch ( IllegalArgumentException e ) { throw new ClassNotFoundException ( null , e ) ; } } | Returns a proxy class that implements the interfaces named in a proxy class descriptor ; subclasses may implement this method to read custom data from the stream along with the descriptors for dynamic proxy classes allowing them to use an alternate loading mechanism for the interfaces and the proxy class . |
27,012 | public static HawkBuilder init ( Context context ) { HawkUtils . checkNull ( "Context" , context ) ; hawkFacade = null ; return new HawkBuilder ( context ) ; } | This will init the hawk without password protection . |
27,013 | public static < T > boolean put ( String key , T value ) { return hawkFacade . put ( key , value ) ; } | Saves any type including any collection primitive values or custom objects |
27,014 | public static < T > T get ( String key , T defaultValue ) { return hawkFacade . get ( key , defaultValue ) ; } | Gets the saved data if it is null default value will be returned |
27,015 | @ SuppressWarnings ( "deprecation" ) @ SuppressLint ( "NewApi" ) public void setBackgroundCompat ( Drawable drawable ) { int pL = getPaddingLeft ( ) ; int pT = getPaddingTop ( ) ; int pR = getPaddingRight ( ) ; int pB = getPaddingBottom ( ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . JELLY_BEAN ) { setBackground ( drawable ) ; } else { setBackgroundDrawable ( drawable ) ; } setPadding ( pL , pT , pR , pB ) ; } | Set the View s background . Masks the API changes made in Jelly Bean . |
27,016 | private void startNewTransition ( ) { if ( ! hasBounds ( ) ) { return ; } mCurrentTrans = mTransGen . generateNextTransition ( mDrawableRect , mViewportRect ) ; mElapsedTime = 0 ; mLastFrameTime = System . currentTimeMillis ( ) ; fireTransitionStart ( mCurrentTrans ) ; } | Generates and starts a transition . |
27,017 | public void restart ( ) { int width = getWidth ( ) ; int height = getHeight ( ) ; if ( width == 0 || height == 0 ) { return ; } updateViewport ( width , height ) ; updateDrawableBounds ( ) ; startNewTransition ( ) ; } | Creates a new transition and starts over . |
27,018 | private void updateDrawableBounds ( ) { if ( mDrawableRect == null ) { mDrawableRect = new RectF ( ) ; } Drawable d = getDrawable ( ) ; if ( d != null && d . getIntrinsicHeight ( ) > 0 && d . getIntrinsicWidth ( ) > 0 ) { mDrawableRect . set ( 0 , 0 , d . getIntrinsicWidth ( ) , d . getIntrinsicHeight ( ) ) ; } } | Updates the drawable bounds rect . This must be called every time the drawable associated to this view changes . |
27,019 | public RectF getInterpolatedRect ( long elapsedTime ) { float elapsedTimeFraction = elapsedTime / ( float ) mDuration ; float interpolationProgress = Math . min ( elapsedTimeFraction , 1 ) ; float interpolation = mInterpolator . getInterpolation ( interpolationProgress ) ; float currentWidth = mSrcRect . width ( ) + ( interpolation * mWidthDiff ) ; float currentHeight = mSrcRect . height ( ) + ( interpolation * mHeightDiff ) ; float currentCenterX = mSrcRect . centerX ( ) + ( interpolation * mCenterXDiff ) ; float currentCenterY = mSrcRect . centerY ( ) + ( interpolation * mCenterYDiff ) ; float left = currentCenterX - ( currentWidth / 2 ) ; float top = currentCenterY - ( currentHeight / 2 ) ; float right = left + currentWidth ; float bottom = top + currentHeight ; mCurrentRect . set ( left , top , right , bottom ) ; return mCurrentRect ; } | Gets the current rect that represents the part of the image to take the scene in the current frame . |
27,020 | public SignalServiceEnvelope read ( long timeout , TimeUnit unit ) throws InvalidVersionException , IOException , TimeoutException { return read ( timeout , unit , new NullMessagePipeCallback ( ) ) ; } | A blocking call that reads a message off the pipe . When this call returns the message has been acknowledged and will not be retransmitted . |
27,021 | public void sendReceipt ( SignalServiceAddress recipient , Optional < UnidentifiedAccessPair > unidentifiedAccess , SignalServiceReceiptMessage message ) throws IOException , UntrustedIdentityException { byte [ ] content = createReceiptContent ( message ) ; sendMessage ( recipient , getTargetUnidentifiedAccess ( unidentifiedAccess ) , message . getWhen ( ) , content , false ) ; } | Send a read receipt for a received message . |
27,022 | public void sendTyping ( SignalServiceAddress recipient , Optional < UnidentifiedAccessPair > unidentifiedAccess , SignalServiceTypingMessage message ) throws IOException , UntrustedIdentityException { byte [ ] content = createTypingContent ( message ) ; sendMessage ( recipient , getTargetUnidentifiedAccess ( unidentifiedAccess ) , message . getTimestamp ( ) , content , true ) ; } | Send a typing indicator . |
27,023 | public void sendCallMessage ( SignalServiceAddress recipient , Optional < UnidentifiedAccessPair > unidentifiedAccess , SignalServiceCallMessage message ) throws IOException , UntrustedIdentityException { byte [ ] content = createCallContent ( message ) ; sendMessage ( recipient , getTargetUnidentifiedAccess ( unidentifiedAccess ) , System . currentTimeMillis ( ) , content , false ) ; } | Send a call setup message to a single recipient . |
27,024 | public SendMessageResult sendMessage ( SignalServiceAddress recipient , Optional < UnidentifiedAccessPair > unidentifiedAccess , SignalServiceDataMessage message ) throws UntrustedIdentityException , IOException { byte [ ] content = createMessageContent ( message ) ; long timestamp = message . getTimestamp ( ) ; SendMessageResult result = sendMessage ( recipient , getTargetUnidentifiedAccess ( unidentifiedAccess ) , timestamp , content , false ) ; if ( ( result . getSuccess ( ) != null && result . getSuccess ( ) . isNeedsSync ( ) ) || ( unidentifiedAccess . isPresent ( ) && isMultiDevice . get ( ) ) ) { byte [ ] syncMessage = createMultiDeviceSentTranscriptContent ( content , Optional . of ( recipient ) , timestamp , Collections . singletonList ( result ) ) ; sendMessage ( localAddress , Optional . < UnidentifiedAccess > absent ( ) , timestamp , syncMessage , false ) ; } if ( message . isEndSession ( ) ) { store . deleteAllSessions ( recipient . getNumber ( ) ) ; if ( eventListener . isPresent ( ) ) { eventListener . get ( ) . onSecurityEvent ( recipient ) ; } } return result ; } | Send a message to a single recipient . |
27,025 | public List < SendMessageResult > sendMessage ( List < SignalServiceAddress > recipients , List < Optional < UnidentifiedAccessPair > > unidentifiedAccess , SignalServiceDataMessage message ) throws IOException , UntrustedIdentityException { byte [ ] content = createMessageContent ( message ) ; long timestamp = message . getTimestamp ( ) ; List < SendMessageResult > results = sendMessage ( recipients , getTargetUnidentifiedAccess ( unidentifiedAccess ) , timestamp , content , false ) ; boolean needsSyncInResults = false ; for ( SendMessageResult result : results ) { if ( result . getSuccess ( ) != null && result . getSuccess ( ) . isNeedsSync ( ) ) { needsSyncInResults = true ; break ; } } if ( needsSyncInResults || ( isMultiDevice . get ( ) ) ) { byte [ ] syncMessage = createMultiDeviceSentTranscriptContent ( content , Optional . < SignalServiceAddress > absent ( ) , timestamp , results ) ; sendMessage ( localAddress , Optional . < UnidentifiedAccess > absent ( ) , timestamp , syncMessage , false ) ; } return results ; } | Send a message to a group . |
27,026 | public SignalServiceMessagePipe createMessagePipe ( ) { WebSocketConnection webSocket = new WebSocketConnection ( urls . getSignalServiceUrls ( ) [ 0 ] . getUrl ( ) , urls . getSignalServiceUrls ( ) [ 0 ] . getTrustStore ( ) , Optional . of ( credentialsProvider ) , userAgent , connectivityListener , sleepTimer ) ; return new SignalServiceMessagePipe ( webSocket , Optional . of ( credentialsProvider ) ) ; } | Creates a pipe for receiving SignalService messages . |
27,027 | public void requestSmsVerificationCode ( boolean androidSmsRetrieverSupported , Optional < String > captchaToken ) throws IOException { this . pushServiceSocket . requestSmsVerificationCode ( androidSmsRetrieverSupported , captchaToken ) ; } | Request an SMS verification code . On success the server will send an SMS verification code to this Signal user . |
27,028 | public void requestVoiceVerificationCode ( Locale locale , Optional < String > captchaToken ) throws IOException { this . pushServiceSocket . requestVoiceVerificationCode ( locale , captchaToken ) ; } | Request a Voice verification code . On success the server will make a voice call to this Signal user . |
27,029 | public void verifyAccountWithCode ( String verificationCode , String signalingKey , int signalProtocolRegistrationId , boolean fetchesMessages , String pin , byte [ ] unidentifiedAccessKey , boolean unrestrictedUnidentifiedAccess ) throws IOException { this . pushServiceSocket . verifyAccountCode ( verificationCode , signalingKey , signalProtocolRegistrationId , fetchesMessages , pin , unidentifiedAccessKey , unrestrictedUnidentifiedAccess ) ; } | Verify a Signal Service account with a received SMS or voice verification code . |
27,030 | public void setAccountAttributes ( String signalingKey , int signalProtocolRegistrationId , boolean fetchesMessages , String pin , byte [ ] unidentifiedAccessKey , boolean unrestrictedUnidentifiedAccess ) throws IOException { this . pushServiceSocket . setAccountAttributes ( signalingKey , signalProtocolRegistrationId , fetchesMessages , pin , unidentifiedAccessKey , unrestrictedUnidentifiedAccess ) ; } | Refresh account attributes with server . |
27,031 | public void setPreKeys ( IdentityKey identityKey , SignedPreKeyRecord signedPreKey , List < PreKeyRecord > oneTimePreKeys ) throws IOException { this . pushServiceSocket . registerPreKeys ( identityKey , signedPreKey , oneTimePreKeys ) ; } | Register an identity key signed prekey and list of one time prekeys with the server . |
27,032 | public Optional < ContactTokenDetails > getContact ( String e164number ) throws IOException { String contactToken = createDirectoryServerToken ( e164number , true ) ; ContactTokenDetails contactTokenDetails = this . pushServiceSocket . getContactTokenDetails ( contactToken ) ; if ( contactTokenDetails != null ) { contactTokenDetails . setNumber ( e164number ) ; } return Optional . fromNullable ( contactTokenDetails ) ; } | Checks whether a contact is currently registered with the server . |
27,033 | public List < ContactTokenDetails > getContacts ( Set < String > e164numbers ) throws IOException { Map < String , String > contactTokensMap = createDirectoryServerTokenMap ( e164numbers ) ; List < ContactTokenDetails > activeTokens = this . pushServiceSocket . retrieveDirectory ( contactTokensMap . keySet ( ) ) ; for ( ContactTokenDetails activeToken : activeTokens ) { activeToken . setNumber ( contactTokensMap . get ( activeToken . getToken ( ) ) ) ; } return activeTokens ; } | Checks which contacts in a set are registered with the server . |
27,034 | @ SuppressWarnings ( "unchecked" ) public KieContainerResource getContainer ( String serverUrl , String container ) { String response = client . target ( serverUrl ) . path ( "containers" ) . path ( container ) . request ( MediaType . APPLICATION_JSON ) . get ( String . class ) ; ServiceResponse < KieContainerResource > result = jsonMarshaller . unmarshall ( response , ServiceResponse . class ) ; return result . getResult ( ) ; } | Verify the container status on remote process server . |
27,035 | public void feed ( ) { if ( source . hasNext ( ) ) { Event < ? > event = source . getNext ( ) ; FeedContext context = new FeedContext ( event ) ; FeedTrigger trigger = new FeedTrigger ( ) ; trigger . setNextFireTime ( event . getDate ( ) ) ; FeedJob job = new FeedJob ( source , sink , trigger , clock ) ; clock . scheduleJob ( job , context , trigger ) ; } } | For this simple demo we are loading the whole stream of events into the memory . |
27,036 | public void update ( final Graphics g ) { final Dimension d = getSize ( ) ; if ( ( this . offScreenImage == null ) ) { this . offScreenImage = createImage ( d . width , d . height ) ; } paint ( this . offScreenImage . getGraphics ( ) ) ; g . drawImage ( this . offScreenImage , 0 , 0 , null ) ; } | Use double buffering . |
27,037 | public void paint ( final Graphics g ) { final int numberOfColumns = this . cellGrid . getNumberOfColumns ( ) ; final int numberOfRows = this . cellGrid . getNumberOfRows ( ) ; if ( this . backgroundImage == null ) { final Dimension d = getSize ( ) ; this . backgroundImage = createImage ( d . width , d . height ) ; final Graphics backgroundImageGraphics = this . backgroundImage . getGraphics ( ) ; backgroundImageGraphics . setColor ( getBackground ( ) ) ; backgroundImageGraphics . fillRect ( 0 , 0 , d . width , d . height ) ; backgroundImageGraphics . setColor ( CellGridCanvas . BACKGROUND_COLOR ) ; backgroundImageGraphics . fillRect ( 0 , 0 , this . cellSize * numberOfColumns - 1 , this . cellSize * numberOfRows - 1 ) ; backgroundImageGraphics . setColor ( CellGridCanvas . GRID_COLOR ) ; for ( int x = 1 ; x < numberOfColumns ; x ++ ) { backgroundImageGraphics . drawLine ( x * this . cellSize - 1 , 0 , x * this . cellSize - 1 , this . cellSize * numberOfRows - 1 ) ; } for ( int y = 1 ; y < numberOfRows ; y ++ ) { backgroundImageGraphics . drawLine ( 0 , y * this . cellSize - 1 , this . cellSize * numberOfColumns - 1 , y * this . cellSize - 1 ) ; } } g . drawImage ( this . backgroundImage , 0 , 0 , null ) ; for ( int row = 0 ; row < numberOfRows ; row ++ ) { for ( int column = 0 ; column < numberOfColumns ; column ++ ) { final Cell cell = this . cellGrid . getCellAt ( row , column ) ; if ( cell . getCellState ( ) == CellState . LIVE ) { g . drawImage ( this . liveCellImage , column * this . cellSize , row * this . cellSize , this ) ; } } } } | Draw this generation . |
27,038 | public Dimension getPreferredSize ( ) { final int numberOfColumns = this . cellGrid . getNumberOfColumns ( ) ; final int numberOfRows = this . cellGrid . getNumberOfRows ( ) ; return new Dimension ( this . cellSize * numberOfColumns , this . cellSize * numberOfRows ) ; } | This is the preferred size . |
27,039 | public static Set < BeanDefinitionHolder > registerAnnotationConfigProcessors ( BeanDefinitionRegistry registry , Object source , ReleaseId releaseId ) { Set < BeanDefinitionHolder > beanDefs = new LinkedHashSet < BeanDefinitionHolder > ( 1 ) ; if ( ! registry . containsBeanDefinition ( KIE_ANNOTATION_PROCESSOR_CLASS_NAME ) ) { RootBeanDefinition def = new RootBeanDefinition ( AnnotationsPostProcessor . class ) ; def . setSource ( source ) ; def . getPropertyValues ( ) . add ( "releaseId" , releaseId ) ; beanDefs . add ( registerPostProcessor ( registry , def , KIE_ANNOTATION_PROCESSOR_CLASS_NAME ) ) ; } return beanDefs ; } | Register all relevant annotation post processors in the given registry . |
27,040 | public static String encode ( String string ) { byte [ ] bytes ; try { bytes = string . getBytes ( PREFERRED_ENCODING ) ; } catch ( UnsupportedEncodingException uee ) { bytes = string . getBytes ( ) ; } return encodeBytes ( bytes ) ; } | Encode string as a byte array in Base64 annotation . |
27,041 | public ServiceResponse < DMNResult > evaluateDecisions ( String containerId , DMNContextKS payload ) { ServiceResponse < DMNResult > result = null ; if ( config . isRest ( ) ) { Map < String , Object > valuesMap = new HashMap < String , Object > ( ) ; valuesMap . put ( CONTAINER_ID , containerId ) ; result = ( ServiceResponse < DMNResult > ) ( ServiceResponse < ? > ) makeHttpPostRequestAndCreateServiceResponse ( build ( loadBalancer . getUrl ( ) , DMN_URI , valuesMap ) , payload , DMNResultKS . class ) ; } else { CommandScript script = new CommandScript ( Collections . singletonList ( ( KieServerCommand ) new DescriptorCommand ( "DMNService" , "evaluateDecisions" , serialize ( payload ) , marshaller . getFormat ( ) . getType ( ) , new Object [ ] { containerId } ) ) ) ; result = ( ServiceResponse < DMNResult > ) executeJmsCommand ( script , DescriptorCommand . class . getName ( ) , KieServerConstants . CAPABILITY_DMN , containerId ) . getResponses ( ) . get ( 0 ) ; throwExceptionOnFailure ( result ) ; if ( shouldReturnWithNullResponse ( result ) ) { return null ; } } if ( result instanceof Wrapped ) { return ( ServiceResponse < DMNResult > ) ( ( Wrapped ) result ) . unwrap ( ) ; } ServiceResponse < DMNResult > result2 = ( ServiceResponse < DMNResult > ) result ; if ( config . getMarshallingFormat ( ) == MarshallingFormat . JSON ) { recurseAndModifyByCoercingNumbers ( result2 . getResult ( ) . getContext ( ) ) ; for ( DMNDecisionResult dr : result2 . getResult ( ) . getDecisionResults ( ) ) { recurseAndModifyByCoercingNumbers ( dr . getResult ( ) ) ; } } return result2 ; } | DO NOT ADD |
27,042 | public static Map < MediaType , QualityValue > getMediaTypeQualityValues ( String header ) { if ( header == null ) return null ; header = header . trim ( ) ; if ( header . length ( ) == 0 ) return null ; Map < MediaType , QualityValue > result = new LinkedHashMap < MediaType , QualityValue > ( ) ; int offset = 0 ; while ( offset >= 0 ) { int slashIndex = header . indexOf ( '/' , offset ) ; if ( slashIndex < 0 ) throw new IllegalArgumentException ( "Malformed media type: " + header ) ; String type = header . substring ( offset , slashIndex ) ; String subtype ; Map < String , String > parameters = null ; QualityValue qualityValue = QualityValue . DEFAULT ; offset = slashIndex + 1 ; int parameterStartIndex = header . indexOf ( ';' , offset ) ; int itemEndIndex = header . indexOf ( ',' , offset ) ; if ( parameterStartIndex == itemEndIndex ) { assert itemEndIndex == - 1 ; subtype = header . substring ( offset ) ; offset = - 1 ; } else if ( itemEndIndex < 0 || ( parameterStartIndex >= 0 && parameterStartIndex < itemEndIndex ) ) { subtype = header . substring ( offset , parameterStartIndex ) ; offset = parameterStartIndex + 1 ; parameters = new LinkedHashMap < String , String > ( ) ; offset = parseParameters ( parameters , header , offset ) ; qualityValue = evaluateAcceptParameters ( parameters ) ; } else { subtype = header . substring ( offset , itemEndIndex ) ; offset = itemEndIndex + 1 ; } result . put ( new MediaType ( type . trim ( ) , subtype . trim ( ) , parameters ) , qualityValue ) ; } if ( logger . isDebugEnabled ( ) ) logger . debug ( result . toString ( ) ) ; return result ; } | Gets the media types from a comma - separated list . |
27,043 | private static int addMethod ( ConstPool cPool , CtMethod method ) { return cPool . addMethodrefInfo ( cPool . getThisClassInfo ( ) , method . getName ( ) , method . getSignature ( ) ) ; } | Add Method to ConstPool . If method was not in the ConstPool will add and return index otherwise will return index of already existing entry of constpool |
27,044 | private boolean isCtFieldACollection ( CtField ctField ) { try { return ctField . getType ( ) . equals ( cp . get ( Collection . class . getName ( ) ) ) || ctField . getType ( ) . equals ( cp . get ( List . class . getName ( ) ) ) || ctField . getType ( ) . equals ( cp . get ( Set . class . getName ( ) ) ) ; } catch ( NotFoundException e ) { e . printStackTrace ( ) ; return false ; } } | Verify that CtField is exactly the java . util . Collection java . util . List or java . util . Set otherwise cannot instrument the class field |
27,045 | public static KieServicesConfiguration newRestConfiguration ( String serverUrl , String login , String password , long timeout ) { return new KieServicesConfigurationImpl ( serverUrl , login , password , timeout ) ; } | Creates a new configuration object for REST based service |
27,046 | private String writeClass ( String name , byte [ ] bytes ) throws IOException { name = name . replace ( '.' , '/' ) ; File dexFile = new File ( String . format ( "%s/%s.dex" , getDexDir ( ) , name ) ) ; if ( dexFile . exists ( ) && isReuseClassFiles ( ) ) { if ( log . isTraceEnabled ( ) ) log . trace ( String . format ( "Reused class [%s] from cache: %s" , name , dexFile . getAbsolutePath ( ) ) ) ; return dexFile . getAbsolutePath ( ) ; } FileOutputStream fos = null ; try { DexFile file = new DexFile ( dex_options ) ; DirectClassFile cf = new DirectClassFile ( bytes , name + ".class" , cf_options . strictNameCheck ) ; cf . setAttributeFactory ( StdAttributeFactory . THE_ONE ) ; cf . getMagic ( ) ; file . add ( CfTranslator . translate ( cf , bytes , cf_options , dex_options , file ) ) ; dexFile . getParentFile ( ) . mkdirs ( ) ; if ( dexFile . exists ( ) ) dexFile . delete ( ) ; fos = new FileOutputStream ( dexFile ) ; file . writeTo ( fos , null , false ) ; if ( log . isTraceEnabled ( ) ) log . trace ( String . format ( "Wrote class [%s] to cache: %s" , name , dexFile . getAbsolutePath ( ) ) ) ; return dexFile . getAbsolutePath ( ) ; } finally { if ( fos != null ) { try { fos . close ( ) ; } catch ( IOException e ) { } } } } | Convert class to dex |
27,047 | private void storeServerState ( Consumer < KieServerState > kieServerStateConsumer ) { KieServerState currentState = repository . load ( KieServerEnvironment . getServerId ( ) ) ; kieServerStateConsumer . accept ( currentState ) ; repository . store ( KieServerEnvironment . getServerId ( ) , currentState ) ; } | Persists updated KieServer state . |
27,048 | private void walkDir ( File dir ) { walkDir ( dir , new FileFilter ( ) { public boolean accept ( File pathname ) { return ( pathname . isFile ( ) && pathname . getName ( ) . endsWith ( ".class" ) ) ; } } , new FileFilter ( ) { public boolean accept ( File pathname ) { return ( pathname . isDirectory ( ) ) ; } } ) ; } | Expects a directory . |
27,049 | public boolean isKieContainerUpdateDuringRolloutAllowed ( ConfigMap cm , KieServerState newState ) { KieServerState state = ( KieServerState ) xs . fromXML ( cm . getData ( ) . get ( CFG_MAP_DATA_KEY ) ) ; for ( KieContainerResource container : state . getContainers ( ) ) { if ( container . getStatus ( ) . equals ( KieContainerStatus . STARTED ) && newState . getContainers ( ) . stream ( ) . anyMatch ( c -> c . getContainerId ( ) . equals ( container . getContainerId ( ) ) && c . getStatus ( ) . equals ( KieContainerStatus . STOPPED ) ) ) { logger . warn ( "Non KieServer process updated KieServerState during DC rollout for STOPPING containers." ) ; return true ; } } logger . warn ( "Non KieServer process updates KieServerState during DC rollout is prohibited!" ) ; return false ; } | Non KieServer process including workbench is allowed to update KieServerState even during DC rollout but ONLY under certain conditions . |
27,050 | public static String parseBundleId ( String url ) { if ( isOsgiBundleUrl ( url ) ) { int slashesIdx = url . indexOf ( "://" ) ; return url . substring ( slashesIdx + "://" . length ( ) , url . indexOf ( '.' ) ) ; } else { return null ; } } | Parses OSGi bundle ID from the provided URL . The URL may not be coming from OSGi in which case null is returned |
27,051 | public static boolean isOsgiBundleUrl ( String str ) { if ( str == null ) { throw new NullPointerException ( "Specified string can not be null!" ) ; } return str . startsWith ( "bundle" ) && str . contains ( "://" ) ; } | Determines if the provided string is OSGi bundle URL or not . |
27,052 | protected RequestInfoInstanceList convertToRequestInfoList ( List < RequestInfo > requests , boolean withErrors , boolean withData ) { RequestInfoInstance [ ] requestInfos = new RequestInfoInstance [ requests . size ( ) ] ; int index = 0 ; for ( RequestInfo request : requests ) { requestInfos [ index ] = convertToRequestInfo ( request , withErrors , withData ) ; index ++ ; } return new RequestInfoInstanceList ( requestInfos ) ; } | utility methods for conversion |
27,053 | public KieServerHttpRequest basicAuthorization ( final String name , final String password ) { return header ( AUTHORIZATION , "Basic " + org . kie . server . common . rest . Base64Util . encode ( name + ':' + password ) ) ; } | Set the Authorization header to given values in Basic authentication format |
27,054 | public KieServerHttpRequest contentType ( final String contentType , final String charset ) { if ( charset != null && charset . length ( ) > 0 ) { final String separator = "; " + PARAM_CHARSET + '=' ; return header ( CONTENT_TYPE , contentType + separator + charset ) ; } else return header ( CONTENT_TYPE , contentType ) ; } | Set the Content - Type request header to the given value and charset |
27,055 | public KieServerHttpRequest accept ( final String accept ) { RequestInfo requestInfo = getRequestInfo ( ) ; if ( requestInfo . getHeader ( ACCEPT ) . isEmpty ( ) ) { requestInfo . setHeader ( ACCEPT , new ArrayList < String > ( ) ) ; } requestInfo . headers . get ( ACCEPT ) . set ( 0 , accept ) ; return this ; } | Set the Accept header to given value |
27,056 | private KieServerHttpRequest openOutput ( ) throws IOException { if ( output != null ) { return this ; } getConnection ( ) . setDoOutput ( true ) ; final String charset = getHeaderParam ( getConnection ( ) . getRequestProperty ( CONTENT_TYPE ) , PARAM_CHARSET ) ; output = new RequestOutputStream ( getConnection ( ) . getOutputStream ( ) , charset , bufferSize ) ; return this ; } | Open output stream |
27,057 | private KieServerHttpRequest closeOutput ( ) throws IOException { if ( connection == null ) { throw new KieServerHttpRequestException ( "Please execute a HTTP method first on the request." ) ; } if ( output == null ) { return this ; } if ( ignoreCloseExceptions ) { try { output . close ( ) ; } catch ( IOException ignored ) { } } else { output . close ( ) ; } output = null ; return this ; } | Close output stream |
27,058 | private String [ ] responseHeaders ( final String name ) { final Map < String , List < String > > headers = responseHeaders ( ) ; if ( headers == null || headers . isEmpty ( ) ) return EMPTY_STRINGS ; final List < String > values = headers . get ( name ) ; if ( values != null && ! values . isEmpty ( ) ) return values . toArray ( new String [ values . size ( ) ] ) ; else return EMPTY_STRINGS ; } | Get all values of the given header from the response |
27,059 | private String responseHeaderParameter ( final String headerName , final String paramName ) { return getHeaderParam ( responseHeader ( headerName ) , paramName ) ; } | Get parameter with given name from header value in response |
27,060 | private static Map < String , String > getHeaderParams ( final String header ) { if ( header == null || header . length ( ) == 0 ) return Collections . emptyMap ( ) ; final int headerLength = header . length ( ) ; int start = header . indexOf ( ';' ) + 1 ; if ( start == 0 || start == headerLength ) return Collections . emptyMap ( ) ; int end = header . indexOf ( ';' , start ) ; if ( end == - 1 ) end = headerLength ; Map < String , String > params = new LinkedHashMap < String , String > ( ) ; while ( start < end ) { int nameEnd = header . indexOf ( '=' , start ) ; if ( nameEnd != - 1 && nameEnd < end ) { String name = header . substring ( start , nameEnd ) . trim ( ) ; if ( name . length ( ) > 0 ) { String value = header . substring ( nameEnd + 1 , end ) . trim ( ) ; int length = value . length ( ) ; if ( length != 0 ) if ( length > 2 && '"' == value . charAt ( 0 ) && '"' == value . charAt ( length - 1 ) ) params . put ( name , value . substring ( 1 , length - 1 ) ) ; else params . put ( name , value ) ; } } start = end + 1 ; end = header . indexOf ( ';' , start ) ; if ( end == - 1 ) end = headerLength ; } return params ; } | Get parameter values from header value |
27,061 | private static String getHeaderParam ( final String value , final String paramName ) { if ( value == null || value . length ( ) == 0 ) return null ; final int length = value . length ( ) ; int start = value . indexOf ( ';' ) + 1 ; if ( start == 0 || start == length ) return null ; int end = value . indexOf ( ';' , start ) ; if ( end == - 1 ) end = length ; while ( start < end ) { int nameEnd = value . indexOf ( '=' , start ) ; if ( nameEnd != - 1 && nameEnd < end && paramName . equals ( value . substring ( start , nameEnd ) . trim ( ) ) ) { String paramValue = value . substring ( nameEnd + 1 , end ) . trim ( ) ; int valueLength = paramValue . length ( ) ; if ( valueLength != 0 ) if ( valueLength > 2 && '"' == paramValue . charAt ( 0 ) && '"' == paramValue . charAt ( valueLength - 1 ) ) return paramValue . substring ( 1 , valueLength - 1 ) ; else return paramValue ; } start = end + 1 ; end = value . indexOf ( ';' , start ) ; if ( end == - 1 ) end = length ; } return null ; } | Get parameter value from header value |
27,062 | private JMSConnection startConnectionAndSession ( ) { JMSConnection result = null ; Connection connection = null ; Session session = null ; try { connection = factory . createConnection ( ) ; if ( connection != null ) { session = connection . createSession ( sessionTransacted , sessionAck ) ; result = new JMSConnection ( connection , session ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "KieServerMDB sessionTransacted={}, sessionAck={}" , sessionTransacted , sessionAck ) ; } } } catch ( JMSException jmse ) { String errMsg = "Unable to obtain connection/session" ; logger . error ( errMsg , jmse ) ; throw new JMSRuntimeException ( errMsg , jmse ) ; } finally { if ( connection != null && session == null ) { logger . error ( "KieServerMDB: Session creation failed - closing connection" ) ; try { connection . close ( ) ; } catch ( JMSException jmse ) { String errMsg = "KieServerMDB: Error closing connection after failing to open session" ; throw new JMSRuntimeException ( errMsg , jmse ) ; } } } return result ; } | This method is used to initialize the JMS connection and session . It is done in its own method so that if the point at which it is done needs to be changed then it can be done by just changing the invocation point . |
27,063 | public static String transform ( InputStream svg , List < String > completed , List < String > active ) { return transform ( svg , completed , active , null , COMPLETED_COLOR , COMPLETED_BORDER_COLOR , ACTIVE_BORDER_COLOR ) ; } | Static methods to keep backward compatibility |
27,064 | public TaskInstance findTaskByWorkItemId ( Long workItemId ) { if ( config . isRest ( ) ) { Map < String , Object > valuesMap = new HashMap < String , Object > ( ) ; valuesMap . put ( WORK_ITEM_ID , workItemId ) ; return makeHttpGetRequestAndCreateCustomResponse ( build ( loadBalancer . getUrl ( ) , QUERY_URI + "/" + TASK_BY_WORK_ITEM_ID_GET_URI , valuesMap ) , TaskInstance . class ) ; } else { CommandScript script = new CommandScript ( Collections . singletonList ( ( KieServerCommand ) new DescriptorCommand ( "QueryService" , "getTaskByWorkItemId" , new Object [ ] { workItemId } ) ) ) ; ServiceResponse < TaskInstance > response = ( ServiceResponse < TaskInstance > ) executeJmsCommand ( script , DescriptorCommand . class . getName ( ) , "BPM" ) . getResponses ( ) . get ( 0 ) ; throwExceptionOnFailure ( response ) ; if ( shouldReturnWithNullResponse ( response ) ) { return null ; } return response . getResult ( ) ; } } | task basic queries |
27,065 | protected String initializeURI ( URL url , String servicePrefix ) { if ( url == null ) { throw new IllegalArgumentException ( "The url may not be empty or null." ) ; } try { url . toURI ( ) ; } catch ( URISyntaxException urise ) { throw new IllegalArgumentException ( "URL (" + url . toExternalForm ( ) + ") is incorrectly formatted: " + urise . getMessage ( ) , urise ) ; } String urlString = url . toExternalForm ( ) ; if ( ! urlString . endsWith ( "/" ) ) { urlString += "/" ; } urlString += "services/" + servicePrefix + "/server" ; URL serverPlusServicePrefixUrl ; try { serverPlusServicePrefixUrl = new URL ( urlString ) ; } catch ( MalformedURLException murle ) { throw new IllegalArgumentException ( "URL (" + url . toExternalForm ( ) + ") is incorrectly formatted: " + murle . getMessage ( ) , murle ) ; } return urlString ; } | Initializes the URL that will be used for web service access |
27,066 | public static JaxbDataFormat augmentJaxbDataFormatDefinition ( JaxbDataFormat jaxbDataFormat ) { Set < String > set = new HashSet < String > ( ) ; for ( String clsName : DroolsJaxbHelperProviderImpl . JAXB_ANNOTATED_CMD ) { set . add ( clsName . substring ( 0 , clsName . lastIndexOf ( '.' ) ) ) ; } StringBuilder sb = new StringBuilder ( ) ; String contextPath = jaxbDataFormat . getContextPath ( ) ; if ( contextPath != null ) { sb . append ( contextPath ) ; } sb . append ( ":" ) ; for ( String pkgName : set ) { sb . append ( pkgName ) ; sb . append ( ':' ) ; } jaxbDataFormat . setContextPath ( sb . toString ( ) ) ; return jaxbDataFormat ; } | Clones the passed JaxbDataFormat and then augments it with with Drools related namespaces |
27,067 | protected void registerFastCloners ( ) { this . fastCloners . put ( GregorianCalendar . class , new FastClonerCalendar ( ) ) ; this . fastCloners . put ( ArrayList . class , new FastClonerArrayList ( ) ) ; this . fastCloners . put ( Arrays . asList ( new Object [ ] { "" } ) . getClass ( ) , new FastClonerArrayList ( ) ) ; this . fastCloners . put ( LinkedList . class , new FastClonerLinkedList ( ) ) ; this . fastCloners . put ( HashSet . class , new FastClonerHashSet ( ) ) ; this . fastCloners . put ( HashMap . class , new FastClonerHashMap ( ) ) ; this . fastCloners . put ( TreeMap . class , new FastClonerTreeMap ( ) ) ; } | registers a std set of fast cloners . |
27,068 | protected void registerKnownJdkImmutableClasses ( ) { registerImmutable ( String . class ) ; registerImmutable ( Integer . class ) ; registerImmutable ( Long . class ) ; registerImmutable ( Boolean . class ) ; registerImmutable ( Class . class ) ; registerImmutable ( Float . class ) ; registerImmutable ( Double . class ) ; registerImmutable ( Character . class ) ; registerImmutable ( Byte . class ) ; registerImmutable ( Short . class ) ; registerImmutable ( Void . class ) ; registerImmutable ( BigDecimal . class ) ; registerImmutable ( BigInteger . class ) ; registerImmutable ( URI . class ) ; registerImmutable ( URL . class ) ; registerImmutable ( UUID . class ) ; registerImmutable ( Pattern . class ) ; } | registers some known JDK immutable classes . Override this to register your own list of jdk s immutable classes |
27,069 | public void registerStaticFields ( final Class < ? > ... classes ) { for ( final Class < ? > c : classes ) { final List < Field > fields = allFields ( c ) ; for ( final Field field : fields ) { final int mods = field . getModifiers ( ) ; if ( Modifier . isStatic ( mods ) && ! field . getType ( ) . isPrimitive ( ) ) { registerConstant ( c , field . getName ( ) ) ; } } } } | registers all static fields of these classes . Those static fields won t be cloned when an instance of the class is cloned . This is useful i . e . when a static field object is added into maps or sets . At that point there is no way for the cloner to know that it was static except if it is registered . |
27,070 | public < T > T deepClone ( final T o ) { if ( o == null ) { return null ; } if ( ! this . cloningEnabled ) { return o ; } if ( this . dumpClonedClasses ) { System . out . println ( "start>" + o . getClass ( ) ) ; } final Map < Object , Object > clones = new IdentityHashMap < Object , Object > ( 16 ) ; try { return cloneInternal ( o , clones ) ; } catch ( final IllegalAccessException e ) { throw new RuntimeException ( "error during cloning of " + o , e ) ; } } | deep clones o . |
27,071 | public < T , E extends T > void copyPropertiesOfInheritedClass ( final T src , final E dest ) { if ( src == null ) { throw new IllegalArgumentException ( "src can't be null" ) ; } if ( dest == null ) { throw new IllegalArgumentException ( "dest can't be null" ) ; } final Class < ? extends Object > srcClz = src . getClass ( ) ; final Class < ? extends Object > destClz = dest . getClass ( ) ; if ( srcClz . isArray ( ) ) { if ( ! destClz . isArray ( ) ) { throw new IllegalArgumentException ( "can't copy from array to non-array class " + destClz ) ; } final int length = Array . getLength ( src ) ; for ( int i = 0 ; i < length ; i ++ ) { final Object v = Array . get ( src , i ) ; Array . set ( dest , i , v ) ; } return ; } final List < Field > fields = allFields ( srcClz ) ; for ( final Field field : fields ) { if ( ! Modifier . isStatic ( field . getModifiers ( ) ) ) { try { final Object fieldObject = field . get ( src ) ; field . set ( dest , fieldObject ) ; } catch ( final IllegalArgumentException e ) { throw new RuntimeException ( e ) ; } catch ( final IllegalAccessException e ) { throw new RuntimeException ( e ) ; } } } } | copies all properties from src to dest . Src and dest can be of different class provided they contain same field names |
27,072 | public String forCaseInstance ( String alias , String caseId ) { return registry . getContainerId ( alias , new ByCaseIdContainerLocator ( caseId ) ) ; } | Looks up container id for given alias that is associated with case instance |
27,073 | public String forProcessInstance ( String alias , long processInstanceId ) { return registry . getContainerId ( alias , new ByProcessInstanceIdContainerLocator ( processInstanceId ) ) ; } | Looks up container id for given alias that is associated with process instance |
27,074 | public String forTaskInstance ( String alias , long taskId ) { return registry . getContainerId ( alias , new ByTaskIdContainerLocator ( taskId ) ) ; } | Looks up container id for given alias that is associated with task instance |
27,075 | void validate ( ) throws IOException { validate ( platform , "platform" ) ; validate ( jdk , "JDK" ) ; validate ( executable , "executable name" ) ; validate ( mainClass , "main class" ) ; validate ( outDir , "output folder" ) ; if ( outDir . exists ( ) ) { if ( new File ( "." ) . equals ( outDir ) ) { throw new IOException ( "Output directory equals working directory, aborting" ) ; } if ( new File ( "/" ) . equals ( outDir ) ) { throw new IOException ( "Output directory points to root folder." ) ; } } if ( classpath . isEmpty ( ) ) { throw new IOException ( "Empty class path. Please check your commandline or configuration." ) ; } } | Sanity checks for configuration settings . Because users like to break stuff . |
27,076 | public void commitJob ( JobContext context ) throws IOException { super . commitJob ( context ) ; Configuration conf = context . getConfiguration ( ) ; TableReference destTable = BigQueryOutputConfiguration . getTableReference ( conf ) ; String destProjectId = BigQueryOutputConfiguration . getProjectId ( conf ) ; String writeDisposition = BigQueryOutputConfiguration . getWriteDisposition ( conf ) ; Optional < BigQueryTableSchema > destSchema = BigQueryOutputConfiguration . getTableSchema ( conf ) ; String kmsKeyName = BigQueryOutputConfiguration . getKmsKeyName ( conf ) ; BigQueryFileFormat outputFileFormat = BigQueryOutputConfiguration . getFileFormat ( conf ) ; List < String > sourceUris = getOutputFileURIs ( ) ; try { getBigQueryHelper ( ) . importFromGcs ( destProjectId , destTable , destSchema . isPresent ( ) ? destSchema . get ( ) . get ( ) : null , kmsKeyName , outputFileFormat , writeDisposition , sourceUris , true ) ; } catch ( InterruptedException e ) { throw new IOException ( "Failed to import GCS into BigQuery" , e ) ; } cleanup ( context ) ; } | Runs an import job on BigQuery for the data in the output path in addition to calling the delegate s commitJob . |
27,077 | public void abortJob ( JobContext context , State state ) throws IOException { super . abortJob ( context , state ) ; cleanup ( context ) ; } | Performs a cleanup of the output path in addition to delegating the call to the wrapped OutputCommitter . |
27,078 | protected void cleanup ( JobContext context ) throws IOException { boolean delete = BigQueryOutputConfiguration . getCleanupTemporaryDataFlag ( context . getConfiguration ( ) ) ; if ( delete && outputFileSystem . exists ( outputPath ) ) { logger . atInfo ( ) . log ( "Found GCS output data at '%s', attempting to clean up." , outputPath ) ; if ( outputFileSystem . delete ( outputPath , true ) ) { logger . atInfo ( ) . log ( "Successfully deleted GCS output path '%s'." , outputPath ) ; } else { logger . atWarning ( ) . log ( "Failed to delete GCS output at '%s', retrying on shutdown." , outputPath ) ; outputFileSystem . deleteOnExit ( outputPath ) ; } } } | Attempts to manually delete data in the output path . If this fails another delete attempt is made on JVM shutdown . |
27,079 | public synchronized int read ( ) throws IOException { long startTime = System . nanoTime ( ) ; int numRead = channel . read ( ByteBuffer . wrap ( singleReadBuf ) ) ; if ( numRead == - 1 ) { return - 1 ; } if ( numRead != 1 ) { throw new IOException ( String . format ( "Somehow read %d bytes using single-byte buffer for path %s ending in position %d!" , numRead , gcsPath , channel . position ( ) ) ) ; } byte b = singleReadBuf [ 0 ] ; totalBytesRead ++ ; statistics . incrementBytesRead ( 1 ) ; long duration = System . nanoTime ( ) - startTime ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ1 ) ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ1_TIME , duration ) ; return ( b & 0xff ) ; } | Reads a single byte from the underlying store . |
27,080 | public synchronized int read ( byte [ ] buf , int offset , int length ) throws IOException { long startTime = System . nanoTime ( ) ; Preconditions . checkNotNull ( buf , "buf must not be null" ) ; if ( offset < 0 || length < 0 || length > buf . length - offset ) { throw new IndexOutOfBoundsException ( ) ; } int numRead = channel . read ( ByteBuffer . wrap ( buf , offset , length ) ) ; if ( numRead > 0 ) { statistics . incrementBytesRead ( numRead ) ; totalBytesRead += numRead ; } long duration = System . nanoTime ( ) - startTime ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ ) ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ_TIME , duration ) ; return numRead ; } | Reads up to length bytes from the underlying store and stores them starting at the specified offset in the given buffer . Less than length bytes may be returned . |
27,081 | public synchronized int read ( long position , byte [ ] buf , int offset , int length ) throws IOException { long startTime = System . nanoTime ( ) ; int result = super . read ( position , buf , offset , length ) ; if ( result > 0 ) { statistics . incrementBytesRead ( result ) ; totalBytesRead += result ; } long duration = System . nanoTime ( ) - startTime ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ_POS ) ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ_POS_TIME , duration ) ; return result ; } | Reads up to length bytes from the underlying store and stores them starting at the specified offset in the given buffer . Less than length bytes may be returned . Reading starts at the given position . |
27,082 | public synchronized long getPos ( ) throws IOException { long pos = channel . position ( ) ; logger . atFine ( ) . log ( "getPos: %d" , pos ) ; return pos ; } | Gets the current position within the file being read . |
27,083 | public synchronized void seek ( long pos ) throws IOException { long startTime = System . nanoTime ( ) ; logger . atFine ( ) . log ( "seek: %d" , pos ) ; try { channel . position ( pos ) ; } catch ( IllegalArgumentException e ) { throw new IOException ( e ) ; } long duration = System . nanoTime ( ) - startTime ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . SEEK ) ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . SEEK_TIME , duration ) ; } | Sets the current position within the file being read . |
27,084 | public synchronized void close ( ) throws IOException { if ( channel != null ) { long startTime = System . nanoTime ( ) ; logger . atFine ( ) . log ( "close: file: %s, totalBytesRead: %d" , gcsPath , totalBytesRead ) ; channel . close ( ) ; long duration = System . nanoTime ( ) - startTime ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ_CLOSE ) ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . READ_CLOSE_TIME , duration ) ; long streamDuration = System . nanoTime ( ) - initTime ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . INPUT_STREAM ) ; ghfs . increment ( GoogleHadoopFileSystemBase . Counter . INPUT_STREAM_TIME , streamDuration ) ; } } | Closes the current stream . |
27,085 | public static void setTemporaryCloudStorageDirectory ( Configuration configuration , String path ) { configuration . set ( BigQueryConfiguration . TEMP_GCS_PATH_KEY , path ) ; } | Configure a directory to which we will export BigQuery data |
27,086 | public static void cleanupJob ( Configuration configuration , JobID jobId ) throws IOException { String exportPathRoot = BigQueryConfiguration . getTemporaryPathRoot ( configuration , jobId ) ; configuration . set ( BigQueryConfiguration . TEMP_GCS_PATH_KEY , exportPathRoot ) ; Bigquery bigquery = null ; try { bigquery = new BigQueryFactory ( ) . getBigQuery ( configuration ) ; } catch ( GeneralSecurityException gse ) { throw new IOException ( "Failed to create Bigquery client" , gse ) ; } cleanupJob ( new BigQueryHelper ( bigquery ) , configuration ) ; } | Cleans up relevant temporary resources associated with a job which used the GsonBigQueryInputFormat ; this should be called explicitly after the completion of the entire job . Possibly cleans up intermediate export tables if configured to use one due to specifying a BigQuery query for the input . Cleans up the GCS directoriy where BigQuery exported its files for reading . |
27,087 | public static String getPropertyOrDefault ( Class < ? > referenceClass , String propertyFile , String key , String defaultValue ) { try ( InputStream stream = referenceClass . getResourceAsStream ( propertyFile ) ) { if ( stream == null ) { logger . atSevere ( ) . log ( "Could not load properties file '%s'" , propertyFile ) ; return defaultValue ; } Properties properties = new Properties ( ) ; properties . load ( stream ) ; String value = properties . getProperty ( key ) ; if ( value == null ) { logger . atSevere ( ) . log ( "Key %s not found in properties file %s." , key , propertyFile ) ; return defaultValue ; } return value ; } catch ( IOException e ) { logger . atSevere ( ) . withCause ( e ) . log ( "Error while trying to get property value for key %s" , key ) ; return defaultValue ; } } | Get the value of a property or a default value if there s an error retrieving the property key . |
27,088 | public void exportBigQueryToGcs ( String projectId , TableReference tableRef , List < String > gcsPaths , boolean awaitCompletion ) throws IOException , InterruptedException { logger . atFine ( ) . log ( "exportBigQueryToGcs(bigquery, '%s', '%s', '%s', '%s')" , projectId , BigQueryStrings . toString ( tableRef ) , gcsPaths , awaitCompletion ) ; logger . atInfo ( ) . log ( "Exporting table '%s' to %s paths; path[0] is '%s'; awaitCompletion: %s" , BigQueryStrings . toString ( tableRef ) , gcsPaths . size ( ) , gcsPaths . isEmpty ( ) ? "(empty)" : gcsPaths . get ( 0 ) , awaitCompletion ) ; JobConfigurationExtract extractConfig = new JobConfigurationExtract ( ) ; extractConfig . setSourceTable ( tableRef ) ; extractConfig . setDestinationUris ( gcsPaths ) ; extractConfig . set ( "destinationFormat" , "NEWLINE_DELIMITED_JSON" ) ; JobConfiguration config = new JobConfiguration ( ) ; config . setExtract ( extractConfig ) ; Table table = getTable ( tableRef ) ; JobReference jobReference = createJobReference ( projectId , "direct-bigqueryhelper-export" , table . getLocation ( ) ) ; Job job = new Job ( ) ; job . setConfiguration ( config ) ; job . setJobReference ( jobReference ) ; insertJobOrFetchDuplicate ( projectId , job ) ; if ( awaitCompletion ) { BigQueryUtils . waitForJobCompletion ( service , projectId , jobReference , NOP_PROGRESSABLE ) ; } } | Exports BigQuery results into GCS polls for completion before returning . |
27,089 | public boolean tableExists ( TableReference tableRef ) throws IOException { try { Table fetchedTable = service . tables ( ) . get ( tableRef . getProjectId ( ) , tableRef . getDatasetId ( ) , tableRef . getTableId ( ) ) . execute ( ) ; logger . atFine ( ) . log ( "Successfully fetched table '%s' for tableRef '%s'" , fetchedTable , tableRef ) ; return true ; } catch ( IOException ioe ) { if ( errorExtractor . itemNotFound ( ioe ) ) { return false ; } else { throw ioe ; } } } | Returns true if the table exists or false if not . |
27,090 | public Table getTable ( TableReference tableRef ) throws IOException { Bigquery . Tables . Get getTablesReply = service . tables ( ) . get ( tableRef . getProjectId ( ) , tableRef . getDatasetId ( ) , tableRef . getTableId ( ) ) ; return getTablesReply . execute ( ) ; } | Gets the specified table resource by table ID . This method does not return the data in the table it only returns the table resource which describes the structure of this table . |
27,091 | public WritableByteChannel create ( URI path , CreateFileOptions options ) throws IOException { logger . atFine ( ) . log ( "create(%s)" , path ) ; Preconditions . checkNotNull ( path , "path could not be null" ) ; if ( FileInfo . isDirectoryPath ( path ) ) { throw new IOException ( String . format ( "Cannot create a file whose name looks like a directory. Got '%s'" , path ) ) ; } if ( options . checkNoDirectoryConflict ( ) ) { URI dirPath = FileInfo . convertToDirectoryPath ( pathCodec , path ) ; if ( exists ( dirPath ) ) { throw new FileAlreadyExistsException ( "A directory with that name exists: " + path ) ; } } if ( options . ensureParentDirectoriesExist ( ) ) { URI parentPath = getParentPath ( path ) ; if ( parentPath != null ) { mkdirs ( parentPath ) ; } } return createInternal ( path , options ) ; } | Creates and opens an object for writing . |
27,092 | public SeekableByteChannel open ( URI path , GoogleCloudStorageReadOptions readOptions ) throws IOException { logger . atFine ( ) . log ( "open(%s, %s)" , path , readOptions ) ; Preconditions . checkNotNull ( path ) ; checkArgument ( ! FileInfo . isDirectoryPath ( path ) , "Cannot open a directory for reading: %s" , path ) ; StorageResourceId resourceId = pathCodec . validatePathAndGetId ( path , false ) ; return gcs . open ( resourceId , readOptions ) ; } | Opens an object for reading . |
27,093 | public void delete ( URI path , boolean recursive ) throws IOException { Preconditions . checkNotNull ( path , "path can not be null" ) ; checkArgument ( ! path . equals ( GCS_ROOT ) , "Cannot delete root path (%s)" , path ) ; logger . atFine ( ) . log ( "delete(%s, %s)" , path , recursive ) ; FileInfo fileInfo = getFileInfo ( path ) ; if ( ! fileInfo . exists ( ) ) { throw new FileNotFoundException ( "Item not found: " + path ) ; } Future < GoogleCloudStorageItemInfo > parentInfoFuture = null ; if ( options . getCloudStorageOptions ( ) . isAutoRepairImplicitDirectoriesEnabled ( ) ) { StorageResourceId parentId = pathCodec . validatePathAndGetId ( getParentPath ( path ) , true ) ; parentInfoFuture = cachedExecutor . submit ( ( ) -> getFileInfoInternal ( parentId , false ) ) ; } List < FileInfo > itemsToDelete = new ArrayList < > ( ) ; List < FileInfo > bucketsToDelete = new ArrayList < > ( ) ; if ( fileInfo . isDirectory ( ) ) { itemsToDelete = recursive ? listAllFileInfoForPrefix ( fileInfo . getPath ( ) ) : listFileInfo ( fileInfo . getPath ( ) ) ; if ( ! itemsToDelete . isEmpty ( ) && ! recursive ) { throw new DirectoryNotEmptyException ( "Cannot delete a non-empty directory." ) ; } } if ( fileInfo . getItemInfo ( ) . isBucket ( ) ) { bucketsToDelete . add ( fileInfo ) ; } else { itemsToDelete . add ( fileInfo ) ; } deleteInternal ( itemsToDelete , bucketsToDelete ) ; repairImplicitDirectory ( parentInfoFuture ) ; if ( bucketsToDelete . isEmpty ( ) ) { List < URI > itemsToDeleteNames = itemsToDelete . stream ( ) . map ( FileInfo :: getPath ) . collect ( toCollection ( ArrayList :: new ) ) ; tryUpdateTimestampsForParentDirectories ( itemsToDeleteNames , itemsToDeleteNames ) ; } } | Deletes one or more items indicated by the given path . |
27,094 | private void deleteInternal ( List < FileInfo > itemsToDelete , List < FileInfo > bucketsToDelete ) throws IOException { itemsToDelete . sort ( FILE_INFO_PATH_COMPARATOR . reversed ( ) ) ; if ( ! itemsToDelete . isEmpty ( ) ) { List < StorageResourceId > objectsToDelete = new ArrayList < > ( itemsToDelete . size ( ) ) ; for ( FileInfo fileInfo : itemsToDelete ) { objectsToDelete . add ( new StorageResourceId ( fileInfo . getItemInfo ( ) . getBucketName ( ) , fileInfo . getItemInfo ( ) . getObjectName ( ) , fileInfo . getItemInfo ( ) . getContentGeneration ( ) ) ) ; } gcs . deleteObjects ( objectsToDelete ) ; } if ( ! bucketsToDelete . isEmpty ( ) ) { List < String > bucketNames = new ArrayList < > ( bucketsToDelete . size ( ) ) ; for ( FileInfo bucketInfo : bucketsToDelete ) { StorageResourceId resourceId = bucketInfo . getItemInfo ( ) . getResourceId ( ) ; gcs . waitForBucketEmpty ( resourceId . getBucketName ( ) ) ; bucketNames . add ( resourceId . getBucketName ( ) ) ; } if ( options . enableBucketDelete ( ) ) { gcs . deleteBuckets ( bucketNames ) ; } else { logger . atInfo ( ) . log ( "Skipping deletion of buckets because enableBucketDelete is false: %s" , bucketNames ) ; } } } | Deletes all items in the given path list followed by all bucket items . |
27,095 | public boolean exists ( URI path ) throws IOException { logger . atFine ( ) . log ( "exists(%s)" , path ) ; return getFileInfo ( path ) . exists ( ) ; } | Indicates whether the given item exists . |
27,096 | public void mkdirs ( URI path ) throws IOException { logger . atFine ( ) . log ( "mkdirs(%s)" , path ) ; Preconditions . checkNotNull ( path ) ; if ( path . equals ( GCS_ROOT ) ) { return ; } StorageResourceId resourceId = pathCodec . validatePathAndGetId ( path , true ) ; resourceId = FileInfo . convertToDirectoryPath ( resourceId ) ; List < String > subdirs = getSubDirs ( resourceId . getObjectName ( ) ) ; List < StorageResourceId > itemIds = new ArrayList < > ( subdirs . size ( ) * 2 + 1 ) ; for ( String subdir : subdirs ) { itemIds . add ( new StorageResourceId ( resourceId . getBucketName ( ) , subdir ) ) ; if ( ! Strings . isNullOrEmpty ( subdir ) ) { itemIds . add ( new StorageResourceId ( resourceId . getBucketName ( ) , FileInfo . convertToFilePath ( subdir ) ) ) ; } } itemIds . add ( new StorageResourceId ( resourceId . getBucketName ( ) ) ) ; logger . atFine ( ) . log ( "mkdirs: items: %s" , itemIds ) ; List < GoogleCloudStorageItemInfo > itemInfos = gcs . getItemInfos ( itemIds ) ; GoogleCloudStorageItemInfo bucketInfo = null ; List < StorageResourceId > subdirsToCreate = new ArrayList < > ( subdirs . size ( ) ) ; for ( GoogleCloudStorageItemInfo info : itemInfos ) { if ( info . isBucket ( ) ) { checkState ( bucketInfo == null , "bucketInfo should be null" ) ; bucketInfo = info ; } else if ( info . getResourceId ( ) . isDirectory ( ) && ! info . exists ( ) ) { subdirsToCreate . add ( info . getResourceId ( ) ) ; } else if ( ! info . getResourceId ( ) . isDirectory ( ) && info . exists ( ) ) { throw new FileAlreadyExistsException ( "Cannot create directories because of existing file: " + info . getResourceId ( ) ) ; } } if ( ! checkNotNull ( bucketInfo , "bucketInfo should not be null" ) . exists ( ) ) { gcs . create ( bucketInfo . getBucketName ( ) ) ; } gcs . createEmptyObjects ( subdirsToCreate ) ; List < URI > createdDirectories = subdirsToCreate . stream ( ) . map ( s -> pathCodec . getPath ( s . getBucketName ( ) , s . getObjectName ( ) , false ) ) . collect ( toImmutableList ( ) ) ; tryUpdateTimestampsForParentDirectories ( createdDirectories , createdDirectories ) ; } | Creates a directory at the specified path . Also creates any parent directories as necessary . Similar to mkdir - p command . |
27,097 | public void compose ( List < URI > sources , URI destination , String contentType ) throws IOException { StorageResourceId destResource = StorageResourceId . fromObjectName ( destination . toString ( ) ) ; List < String > sourceObjects = Lists . transform ( sources , uri -> StorageResourceId . fromObjectName ( uri . toString ( ) ) . getObjectName ( ) ) ; gcs . compose ( destResource . getBucketName ( ) , sourceObjects , destResource . getObjectName ( ) , contentType ) ; } | Composes inputs into a single GCS object . This performs a GCS Compose . Objects will be composed according to the order they appear in the input . The destination object if already present will be overwritten . Sources and destination are assumed to be in the same bucket . |
27,098 | private void renameInternal ( FileInfo srcInfo , URI dst ) throws IOException { if ( srcInfo . isDirectory ( ) ) { renameDirectoryInternal ( srcInfo , dst ) ; } else { URI src = srcInfo . getPath ( ) ; StorageResourceId srcResourceId = pathCodec . validatePathAndGetId ( src , true ) ; StorageResourceId dstResourceId = pathCodec . validatePathAndGetId ( dst , true ) ; gcs . copy ( srcResourceId . getBucketName ( ) , ImmutableList . of ( srcResourceId . getObjectName ( ) ) , dstResourceId . getBucketName ( ) , ImmutableList . of ( dstResourceId . getObjectName ( ) ) ) ; tryUpdateTimestampsForParentDirectories ( ImmutableList . of ( dst ) , ImmutableList . < URI > of ( ) ) ; gcs . deleteObjects ( ImmutableList . of ( new StorageResourceId ( srcInfo . getItemInfo ( ) . getBucketName ( ) , srcInfo . getItemInfo ( ) . getObjectName ( ) , srcInfo . getItemInfo ( ) . getContentGeneration ( ) ) ) ) ; tryUpdateTimestampsForParentDirectories ( ImmutableList . of ( src ) , ImmutableList . < URI > of ( ) ) ; } } | Renames the given path without checking any parameters . |
27,099 | private void renameDirectoryInternal ( FileInfo srcInfo , URI dst ) throws IOException { checkArgument ( srcInfo . isDirectory ( ) , "'%s' should be a directory" , srcInfo ) ; Pattern markerFilePattern = options . getMarkerFilePattern ( ) ; Map < FileInfo , URI > srcToDstItemNames = new TreeMap < > ( FILE_INFO_PATH_COMPARATOR ) ; Map < FileInfo , URI > srcToDstMarkerItemNames = new TreeMap < > ( FILE_INFO_PATH_COMPARATOR ) ; List < FileInfo > srcItemInfos = listAllFileInfoForPrefix ( srcInfo . getPath ( ) ) ; dst = FileInfo . convertToDirectoryPath ( pathCodec , dst ) ; mkdir ( dst ) ; String prefix = srcInfo . getPath ( ) . toString ( ) ; for ( FileInfo srcItemInfo : srcItemInfos ) { String relativeItemName = srcItemInfo . getPath ( ) . toString ( ) . substring ( prefix . length ( ) ) ; URI dstItemName = dst . resolve ( relativeItemName ) ; if ( markerFilePattern != null && markerFilePattern . matcher ( relativeItemName ) . matches ( ) ) { srcToDstMarkerItemNames . put ( srcItemInfo , dstItemName ) ; } else { srcToDstItemNames . put ( srcItemInfo , dstItemName ) ; } } copyInternal ( srcToDstItemNames ) ; copyInternal ( srcToDstMarkerItemNames ) ; if ( ! srcToDstItemNames . isEmpty ( ) || ! srcToDstMarkerItemNames . isEmpty ( ) ) { List < URI > allDestinationUris = new ArrayList < > ( srcToDstItemNames . size ( ) + srcToDstMarkerItemNames . size ( ) ) ; allDestinationUris . addAll ( srcToDstItemNames . values ( ) ) ; allDestinationUris . addAll ( srcToDstMarkerItemNames . values ( ) ) ; tryUpdateTimestampsForParentDirectories ( allDestinationUris , allDestinationUris ) ; } List < FileInfo > bucketsToDelete = new ArrayList < > ( 1 ) ; List < FileInfo > srcItemsToDelete = new ArrayList < > ( srcToDstItemNames . size ( ) + 1 ) ; srcItemsToDelete . addAll ( srcToDstItemNames . keySet ( ) ) ; if ( srcInfo . getItemInfo ( ) . isBucket ( ) ) { bucketsToDelete . add ( srcInfo ) ; } else { srcItemsToDelete . add ( srcInfo ) ; } deleteInternal ( new ArrayList < > ( srcToDstMarkerItemNames . keySet ( ) ) , new ArrayList < > ( ) ) ; deleteInternal ( srcItemsToDelete , bucketsToDelete ) ; if ( bucketsToDelete . isEmpty ( ) ) { List < URI > srcItemNames = srcItemInfos . stream ( ) . map ( FileInfo :: getPath ) . collect ( toCollection ( ArrayList :: new ) ) ; tryUpdateTimestampsForParentDirectories ( srcItemNames , srcItemNames ) ; } } | Renames given directory . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.