idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
39,100
public void addProcessor ( String nodeName , NodeProcessor processor ) { if ( null == processor ) { throw new IllegalArgumentException ( "Processor should not be null." ) ; } if ( IS_EMPTY . test ( nodeName ) ) { throw new IllegalArgumentException ( "The node name should not be empty." ) ; } getActionPool ( ) . put ( nodeName , processor ) ; }
Add a specific processing that will be applied to nodes having the matching name .
39,101
private void doProcessNext ( ) { int _position = getPosition ( ) ; Node _current = getSource ( ) . item ( _position ) ; if ( getActionPool ( ) . containsKey ( _current . getNodeName ( ) ) ) { getActionPool ( ) . get ( _current . getNodeName ( ) ) . execute ( _current , _position ) ; } else if ( getActionPool ( ) . containsKey ( NODE_NAME__DEFAULT ) ) { getActionPool ( ) . get ( NODE_NAME__DEFAULT ) . execute ( _current , _position ) ; } _position ++ ; setPosition ( _position ) ; }
Perform the processing on the next available node .
39,102
public static final FluidFlowPanelModel createFluidFlowPanel ( FlowLayout flowLayout ) { JPanel _panel = new JPanel ( flowLayout ) ; PanelSizeUpdaterOnViewPortResize _updater = new PanelSizeUpdaterOnViewPortResize ( _panel ) ; _panel . addContainerListener ( _updater ) ; JScrollPane _scroller = new JScrollPane ( _panel , JScrollPane . VERTICAL_SCROLLBAR_ALWAYS , JScrollPane . HORIZONTAL_SCROLLBAR_NEVER ) ; _scroller . addComponentListener ( _updater ) ; return new FluidFlowPanelModel ( _panel , _scroller ) ; }
The macro that create a scrollable panel with a fluid flow layout .
39,103
private ImageIcon retrieveIcon ( IconLocationType location ) { try { return myIconProvider . retrieveIcon ( location ) ; } catch ( Exception _exception ) { System . err . println ( _exception . getMessage ( ) ) ; } return null ; }
Load the icon file .
39,104
public static TextLoader getInstance ( Encoding encoding ) { if ( null == encoding ) { return getInstance ( ) ; } else { if ( ! INSTANCE_BY_ENCODING . containsKey ( encoding . getIsoName ( ) ) ) { TextLoader _newInstance = new TextLoader ( encoding ) ; INSTANCE_BY_ENCODING . put ( encoding . getIsoName ( ) , _newInstance ) ; } return INSTANCE_BY_ENCODING . get ( encoding . getIsoName ( ) ) ; } }
Get a default instance for the given encoding .
39,105
public StringBuffer append ( Reader source , StringBuffer buffer ) throws IOException { BufferedReader _bufferedReader = new BufferedReader ( source ) ; char [ ] _buffer = new char [ getBufferSize ( ) ] ; try { for ( int _countReadChars = 0 ; _countReadChars >= 0 ; ) { buffer . append ( _buffer , 0 , _countReadChars ) ; _countReadChars = _bufferedReader . read ( _buffer ) ; } } finally { _bufferedReader . close ( ) ; } return buffer ; }
Load a text from the specified reader and put it in the provided StringBuffer .
39,106
public String loadTextFile ( File source ) throws IOException { StringBuffer _result = new StringBuffer ( ) ; append ( source , _result ) ; return _result . toString ( ) ; }
Load a text file .
39,107
public static Select create ( String name , String idSuffix , List < Option > options , boolean allowMultipleSelection , boolean isDisabled ) { Select _select = new Select ( ) ; _select . setName ( name ) ; _select . setIdSuffix ( idSuffix ) ; _select . setDisabled ( isDisabled ) ; _select . setMultiple ( allowMultipleSelection ) ; _select . getOptions ( ) . addAll ( options ) ; return _select ; }
Create a fully defined selector ..
39,108
public static boolean isXInclude ( final QName qname ) { return XINCLUDE_QNAME . getLocalPart ( ) . equals ( qname . getLocalPart ( ) ) && ( Strings . isNullOrEmpty ( qname . getNamespaceURI ( ) ) || XINCLUDE_QNAME . getNamespaceURI ( ) . equals ( qname . getNamespaceURI ( ) ) ) ; }
Return if element is a Xinclude element or not
39,109
public static boolean isFallback ( final QName qname ) { return FALLBACK_QNAME . getLocalPart ( ) . equals ( qname . getLocalPart ( ) ) && ( Strings . isNullOrEmpty ( qname . getNamespaceURI ( ) ) || FALLBACK_QNAME . getNamespaceURI ( ) . equals ( qname . getNamespaceURI ( ) ) ) ; }
Return if element is a Fallback elementor not
39,110
public static String format ( Integer year , Integer month , Integer day ) { Object [ ] _args = { year , month , day } ; return SHORT__DATE . format ( _args ) ; }
Formatter for a date .
39,111
public static String format ( Integer year , Integer month , Integer day , Integer hour , Integer minute , Integer second ) { Object [ ] _args = { year , month , day , hour , minute , second } ; return SHORT__DATE_TIME . format ( _args ) ; }
Formatter for a date and time .
39,112
public static String format ( Integer year , Integer month , Integer day , Integer hour , Integer minute , Integer second , Integer millisecond ) { Object [ ] _args = { year , month , day , hour , minute , second , millisecond } ; return LONG__MS . format ( _args ) ; }
Formatter for a date and time up to milliseconds .
39,113
public static StringBuffer appendOpeningTag ( StringBuffer buffer , String tag ) { return appendOpeningTag ( buffer , tag , EMPTY_MAP ) ; }
Add an opening tag to a StringBuffer .
39,114
public static StringBuffer appendTextInsideTag ( StringBuffer buffer , String text , String tag ) { return appendTextInsideTag ( buffer , text , tag , EMPTY_MAP ) ; }
Wrap a text inside a tag .
39,115
public static String decodeAttributeValue ( String value ) { String _result = value . replaceAll ( PATTERN__ENTITY_QUOTE , VALUE__CHAR_QUOTE ) ; _result = _result . replaceAll ( PATTERN__ENTITY_AMPERSAND , VALUE__CHAR_AMPERSAND ) ; return _result ; }
Reverse the escaping of special chars from a value of an attribute .
39,116
public static String encodeAttributeValue ( String value ) { String _result = value . replaceAll ( PATTERN__CHAR_AMPERSAND , VALUE__ENTITY_AMPERSAND ) ; _result = _result . replaceAll ( PATTERN__CHAR_QUOTE , VALUE__ENTITY_QUOTE ) ; return _result ; }
Convert special chars so that it is legal as an attribute value .
39,117
public static String getCdataSection ( String cdataContent ) { StringBuffer _result = new StringBuffer ( ) ; return doAppendCdataSection ( _result , cdataContent ) . toString ( ) ; }
Create a string containing a Cdata section .
39,118
public static String getComment ( String comment ) { StringBuffer _result = new StringBuffer ( ) ; return doAppendComment ( _result , comment ) . toString ( ) ; }
Create a string containing a comment .
39,119
public static UserInterfaceAction < URL > retrieveActionDefinitionFromMessageProvider ( UserInterfaceAction < URL > action , MessageProviderInterface messageProvider , String prefix , Locale locale , Object callbackProvider , UrlProvider urlProvider ) { if ( null == action ) { throw new NullPointerException ( "action must not be null" ) ; } if ( null == messageProvider ) { throw new NullPointerException ( "messageProvider must not be null" ) ; } if ( null == callbackProvider ) { throw new NullPointerException ( "callbackProvider must not be null" ) ; } String _prefix = ( null != prefix ) ? prefix . trim ( ) + TOKEN_SEPARATOR : TOKEN_SEPARATOR ; try { action . setCallback ( FunctorFactory . instanciateFunctorAsAnInstanceMethodWrapper ( callbackProvider , messageProvider . getMessage ( _prefix + TOKEN_CALLBACK , locale ) ) ) ; } catch ( Exception _exception ) { throw new ActionException ( _exception ) ; } try { String _location = messageProvider . getMessage ( _prefix + TOKEN_ICON_FOR_BUTTON , locale ) ; if ( IS_NOT_EMPTY . test ( _location ) ) { action . setIconForButton ( urlProvider . getUrl ( _location ) ) ; } } catch ( UrlProviderException _exception ) { } try { String _location = messageProvider . getMessage ( _prefix + TOKEN_ICON_FOR_MENU , locale ) ; if ( IS_NOT_EMPTY . test ( _location ) ) { action . setIconForMenu ( urlProvider . getUrl ( _location ) ) ; } } catch ( UrlProviderException _exception ) { } action . setKeyboardShortcut ( messageProvider . getMessage ( _prefix + TOKEN_KEYBOARD_SHORTCUT , locale ) ) ; action . setLabelDescription ( messageProvider . getMessage ( _prefix + TOKEN_LABEL_DESCRIPTION , locale ) ) ; action . setLabelMessage ( messageProvider . getMessage ( _prefix + TOKEN_LABEL_MESSAGE , locale ) ) ; return action ; }
Read the data of an UserInterfaceAction from a properties file .
39,120
public final void transformIcons ( final Element root ) { final Map < String , String > replacements ; checkNotNull ( root , "Received a null pointer as root element" ) ; replacements = new HashMap < > ( ) ; replacements . put ( "img[src$=images/add.gif]" , "<span><span class=\"fa fa-plus\" aria-hidden=\"true\"></span><span class=\"sr-only\">Addition</span></span>" ) ; replacements . put ( "img[src$=images/remove.gif]" , "<span><span class=\"fa fa-minus\" aria-hidden=\"true\"></span><span class=\"sr-only\">Remove</span></span>" ) ; replacements . put ( "img[src$=images/fix.gif]" , "<span><span class=\"fa fa-wrench\" aria-hidden=\"true\"></span><span class=\"sr-only\">Fix</span></span>" ) ; replacements . put ( "img[src$=images/update.gif]" , "<span><span class=\"fa fa-refresh\" aria-hidden=\"true\"></span><span class=\"sr-only\">Refresh</span></span>" ) ; replacements . put ( "img[src$=images/icon_help_sml.gif]" , "<span><span class=\"fa fa-question\" aria-hidden=\"true\"></span><span class=\"sr-only\">Question</span></span>" ) ; replacements . put ( "img[src$=images/icon_success_sml.gif]" , "<span><span class=\"navbar-icon fa fa-check\" aria-hidden=\"true\" title=\"Passed\" aria-label=\"Passed\"></span><span class=\"sr-only\">Passed</span></span>" ) ; replacements . put ( "img[src$=images/icon_warning_sml.gif]" , "<span><span class=\"fa fa-exclamation\" aria-hidden=\"true\"></span><span class=\"sr-only\">Warning</span>" ) ; replacements . put ( "img[src$=images/icon_error_sml.gif]" , "<span><span class=\"navbar-icon fa fa-close\" aria-hidden=\"true\" title=\"Failed\" aria-label=\"Failed\"></span><span class=\"sr-only\">Failed</span></span>" ) ; replacements . put ( "img[src$=images/icon_info_sml.gif]" , "<span><span class=\"fa fa-info\" aria-hidden=\"true\"></span><span class=\"sr-only\">Info</span></span>" ) ; replaceAll ( root , replacements ) ; }
Transforms the default icons used by the Maven Site to Font Awesome icons .
39,121
private final void fixReportCheckstyle ( final Element root ) { final Collection < Element > elements ; elements = root . getElementsByTag ( "h2" ) ; if ( ! elements . isEmpty ( ) ) { elements . iterator ( ) . next ( ) . tagName ( "h1" ) ; } root . select ( "img[src=\"images/rss.png\"]" ) . remove ( ) ; }
Fixes the Checkstyle report page .
39,122
private final void fixReportCpd ( final Element root ) { final Collection < Element > elements ; elements = root . getElementsByTag ( "h2" ) ; if ( ! elements . isEmpty ( ) ) { elements . iterator ( ) . next ( ) . tagName ( "h1" ) ; } }
Fixes the CPD report page .
39,123
private final void fixReportDependencyAnalysis ( final Element root ) { for ( final Element head : root . getElementsByTag ( "h2" ) ) { head . tagName ( "h1" ) ; } for ( final Element head : root . getElementsByTag ( "h3" ) ) { head . tagName ( "h2" ) ; } }
Fixes the dependency analysis report page .
39,124
private final void fixReportFailsafe ( final Element root ) { final Collection < Element > elements ; final Element heading ; elements = root . getElementsByTag ( "h2" ) ; if ( ! elements . isEmpty ( ) ) { heading = elements . iterator ( ) . next ( ) ; heading . tagName ( "h1" ) ; heading . text ( "Failsafe Report" ) ; } }
Fixes the Failsafe report page .
39,125
private final void fixReportPluginManagement ( final Element root ) { final Collection < Element > sections ; final Element section ; for ( final Element head : root . getElementsByTag ( "h2" ) ) { head . tagName ( "h1" ) ; } sections = root . getElementsByTag ( "section" ) ; if ( ! sections . isEmpty ( ) ) { section = sections . iterator ( ) . next ( ) ; for ( final Element child : section . children ( ) ) { child . remove ( ) ; root . appendChild ( child ) ; } section . remove ( ) ; } }
Fixes the plugin management report page .
39,126
private final void fixReportProjectSummary ( final Element root ) { for ( final Element head : root . getElementsByTag ( "h2" ) ) { head . tagName ( "h1" ) ; } for ( final Element head : root . getElementsByTag ( "h3" ) ) { head . tagName ( "h2" ) ; } }
Fixes the project summary report page .
39,127
private final void fixReportTeamList ( final Element root ) { for ( final Element head : root . getElementsByTag ( "h2" ) ) { head . tagName ( "h1" ) ; } for ( final Element head : root . getElementsByTag ( "h3" ) ) { head . tagName ( "h2" ) ; } }
Fixes the team list report page .
39,128
private void onProgress ( final int workerNumber , final int workerSize , final List < Long > pointerList , final float progress ) { if ( progressCache == null ) { progressCache = new ProgressCache ( workerSize , ( onRealtimeResultListener != null ) ) ; } progressCache . setProgress ( workerNumber , progress , pointerList ) ; if ( onProgressListener != null ) { onProgressListener . onProgress ( progressCache . getProgress ( ) ) ; } if ( onRealtimeResultListener != null ) { onRealtimeResultListener . onRealtimeResultListener ( progressCache . getProgress ( ) , progressCache . getResultPointers ( ) ) ; } }
Call from each worker thread
39,129
private void optimize ( long fileLength ) { final int availableProcessors = Runtime . getRuntime ( ) . availableProcessors ( ) ; final long free = Runtime . getRuntime ( ) . freeMemory ( ) / 2 ; int workerSize = availableProcessors / 2 ; if ( workerSize < 2 ) { workerSize = 2 ; } long bufferSize = free / workerSize ; if ( bufferSize > 1 * 1024 * 1024 ) { bufferSize = 1 * 1024 * 1024 ; } long blockSize = fileLength / workerSize ; if ( blockSize > 1 * 1024 * 1024 ) { blockSize = 1 * 1024 * 1024 ; } int iBlockSize = ( int ) blockSize ; if ( bufferSize > blockSize ) { bufferSize = blockSize ; } int iBufferSize = ( int ) bufferSize ; this . setBlockSize ( iBlockSize ) ; this . setMaxNumOfThreads ( workerSize ) ; this . setBufferSizePerWorker ( iBufferSize ) ; this . setSubBufferSize ( 256 ) ; }
Optimize threading and memory
39,130
public void read ( InputStream is , String encoding , final TagListener listener ) { mSaxHandler . initialize ( ) ; Reader reader = null ; try { reader = new InputStreamReader ( is , encoding ) ; SAXParserFactory spf = SAXParserFactory . newInstance ( ) ; spf . setFeature ( XMLConstants . FEATURE_SECURE_PROCESSING , false ) ; XMLReader xmlReader ; xmlReader = spf . newSAXParser ( ) . getXMLReader ( ) ; mSaxHandler . setTagEventListener ( new TagEventListener ( ) { public void onTagStarted ( String fullTagName , String tagName ) { if ( listener != null ) { listener . onTagStarted ( BigXmlReader . this , fullTagName , tagName ) ; } } public void onTagFinished ( String fullTagName , String tagName , String value , Attributes atts ) { if ( listener != null ) { listener . onTagFinished ( BigXmlReader . this , fullTagName , tagName , value , atts ) ; } } } ) ; xmlReader . setContentHandler ( mSaxHandler ) ; InputSource iso = new InputSource ( reader ) ; xmlReader . parse ( iso ) ; } catch ( BasicSAXHandlerException e ) { } catch ( ParserConfigurationException e ) { e . printStackTrace ( ) ; } catch ( SAXException e ) { e . printStackTrace ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Read big XML file from specific stream
39,131
public void initialize ( ) { mIsForceStop = false ; mTagStack . clear ( ) ; mTagValueCache . clear ( ) ; mTagAttrCache . clear ( ) ; mCurrentTagKey = null ; }
Initialize internal variables
39,132
private void pushTag ( String qName ) { final String currentTag = getCurrentTag ( ) ; if ( "" . equals ( currentTag ) ) { mTagStack . push ( qName ) ; } else { mTagStack . push ( getCurrentTag ( ) + "." + qName ) ; } }
Make current scanning full tag name like root . element . child
39,133
public List < Long > searchPartially ( File f , byte [ ] searchBytes , long startPosition , long maxSizeToRead ) { if ( USE_NIO ) { return searchPartiallyUsingNIO ( f , searchBytes , startPosition , maxSizeToRead , null ) ; } else { return searchPartiallyUsingLegacy ( f , searchBytes , startPosition , maxSizeToRead , null ) ; } }
Search for a sequence of bytes from the file within the specified size range starting at the specified position .
39,134
public static List < Prediction > parse ( GooglePlaces client , String rawJson ) { JSONObject json = new JSONObject ( rawJson ) ; checkStatus ( json . getString ( STRING_STATUS ) , json . optString ( STRING_ERROR_MESSAGE ) ) ; List < Prediction > predictions = new ArrayList < > ( ) ; JSONArray jsonPredictions = json . getJSONArray ( ARRAY_PREDICTIONS ) ; for ( int i = 0 ; i < jsonPredictions . length ( ) ; i ++ ) { JSONObject jsonPrediction = jsonPredictions . getJSONObject ( i ) ; String placeId = jsonPrediction . getString ( STRING_PLACE_ID ) ; String description = jsonPrediction . getString ( STRING_DESCRIPTION ) ; JSONArray jsonTerms = jsonPrediction . getJSONArray ( ARRAY_TERMS ) ; List < DescriptionTerm > terms = new ArrayList < > ( ) ; for ( int a = 0 ; a < jsonTerms . length ( ) ; a ++ ) { JSONObject jsonTerm = jsonTerms . getJSONObject ( a ) ; String value = jsonTerm . getString ( STRING_VALUE ) ; int offset = jsonTerm . getInt ( INTEGER_OFFSET ) ; terms . add ( new DescriptionTerm ( value , offset ) ) ; } JSONArray jsonTypes = jsonPrediction . optJSONArray ( ARRAY_TYPES ) ; List < String > types = new ArrayList < > ( ) ; if ( jsonTypes != null ) { for ( int b = 0 ; b < jsonTypes . length ( ) ; b ++ ) { types . add ( jsonTypes . getString ( b ) ) ; } } JSONArray substrArray = jsonPrediction . getJSONArray ( ARRAY_MATCHED_SUBSTRINGS ) ; JSONObject substr = substrArray . getJSONObject ( 0 ) ; int substrOffset = substr . getInt ( INTEGER_OFFSET ) ; int substrLength = substr . getInt ( INTEGER_LENGTH ) ; predictions . add ( new Prediction ( ) . setPlaceId ( placeId ) . setDescription ( description ) . addTerms ( terms ) . addTypes ( types ) . setSubstringLength ( substrLength ) . setSubstringOffset ( substrOffset ) . setClient ( client ) ) ; } return predictions ; }
Returns a list of predictions from JSON .
39,135
public JSONObject toJson ( ) { JSONObject obj = new JSONObject ( ) . put ( OBJECT_LOCATION , new JSONObject ( ) . put ( "lat" , lat ) . put ( "lng" , lng ) ) . put ( STRING_NAME , name ) . put ( STRING_TYPES , new JSONArray ( types ) ) ; if ( accuracy != - 1 ) obj . put ( INTEGER_ACCURACY , accuracy ) ; if ( phoneNumber != null ) obj . put ( STRING_PHONE_NUMBER , phoneNumber ) ; if ( address != null ) obj . put ( STRING_ADDRESS , address ) ; if ( website != null ) obj . put ( STRING_WEBSITE , website ) ; if ( locale != null ) obj . put ( STRING_LANGUAGE , locale . getLanguage ( ) ) ; return obj ; }
Returns a Google formatted JSON object to be sent to Google s servers .
39,136
public Photo download ( int maxWidth , int maxHeight , Param ... extraParams ) { image = place . getClient ( ) . downloadPhoto ( this , maxWidth , maxHeight , extraParams ) ; return this ; }
Downloads the photo and caches it within the photo .
39,137
public static GooglePlacesException parse ( String statusCode , String errorMessage ) { Class < ? > clazz = statusClassMap . get ( statusCode ) ; if ( clazz == null ) return null ; try { if ( errorMessage == null || errorMessage . isEmpty ( ) ) return ( GooglePlacesException ) clazz . newInstance ( ) ; else { Constructor < ? > constructor = clazz . getConstructor ( String . class ) ; return ( GooglePlacesException ) constructor . newInstance ( errorMessage ) ; } } catch ( Exception e ) { throw new GooglePlacesException ( e ) ; } }
Returns the correct exception from a server - given status code and error message .
39,138
public Param value ( List < String > values ) { StringBuilder valuesSb = new StringBuilder ( ) ; for ( int i = 0 ; i < values . size ( ) ; i ++ ) { valuesSb . append ( values . get ( i ) ) ; if ( i != ( values . size ( ) - 1 ) ) { valuesSb . append ( "%7C" ) ; } } this . value = valuesSb . toString ( ) ; return this ; }
Sets the values of the Param .
39,139
public JerseyModuleExtender setProperty ( String name , Object value ) { contributeProperties ( ) . addBinding ( name ) . toInstance ( value ) ; return this ; }
Sets Jersey container property . This allows setting ResourceConfig properties that can not be set via JAX RS features .
39,140
public static synchronized void startMsg ( ) { Log . startZeit . setTime ( System . currentTimeMillis ( ) ) ; Log . versionMsg ( Const . PROGRAMMNAME ) ; Log . sysLog ( Log . LILNE ) ; Log . sysLog ( "" ) ; Log . sysLog ( "Programmpfad: " + Functions . getPathJar ( ) ) ; Log . sysLog ( "Filmliste: " + getPathFilmlist_json_akt ( true ) ) ; Log . sysLog ( "Useragent: " + Config . getUserAgent ( ) ) ; Log . sysLog ( "" ) ; Log . sysLog ( Log . LILNE ) ; Log . sysLog ( "" ) ; if ( loadLongMax ( ) ) { Log . sysLog ( "Laden: alles" ) ; } else { Log . sysLog ( "Laden: nur update" ) ; } if ( CrawlerConfig . updateFilmliste ) { Log . sysLog ( "Filmliste: nur updaten" ) ; } else { Log . sysLog ( "Filmliste: neu erstellen" ) ; } Log . sysLog ( "ImportURL 1: " + CrawlerConfig . importUrl_1__anhaengen ) ; Log . sysLog ( "ImportURL 2: " + CrawlerConfig . importUrl_2__anhaengen ) ; Log . sysLog ( "ImportOLD: " + CrawlerConfig . importOld ) ; Log . sysLog ( "ImportAkt: " + CrawlerConfig . importAkt ) ; if ( CrawlerConfig . nurSenderLaden != null ) { Log . sysLog ( "Nur Sender laden: " + StringUtils . join ( CrawlerConfig . nurSenderLaden , ',' ) ) ; } Log . sysLog ( "" ) ; Log . sysLog ( Log . LILNE ) ; }
ist die aktuelle Filmliste xz komprimiert
39,141
public static String addDomainIfMissing ( final String aUrl , final String aDomain ) { if ( aUrl != null && ! aUrl . isEmpty ( ) && aUrl . startsWith ( "/" ) ) { return aDomain + aUrl ; } return aUrl ; }
adds the domain if missing .
39,142
public static String addProtocolIfMissing ( final String aUrl , final String aProtocol ) { if ( aUrl != null && aUrl . startsWith ( "//" ) ) { return aProtocol + aUrl ; } return aUrl ; }
adds the protocol if missing .
39,143
public static String changeOrAddParameter ( final String aUrl , final String aParameter , final String aValue ) { final StringBuilder newUrlBuilder = new StringBuilder ( ) ; final String [ ] splittedUrl = aUrl . split ( REGEX_ESCAPOR + URL_TO_PARAMETERS_SPLITTERATOR ) ; newUrlBuilder . append ( splittedUrl [ 0 ] ) ; if ( splittedUrl . length == 2 ) { final String cleanedParameters = splittedUrl [ 1 ] + URL_TO_PARAMETERS_SPLITTERATOR . replaceAll ( String . format ( URL_PARAMETER_REPLACEMENT_REGEX_PATTERN , aParameter ) , "" ) . replaceAll ( REGEX_ESCAPOR + WRONG_PARAMETER_START , URL_TO_PARAMETERS_SPLITTERATOR ) ; newUrlBuilder . append ( URL_TO_PARAMETERS_SPLITTERATOR ) ; newUrlBuilder . append ( cleanedParameters ) ; if ( ! cleanedParameters . endsWith ( URL_PARAMETER_SEPPERATOR ) && ! cleanedParameters . isEmpty ( ) ) { newUrlBuilder . append ( URL_PARAMETER_SEPPERATOR ) ; } } else { newUrlBuilder . append ( URL_TO_PARAMETERS_SPLITTERATOR ) ; } newUrlBuilder . append ( String . format ( PARAMETER_PATTERN , aParameter , aValue ) ) ; return newUrlBuilder . toString ( ) ; }
Changes or adds an URL parameter .
39,144
public static boolean existsUrl ( final String aUrl ) { try { final URL url = new URL ( aUrl ) ; HttpURLConnection connection = ( HttpURLConnection ) url . openConnection ( ) ; connection . setRequestMethod ( "HEAD" ) ; connection . connect ( ) ; return connection . getResponseCode ( ) == 200 ; } catch ( IOException e ) { return false ; } }
checks whether an url exists . uses head request to check .
39,145
public static Optional < String > getFileName ( final String aUrl ) { if ( aUrl != null ) { int index = aUrl . lastIndexOf ( '/' ) ; if ( index > 0 ) { final String file = aUrl . substring ( index + 1 ) ; if ( file . contains ( "." ) ) { return Optional . of ( file ) ; } } } return Optional . empty ( ) ; }
returns the file name of the url .
39,146
public static Optional < String > getFileType ( final String aUrl ) { if ( aUrl != null ) { int index = aUrl . lastIndexOf ( '.' ) ; if ( index > 0 ) { return Optional . of ( aUrl . substring ( index + 1 ) ) ; } } return Optional . empty ( ) ; }
returns the file type of the url .
39,147
public static Optional < String > getProtocol ( final String aUrl ) { if ( aUrl != null ) { int index = aUrl . indexOf ( "//" ) ; if ( index > 0 ) { String protocol = aUrl . substring ( 0 , index ) ; return Optional . of ( protocol ) ; } } return Optional . empty ( ) ; }
returns the protocol of the url .
39,148
public static Optional < String > getUrlParameterValue ( final String aUrl , final String aParameterName ) throws UrlParseException { if ( aUrl != null ) { Map < String , String > parameters = getUrlParameters ( aUrl ) ; if ( parameters . containsKey ( aParameterName ) ) { return Optional . of ( parameters . get ( aParameterName ) ) ; } } return Optional . empty ( ) ; }
returns the value of an url parameter .
39,149
private String getBroadcastDate ( JsonArray broadcastArray ) { String broadcastDate = "" ; String broadcastBeginFirst = "" ; String broadcastBeginMajor = "" ; String broadcastBeginMinor = "" ; for ( int i = 0 ; i < broadcastArray . size ( ) ; i ++ ) { JsonObject broadcastObject = broadcastArray . get ( i ) . getAsJsonObject ( ) ; if ( broadcastObject . has ( JSON_ELEMENT_BROADCASTTYPE ) && broadcastObject . has ( JSON_ELEMENT_BROADCAST ) ) { String value = this . getBroadcastDateConsideringCatchupRights ( broadcastObject ) ; if ( ! value . isEmpty ( ) ) { String type = broadcastObject . get ( JSON_ELEMENT_BROADCASTTYPE ) . getAsString ( ) ; switch ( type ) { case BROADCASTTTYPE_FIRST : broadcastBeginFirst = value ; break ; case BROADCASTTTYPE_MAJOR_RE : broadcastBeginMajor = value ; break ; case BROADCASTTTYPE_MINOR_RE : broadcastBeginMinor = value ; break ; default : LOG . debug ( "New broadcasttype: " + type ) ; } } } } if ( ! broadcastBeginFirst . isEmpty ( ) ) { broadcastDate = broadcastBeginFirst ; } else if ( ! broadcastBeginMajor . isEmpty ( ) ) { broadcastDate = broadcastBeginMajor ; } else if ( ! broadcastBeginMinor . isEmpty ( ) ) { broadcastDate = broadcastBeginMinor ; } if ( broadcastDate . isEmpty ( ) ) { broadcastDate = getBroadcastDateIgnoringCatchupRights ( broadcastArray , BROADCASTTTYPE_FIRST ) ; } if ( broadcastDate . isEmpty ( ) ) { broadcastDate = getBroadcastDateIgnoringCatchupRights ( broadcastArray , BROADCASTTTYPE_MAJOR_RE ) ; } return broadcastDate ; }
ermittelt Ausstrahlungsdatum aus der Liste der Ausstrahlungen
39,150
private String getBroadcastDateConsideringCatchupRights ( JsonObject broadcastObject ) { String broadcastDate = "" ; JsonElement elementBegin = broadcastObject . get ( JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_BEGIN ) ; JsonElement elementEnd = broadcastObject . get ( JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_END ) ; if ( ! elementBegin . isJsonNull ( ) && ! elementEnd . isJsonNull ( ) ) { String begin = elementBegin . getAsString ( ) ; String end = elementEnd . getAsString ( ) ; try { Calendar beginDate = Calendar . getInstance ( ) ; beginDate . setTime ( broadcastDateFormat . parse ( begin ) ) ; Calendar endDate = Calendar . getInstance ( ) ; endDate . setTime ( broadcastDateFormat . parse ( end ) ) ; if ( ( DateWithoutTimeComparer . compare ( today , beginDate ) >= 0 && DateWithoutTimeComparer . compare ( today , endDate ) <= 0 ) || ( DateWithoutTimeComparer . compare ( today , beginDate ) < 0 ) ) { broadcastDate = broadcastObject . get ( JSON_ELEMENT_BROADCAST ) . getAsString ( ) ; } } catch ( ParseException ex ) { LOG . debug ( ex ) ; } } else { String broadcast = broadcastObject . get ( JSON_ELEMENT_BROADCAST ) . getAsString ( ) ; try { Calendar broadcastCal = Calendar . getInstance ( ) ; broadcastCal . setTime ( broadcastDateFormat . parse ( broadcast ) ) ; broadcastDate = broadcast ; } catch ( ParseException ex ) { LOG . debug ( ex ) ; } } return broadcastDate ; }
Liefert den Beginn der Ausstrahlung wenn - heute im Zeitraum von CatchUpRights liegt - oder heute vor dem Zeitraum liegt - oder CatchUpRights nicht gesetzt ist
39,151
public static Optional < String > getAttributeAsString ( final JsonObject aJsonObject , final String aAttributeName ) { if ( aJsonObject . has ( aAttributeName ) ) { final JsonElement aElement = aJsonObject . get ( aAttributeName ) ; if ( ! aElement . isJsonNull ( ) ) { return Optional . of ( aElement . getAsString ( ) ) ; } } return Optional . empty ( ) ; }
Gets the value of an attribute
39,152
public static String formatTime ( String dateValue , FastDateFormat sdf ) { try { return FDF_OUT_TIME . format ( sdf . parse ( dateValue ) ) ; } catch ( ParseException ex ) { LOG . debug ( String . format ( "Fehler beim Parsen des Datums %s: %s" , dateValue , ex . getMessage ( ) ) ) ; } return "" ; }
formats a datetime string to the time format used in DatenFilm
39,153
public synchronized void filmeBeimSenderLaden ( ListeFilme listeFilme ) { initStart ( listeFilme ) ; mrStarten ( 0 ) ; if ( ! Config . getStop ( ) ) { mrWarten ( ) ; mrStarten ( 1 ) ; allStarted = true ; } }
es werden alle Filme gesucht
39,154
public void updateSender ( String [ ] nameSender , ListeFilme listeFilme ) { boolean starten = false ; initStart ( listeFilme ) ; for ( MediathekReader reader : mediathekListe ) { for ( String s : nameSender ) { if ( reader . checkNameSenderFilmliste ( s ) ) { starten = true ; new Thread ( reader ) . start ( ) ; } } } allStarted = true ; if ( ! starten ) { meldenFertig ( "" ) ; } }
es werden nur einige Sender aktualisiert
39,155
private Map < Qualities , String > searchForUrlsWithM3U8 ( final MSStringBuilder aSeiteStringExtractor ) { final Map < Qualities , String > urls = new EnumMap < > ( Qualities . class ) ; final ArrayList < String > patternMatches = new ArrayList < > ( ) ; aSeiteStringExtractor . extractList ( M3U8_PATTERN_START , M3U8_PATTERN_END , patternMatches ) ; String m3u8Url = null ; for ( final String patternMatch : patternMatches ) { if ( patternMatch . startsWith ( TEXT_START_HTTP ) ) { m3u8Url = patternMatch ; break ; } } if ( m3u8Url != null ) { m3u8Url = m3u8Url . replaceAll ( URL_GET_PARAMETER , "" ) ; if ( m3u8Url . contains ( M3U8Utils . M3U8_WDR_URL_BEGIN ) ) { urls . putAll ( M3U8Utils . gatherUrlsFromWdrM3U8 ( m3u8Url ) ) ; } else { urls . put ( Qualities . NORMAL , m3u8Url ) ; } } return urls ; }
Searches the Seite for a quality auto to get a M3U8 URL . If the URL is from WRD it searches for the URLs of the MP4 files .
39,156
private String getUrl ( MSStringBuilder seiteXml , String pattern ) { return seiteXml . extract ( pattern , PATTERN_DLURL , PATTERN_END ) ; }
gets the url for the specified pattern
39,157
protected void addFilm ( DatenFilm film ) { film . setFileSize ( ) ; upgradeUrl ( film ) ; film . setUrlHistory ( ) ; setGeo ( film ) ; if ( mlibFilmeSuchen . listeFilmeNeu . addFilmVomSender ( film ) ) { FilmeSuchen . listeSenderLaufen . inc ( film . arr [ DatenFilm . FILM_SENDER ] , RunSender . Count . FILME ) ; } }
Es werden die gefundenen Filme in die Liste einsortiert .
39,158
public static List < String > parseLetterLinks ( Document aDocument ) { final List < String > results = new ArrayList < > ( ) ; Elements links = aDocument . select ( LETTER_URL_SELECTOR ) ; links . forEach ( element -> { if ( element . hasAttr ( ATTRIBUTE_HREF ) ) { String subpage = element . attr ( ATTRIBUTE_HREF ) ; results . add ( OrfConstants . URL_BASE + subpage ) ; } } ) ; return results ; }
determines the links to the letter pages
39,159
public void writeHash ( String id ) { try ( BufferedWriter fileWriter = Files . newBufferedWriter ( baseDir . resolve ( FILE_NAME ) , StandardCharsets . UTF_8 ) ) { fileWriter . write ( id ) ; } catch ( IOException ioException ) { Log . errorLog ( 494461668 , ioException , "Der Filmlisten Hash konnte nicht geschrieben werden." ) ; } }
Schreibt die gegebene ID in die Filmlist Hash Datei .
39,160
public static void writeJson ( Writer writer , Object object ) throws IOException { ObjectMapper om = ObjectMapperProvider . createCustomMapper ( ) ; om . configure ( SerializationConfig . Feature . INDENT_OUTPUT , true ) ; om . configure ( SerializationConfig . Feature . FAIL_ON_EMPTY_BEANS , false ) ; writer . write ( om . writeValueAsString ( object ) ) ; writer . write ( "\n" ) ; writer . flush ( ) ; }
Writes object to the writer as JSON using Jackson and adds a new - line before flushing .
39,161
public List < HdfsStats > getAllDirs ( String cluster , String pathPrefix , int limit , long runId ) throws IOException { long encodedRunId = getEncodedRunId ( runId ) ; String rowPrefixStr = Long . toString ( encodedRunId ) + HdfsConstants . SEP + cluster ; if ( StringUtils . isNotEmpty ( pathPrefix ) ) { rowPrefixStr += HdfsConstants . SEP + StringUtil . cleanseToken ( pathPrefix ) ; } LOG . info ( " Getting all dirs for cluster " + cluster + " with pathPrefix: " + pathPrefix + " for runId " + runId + " encodedRunId: " + encodedRunId + " limit: " + limit + " row prefix : " + rowPrefixStr ) ; byte [ ] rowPrefix = Bytes . toBytes ( rowPrefixStr ) ; Scan scan = createScanWithAllColumns ( ) ; scan . setStartRow ( rowPrefix ) ; Filter prefixFilter = new WhileMatchFilter ( new PrefixFilter ( rowPrefix ) ) ; scan . setFilter ( prefixFilter ) ; scan . setCaching ( Math . min ( limit , defaultScannerCaching ) ) ; scan . setMaxVersions ( 1 ) ; return createFromScanResults ( cluster , null , scan , limit , Boolean . FALSE , 0l , 0l ) ; }
Gets hdfs stats about all dirs on the given cluster
39,162
private List < HdfsStats > createFromScanResults ( String cluster , String path , Scan scan , int maxCount , boolean checkPath , long starttime , long endtime ) throws IOException { Map < HdfsStatsKey , HdfsStats > hdfsStats = new HashMap < HdfsStatsKey , HdfsStats > ( ) ; ResultScanner scanner = null ; Stopwatch timer = new Stopwatch ( ) . start ( ) ; int rowCount = 0 ; long colCount = 0 ; long resultSize = 0 ; Table hdfsUsageTable = null ; try { hdfsUsageTable = hbaseConnection . getTable ( TableName . valueOf ( HdfsConstants . HDFS_USAGE_TABLE ) ) ; scanner = hdfsUsageTable . getScanner ( scan ) ; for ( Result result : scanner ) { if ( result != null && ! result . isEmpty ( ) ) { colCount += result . size ( ) ; rowCount = populateHdfsStats ( result , hdfsStats , checkPath , path , starttime , endtime , rowCount ) ; if ( rowCount >= maxCount ) { break ; } } } timer . stop ( ) ; LOG . info ( "In createFromScanResults For cluster " + cluster + " Fetched from hbase " + rowCount + " rows, " + colCount + " columns, " + resultSize + " bytes ( " + resultSize / ( 1024 * 1024 ) + ") MB, in total time of " + timer ) ; } finally { try { if ( scanner != null ) { scanner . close ( ) ; } } finally { if ( hdfsUsageTable != null ) { hdfsUsageTable . close ( ) ; } } } List < HdfsStats > values = new ArrayList < HdfsStats > ( hdfsStats . values ( ) ) ; Collections . sort ( values ) ; return values ; }
Scans the hbase table and populates the hdfs stats
39,163
private int populateHdfsStats ( Result result , Map < HdfsStatsKey , HdfsStats > hdfsStats , boolean checkPath , String path , long starttime , long endtime , int rowCount ) { HdfsStatsKey currentFullKey = hdfsStatsKeyConv . fromBytes ( result . getRow ( ) ) ; QualifiedPathKey qpk = currentFullKey . getQualifiedPathKey ( ) ; if ( checkPath ) { if ( ! qpk . getPath ( ) . equalsIgnoreCase ( StringUtil . cleanseToken ( path ) ) ) { return rowCount ; } if ( ( currentFullKey . getRunId ( ) < endtime ) || ( currentFullKey . getRunId ( ) > starttime ) ) { return rowCount ; } } HdfsStatsKey currentKey = new HdfsStatsKey ( qpk . getCluster ( ) , qpk . getPath ( ) , currentFullKey . getEncodedRunId ( ) ) ; HdfsStats currentHdfsStats = hdfsStats . get ( currentKey ) ; if ( currentHdfsStats != null ) { currentHdfsStats . populate ( result ) ; } else { currentHdfsStats = new HdfsStats ( new HdfsStatsKey ( currentKey ) ) ; currentHdfsStats . populate ( result ) ; hdfsStats . put ( currentKey , currentHdfsStats ) ; } return rowCount + 1 ; }
Populates the hdfs stats for a cluster based on the hbase Result
39,164
public byte [ ] toBytes ( TaskKey taskKey ) { return Bytes . add ( jobKeyConv . toBytes ( taskKey ) , Constants . SEP_BYTES , Bytes . toBytes ( taskKey . getTaskId ( ) ) ) ; }
Returns the bytes representation for a TaskKey .
39,165
public TaskKey fromBytes ( byte [ ] bytes ) { byte [ ] [ ] keyComponents = JobKeyConverter . splitJobKey ( bytes ) ; JobKey jobKey = jobKeyConv . parseJobKey ( keyComponents ) ; return new TaskKey ( jobKey , ( keyComponents . length > 5 ? Bytes . toString ( keyComponents [ 5 ] ) : null ) ) ; }
Generates a TaskKey from the byte encoded format .
39,166
public Configuration getRawJobConfiguration ( QualifiedJobId jobId ) throws IOException { Configuration conf = null ; byte [ ] rowKey = idConv . toBytes ( jobId ) ; Get get = new Get ( rowKey ) ; get . addColumn ( Constants . RAW_FAM_BYTES , Constants . JOBCONF_COL_BYTES ) ; Table rawTable = null ; try { rawTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_RAW_TABLE ) ) ; Result result = rawTable . get ( get ) ; if ( result != null && ! result . isEmpty ( ) ) { conf = createConfigurationFromResult ( result ) ; } } catch ( MissingColumnInResultException e ) { LOG . error ( "Failed to retrieve configuration from row returned for " + jobId , e ) ; } finally { if ( rawTable != null ) { rawTable . close ( ) ; } } return conf ; }
Returns the raw job configuration stored for the given cluster and job ID
39,167
public String getRawJobHistory ( QualifiedJobId jobId ) throws IOException { String historyData = null ; byte [ ] rowKey = idConv . toBytes ( jobId ) ; Get get = new Get ( rowKey ) ; get . addColumn ( Constants . RAW_FAM_BYTES , Constants . JOBHISTORY_COL_BYTES ) ; Table rawTable = null ; try { rawTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_RAW_TABLE ) ) ; Result result = rawTable . get ( get ) ; if ( result != null && ! result . isEmpty ( ) ) { historyData = Bytes . toString ( result . getValue ( Constants . RAW_FAM_BYTES , Constants . JOBHISTORY_COL_BYTES ) ) ; } } finally { if ( rawTable != null ) { rawTable . close ( ) ; } } return historyData ; }
Returns the raw job history file stored for the given cluster and job ID .
39,168
public long getApproxSubmitTime ( Result value ) throws MissingColumnInResultException { if ( value == null ) { throw new IllegalArgumentException ( "Cannot get last modification time from " + "a null hbase result" ) ; } Cell cell = value . getColumnLatestCell ( Constants . INFO_FAM_BYTES , Constants . JOBHISTORY_LAST_MODIFIED_COL_BYTES ) ; if ( cell == null ) { throw new MissingColumnInResultException ( Constants . INFO_FAM_BYTES , Constants . JOBHISTORY_LAST_MODIFIED_COL_BYTES ) ; } byte [ ] lastModTimeBytes = CellUtil . cloneValue ( cell ) ; long lastModTime = Bytes . toLong ( lastModTimeBytes ) ; long jobSubmitTimeMillis = lastModTime - Constants . AVERGAE_JOB_DURATION ; LOG . debug ( "Approximate job submit time is " + jobSubmitTimeMillis + " based on " + lastModTime ) ; return jobSubmitTimeMillis ; }
attempts to approximately set the job submit time based on the last modification time of the job history file
39,169
public void markJobForReprocesssing ( QualifiedJobId jobId ) throws IOException { Put p = new Put ( idConv . toBytes ( jobId ) ) ; p . addColumn ( Constants . INFO_FAM_BYTES , Constants . RAW_COL_REPROCESS_BYTES , Bytes . toBytes ( true ) ) ; Table rawTable = null ; try { rawTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_RAW_TABLE ) ) ; rawTable . put ( p ) ; } finally { if ( rawTable != null ) { rawTable . close ( ) ; } } }
Flags a job s RAW record for reprocessing
39,170
public Put getAggregatedStatusPut ( byte [ ] row , byte [ ] col , Boolean status ) { Put put = new Put ( row ) ; put . addColumn ( Constants . INFO_FAM_BYTES , col , Bytes . toBytes ( status ) ) ; try { LOG . info ( " agg status " + status + " and put " + put . toJSON ( ) ) ; } catch ( IOException e ) { } return put ; }
creates a put to be updated into the RAW table for aggregation status
39,171
public boolean getStatusAgg ( byte [ ] row , byte [ ] col ) throws IOException { Get g = new Get ( row ) ; g . addColumn ( Constants . INFO_FAM_BYTES , col ) ; Table rawTable = null ; Cell cell = null ; try { rawTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_RAW_TABLE ) ) ; Result r = rawTable . get ( g ) ; cell = r . getColumnLatestCell ( Constants . INFO_FAM_BYTES , col ) ; } finally { if ( rawTable != null ) { rawTable . close ( ) ; } } boolean status = false ; try { if ( cell != null ) { status = Bytes . toBoolean ( CellUtil . cloneValue ( cell ) ) ; } } catch ( IllegalArgumentException iae ) { LOG . error ( "Caught " + iae ) ; } LOG . info ( "Returning from Raw, " + Bytes . toString ( col ) + " for this job=" + status ) ; return status ; }
creates a Get to be fetch daily aggregation status from the RAW table
39,172
public int compareTo ( Object other ) { if ( other == null ) { return - 1 ; } FlowKey otherKey = ( FlowKey ) other ; return new CompareToBuilder ( ) . appendSuper ( super . compareTo ( other ) ) . append ( getEncodedRunId ( ) , otherKey . getEncodedRunId ( ) ) . toComparison ( ) ; }
Compares two FlowKey objects on the basis of their cluster userName appId and encodedRunId
39,173
public void writeIndexes ( JobKey jobKey ) throws IOException { if ( jobKey != null ) { Table historyByJobIdTable = null ; try { historyByJobIdTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . HISTORY_BY_JOBID_TABLE ) ) ; byte [ ] jobKeyBytes = jobKeyConv . toBytes ( jobKey ) ; byte [ ] rowKeyBytes = jobIdConv . toBytes ( new QualifiedJobId ( jobKey . getCluster ( ) , jobKey . getJobId ( ) ) ) ; Put p = new Put ( rowKeyBytes ) ; p . addColumn ( Constants . INFO_FAM_BYTES , Constants . ROWKEY_COL_BYTES , jobKeyBytes ) ; historyByJobIdTable . put ( p ) ; } finally { if ( historyByJobIdTable != null ) { historyByJobIdTable . close ( ) ; } } } }
Create the secondary indexes records cluster!jobId - > jobKey .
39,174
public static byte [ ] join ( byte [ ] separator , byte [ ] ... components ) { if ( components == null || components . length == 0 ) { return Constants . EMPTY_BYTES ; } int finalSize = 0 ; if ( separator != null ) { finalSize = separator . length * ( components . length - 1 ) ; } for ( byte [ ] comp : components ) { finalSize += comp . length ; } byte [ ] buf = new byte [ finalSize ] ; int offset = 0 ; for ( int i = 0 ; i < components . length ; i ++ ) { System . arraycopy ( components [ i ] , 0 , buf , offset , components [ i ] . length ) ; offset += components [ i ] . length ; if ( i < ( components . length - 1 ) && separator != null && separator . length > 0 ) { System . arraycopy ( separator , 0 , buf , offset , separator . length ) ; offset += separator . length ; } } return buf ; }
Returns a single byte array containing all of the individual component arrays separated by the separator array .
39,175
public static byte [ ] safeCopy ( byte [ ] source , int offset , int length ) { if ( length < 0 || source . length < offset + length ) { return null ; } byte [ ] copy = new byte [ length ] ; System . arraycopy ( source , offset , copy , 0 , length ) ; return copy ; }
Returns a copy of the source byte array starting at offset for the given length . If the offset + length is out of bounds for the array returns null .
39,176
public static long getValueAsLong ( final byte [ ] key , final Map < byte [ ] , byte [ ] > taskValues ) { byte [ ] value = taskValues . get ( key ) ; if ( value != null ) { try { long retValue = Bytes . toLong ( value ) ; return retValue ; } catch ( NumberFormatException nfe ) { LOG . error ( "Caught NFE while converting to long " , nfe ) ; return 0L ; } catch ( IllegalArgumentException iae ) { LOG . error ( "Caught IAE while converting to long " , iae ) ; return 0L ; } } else { return 0L ; } }
return a value from the NavigableMap as a long
39,177
public static String getValueAsString ( final byte [ ] key , final Map < byte [ ] , byte [ ] > taskValues ) { byte [ ] value = taskValues . get ( key ) ; if ( value != null ) { return Bytes . toString ( value ) ; } else { return "" ; } }
return a value from the NavigableMap as a String
39,178
public static double getValueAsDouble ( byte [ ] key , NavigableMap < byte [ ] , byte [ ] > infoValues ) { byte [ ] value = infoValues . get ( key ) ; if ( value != null ) { return Bytes . toDouble ( value ) ; } else { return 0.0 ; } }
return a value from the NavigableMap as a Double
39,179
public static int getValueAsInt ( byte [ ] key , Map < byte [ ] , byte [ ] > infoValues ) { byte [ ] value = infoValues . get ( key ) ; if ( value != null ) { try { int retValue = Bytes . toInt ( value ) ; return retValue ; } catch ( NumberFormatException nfe ) { LOG . error ( "Caught NFE while converting to int " , nfe ) ; return 0 ; } catch ( IllegalArgumentException iae ) { LOG . error ( "Caught IAE while converting to int " , iae ) ; return 0 ; } } else { return 0 ; } }
get value from a map as an int
39,180
public int compareTo ( Object other ) { if ( other == null ) { return - 1 ; } AppKey otherKey = ( AppKey ) other ; return new CompareToBuilder ( ) . append ( this . cluster , otherKey . getCluster ( ) ) . append ( this . userName , otherKey . getUserName ( ) ) . append ( this . appId , otherKey . getAppId ( ) ) . toComparison ( ) ; }
Compares two AppKey objects on the basis of their cluster userName appId and encodedRunId
39,181
public static HadoopVersion getVersion ( byte [ ] historyFileContents ) { if ( historyFileContents . length > HADOOP2_VERSION_LENGTH ) { String version2Part = new String ( historyFileContents , 0 , HADOOP2_VERSION_LENGTH ) ; if ( StringUtils . equalsIgnoreCase ( version2Part , HADOOP2_VERSION_STRING ) ) { return HadoopVersion . TWO ; } } throw new IllegalArgumentException ( " Unknown format of job history file: " + historyFileContents ) ; }
determines the verison of hadoop that the history file belongs to
39,182
protected void createPool ( PathFilter ... filters ) { MultiPathFilter multi = new MultiPathFilter ( ) ; for ( PathFilter f : filters ) { multi . add ( f ) ; } pools . add ( multi ) ; }
Create a new pool and add the filters to it . A pathname can satisfy any one of the specified filters . A split cannot have files from different pools .
39,183
private void addCreatedSplit ( List < InputSplit > splitList , List < String > locations , ArrayList < OneBlockInfo > validBlocks ) { Path [ ] fl = new Path [ validBlocks . size ( ) ] ; long [ ] offset = new long [ validBlocks . size ( ) ] ; long [ ] length = new long [ validBlocks . size ( ) ] ; for ( int i = 0 ; i < validBlocks . size ( ) ; i ++ ) { fl [ i ] = validBlocks . get ( i ) . onepath ; offset [ i ] = validBlocks . get ( i ) . offset ; length [ i ] = validBlocks . get ( i ) . length ; } CombineFileSplit thissplit = new CombineFileSplit ( fl , offset , length , locations . toArray ( new String [ 0 ] ) ) ; splitList . add ( thissplit ) ; }
Create a single split from the list of blocks specified in validBlocks Add this new split into splitList .
39,184
public byte [ ] toBytes ( JobKey jobKey ) { if ( jobKey == null ) { return Constants . EMPTY_BYTES ; } else { return ByteUtil . join ( Constants . SEP_BYTES , Bytes . toBytes ( jobKey . getCluster ( ) ) , Bytes . toBytes ( jobKey . getUserName ( ) ) , Bytes . toBytes ( jobKey . getAppId ( ) ) , Bytes . toBytes ( jobKey . getEncodedRunId ( ) ) , idConv . toBytes ( jobKey . getJobId ( ) ) ) ; } }
Returns the byte encoded representation of a JobKey
39,185
public JobKey parseJobKey ( byte [ ] [ ] keyComponents ) { long encodedRunId = keyComponents . length > 3 ? Bytes . toLong ( keyComponents [ 3 ] ) : Long . MAX_VALUE ; JobId jobId = keyComponents . length > 4 ? idConv . fromBytes ( keyComponents [ 4 ] ) : null ; return new JobKey ( Bytes . toString ( keyComponents [ 0 ] ) , ( keyComponents . length > 1 ? Bytes . toString ( keyComponents [ 1 ] ) : null ) , ( keyComponents . length > 2 ? Bytes . toString ( keyComponents [ 2 ] ) : null ) , Long . MAX_VALUE - encodedRunId , jobId ) ; }
Constructs a JobKey instance from the individual byte encoded key components .
39,186
public int compareTo ( VersionInfo other ) { if ( this . timestamp == other . timestamp ) { return 0 ; } if ( this . timestamp < other . timestamp ) { return 1 ; } return - 1 ; }
Compares two VersionInfo timestamps to order them in reverse chronological order
39,187
public int compareTo ( Object other ) { if ( other == null ) { return - 1 ; } TaskKey otherKey = ( TaskKey ) other ; return new CompareToBuilder ( ) . appendSuper ( super . compareTo ( otherKey ) ) . append ( this . taskId , otherKey . getTaskId ( ) ) . toComparison ( ) ; }
Compares two TaskKey objects on the basis of their taskId
39,188
public int compareTo ( Object other ) { if ( other == null ) { return - 1 ; } JobKey otherKey = ( JobKey ) other ; return new CompareToBuilder ( ) . appendSuper ( super . compareTo ( otherKey ) ) . append ( this . jobId , otherKey . getJobId ( ) ) . toComparison ( ) ; }
Compares two JobKey QualifiedJobId
39,189
public Put getHadoopVersionPut ( HadoopVersion historyFileVersion , byte [ ] jobKeyBytes ) { Put pVersion = new Put ( jobKeyBytes ) ; byte [ ] valueBytes = null ; valueBytes = Bytes . toBytes ( historyFileVersion . toString ( ) ) ; byte [ ] qualifier = Bytes . toBytes ( JobHistoryKeys . hadoopversion . toString ( ) . toLowerCase ( ) ) ; pVersion . add ( Constants . INFO_FAM_BYTES , qualifier , valueBytes ) ; return pVersion ; }
generates a put that sets the hadoop version for a record
39,190
public static long getSubmitTimeMillisFromJobHistory ( byte [ ] jobHistoryRaw ) { long submitTimeMillis = 0 ; if ( null == jobHistoryRaw ) { return submitTimeMillis ; } HadoopVersion hv = JobHistoryFileParserFactory . getVersion ( jobHistoryRaw ) ; switch ( hv ) { case TWO : int startIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . JOB_SUBMIT_EVENT_BYTES , 0 ) ; if ( startIndex != - 1 ) { int secondQuoteIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . SUBMIT_TIME_PREFIX_HADOOP2_BYTES , startIndex ) ; if ( secondQuoteIndex != - 1 ) { String submitTimeMillisString = Bytes . toString ( jobHistoryRaw , secondQuoteIndex + Constants . EPOCH_TIMESTAMP_STRING_LENGTH , Constants . EPOCH_TIMESTAMP_STRING_LENGTH ) ; try { submitTimeMillis = Long . parseLong ( submitTimeMillisString ) ; } catch ( NumberFormatException nfe ) { LOG . error ( " caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe . getMessage ( ) ) ; submitTimeMillis = 0 ; } } } break ; case ONE : default : startIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . SUBMIT_TIME_PREFIX_BYTES , 0 ) ; if ( startIndex != - 1 ) { int prefixEndIndex = startIndex + Constants . SUBMIT_TIME_PREFIX_BYTES . length ; int secondQuoteIndex = ByteUtil . indexOf ( jobHistoryRaw , Constants . QUOTE_BYTES , prefixEndIndex ) ; if ( secondQuoteIndex != - 1 ) { int numberLength = secondQuoteIndex - prefixEndIndex ; String submitTimeMillisString = Bytes . toString ( jobHistoryRaw , prefixEndIndex , numberLength ) ; try { submitTimeMillis = Long . parseLong ( submitTimeMillisString ) ; } catch ( NumberFormatException nfe ) { LOG . error ( " caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe . getMessage ( ) ) ; submitTimeMillis = 0 ; } } } break ; } return submitTimeMillis ; }
fetches the submit time from a raw job history byte representation
39,191
public void addEvent ( FlowEvent event ) throws IOException { Put p = createPutForEvent ( event ) ; Table eventTable = null ; try { eventTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . FLOW_EVENT_TABLE ) ) ; eventTable . put ( p ) ; } finally { if ( eventTable != null ) { eventTable . close ( ) ; } } }
Stores a single flow event row
39,192
public void addEvents ( List < FlowEvent > events ) throws IOException { List < Put > puts = new ArrayList < Put > ( events . size ( ) ) ; for ( FlowEvent e : events ) { puts . add ( createPutForEvent ( e ) ) ; } Table eventTable = null ; try { eventTable = hbaseConnection . getTable ( TableName . valueOf ( Constants . FLOW_EVENT_TABLE ) ) ; eventTable . put ( puts ) ; } finally { if ( eventTable != null ) { eventTable . close ( ) ; } } }
Stores a batch of events
39,193
public byte [ ] toBytes ( HdfsStatsKey hdfsStatsKey ) { if ( hdfsStatsKey == null || hdfsStatsKey . getQualifiedPathKey ( ) == null ) { return HdfsConstants . EMPTY_BYTES ; } else { if ( StringUtils . isBlank ( hdfsStatsKey . getQualifiedPathKey ( ) . getNamespace ( ) ) ) { return ByteUtil . join ( HdfsConstants . SEP_BYTES , Bytes . toBytes ( Long . toString ( hdfsStatsKey . getEncodedRunId ( ) ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getCluster ( ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getPath ( ) ) ) ; } else { return ByteUtil . join ( HdfsConstants . SEP_BYTES , Bytes . toBytes ( Long . toString ( hdfsStatsKey . getEncodedRunId ( ) ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getCluster ( ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getPath ( ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getNamespace ( ) ) ) ; } } }
Returns the byte encoded representation of a HdfsStatsKey
39,194
public static HdfsStatsKey parseHdfsStatsKey ( byte [ ] [ ] keyComponents ) { return new HdfsStatsKey ( new QualifiedPathKey ( ( keyComponents . length > 1 ? Bytes . toString ( keyComponents [ 1 ] ) : null ) , ( keyComponents . length > 2 ? Bytes . toString ( keyComponents [ 2 ] ) : null ) , ( keyComponents . length > 3 ? Bytes . toString ( keyComponents [ 3 ] ) : null ) ) , ( keyComponents . length > 0 ? Long . parseLong ( Bytes . toString ( keyComponents [ 0 ] ) ) : null ) ) ; }
Constructs a HdfsStatsKey instance from the individual byte encoded key components .
39,195
static byte [ ] [ ] splitHdfsStatsKey ( byte [ ] rawKey ) { byte [ ] [ ] splits = ByteUtil . split ( rawKey , HdfsConstants . SEP_BYTES , HdfsConstants . NUM_HDFS_USAGE_ROWKEY_COMPONENTS ) ; return splits ; }
Handles splitting the encoded hdfsStats key
39,196
public int compareTo ( TaskDetails otherTask ) { if ( otherTask == null ) { return - 1 ; } return new CompareToBuilder ( ) . append ( this . taskKey , otherTask . getTaskKey ( ) ) . toComparison ( ) ; }
Compares two TaskDetails objects on the basis of their TaskKey
39,197
private void parseFilename ( ) { if ( ( filename != null ) && ( filename . length ( ) > 0 ) && ( ! filename . endsWith ( ".crc" ) ) ) { Matcher matcher = PATTERN . matcher ( filename ) ; if ( matcher . matches ( ) ) { jobid = matcher . group ( 1 ) ; Matcher confMatcher = CONF_PATTERN . matcher ( filename ) ; if ( confMatcher . matches ( ) ) { isJobConfFile = true ; LOG . debug ( "Job Conf file " + filename + " with job id: " + jobid ) ; } else { isJobHistoryFile = true ; LOG . debug ( "Job History file " + filename + " with job id: " + jobid ) ; } } else { LOG . info ( " file does not match any format: " + filename ) ; } } }
Parse the filename and pull the jobtracker and jobid out of it .
39,198
public static boolean shouldRetain ( int i , int maxRetention , int length ) { int retentionCutoff = length - maxRetention ; boolean retain = ( i >= retentionCutoff ) ? true : false ; return retain ; }
Method that can be used when iterating over an array and you want to retain only maxRetention items .
39,199
private Put getJobStatusPut ( ) { Put pStatus = new Put ( jobKeyBytes ) ; byte [ ] valueBytes = Bytes . toBytes ( this . jobStatus ) ; byte [ ] qualifier = Bytes . toBytes ( JobHistoryKeys . JOB_STATUS . toString ( ) . toLowerCase ( ) ) ; pStatus . addColumn ( Constants . INFO_FAM_BYTES , qualifier , valueBytes ) ; return pStatus ; }
generates a put for job status