idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
52,500
def open_resource ( name ) : name_parts = name . lstrip ( '/' ) . split ( '/' ) for part in name_parts : if part == os . path . pardir or os . path . sep in part : raise ValueError ( 'Bad path segment: %r' % part ) filename = os . path . join ( os . path . dirname ( __file__ ) , 'zoneinfo' , * name_parts ) if not os . ...
Open a resource from the zoneinfo subdir for reading .
52,501
def FixedOffset ( offset , _tzinfos = { } ) : if offset == 0 : return UTC info = _tzinfos . get ( offset ) if info is None : info = _tzinfos . setdefault ( offset , _FixedOffset ( offset ) ) return info
return a fixed - offset timezone based off a number of minutes .
52,502
def boolean_or_list ( config_name , args , configs , alternative_names = [ ] ) : for key in alternative_names + [ config_name ] : if hasattr ( args , key ) and getattr ( args , key ) : setattr ( args , config_name , [ '.*' ] ) return setattr ( args , config_name , [ ] ) option = None alternative_names . insert ( 0 , co...
Get a boolean or list of regexes from args and configs .
52,503
def get_style_defs ( self , arg = '' ) : cp = self . commandprefix styles = [ ] for name , definition in iteritems ( self . cmd2def ) : styles . append ( r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' % ( cp , name , definition ) ) return STYLE_TEMPLATE % { 'cp' : self . commandprefix , 'styles' : '\n' . join ( sty...
Return the command sequences needed to define the commands used to format text in the verbatim environment . arg is ignored .
52,504
def _fn_matches ( fn , glob ) : if glob not in _pattern_cache : pattern = _pattern_cache [ glob ] = re . compile ( fnmatch . translate ( glob ) ) return pattern . match ( fn ) return _pattern_cache [ glob ] . match ( fn )
Return whether the supplied file name fn matches pattern filename .
52,505
def get_all_formatters ( ) : for info in itervalues ( FORMATTERS ) : if info [ 1 ] not in _formatter_cache : _load_formatters ( info [ 0 ] ) yield _formatter_cache [ info [ 1 ] ] for _ , formatter in find_plugin_formatters ( ) : yield formatter
Return a generator for all formatter classes .
52,506
def find_formatter_class ( alias ) : for module_name , name , aliases , _ , _ in itervalues ( FORMATTERS ) : if alias in aliases : if name not in _formatter_cache : _load_formatters ( module_name ) return _formatter_cache [ name ] for _ , cls in find_plugin_formatters ( ) : if alias in cls . aliases : return cls
Lookup a formatter by alias .
52,507
def get_formatter_by_name ( _alias , ** options ) : cls = find_formatter_class ( _alias ) if cls is None : raise ClassNotFound ( "no formatter found for name %r" % _alias ) return cls ( ** options )
Lookup and instantiate a formatter by alias .
52,508
def load_formatter_from_file ( filename , formattername = "CustomFormatter" , ** options ) : try : custom_namespace = { } exec ( open ( filename , 'rb' ) . read ( ) , custom_namespace ) if formattername not in custom_namespace : raise ClassNotFound ( 'no valid %s class found in %s' % ( formattername , filename ) ) form...
Load a formatter from a file .
52,509
def get_formatter_for_filename ( fn , ** options ) : fn = basename ( fn ) for modname , name , _ , filenames , _ in itervalues ( FORMATTERS ) : for filename in filenames : if _fn_matches ( fn , filename ) : if name not in _formatter_cache : _load_formatters ( modname ) return _formatter_cache [ name ] ( ** options ) fo...
Lookup and instantiate a formatter by filename pattern .
52,510
def get_tokens_unprocessed ( self , text , stack = ( 'root' , ) ) : self . content_type = None return RegexLexer . get_tokens_unprocessed ( self , text , stack )
Reset the content - type state .
52,511
def find_lexer_class ( name ) : if name in _lexer_cache : return _lexer_cache [ name ] for module_name , lname , aliases , _ , _ in itervalues ( LEXERS ) : if name == lname : _load_lexers ( module_name ) return _lexer_cache [ name ] for cls in find_plugin_lexers ( ) : if cls . name == name : return cls
Lookup a lexer class by name .
52,512
def find_lexer_class_by_name ( _alias ) : if not _alias : raise ClassNotFound ( 'no lexer for alias %r found' % _alias ) for module_name , name , aliases , _ , _ in itervalues ( LEXERS ) : if _alias . lower ( ) in aliases : if name not in _lexer_cache : _load_lexers ( module_name ) return _lexer_cache [ name ] for cls ...
Lookup a lexer class by alias .
52,513
def load_lexer_from_file ( filename , lexername = "CustomLexer" , ** options ) : try : custom_namespace = { } exec ( open ( filename , 'rb' ) . read ( ) , custom_namespace ) if lexername not in custom_namespace : raise ClassNotFound ( 'no valid %s class found in %s' % ( lexername , filename ) ) lexer_class = custom_nam...
Load a lexer from a file .
52,514
def get_lexer_for_mimetype ( _mime , ** options ) : for modname , name , _ , _ , mimetypes in itervalues ( LEXERS ) : if _mime in mimetypes : if name not in _lexer_cache : _load_lexers ( modname ) return _lexer_cache [ name ] ( ** options ) for cls in find_plugin_lexers ( ) : if _mime in cls . mimetypes : return cls ( ...
Get a lexer for a mimetype .
52,515
def _iter_lexerclasses ( plugins = True ) : for key in sorted ( LEXERS ) : module_name , name = LEXERS [ key ] [ : 2 ] if name not in _lexer_cache : _load_lexers ( module_name ) yield _lexer_cache [ name ] if plugins : for lexer in find_plugin_lexers ( ) : yield lexer
Return an iterator over all lexer classes .
52,516
def get_file_stats ( file_name , entity_type = 'file' , lineno = None , cursorpos = None , plugin = None , language = None , local_file = None ) : language = standardize_language ( language , plugin ) stats = { 'language' : language , 'dependencies' : [ ] , 'lines' : None , 'lineno' : lineno , 'cursorpos' : cursorpos ,...
Returns a hash of information about the entity .
52,517
def guess_language ( file_name , local_file ) : lexer = None language = get_language_from_extension ( file_name ) if language : lexer = get_lexer ( language ) else : lexer = smart_guess_lexer ( file_name , local_file ) if lexer : language = u ( lexer . name ) return language , lexer
Guess lexer and language for a file .
52,518
def smart_guess_lexer ( file_name , local_file ) : lexer = None text = get_file_head ( file_name ) lexer1 , accuracy1 = guess_lexer_using_filename ( local_file or file_name , text ) lexer2 , accuracy2 = guess_lexer_using_modeline ( text ) if lexer1 : lexer = lexer1 if ( lexer2 and accuracy2 and ( not accuracy1 or accur...
Guess Pygments lexer for a file .
52,519
def guess_lexer_using_filename ( file_name , text ) : lexer , accuracy = None , None try : lexer = custom_pygments_guess_lexer_for_filename ( file_name , text ) except SkipHeartbeat as ex : raise SkipHeartbeat ( u ( ex ) ) except : log . traceback ( logging . DEBUG ) if lexer is not None : try : accuracy = lexer . anal...
Guess lexer for given text limited to lexers for this file s extension .
52,520
def guess_lexer_using_modeline ( text ) : lexer , accuracy = None , None file_type = None try : file_type = get_filetype_from_buffer ( text ) except : log . traceback ( logging . DEBUG ) if file_type is not None : try : lexer = get_lexer_by_name ( file_type ) except ClassNotFound : log . traceback ( logging . DEBUG ) i...
Guess lexer for given text using Vim modeline .
52,521
def get_language_from_extension ( file_name ) : filepart , extension = os . path . splitext ( file_name ) pathpart , filename = os . path . split ( file_name ) if filename == 'go.mod' : return 'Go' if re . match ( r'\.h.*$' , extension , re . IGNORECASE ) or re . match ( r'\.c.*$' , extension , re . IGNORECASE ) : if o...
Returns a matching language for the given file extension .
52,522
def standardize_language ( language , plugin ) : if not language : return None if plugin : plugin = plugin . split ( ' ' ) [ - 1 ] . split ( '/' ) [ 0 ] . split ( '-' ) [ 0 ] standardized = get_language_from_json ( language , plugin ) if standardized is not None : return standardized return get_language_from_json ( lan...
Maps a string to the equivalent Pygments language .
52,523
def get_language_from_json ( language , key ) : file_name = os . path . join ( os . path . dirname ( __file__ ) , 'languages' , '{0}.json' ) . format ( key . lower ( ) ) if os . path . exists ( file_name ) : try : with open ( file_name , 'r' , encoding = 'utf-8' ) as fh : languages = json . loads ( fh . read ( ) ) if l...
Finds the given language in a json file .
52,524
def get_file_head ( file_name ) : text = None try : with open ( file_name , 'r' , encoding = 'utf-8' ) as fh : text = fh . read ( 512000 ) except : try : with open ( file_name , 'r' , encoding = sys . getfilesystemencoding ( ) ) as fh : text = fh . read ( 512000 ) except : log . traceback ( logging . DEBUG ) return tex...
Returns the first 512000 bytes of the file s contents .
52,525
def custom_pygments_guess_lexer_for_filename ( _fn , _text , ** options ) : fn = basename ( _fn ) primary = { } matching_lexers = set ( ) for lexer in _iter_lexerclasses ( ) : for filename in lexer . filenames : if _fn_matches ( fn , filename ) : matching_lexers . add ( lexer ) primary [ lexer ] = True for filename in ...
Overwrite pygments . lexers . guess_lexer_for_filename to customize the priority of different lexers based on popularity of languages .
52,526
def customize_lexer_priority ( file_name , accuracy , lexer ) : priority = lexer . priority lexer_name = lexer . name . lower ( ) . replace ( 'sharp' , '#' ) if lexer_name in LANGUAGES : priority = LANGUAGES [ lexer_name ] elif lexer_name == 'matlab' : available_extensions = extensions_in_same_folder ( file_name ) if '...
Customize lexer priority
52,527
def extensions_in_same_folder ( file_name ) : directory = os . path . dirname ( file_name ) files = os . listdir ( directory ) extensions = list ( zip ( * map ( os . path . splitext , files ) ) ) [ 1 ] extensions = set ( [ ext . lower ( ) for ext in extensions ] ) return extensions
Returns a list of file extensions from the same folder as file_name .
52,528
def analyse_text ( text ) : if re . search ( r'/\*\**\s*rexx' , text , re . IGNORECASE ) : return 1.0 elif text . startswith ( '/*' ) : lowerText = text . lower ( ) result = sum ( weight for ( pattern , weight ) in RexxLexer . PATTERNS_AND_WEIGHTS if pattern . search ( lowerText ) ) + 0.01 return min ( result , 1.0 )
Check for inital comment and patterns that distinguish Rexx from other C - like languages .
52,529
def analyse_text ( text ) : result = 0.0 lines = text . split ( '\n' ) hasEndProc = False hasHeaderComment = False hasFile = False hasJob = False hasProc = False hasParm = False hasReport = False def isCommentLine ( line ) : return EasytrieveLexer . _COMMENT_LINE_REGEX . match ( lines [ 0 ] ) is not None def isEmptyLin...
Perform a structural analysis for basic Easytrieve constructs .
52,530
def analyse_text ( text ) : result = 0.0 lines = text . split ( '\n' ) if len ( lines ) > 0 : if JclLexer . _JOB_HEADER_PATTERN . match ( lines [ 0 ] ) : result = 1.0 assert 0.0 <= result <= 1.0 return result
Recognize JCL job by header .
52,531
def _objdump_lexer_tokens ( asm_lexer ) : hex_re = r'[0-9A-Za-z]' return { 'root' : [ ( '(.*?)(:)( +file format )(.*?)$' , bygroups ( Name . Label , Punctuation , Text , String ) ) , ( '(Disassembly of section )(.*?)(:)$' , bygroups ( Text , Name . Label , Punctuation ) ) , ( '(' + hex_re + '+)( )(<)(.*?)([-+])(0[xX][A...
Common objdump lexer tokens to wrap an ASM lexer .
52,532
def get_key ( self , fileobj ) : mapping = self . get_map ( ) if mapping is None : raise RuntimeError ( "Selector is closed" ) try : return mapping [ fileobj ] except KeyError : raise KeyError ( "{0!r} is not registered" . format ( fileobj ) )
Return the key associated with a registered file object .
52,533
def get_filetype_from_buffer ( buf , max_lines = 5 ) : lines = buf . splitlines ( ) for l in lines [ - 1 : - max_lines - 1 : - 1 ] : ret = get_filetype_from_line ( l ) if ret : return ret for i in range ( max_lines , - 1 , - 1 ) : if i < len ( lines ) : ret = get_filetype_from_line ( lines [ i ] ) if ret : return ret r...
Scan the buffer for modelines and return filetype if one is found .
52,534
def get_font ( self , bold , oblique ) : if bold and oblique : return self . fonts [ 'BOLDITALIC' ] elif bold : return self . fonts [ 'BOLD' ] elif oblique : return self . fonts [ 'ITALIC' ] else : return self . fonts [ 'NORMAL' ]
Get the font based on bold and italic flags .
52,535
def _get_char_x ( self , charno ) : return charno * self . fontw + self . image_pad + self . line_number_width
Get the X coordinate of a character position .
52,536
def _get_text_pos ( self , charno , lineno ) : return self . _get_char_x ( charno ) , self . _get_line_y ( lineno )
Get the actual position for a character and line position .
52,537
def _get_image_size ( self , maxcharno , maxlineno ) : return ( self . _get_char_x ( maxcharno ) + self . image_pad , self . _get_line_y ( maxlineno + 0 ) + self . image_pad )
Get the required image size .
52,538
def _draw_linenumber ( self , posno , lineno ) : self . _draw_text ( self . _get_linenumber_pos ( posno ) , str ( lineno ) . rjust ( self . line_number_chars ) , font = self . fonts . get_font ( self . line_number_bold , self . line_number_italic ) , fill = self . line_number_fg , )
Remember a line number drawable to paint later .
52,539
def _draw_text ( self , pos , text , font , ** kw ) : self . drawables . append ( ( pos , text , font , kw ) )
Remember a single drawable tuple to paint later .
52,540
def _create_drawables ( self , tokensource ) : lineno = charno = maxcharno = 0 for ttype , value in tokensource : while ttype not in self . styles : ttype = ttype . parent style = self . styles [ ttype ] value = value . expandtabs ( 4 ) lines = value . splitlines ( True ) for i , line in enumerate ( lines ) : temp = li...
Create drawables for the token content .
52,541
def _draw_line_numbers ( self ) : if not self . line_numbers : return for p in xrange ( self . maxlineno ) : n = p + self . line_number_start if ( n % self . line_number_step ) == 0 : self . _draw_linenumber ( p , n )
Create drawables for the line numbers .
52,542
def _paint_line_number_bg ( self , im ) : if not self . line_numbers : return if self . line_number_fg is None : return draw = ImageDraw . Draw ( im ) recth = im . size [ - 1 ] rectw = self . image_pad + self . line_number_width - self . line_number_pad draw . rectangle ( [ ( 0 , 0 ) , ( rectw , recth ) ] , fill = self...
Paint the line number background on the image .
52,543
def update ( self , attrs ) : data = self . dict ( ) data . update ( attrs ) heartbeat = Heartbeat ( data , self . args , self . configs , _clone = True ) return heartbeat
Return a copy of the current Heartbeat with updated attributes .
52,544
def sanitize ( self ) : if not self . args . hide_file_names : return self if self . entity is None : return self if self . type != 'file' : return self if self . should_obfuscate_filename ( ) : self . _sanitize_metadata ( ) extension = u ( os . path . splitext ( self . entity ) [ 1 ] ) self . entity = u ( 'HIDDEN{0}' ...
Removes sensitive data including file names and dependencies .
52,545
def should_obfuscate_filename ( self ) : for pattern in self . args . hide_file_names : try : compiled = re . compile ( pattern , re . IGNORECASE ) if compiled . search ( self . entity ) : return True except re . error as ex : log . warning ( u ( 'Regex error ({msg}) for hide_file_names pattern: {pattern}' ) . format (...
Returns True if hide_file_names is true or the entity file path matches one in the list of obfuscated file paths .
52,546
def _format_local_file ( self ) : if self . type != 'file' : return if not self . entity : return if not is_win : return if self . _file_exists ( ) : return self . args . local_file = self . _to_unc_path ( self . entity )
When args . local_file empty on Windows tries to map args . entity to a unc path .
52,547
def create_negotiate_message ( self , domain_name = None , workstation = None ) : self . negotiate_message = NegotiateMessage ( self . negotiate_flags , domain_name , workstation ) return base64 . b64encode ( self . negotiate_message . get_data ( ) )
Create an NTLM NEGOTIATE_MESSAGE
52,548
def parse_challenge_message ( self , msg2 ) : msg2 = base64 . b64decode ( msg2 ) self . challenge_message = ChallengeMessage ( msg2 )
Parse the NTLM CHALLENGE_MESSAGE from the server and add it to the Ntlm context fields
52,549
def create_authenticate_message ( self , user_name , password , domain_name = None , workstation = None , server_certificate_hash = None ) : self . authenticate_message = AuthenticateMessage ( user_name , password , domain_name , workstation , self . challenge_message , self . ntlm_compatibility , server_certificate_ha...
Create an NTLM AUTHENTICATE_MESSAGE based on the Ntlm context and the previous messages sent and received
52,550
def lex ( code , lexer ) : try : return lexer . get_tokens ( code ) except TypeError as err : if ( isinstance ( err . args [ 0 ] , str ) and ( 'unbound method get_tokens' in err . args [ 0 ] or 'missing 1 required positional argument' in err . args [ 0 ] ) ) : raise TypeError ( 'lex() argument must be a lexer instance,...
Lex code with lexer and return an iterable of tokens .
52,551
def format ( tokens , formatter , outfile = None ) : try : if not outfile : realoutfile = getattr ( formatter , 'encoding' , None ) and BytesIO ( ) or StringIO ( ) formatter . format ( tokens , realoutfile ) return realoutfile . getvalue ( ) else : formatter . format ( tokens , outfile ) except TypeError as err : if ( ...
Format a tokenlist tokens with the formatter formatter .
52,552
def highlight ( code , lexer , formatter , outfile = None ) : return format ( lex ( code , lexer ) , formatter , outfile )
Lex code with lexer and format it with the formatter formatter .
52,553
def getConfigFile ( ) : fileName = '.wakatime.cfg' home = os . environ . get ( 'WAKATIME_HOME' ) if home : return os . path . join ( os . path . expanduser ( home ) , fileName ) return os . path . join ( os . path . expanduser ( '~' ) , fileName )
Returns the config file location .
52,554
def find_filter_class ( filtername ) : if filtername in FILTERS : return FILTERS [ filtername ] for name , cls in find_plugin_filters ( ) : if name == filtername : return cls return None
Lookup a filter by name . Return None if not found .
52,555
def get_filter_by_name ( filtername , ** options ) : cls = find_filter_class ( filtername ) if cls : return cls ( ** options ) else : raise ClassNotFound ( 'filter %r not found' % filtername )
Return an instantiated filter .
52,556
def get_tokens_unprocessed ( self , text ) : tokens = self . _block_re . split ( text ) tokens . reverse ( ) state = idx = 0 try : while True : if state == 0 : val = tokens . pop ( ) yield idx , Other , val idx += len ( val ) state = 1 elif state == 1 : tag = tokens . pop ( ) if tag in ( '<%%' , '%%>' ) : yield idx , O...
Since ERB doesn t allow <% and other tags inside of ruby blocks we have to use a split approach here that fails for that too .
52,557
def format_file_path ( filepath ) : try : is_windows_network_mount = WINDOWS_NETWORK_MOUNT_PATTERN . match ( filepath ) filepath = os . path . realpath ( os . path . abspath ( filepath ) ) filepath = re . sub ( BACKSLASH_REPLACE_PATTERN , '/' , filepath ) is_windows_drive = WINDOWS_DRIVE_PATTERN . match ( filepath ) if...
Formats a path as absolute and with the correct platform separator .
52,558
def close ( self ) : old_pool , self . pool = self . pool , None try : while True : conn = old_pool . get ( block = False ) if conn : conn . close ( ) except queue . Empty : pass
Close all pooled connections and disable the pool .
52,559
def is_same_host ( self , url ) : if url . startswith ( '/' ) : return True scheme , host , port = get_host ( url ) host = _ipv6_host ( host ) . lower ( ) if self . port and not port : port = port_by_scheme . get ( scheme ) elif not self . port and port == port_by_scheme . get ( scheme ) : port = None return ( scheme ,...
Check if the given url is a member of the same host as this connection pool .
52,560
def filename ( self , value ) : warnings . warn ( "The 'filename' attribute will be removed in future versions. " "Use 'source' instead." , DeprecationWarning , stacklevel = 2 ) self . source = value
Deprecated user source .
52,561
def options ( self , section ) : try : opts = self . _sections [ section ] . copy ( ) except KeyError : raise from_none ( NoSectionError ( section ) ) opts . update ( self . _defaults ) return list ( opts . keys ( ) )
Return a list of option names for the given section name .
52,562
def read_string ( self , string , source = '<string>' ) : sfile = io . StringIO ( string ) self . read_file ( sfile , source )
Read configuration from a given string .
52,563
def read_dict ( self , dictionary , source = '<dict>' ) : elements_added = set ( ) for section , keys in dictionary . items ( ) : section = str ( section ) try : self . add_section ( section ) except ( DuplicateSectionError , ValueError ) : if self . _strict and section in elements_added : raise elements_added . add ( ...
Read configuration from a dictionary .
52,564
def readfp ( self , fp , filename = None ) : warnings . warn ( "This method will be removed in future versions. " "Use 'parser.read_file()' instead." , DeprecationWarning , stacklevel = 2 ) self . read_file ( fp , source = filename )
Deprecated use read_file instead .
52,565
def has_option ( self , section , option ) : if not section or section == self . default_section : option = self . optionxform ( option ) return option in self . _defaults elif section not in self . _sections : return False else : option = self . optionxform ( option ) return ( option in self . _sections [ section ] or...
Check for the existence of a given option in a given section . If the specified section is None or an empty string DEFAULT is assumed . If the specified section does not exist returns False .
52,566
def _write_section ( self , fp , section_name , section_items , delimiter ) : fp . write ( "[{0}]\n" . format ( section_name ) ) for key , value in section_items : value = self . _interpolation . before_write ( self , section_name , key , value ) if value is not None or not self . _allow_no_value : value = delimiter + ...
Write a single section to the specified fp .
52,567
def _unify_values ( self , section , vars ) : sectiondict = { } try : sectiondict = self . _sections [ section ] except KeyError : if section != self . default_section : raise NoSectionError ( section ) vardict = { } if vars : for key , value in vars . items ( ) : if value is not None : value = str ( value ) vardict [ ...
Create a sequence of lookups with vars taking priority over the section which takes priority over the DEFAULTSECT .
52,568
def _convert_to_boolean ( self , value ) : if value . lower ( ) not in self . BOOLEAN_STATES : raise ValueError ( 'Not a boolean: %s' % value ) return self . BOOLEAN_STATES [ value . lower ( ) ]
Return a boolean value translating from other types if necessary .
52,569
def _validate_value_types ( self , ** kwargs ) : section = kwargs . get ( 'section' , "" ) option = kwargs . get ( 'option' , "" ) value = kwargs . get ( 'value' , "" ) if PY2 and bytes in ( type ( section ) , type ( option ) , type ( value ) ) : warnings . warn ( "You passed a bytestring. Implicitly decoding as UTF-8 ...
Raises a TypeError for non - string values .
52,570
def set ( self , section , option , value = None ) : _ , option , value = self . _validate_value_types ( option = option , value = value ) super ( ConfigParser , self ) . set ( section , option , value )
Set an option . Extends RawConfigParser . set by validating type and interpolation syntax on the value .
52,571
def add_section ( self , section ) : section , _ , _ = self . _validate_value_types ( section = section ) super ( ConfigParser , self ) . add_section ( section )
Create a new section in the configuration . Extends RawConfigParser . add_section by validating if the section name is a string .
52,572
def get ( self , option , fallback = None , ** kwargs ) : kwargs . setdefault ( 'raw' , False ) kwargs . setdefault ( 'vars' , None ) _impl = kwargs . pop ( '_impl' , None ) if not _impl : _impl = self . _parser . get return _impl ( self . _name , option , fallback = fallback , ** kwargs )
Get an option value .
52,573
def analyse_text ( text ) : if re . match ( r'^\s*REBOL\s*\[' , text , re . IGNORECASE ) : return 1.0 elif re . search ( r'\s*REBOL\s*[' , text , re . IGNORECASE ) : return 0.5
Check if code contains REBOL header and so it probably not R code
52,574
def get_sign_key ( exported_session_key , magic_constant ) : sign_key = hashlib . md5 ( exported_session_key + magic_constant ) . digest ( ) return sign_key
3 . 4 . 5 . 2 SIGNKEY
52,575
def _wait_for_io_events ( socks , events , timeout = None ) : if not HAS_SELECT : raise ValueError ( 'Platform does not have a selector' ) if not isinstance ( socks , list ) : if hasattr ( socks , "fileno" ) : socks = [ socks ] else : socks = list ( socks ) with DefaultSelector ( ) as selector : for sock in socks : sel...
Waits for IO events to be available from a list of sockets or optionally a single socket if passed in . Returns a list of sockets that can be interacted with immediately .
52,576
def make_analysator ( f ) : def text_analyse ( text ) : try : rv = f ( text ) except Exception : return 0.0 if not rv : return 0.0 try : return min ( 1.0 , max ( 0.0 , float ( rv ) ) ) except ( ValueError , TypeError ) : return 0.0 text_analyse . __doc__ = f . __doc__ return staticmethod ( text_analyse )
Return a static text analyser function that returns float values .
52,577
def shebang_matches ( text , regex ) : r index = text . find ( '\n' ) if index >= 0 : first_line = text [ : index ] . lower ( ) else : first_line = text . lower ( ) if first_line . startswith ( '#!' ) : try : found = [ x for x in split_path_re . split ( first_line [ 2 : ] . strip ( ) ) if x and not x . startswith ( '-'...
r Check if the given regular expression matches the last part of the shebang if one exists .
52,578
def looks_like_xml ( text ) : if xml_decl_re . match ( text ) : return True key = hash ( text ) try : return _looks_like_xml_cache [ key ] except KeyError : m = doctype_lookup_re . match ( text ) if m is not None : return True rv = tag_re . search ( text [ : 1000 ] ) is not None _looks_like_xml_cache [ key ] = rv retur...
Check if a doctype exists or if we have some tags .
52,579
def unirange ( a , b ) : if b < a : raise ValueError ( "Bad character range" ) if a < 0x10000 or b < 0x10000 : raise ValueError ( "unirange is only defined for non-BMP ranges" ) if sys . maxunicode > 0xffff : return u'[%s-%s]' % ( unichr ( a ) , unichr ( b ) ) else : ah , al = _surrogatepair ( a ) bh , bl = _surrogatep...
Returns a regular expression string to match the given non - BMP range .
52,580
def format_lines ( var_name , seq , raw = False , indent_level = 0 ) : lines = [ ] base_indent = ' ' * indent_level * 4 inner_indent = ' ' * ( indent_level + 1 ) * 4 lines . append ( base_indent + var_name + ' = (' ) if raw : for i in seq : lines . append ( inner_indent + i + ',' ) else : for i in seq : r = repr ( i + ...
Formats a sequence of strings for output .
52,581
def duplicates_removed ( it , already_seen = ( ) ) : lst = [ ] seen = set ( ) for i in it : if i in seen or i in already_seen : continue lst . append ( i ) seen . add ( i ) return lst
Returns a list with duplicates removed from the iterable it .
52,582
def _lex_fortran ( self , match , ctx = None ) : lexer = FortranLexer ( ) text = match . group ( 0 ) + "\n" for index , token , value in lexer . get_tokens_unprocessed ( text ) : value = value . replace ( '\n' , '' ) if value != '' : yield index , token , value
Lex a line just as free form fortran without line break .
52,583
def get_project_info ( configs , heartbeat , data ) : project_name , branch_name = heartbeat . project , heartbeat . branch if heartbeat . type != 'file' : project_name = project_name or heartbeat . args . project or heartbeat . args . alternate_project return project_name , branch_name if project_name is None or branc...
Find the current project and branch .
52,584
def generate_project_name ( ) : adjectives = [ 'aged' , 'ancient' , 'autumn' , 'billowing' , 'bitter' , 'black' , 'blue' , 'bold' , 'broad' , 'broken' , 'calm' , 'cold' , 'cool' , 'crimson' , 'curly' , 'damp' , 'dark' , 'dawn' , 'delicate' , 'divine' , 'dry' , 'empty' , 'falling' , 'fancy' , 'flat' , 'floral' , 'fragra...
Generates a random project name .
52,585
def save ( self , session ) : if not HAS_SQL : return try : conn , c = self . connect ( ) c . execute ( 'DELETE FROM {0}' . format ( self . table_name ) ) values = { 'value' : sqlite3 . Binary ( pickle . dumps ( session , protocol = 2 ) ) , } c . execute ( 'INSERT INTO {0} VALUES (:value)' . format ( self . table_name ...
Saves a requests . Session object for the next heartbeat process .
52,586
def get ( self ) : if not HAS_SQL : return requests . session ( ) try : conn , c = self . connect ( ) except : log . traceback ( logging . DEBUG ) return requests . session ( ) session = None try : c . execute ( 'BEGIN IMMEDIATE' ) c . execute ( 'SELECT value FROM {0} LIMIT 1' . format ( self . table_name ) ) row = c ....
Returns a requests . Session object .
52,587
def delete ( self ) : if not HAS_SQL : return try : conn , c = self . connect ( ) c . execute ( 'DELETE FROM {0}' . format ( self . table_name ) ) conn . commit ( ) conn . close ( ) except : log . traceback ( logging . DEBUG )
Clears all cached Session objects .
52,588
def resolve_redirects ( self , resp , req , stream = False , timeout = None , verify = True , cert = None , proxies = None , yield_requests = False , ** adapter_kwargs ) : hist = [ ] url = self . get_redirect_target ( resp ) while url : prepared_request = req . copy ( ) hist . append ( resp ) resp . history = hist [ 1 ...
Receives a Response . Returns a generator of Responses or Requests .
52,589
def rebuild_method ( self , prepared_request , response ) : method = prepared_request . method if response . status_code == codes . see_other and method != 'HEAD' : method = 'GET' if response . status_code == codes . found and method != 'HEAD' : method = 'GET' if response . status_code == codes . moved and method == 'P...
When being redirected we may want to change the method of the request based on certain specs or browser behavior .
52,590
def apply_filters ( stream , filters , lexer = None ) : def _apply ( filter_ , stream ) : for token in filter_ . filter ( lexer , stream ) : yield token for filter_ in filters : stream = _apply ( filter_ , stream ) return stream
Use this method to apply an iterable of filters to a stream . If lexer is given it s forwarded to the filter otherwise the filter receives None .
52,591
def reset_indent ( token_class ) : def callback ( lexer , match , context ) : text = match . group ( ) context . indent_stack = [ ] context . indent = - 1 context . next_indent = 0 context . block_scalar_indent = None yield match . start ( ) , token_class , text context . pos = match . end ( ) return callback
Reset the indentation levels .
52,592
def save_indent ( token_class , start = False ) : def callback ( lexer , match , context ) : text = match . group ( ) extra = '' if start : context . next_indent = len ( text ) if context . next_indent < context . indent : while context . next_indent < context . indent : context . indent = context . indent_stack . pop ...
Save a possible indentation level .
52,593
def set_block_scalar_indent ( token_class ) : def callback ( lexer , match , context ) : text = match . group ( ) context . block_scalar_indent = None if not text : return increment = match . group ( 1 ) if increment : current_indent = max ( context . indent , 0 ) increment = int ( increment ) context . block_scalar_in...
Set an explicit indentation level for a block scalar .
52,594
def parse_block_scalar_indent ( token_class ) : def callback ( lexer , match , context ) : text = match . group ( ) if context . block_scalar_indent is None : if len ( text ) <= max ( context . indent , 0 ) : context . stack . pop ( ) context . stack . pop ( ) return context . block_scalar_indent = len ( text ) else : ...
Process indentation spaces in a block scalar .
52,595
def content ( self ) : if self . _content is False : if self . _content_consumed : raise RuntimeError ( 'The content for this response was already consumed' ) if self . status_code == 0 or self . raw is None : self . _content = None else : self . _content = bytes ( ) . join ( self . iter_content ( CONTENT_CHUNK_SIZE ) ...
Content of the response in bytes .
52,596
def py_scanstring ( s , end , encoding = None , strict = True , _b = BACKSLASH , _m = STRINGCHUNK . match , _join = u ( '' ) . join , _PY3 = PY3 , _maxunicode = sys . maxunicode ) : if encoding is None : encoding = DEFAULT_ENCODING chunks = [ ] _append = chunks . append begin = end - 1 while 1 : chunk = _m ( s , end ) ...
Scan the string s for a JSON string . End is the index of the character in s after the quote that started the JSON string . Unescapes all valid JSON string escape sequences and raises ValueError on attempt to decode an invalid string . If strict is False then literal control characters are allowed in the string .
52,597
def _get_css_class ( self , ttype ) : ttypeclass = _get_ttype_class ( ttype ) if ttypeclass : return self . classprefix + ttypeclass return ''
Return the css class of this token type prefixed with the classprefix option .
52,598
def _get_css_classes ( self , ttype ) : cls = self . _get_css_class ( ttype ) while ttype not in STANDARD_TYPES : ttype = ttype . parent cls = self . _get_css_class ( ttype ) + ' ' + cls return cls
Return the css classes of this token type prefixed with the classprefix option .
52,599
def get_style_defs ( self , arg = None ) : if arg is None : arg = ( 'cssclass' in self . options and '.' + self . cssclass or '' ) if isinstance ( arg , string_types ) : args = [ arg ] else : args = list ( arg ) def prefix ( cls ) : if cls : cls = '.' + cls tmp = [ ] for arg in args : tmp . append ( ( arg and arg + ' '...
Return CSS style definitions for the classes produced by the current highlighting style . arg can be a string or list of selectors to insert before the token type classes .