idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
52,700
def generate_aliases ( fieldfile , ** kwargs ) : from easy_thumbnails . files import generate_all_aliases generate_all_aliases ( fieldfile , include_global = False )
A saved_file signal handler which generates thumbnails for all field model and app specific aliases matching the saved file s field .
52,701
def generate_aliases_global ( fieldfile , ** kwargs ) : from easy_thumbnails . files import generate_all_aliases generate_all_aliases ( fieldfile , include_global = True )
A saved_file signal handler which generates thumbnails for all field model and app specific aliases matching the saved file s field also generating thumbnails for each project - wide alias .
52,702
def colorspace ( im , bw = False , replace_alpha = False , ** kwargs ) : if im . mode == 'I' : im = im . point ( list ( _points_table ( ) ) , 'L' ) is_transparent = utils . is_transparent ( im ) is_grayscale = im . mode in ( 'L' , 'LA' ) new_mode = im . mode if is_grayscale or bw : new_mode = 'L' else : new_mode = 'RGB' if is_transparent : if replace_alpha : if im . mode != 'RGBA' : im = im . convert ( 'RGBA' ) base = Image . new ( 'RGBA' , im . size , replace_alpha ) base . paste ( im , mask = im ) im = base else : new_mode = new_mode + 'A' if im . mode != new_mode : im = im . convert ( new_mode ) return im
Convert images to the correct color space .
52,703
def autocrop ( im , autocrop = False , ** kwargs ) : if autocrop : if utils . is_transparent ( im ) : no_alpha = Image . new ( 'L' , im . size , ( 255 ) ) no_alpha . paste ( im , mask = im . split ( ) [ - 1 ] ) else : no_alpha = im . convert ( 'L' ) bw = no_alpha . convert ( 'L' ) bg = Image . new ( 'L' , im . size , 255 ) bbox = ImageChops . difference ( bw , bg ) . getbbox ( ) if bbox : im = im . crop ( bbox ) return im
Remove any unnecessary whitespace from the edges of the source image .
52,704
def filters ( im , detail = False , sharpen = False , ** kwargs ) : if detail : im = im . filter ( ImageFilter . DETAIL ) if sharpen : im = im . filter ( ImageFilter . SHARPEN ) return im
Pass the source image through post - processing filters .
52,705
def background ( im , size , background = None , ** kwargs ) : if not background : return im if not size [ 0 ] or not size [ 1 ] : return im x , y = im . size if x >= size [ 0 ] and y >= size [ 1 ] : return im im = colorspace ( im , replace_alpha = background , ** kwargs ) new_im = Image . new ( 'RGB' , size , background ) if new_im . mode != im . mode : new_im = new_im . convert ( im . mode ) offset = ( size [ 0 ] - x ) // 2 , ( size [ 1 ] - y ) // 2 new_im . paste ( im , offset ) return new_im
Add borders of a certain color to make the resized image fit exactly within the dimensions given .
52,706
def generate_all_aliases ( fieldfile , include_global ) : all_options = aliases . all ( fieldfile , include_global = include_global ) if all_options : thumbnailer = get_thumbnailer ( fieldfile ) for key , options in six . iteritems ( all_options ) : options [ 'ALIAS' ] = key thumbnailer . get_thumbnail ( options )
Generate all of a file s aliases .
52,707
def _get_image ( self ) : if not hasattr ( self , '_image_cache' ) : from easy_thumbnails . source_generators import pil_image self . image = pil_image ( self ) return self . _image_cache
Get a PIL Image instance of this file .
52,708
def _set_image ( self , image ) : if image : self . _image_cache = image self . _dimensions_cache = image . size else : if hasattr ( self , '_image_cache' ) : del self . _cached_image if hasattr ( self , '_dimensions_cache' ) : del self . _dimensions_cache
Set the image for this file .
52,709
def set_image_dimensions ( self , thumbnail ) : try : dimensions = getattr ( thumbnail , 'dimensions' , None ) except models . ThumbnailDimensions . DoesNotExist : dimensions = None if not dimensions : return False self . _dimensions_cache = dimensions . size return self . _dimensions_cache
Set image dimensions from the cached dimensions of a Thumbnail model instance .
52,710
def generate_thumbnail ( self , thumbnail_options , high_resolution = False , silent_template_exception = False ) : thumbnail_options = self . get_options ( thumbnail_options ) orig_size = thumbnail_options [ 'size' ] min_dim , max_dim = 0 , 0 for dim in orig_size : try : dim = int ( dim ) except ( TypeError , ValueError ) : continue min_dim , max_dim = min ( min_dim , dim ) , max ( max_dim , dim ) if max_dim == 0 or min_dim < 0 : raise exceptions . EasyThumbnailsError ( "The source image is an invalid size (%sx%s)" % orig_size ) if high_resolution : thumbnail_options [ 'size' ] = ( orig_size [ 0 ] * 2 , orig_size [ 1 ] * 2 ) image = engine . generate_source_image ( self , thumbnail_options , self . source_generators , fail_silently = silent_template_exception ) if image is None : raise exceptions . InvalidImageFormatError ( "The source file does not appear to be an image" ) thumbnail_image = engine . process_image ( image , thumbnail_options , self . thumbnail_processors ) if high_resolution : thumbnail_options [ 'size' ] = orig_size filename = self . get_thumbnail_name ( thumbnail_options , transparent = utils . is_transparent ( thumbnail_image ) , high_resolution = high_resolution ) quality = thumbnail_options [ 'quality' ] subsampling = thumbnail_options [ 'subsampling' ] img = engine . save_image ( thumbnail_image , filename = filename , quality = quality , subsampling = subsampling ) data = img . read ( ) thumbnail = ThumbnailFile ( filename , file = ContentFile ( data ) , storage = self . thumbnail_storage , thumbnail_options = thumbnail_options ) thumbnail . image = thumbnail_image thumbnail . _committed = False return thumbnail
Return an unsaved ThumbnailFile containing a thumbnail image .
52,711
def get_existing_thumbnail ( self , thumbnail_options , high_resolution = False ) : thumbnail_options = self . get_options ( thumbnail_options ) names = [ self . get_thumbnail_name ( thumbnail_options , transparent = False , high_resolution = high_resolution ) ] transparent_name = self . get_thumbnail_name ( thumbnail_options , transparent = True , high_resolution = high_resolution ) if transparent_name not in names : names . append ( transparent_name ) for filename in names : exists = self . thumbnail_exists ( filename ) if exists : thumbnail_file = ThumbnailFile ( name = filename , storage = self . thumbnail_storage , thumbnail_options = thumbnail_options ) if settings . THUMBNAIL_CACHE_DIMENSIONS : thumbnail_file . set_image_dimensions ( exists ) return thumbnail_file
Return a ThumbnailFile containing an existing thumbnail for a set of thumbnail options or None if not found .
52,712
def get_thumbnail ( self , thumbnail_options , save = True , generate = None , silent_template_exception = False ) : thumbnail_options = self . get_options ( thumbnail_options ) if generate is None : generate = self . generate thumbnail = self . get_existing_thumbnail ( thumbnail_options ) if not thumbnail : if generate : thumbnail = self . generate_thumbnail ( thumbnail_options , silent_template_exception = silent_template_exception ) if save : self . save_thumbnail ( thumbnail ) else : signals . thumbnail_missed . send ( sender = self , options = thumbnail_options , high_resolution = False ) if 'HIGH_RESOLUTION' in thumbnail_options : generate_high_resolution = thumbnail_options . get ( 'HIGH_RESOLUTION' ) else : generate_high_resolution = self . thumbnail_high_resolution if generate_high_resolution : thumbnail . high_resolution = self . get_existing_thumbnail ( thumbnail_options , high_resolution = True ) if not thumbnail . high_resolution : if generate : thumbnail . high_resolution = self . generate_thumbnail ( thumbnail_options , high_resolution = True , silent_template_exception = silent_template_exception ) if save : self . save_thumbnail ( thumbnail . high_resolution ) else : signals . thumbnail_missed . send ( sender = self , options = thumbnail_options , high_resolution = False ) return thumbnail
Return a ThumbnailFile containing a thumbnail .
52,713
def save_thumbnail ( self , thumbnail ) : filename = thumbnail . name try : self . thumbnail_storage . delete ( filename ) except Exception : pass self . thumbnail_storage . save ( filename , thumbnail ) thumb_cache = self . get_thumbnail_cache ( thumbnail . name , create = True , update = True ) if settings . THUMBNAIL_CACHE_DIMENSIONS : dimensions_cache , created = ( models . ThumbnailDimensions . objects . get_or_create ( thumbnail = thumb_cache , defaults = { 'width' : thumbnail . width , 'height' : thumbnail . height } ) ) if not created : dimensions_cache . width = thumbnail . width dimensions_cache . height = thumbnail . height dimensions_cache . save ( ) signals . thumbnail_created . send ( sender = thumbnail )
Save a thumbnail to the thumbnail_storage .
52,714
def thumbnail_exists ( self , thumbnail_name ) : if self . remote_source : return False if utils . is_storage_local ( self . source_storage ) : source_modtime = utils . get_modified_time ( self . source_storage , self . name ) else : source = self . get_source_cache ( ) if not source : return False source_modtime = source . modified if not source_modtime : return False local_thumbnails = utils . is_storage_local ( self . thumbnail_storage ) if local_thumbnails : thumbnail_modtime = utils . get_modified_time ( self . thumbnail_storage , thumbnail_name ) if not thumbnail_modtime : return False return source_modtime <= thumbnail_modtime thumbnail = self . get_thumbnail_cache ( thumbnail_name ) if not thumbnail : return False thumbnail_modtime = thumbnail . modified if thumbnail . modified and source_modtime <= thumbnail . modified : return thumbnail return False
Calculate whether the thumbnail already exists and that the source is not newer than the thumbnail .
52,715
def save ( self , name , content , * args , ** kwargs ) : super ( ThumbnailerFieldFile , self ) . save ( name , content , * args , ** kwargs ) self . get_source_cache ( create = True , update = True )
Save the file also saving a reference to the thumbnail cache Source model .
52,716
def delete ( self , * args , ** kwargs ) : source_cache = self . get_source_cache ( ) self . delete_thumbnails ( source_cache ) super ( ThumbnailerFieldFile , self ) . delete ( * args , ** kwargs ) if source_cache and source_cache . pk is not None : source_cache . delete ( )
Delete the image along with any generated thumbnails .
52,717
def delete_thumbnails ( self , source_cache = None ) : source_cache = self . get_source_cache ( ) deleted = 0 if source_cache : thumbnail_storage_hash = utils . get_storage_hash ( self . thumbnail_storage ) for thumbnail_cache in source_cache . thumbnails . all ( ) : if thumbnail_cache . storage_hash == thumbnail_storage_hash : self . thumbnail_storage . delete ( thumbnail_cache . name ) thumbnail_cache . delete ( ) deleted += 1 return deleted
Delete any thumbnails generated from the source image .
52,718
def get_thumbnails ( self , * args , ** kwargs ) : source_cache = self . get_source_cache ( ) if source_cache : thumbnail_storage_hash = utils . get_storage_hash ( self . thumbnail_storage ) for thumbnail_cache in source_cache . thumbnails . all ( ) : if thumbnail_cache . storage_hash == thumbnail_storage_hash : yield ThumbnailFile ( name = thumbnail_cache . name , storage = self . thumbnail_storage )
Return an iterator which returns ThumbnailFile instances .
52,719
def save ( self , name , content , * args , ** kwargs ) : options = getattr ( self . field , 'resize_source' , None ) if options : if 'quality' not in options : options [ 'quality' ] = self . thumbnail_quality content = Thumbnailer ( content , name ) . generate_thumbnail ( options ) orig_name , ext = os . path . splitext ( name ) generated_ext = os . path . splitext ( content . name ) [ 1 ] if generated_ext . lower ( ) != ext . lower ( ) : name = orig_name + generated_ext super ( ThumbnailerImageFieldFile , self ) . save ( name , content , * args , ** kwargs )
Save the image .
52,720
def queryset_iterator ( queryset , chunksize = 1000 ) : if queryset . exists ( ) : primary_key = 0 last_pk = queryset . order_by ( '-pk' ) [ 0 ] . pk queryset = queryset . order_by ( 'pk' ) while primary_key < last_pk : for row in queryset . filter ( pk__gt = primary_key ) [ : chunksize ] : primary_key = row . pk yield row gc . collect ( )
The queryset iterator helps to keep the memory consumption down . And also making it easier to process for weaker computers .
52,721
def print_stats ( self ) : print ( "{0:-<48}" . format ( str ( datetime . now ( ) . strftime ( '%Y-%m-%d %H:%M ' ) ) ) ) print ( "{0:<40} {1:>7}" . format ( "Sources checked:" , self . sources ) ) print ( "{0:<40} {1:>7}" . format ( "Source references deleted from DB:" , self . source_refs_deleted ) ) print ( "{0:<40} {1:>7}" . format ( "Thumbnails deleted from disk:" , self . thumbnails_deleted ) ) print ( "(Completed in %s seconds)\n" % self . execution_time )
Print statistics about the cleanup performed .
52,722
def populate_from_settings ( self ) : settings_aliases = settings . THUMBNAIL_ALIASES if settings_aliases : for target , aliases in settings_aliases . items ( ) : target_aliases = self . _aliases . setdefault ( target , { } ) target_aliases . update ( aliases )
Populate the aliases from the THUMBNAIL_ALIASES setting .
52,723
def set ( self , alias , options , target = None ) : target = self . _coerce_target ( target ) or '' target_aliases = self . _aliases . setdefault ( target , { } ) target_aliases [ alias ] = options
Add an alias .
52,724
def get ( self , alias , target = None ) : for target_part in reversed ( list ( self . _get_targets ( target ) ) ) : options = self . _get ( target_part , alias ) if options : return options
Get a dictionary of aliased options .
52,725
def all ( self , target = None , include_global = True ) : aliases = { } for target_part in self . _get_targets ( target , include_global ) : aliases . update ( self . _aliases . get ( target_part , { } ) ) return aliases
Get a dictionary of all aliases and their options .
52,726
def _get ( self , target , alias ) : if target not in self . _aliases : return return self . _aliases [ target ] . get ( alias )
Internal method to get a specific alias .
52,727
def _get_targets ( self , target , include_global = True ) : target = self . _coerce_target ( target ) if include_global : yield '' if not target : return target_bits = target . split ( '.' ) for i in range ( len ( target_bits ) ) : yield '.' . join ( target_bits [ : i + 1 ] )
Internal iterator to split up a complete target into the possible parts it may match .
52,728
def _coerce_target ( self , target ) : if not target or isinstance ( target , six . string_types ) : return target if not hasattr ( target , 'instance' ) : return None if getattr ( target . instance , '_deferred' , False ) : model = target . instance . _meta . proxy_for_model else : model = target . instance . __class__ return '%s.%s.%s' % ( model . _meta . app_label , model . __name__ , target . field . name , )
Internal method to coerce a target to a string .
52,729
def image_entropy ( im ) : if not isinstance ( im , Image . Image ) : return 0 hist = im . histogram ( ) hist_size = float ( sum ( hist ) ) hist = [ h / hist_size for h in hist ] return - sum ( [ p * math . log ( p , 2 ) for p in hist if p != 0 ] )
Calculate the entropy of an image . Used for smart cropping .
52,730
def dynamic_import ( import_string ) : lastdot = import_string . rfind ( '.' ) if lastdot == - 1 : return __import__ ( import_string , { } , { } , [ ] ) module_name , attr = import_string [ : lastdot ] , import_string [ lastdot + 1 : ] parent_module = __import__ ( module_name , { } , { } , [ attr ] ) return getattr ( parent_module , attr )
Dynamically import a module or object .
52,731
def is_transparent ( image ) : if not isinstance ( image , Image . Image ) : return False return ( image . mode in ( 'RGBA' , 'LA' ) or ( image . mode == 'P' and 'transparency' in image . info ) )
Check to see if an image is transparent .
52,732
def is_progressive ( image ) : if not isinstance ( image , Image . Image ) : return False return ( 'progressive' in image . info ) or ( 'progression' in image . info )
Check to see if an image is progressive .
52,733
def get_modified_time ( storage , name ) : try : try : modified_time = storage . get_modified_time ( name ) except AttributeError : modified_time = storage . modified_time ( name ) except OSError : return 0 except NotImplementedError : return None if modified_time and timezone . is_naive ( modified_time ) : if getattr ( settings , 'USE_TZ' , False ) : default_timezone = timezone . get_default_timezone ( ) return timezone . make_aware ( modified_time , default_timezone ) return modified_time
Get modified time from storage ensuring the result is a timezone - aware datetime .
52,734
def namedtuple ( typename , field_names , verbose = False , rename = False ) : if isinstance ( field_names , str ) : field_names = field_names . replace ( ',' , ' ' ) . split ( ) field_names = list ( map ( str , field_names ) ) typename = str ( typename ) for name in [ typename ] + field_names : if type ( name ) != str : raise TypeError ( 'Type names and field names must be strings' ) if _iskeyword ( name ) : raise ValueError ( 'Type names and field names cannot be a ' 'keyword: %r' % name ) if not _isidentifier ( typename ) : raise ValueError ( 'Type names must be valid ' 'identifiers: %r' % name ) seen = set ( ) for name in field_names : if name . startswith ( '_' ) and not rename : raise ValueError ( 'Field names cannot start with an underscore: ' '%r' % name ) if name in seen : raise ValueError ( 'Encountered duplicate field name: %r' % name ) seen . add ( name ) arg_names = [ '_' + str ( i ) for i in range ( len ( field_names ) ) ] class_definition = _class_template . format ( typename = typename , field_names = tuple ( field_names ) , num_fields = len ( field_names ) , arg_list = repr ( tuple ( arg_names ) ) . replace ( "'" , "" ) [ 1 : - 1 ] , repr_fmt = ', ' . join ( _repr_template . format ( name = name ) for name in field_names ) , field_defs = '\n' . join ( _field_template . format ( index = index , name = name ) for index , name in enumerate ( field_names ) if _isidentifier ( name ) ) ) namespace = dict ( __name__ = 'namedtuple_%s' % typename ) exec ( class_definition , namespace ) result = namespace [ typename ] result . _source = class_definition if verbose : print ( result . _source ) try : result . __module__ = _sys . _getframe ( 1 ) . f_globals . get ( '__name__' , '__main__' ) except ( AttributeError , ValueError ) : pass return result
Returns a new subclass of tuple with named fields . This is a patched version of collections . namedtuple from the stdlib . Unlike the latter it accepts non - identifier strings as field names . All values are accessible through dict syntax . Fields whose names are identifiers are also accessible via attribute syntax as in ordinary namedtuples alongside traditional indexing . This feature is needed as SDMX allows field names to contain - .
52,735
def write_source ( self , filename ) : with open ( filename , 'w' ) as fp : return json . dump ( self . message . _elem , fp , indent = 4 , sort_keys = True )
Save source to file by calling write on the root element .
52,736
def write_source ( self , filename ) : return self . message . _elem . getroottree ( ) . write ( filename , encoding = 'utf8' )
Save XML source to file by calling write on the root element .
52,737
def group_attrib ( self ) : group_attributes = [ g . attrib for g in self . dataset . groups if self in g ] if group_attributes : return concat_namedtuples ( * group_attributes )
return a namedtuple containing all attributes attached to groups of which the given series is a member for each group of which the series is a member
52,738
def read_instance ( self , cls , sdmxobj , offset = None , first_only = True ) : if offset : try : base = self . _paths [ offset ] ( sdmxobj . _elem ) [ 0 ] except IndexError : return None else : base = sdmxobj . _elem result = self . _paths [ cls ] ( base ) if result : if first_only : return cls ( self , result [ 0 ] ) else : return [ cls ( self , i ) for i in result ]
If cls in _paths and matches return an instance of cls with the first XML element or if first_only is False a list of cls instances for all elements found If no matches were found return None .
52,739
def load_agency_profile ( cls , source ) : if not isinstance ( source , str_type ) : source = source . read ( ) new_agencies = json . loads ( source ) cls . _agencies . update ( new_agencies )
Classmethod loading metadata on a data provider . source must be a json - formated string or file - like object describing one or more data providers ( URL of the SDMX web API resource types etc . The dict Request . _agencies is updated with the metadata from the source .
52,740
def series_keys ( self , flow_id , cache = True ) : cache_id = 'series_keys_' + flow_id if cache_id in self . cache : return self . cache [ cache_id ] else : resp = self . data ( flow_id , params = { 'detail' : 'serieskeysonly' } ) l = list ( s . key for s in resp . data . series ) df = PD . DataFrame ( l , columns = l [ 0 ] . _fields , dtype = 'category' ) if cache : self . cache [ cache_id ] = df return df
Get an empty dataset with all possible series keys .
52,741
def preview_data ( self , flow_id , key = None , count = True , total = True ) : all_keys = self . series_keys ( flow_id ) if not key : if count : return all_keys . shape [ 0 ] else : return all_keys key_l = { k : [ v ] if isinstance ( v , str_type ) else v for k , v in key . items ( ) } dim_names = [ k for k in all_keys if k in key ] key_df = all_keys . loc [ : , dim_names ] if total : bool_series = reduce ( and_ , ( key_df . isin ( key_l ) [ col ] for col in dim_names ) ) if count : return bool_series . value_counts ( ) [ True ] else : return all_keys [ bool_series ] else : key_product = product ( * ( key_l [ k ] for k in dim_names ) ) PartialKey = namedtuple_factory ( 'PartialKey' , dim_names ) matches = { PartialKey ( k ) : reduce ( and_ , ( key_df . isin ( { k1 : [ v1 ] for k1 , v1 in zip ( dim_names , k ) } ) [ col ] for col in dim_names ) ) for k in key_product } if not count : return { k : all_keys [ v ] for k , v in matches . items ( ) } else : return { k : v . value_counts ( ) [ True ] for k , v in matches . items ( ) }
Get keys or number of series for a prospective dataset query allowing for keys with multiple values per dimension . It downloads the complete list of series keys for a dataflow rather than using constraints and DSD . This feature is however not supported by all data providers . ECB and UNSD are known to work .
52,742
def write ( self , source = None , ** kwargs ) : if not source : source = self . msg return self . _writer . write ( source = source , ** kwargs )
Wrappe r to call the writer s write method if present .
52,743
def parse_json ( path ) : with open ( path ) as f : data = json . load ( f ) result = [ ] def assert_type ( value , typ ) : assert isinstance ( value , typ ) , '%s: Unexpected type %r' % ( path , type ( value ) . __name__ ) def assert_dict_item ( dictionary , key , typ ) : assert key in dictionary , '%s: Missing dictionary key %r' % ( path , key ) value = dictionary [ key ] assert isinstance ( value , typ ) , '%s: Unexpected type %r for key %r' % ( path , type ( value ) . __name__ , key ) assert_type ( data , list ) for item in data : assert_type ( item , dict ) assert_dict_item ( item , 'path' , Text ) assert_dict_item ( item , 'line' , int ) assert_dict_item ( item , 'func_name' , Text ) assert_dict_item ( item , 'type_comments' , list ) for comment in item [ 'type_comments' ] : assert_type ( comment , Text ) assert_type ( item [ 'samples' ] , int ) info = FunctionInfo ( encode ( item [ 'path' ] ) , item [ 'line' ] , encode ( item [ 'func_name' ] ) , [ encode ( comment ) for comment in item [ 'type_comments' ] ] , item [ 'samples' ] ) result . append ( info ) return result
Deserialize a JSON file containing runtime collected types .
52,744
def tokenize ( s ) : original = s tokens = [ ] while True : if not s : tokens . append ( End ( ) ) return tokens elif s [ 0 ] == ' ' : s = s [ 1 : ] elif s [ 0 ] in '()[],*' : tokens . append ( Separator ( s [ 0 ] ) ) s = s [ 1 : ] elif s [ : 2 ] == '->' : tokens . append ( Separator ( '->' ) ) s = s [ 2 : ] else : m = re . match ( r'[-\w]+(\s*(\.|:)\s*[-/\w]*)*' , s ) if not m : raise ParseError ( original ) fullname = m . group ( 0 ) fullname = fullname . replace ( ' ' , '' ) if fullname in TYPE_FIXUPS : fullname = TYPE_FIXUPS [ fullname ] if fullname . startswith ( 'pytz.tzfile.' ) : fullname = 'datetime.tzinfo' if '-' in fullname or '/' in fullname : fullname = 'Any' tokens . append ( DottedName ( fullname ) ) s = s [ len ( m . group ( 0 ) ) : ]
Translate a type comment into a list of tokens .
52,745
def generate_annotations_json_string ( source_path , only_simple = False ) : items = parse_json ( source_path ) results = [ ] for item in items : signature = unify_type_comments ( item . type_comments ) if is_signature_simple ( signature ) or not only_simple : data = { 'path' : item . path , 'line' : item . line , 'func_name' : item . func_name , 'signature' : signature , 'samples' : item . samples } results . append ( data ) return results
Produce annotation data JSON file from a JSON file with runtime - collected types .
52,746
def _my_hash ( arg_list ) : res = 0 for arg in arg_list : res = res * 31 + hash ( arg ) return res
Simple helper hash function
52,747
def name_from_type ( type_ ) : if isinstance ( type_ , ( DictType , ListType , TupleType , SetType , IteratorType ) ) : return repr ( type_ ) else : if type_ . __name__ != 'NoneType' : module = type_ . __module__ if module in BUILTIN_MODULES or module == '<unknown>' : return type_ . __name__ else : name = getattr ( type_ , '__qualname__' , None ) or type_ . __name__ delim = '.' if '.' not in name else ':' return '%s%s%s' % ( module , delim , name ) else : return 'None'
Helper function to get PEP - 484 compatible string representation of our internal types .
52,748
def resolve_type ( arg ) : arg_type = type ( arg ) if arg_type == list : assert isinstance ( arg , list ) sample = arg [ : min ( 4 , len ( arg ) ) ] tentative_type = TentativeType ( ) for sample_item in sample : tentative_type . add ( resolve_type ( sample_item ) ) return ListType ( tentative_type ) elif arg_type == set : assert isinstance ( arg , set ) sample = [ ] iterator = iter ( arg ) for i in range ( 0 , min ( 4 , len ( arg ) ) ) : sample . append ( next ( iterator ) ) tentative_type = TentativeType ( ) for sample_item in sample : tentative_type . add ( resolve_type ( sample_item ) ) return SetType ( tentative_type ) elif arg_type == FakeIterator : assert isinstance ( arg , FakeIterator ) sample = [ ] iterator = iter ( arg ) for i in range ( 0 , min ( 4 , len ( arg ) ) ) : sample . append ( next ( iterator ) ) tentative_type = TentativeType ( ) for sample_item in sample : tentative_type . add ( resolve_type ( sample_item ) ) return IteratorType ( tentative_type ) elif arg_type == tuple : assert isinstance ( arg , tuple ) sample = list ( arg [ : min ( 10 , len ( arg ) ) ] ) return TupleType ( [ resolve_type ( sample_item ) for sample_item in sample ] ) elif arg_type == dict : assert isinstance ( arg , dict ) key_tt = TentativeType ( ) val_tt = TentativeType ( ) for i , ( k , v ) in enumerate ( iteritems ( arg ) ) : if i > 4 : break key_tt . add ( resolve_type ( k ) ) val_tt . add ( resolve_type ( v ) ) return DictType ( key_tt , val_tt ) else : return type ( arg )
Resolve object to one of our internal collection types or generic built - in type .
52,749
def prep_args ( arg_info ) : filtered_args = [ a for a in arg_info . args if getattr ( arg_info , 'varargs' , None ) != a ] if filtered_args and ( filtered_args [ 0 ] in ( 'self' , 'cls' ) ) : filtered_args = filtered_args [ 1 : ] pos_args = [ ] if filtered_args : for arg in filtered_args : if isinstance ( arg , str ) and arg in arg_info . locals : resolved_type = resolve_type ( arg_info . locals [ arg ] ) pos_args . append ( resolved_type ) else : pos_args . append ( type ( UnknownType ( ) ) ) varargs = None if arg_info . varargs : varargs_tuple = arg_info . locals [ arg_info . varargs ] if isinstance ( varargs_tuple , tuple ) : varargs = [ resolve_type ( arg ) for arg in varargs_tuple [ : 4 ] ] return ResolvedTypes ( pos_args = pos_args , varargs = varargs )
Resolve types from ArgInfo
52,750
def _flush_signature ( key , return_type ) : signatures = collected_signatures . setdefault ( key , set ( ) ) args_info = collected_args . pop ( key ) if len ( signatures ) < MAX_ITEMS_PER_FUNCTION : signatures . add ( ( args_info , return_type ) ) num_samples [ key ] = num_samples . get ( key , 0 ) + 1
Store signature for a function .
52,751
def type_consumer ( ) : while True : item = _task_queue . get ( ) if isinstance ( item , KeyAndTypes ) : if item . key in collected_args : _flush_signature ( item . key , UnknownType ) collected_args [ item . key ] = ArgTypes ( item . types ) else : assert isinstance ( item , KeyAndReturn ) if item . key in collected_args : _flush_signature ( item . key , item . return_type ) _task_queue . task_done ( )
Infinite loop of the type consumer thread . It gets types to process from the task query .
52,752
def _make_sampling_sequence ( n ) : seq = list ( range ( 5 ) ) i = 50 while len ( seq ) < n : seq . append ( i ) i += 50 return seq
Return a list containing the proposed call event sampling sequence .
52,753
def default_filter_filename ( filename ) : if filename is None : return None elif filename . startswith ( TOP_DIR ) : if filename . startswith ( TOP_DIR_DOT ) : return None else : return filename [ TOP_DIR_LEN : ] . lstrip ( os . sep ) elif filename . startswith ( os . sep ) : return None else : return filename
Default filter for filenames .
52,754
def _filter_types ( types_dict ) : def exclude ( k ) : return k . path . startswith ( '<' ) or k . func_name == '<module>' return { k : v for k , v in iteritems ( types_dict ) if not exclude ( k ) }
Filter type info before dumping it to the file .
52,755
def _dump_impl ( ) : filtered_signatures = _filter_types ( collected_signatures ) sorted_by_file = sorted ( iteritems ( filtered_signatures ) , key = ( lambda p : ( p [ 0 ] . path , p [ 0 ] . line , p [ 0 ] . func_name ) ) ) res = [ ] for function_key , signatures in sorted_by_file : comments = [ _make_type_comment ( args , ret_type ) for args , ret_type in signatures ] res . append ( { 'path' : function_key . path , 'line' : function_key . line , 'func_name' : function_key . func_name , 'type_comments' : comments , 'samples' : num_samples . get ( function_key , 0 ) , } ) return res
Internal implementation for dump_stats and dumps_stats
52,756
def dump_stats ( filename ) : res = _dump_impl ( ) f = open ( filename , 'w' ) json . dump ( res , f , indent = 4 ) f . close ( )
Write collected information to file .
52,757
def init_types_collection ( filter_filename = default_filter_filename ) : global _filter_filename _filter_filename = filter_filename sys . setprofile ( _trace_dispatch ) threading . setprofile ( _trace_dispatch )
Setup profiler hooks to enable type collection . Call this one time from the main thread .
52,758
def add ( self , type ) : try : if isinstance ( type , SetType ) : if EMPTY_SET_TYPE in self . types_hashable : self . types_hashable . remove ( EMPTY_SET_TYPE ) elif isinstance ( type , ListType ) : if EMPTY_LIST_TYPE in self . types_hashable : self . types_hashable . remove ( EMPTY_LIST_TYPE ) elif isinstance ( type , IteratorType ) : if EMPTY_ITERATOR_TYPE in self . types_hashable : self . types_hashable . remove ( EMPTY_ITERATOR_TYPE ) elif isinstance ( type , DictType ) : if EMPTY_DICT_TYPE in self . types_hashable : self . types_hashable . remove ( EMPTY_DICT_TYPE ) for item in self . types_hashable : if isinstance ( item , DictType ) : if item . key_type == type . key_type : item . val_type . merge ( type . val_type ) return self . types_hashable . add ( type ) except ( TypeError , AttributeError ) : try : if type not in self . types : self . types . append ( type ) except AttributeError : if TypeWasIncomparable not in self . types : self . types . append ( TypeWasIncomparable )
Add type to the runtime type samples .
52,759
def merge ( self , other ) : for hashables in other . types_hashable : self . add ( hashables ) for non_hashbles in other . types : self . add ( non_hashbles )
Merge two TentativeType instances
52,760
def infer_annotation ( type_comments ) : assert type_comments args = { } returns = set ( ) for comment in type_comments : arg_types , return_type = parse_type_comment ( comment ) for i , arg_type in enumerate ( arg_types ) : args . setdefault ( i , set ( ) ) . add ( arg_type ) returns . add ( return_type ) combined_args = [ ] for i in sorted ( args ) : arg_infos = list ( args [ i ] ) kind = argument_kind ( arg_infos ) if kind is None : raise InferError ( 'Ambiguous argument kinds:\n' + '\n' . join ( type_comments ) ) types = [ arg . type for arg in arg_infos ] combined = combine_types ( types ) if str ( combined ) == 'None' : combined = UnionType ( [ ClassType ( 'None' ) , AnyType ( ) ] ) if kind != ARG_POS and ( len ( str ( combined ) ) > 120 or isinstance ( combined , UnionType ) ) : combined = AnyType ( ) combined_args . append ( Argument ( combined , kind ) ) combined_return = combine_types ( returns ) return combined_args , combined_return
Given some type comments return a single inferred signature .
52,761
def argument_kind ( args ) : kinds = set ( arg . kind for arg in args ) if len ( kinds ) != 1 : return None return kinds . pop ( )
Return the kind of an argument based on one or more descriptions of the argument .
52,762
def combine_types ( types ) : items = simplify_types ( types ) if len ( items ) == 1 : return items [ 0 ] else : return UnionType ( items )
Given some types return a combined and simplified type .
52,763
def simplify_types ( types ) : flattened = flatten_types ( types ) items = filter_ignored_items ( flattened ) items = [ simplify_recursive ( item ) for item in items ] items = merge_items ( items ) items = dedupe_types ( items ) items = remove_redundant_items ( items ) if len ( items ) > 3 : return [ AnyType ( ) ] else : return items
Given some types give simplified types representing the union of types .
52,764
def simplify_recursive ( typ ) : if isinstance ( typ , UnionType ) : return combine_types ( typ . items ) elif isinstance ( typ , ClassType ) : simplified = ClassType ( typ . name , [ simplify_recursive ( arg ) for arg in typ . args ] ) args = simplified . args if ( simplified . name == 'Dict' and len ( args ) == 2 and isinstance ( args [ 0 ] , ClassType ) and args [ 0 ] . name in ( 'str' , 'Text' ) and isinstance ( args [ 1 ] , UnionType ) and not is_optional ( args [ 1 ] ) ) : return ClassType ( 'Dict' , [ args [ 0 ] , AnyType ( ) ] ) return simplified elif isinstance ( typ , TupleType ) : return TupleType ( [ simplify_recursive ( item ) for item in typ . items ] ) return typ
Simplify all components of a type .
52,765
def remove_redundant_items ( items ) : result = [ ] for item in items : for other in items : if item is not other and is_redundant_union_item ( item , other ) : break else : result . append ( item ) return result
Filter out redundant union items .
52,766
def is_redundant_union_item ( first , other ) : if isinstance ( first , ClassType ) and isinstance ( other , ClassType ) : if first . name == 'str' and other . name == 'Text' : return True elif first . name == 'bool' and other . name == 'int' : return True elif first . name == 'int' and other . name == 'float' : return True elif ( first . name in ( 'List' , 'Dict' , 'Set' ) and other . name == first . name ) : if not first . args and other . args : return True elif len ( first . args ) == len ( other . args ) and first . args : result = all ( first_arg == other_arg or other_arg == AnyType ( ) for first_arg , other_arg in zip ( first . args , other . args ) ) return result return False
If union has both items is the first one redundant?
52,767
def merge_items ( items ) : result = [ ] while items : item = items . pop ( ) merged = None for i , other in enumerate ( items ) : merged = merged_type ( item , other ) if merged : break if merged : del items [ i ] items . append ( merged ) else : result . append ( item ) return list ( reversed ( result ) )
Merge union items that can be merged .
52,768
def merged_type ( t , s ) : if isinstance ( t , TupleType ) and isinstance ( s , TupleType ) : if len ( t . items ) == len ( s . items ) : return TupleType ( [ combine_types ( [ ti , si ] ) for ti , si in zip ( t . items , s . items ) ] ) all_items = t . items + s . items if all_items and all ( item == all_items [ 0 ] for item in all_items [ 1 : ] ) : return ClassType ( 'Tuple' , [ all_items [ 0 ] ] ) elif ( isinstance ( t , TupleType ) and isinstance ( s , ClassType ) and s . name == 'Tuple' and len ( s . args ) == 1 ) : if all ( item == s . args [ 0 ] for item in t . items ) : return s elif isinstance ( s , TupleType ) and isinstance ( t , ClassType ) and t . name == 'Tuple' : return merged_type ( s , t ) elif isinstance ( s , NoReturnType ) : return t elif isinstance ( t , NoReturnType ) : return s elif isinstance ( s , AnyType ) : return t elif isinstance ( t , AnyType ) : return s return None
Return merged type if two items can be merged in to a different more general type .
52,769
def dump_annotations ( type_info , files ) : with open ( type_info ) as f : data = json . load ( f ) for item in data : path , line , func_name = item [ 'path' ] , item [ 'line' ] , item [ 'func_name' ] if files and path not in files : for f in files : if path . startswith ( os . path . join ( f , '' ) ) : break else : continue print ( "%s:%d: in %s:" % ( path , line , func_name ) ) type_comments = item [ 'type_comments' ] signature = unify_type_comments ( type_comments ) arg_types = signature [ 'arg_types' ] return_type = signature [ 'return_type' ] print ( " # type: (%s) -> %s" % ( ", " . join ( arg_types ) , return_type ) )
Dump annotations out of type_info filtered by files .
52,770
def strip_py ( arg ) : for ext in PY_EXTENSIONS : if arg . endswith ( ext ) : return arg [ : - len ( ext ) ] return None
Strip a trailing . py or . pyi suffix . Return None if no such suffix is found .
52,771
def get_decorators ( self , node ) : if node . parent is None : return [ ] results = { } if not self . decorated . match ( node . parent , results ) : return [ ] decorators = results . get ( 'dd' ) or [ results [ 'd' ] ] decs = [ ] for d in decorators : for child in d . children : if isinstance ( child , Leaf ) and child . type == token . NAME : decs . append ( child . value ) return decs
Return a list of decorators found on a function definition .
52,772
def has_return_exprs ( self , node ) : results = { } if self . return_expr . match ( node , results ) : return True for child in node . children : if child . type not in ( syms . funcdef , syms . classdef ) : if self . has_return_exprs ( child ) : return True return False
Traverse the tree below node looking for return expr .
52,773
def inform_if_paths_invalid ( egrc_path , examples_dir , custom_dir , debug = True ) : if ( not debug ) : return if ( egrc_path ) : _inform_if_path_does_not_exist ( egrc_path ) if ( examples_dir ) : _inform_if_path_does_not_exist ( examples_dir ) if ( custom_dir ) : _inform_if_path_does_not_exist ( custom_dir )
If egrc_path examples_dir or custom_dir is truthy and debug is True informs the user that a path is not set .
52,774
def get_egrc_config ( cli_egrc_path ) : resolved_path = get_priority ( cli_egrc_path , DEFAULT_EGRC_PATH , None ) expanded_path = get_expanded_path ( resolved_path ) egrc_config = get_empty_config ( ) if os . path . isfile ( expanded_path ) : egrc_config = get_config_tuple_from_egrc ( expanded_path ) return egrc_config
Return a Config namedtuple based on the contents of the egrc .
52,775
def get_resolved_config ( egrc_path , examples_dir , custom_dir , use_color , pager_cmd , squeeze , debug = True , ) : inform_if_paths_invalid ( egrc_path , examples_dir , custom_dir ) examples_dir = get_expanded_path ( examples_dir ) custom_dir = get_expanded_path ( custom_dir ) egrc_config = get_egrc_config ( egrc_path ) resolved_examples_dir = get_priority ( examples_dir , egrc_config . examples_dir , DEFAULT_EXAMPLES_DIR ) resolved_examples_dir = get_expanded_path ( resolved_examples_dir ) resolved_custom_dir = get_priority ( custom_dir , egrc_config . custom_dir , DEFAULT_CUSTOM_DIR ) resolved_custom_dir = get_expanded_path ( resolved_custom_dir ) resolved_use_color = get_priority ( use_color , egrc_config . use_color , DEFAULT_USE_COLOR ) resolved_pager_cmd = get_priority ( pager_cmd , egrc_config . pager_cmd , DEFAULT_PAGER_CMD ) environment_editor_cmd = get_editor_cmd_from_environment ( ) resolved_editor_cmd = get_priority ( egrc_config . editor_cmd , environment_editor_cmd , DEFAULT_EDITOR_CMD ) color_config = None if resolved_use_color : default_color_config = get_default_color_config ( ) color_config = merge_color_configs ( egrc_config . color_config , default_color_config ) resolved_squeeze = get_priority ( squeeze , egrc_config . squeeze , DEFAULT_SQUEEZE ) resolved_subs = get_priority ( None , egrc_config . subs , get_default_subs ( ) ) result = Config ( examples_dir = resolved_examples_dir , custom_dir = resolved_custom_dir , color_config = color_config , use_color = resolved_use_color , pager_cmd = resolved_pager_cmd , editor_cmd = resolved_editor_cmd , squeeze = resolved_squeeze , subs = resolved_subs , ) return result
Create a Config namedtuple . Passed in values will override defaults .
52,776
def get_config_tuple_from_egrc ( egrc_path ) : with open ( egrc_path , 'r' ) as egrc : try : config = ConfigParser . RawConfigParser ( ) except AttributeError : config = ConfigParser ( ) config . readfp ( egrc ) examples_dir = None custom_dir = None use_color = None pager_cmd = None squeeze = None subs = None editor_cmd = None if config . has_option ( DEFAULT_SECTION , EG_EXAMPLES_DIR ) : examples_dir = config . get ( DEFAULT_SECTION , EG_EXAMPLES_DIR ) examples_dir = get_expanded_path ( examples_dir ) if config . has_option ( DEFAULT_SECTION , CUSTOM_EXAMPLES_DIR ) : custom_dir = config . get ( DEFAULT_SECTION , CUSTOM_EXAMPLES_DIR ) custom_dir = get_expanded_path ( custom_dir ) if config . has_option ( DEFAULT_SECTION , USE_COLOR ) : use_color_raw = config . get ( DEFAULT_SECTION , USE_COLOR ) use_color = _parse_bool_from_raw_egrc_value ( use_color_raw ) if config . has_option ( DEFAULT_SECTION , PAGER_CMD ) : pager_cmd_raw = config . get ( DEFAULT_SECTION , PAGER_CMD ) pager_cmd = ast . literal_eval ( pager_cmd_raw ) if config . has_option ( DEFAULT_SECTION , EDITOR_CMD ) : editor_cmd_raw = config . get ( DEFAULT_SECTION , EDITOR_CMD ) editor_cmd = ast . literal_eval ( editor_cmd_raw ) color_config = get_custom_color_config_from_egrc ( config ) if config . has_option ( DEFAULT_SECTION , SQUEEZE ) : squeeze_raw = config . get ( DEFAULT_SECTION , SQUEEZE ) squeeze = _parse_bool_from_raw_egrc_value ( squeeze_raw ) if config . has_section ( SUBSTITUTION_SECTION ) : subs = get_substitutions_from_config ( config ) return Config ( examples_dir = examples_dir , custom_dir = custom_dir , color_config = color_config , use_color = use_color , pager_cmd = pager_cmd , editor_cmd = editor_cmd , squeeze = squeeze , subs = subs , )
Create a Config named tuple from the values specified in the . egrc . Expands any paths as necessary .
52,777
def get_expanded_path ( path ) : if path : result = path result = os . path . expanduser ( result ) result = os . path . expandvars ( result ) return result else : return None
Expand ~ and variables in a path . If path is not truthy return None .
52,778
def get_editor_cmd_from_environment ( ) : result = os . getenv ( ENV_VISUAL ) if ( not result ) : result = os . getenv ( ENV_EDITOR ) return result
Gets and editor command from environment variables .
52,779
def _inform_if_path_does_not_exist ( path ) : expanded_path = get_expanded_path ( path ) if not os . path . exists ( expanded_path ) : print ( 'Could not find custom path at: {}' . format ( expanded_path ) )
If the path does not exist print a message saying so . This is intended to be helpful to users if they specify a custom path that eg cannot find .
52,780
def get_custom_color_config_from_egrc ( config ) : pound = _get_color_from_config ( config , CONFIG_NAMES . pound ) heading = _get_color_from_config ( config , CONFIG_NAMES . heading ) code = _get_color_from_config ( config , CONFIG_NAMES . code ) backticks = _get_color_from_config ( config , CONFIG_NAMES . backticks ) prompt = _get_color_from_config ( config , CONFIG_NAMES . prompt ) pound_reset = _get_color_from_config ( config , CONFIG_NAMES . pound_reset ) heading_reset = _get_color_from_config ( config , CONFIG_NAMES . heading_reset ) code_reset = _get_color_from_config ( config , CONFIG_NAMES . code_reset ) backticks_reset = _get_color_from_config ( config , CONFIG_NAMES . backticks_reset ) prompt_reset = _get_color_from_config ( config , CONFIG_NAMES . prompt_reset ) result = ColorConfig ( pound = pound , heading = heading , code = code , backticks = backticks , prompt = prompt , pound_reset = pound_reset , heading_reset = heading_reset , code_reset = code_reset , backticks_reset = backticks_reset , prompt_reset = prompt_reset ) return result
Get the ColorConfig from the egrc config object . Any colors not defined will be None .
52,781
def _get_color_from_config ( config , option ) : if not config . has_option ( COLOR_SECTION , option ) : return None else : return ast . literal_eval ( config . get ( COLOR_SECTION , option ) )
Helper method to uet an option from the COLOR_SECTION of the config .
52,782
def parse_substitution_from_list ( list_rep ) : if type ( list_rep ) is not list : raise SyntaxError ( 'Substitution must be a list' ) if len ( list_rep ) < 2 : raise SyntaxError ( 'Substitution must be a list of size 2' ) pattern = list_rep [ 0 ] replacement = list_rep [ 1 ] is_multiline = False if ( len ( list_rep ) > 2 ) : is_multiline = list_rep [ 2 ] if type ( is_multiline ) is not bool : raise SyntaxError ( 'is_multiline must be a boolean' ) result = substitute . Substitution ( pattern , replacement , is_multiline ) return result
Parse a substitution from the list representation in the config file .
52,783
def get_substitutions_from_config ( config ) : result = [ ] pattern_names = config . options ( SUBSTITUTION_SECTION ) pattern_names . sort ( ) for name in pattern_names : pattern_val = config . get ( SUBSTITUTION_SECTION , name ) list_rep = ast . literal_eval ( pattern_val ) substitution = parse_substitution_from_list ( list_rep ) result . append ( substitution ) return result
Return a list of Substitution objects from the config sorted alphabetically by pattern name . Returns an empty list if no Substitutions are specified . If there are problems parsing the values a help message will be printed and an error will be thrown .
52,784
def get_default_color_config ( ) : result = ColorConfig ( pound = DEFAULT_COLOR_POUND , heading = DEFAULT_COLOR_HEADING , code = DEFAULT_COLOR_CODE , backticks = DEFAULT_COLOR_BACKTICKS , prompt = DEFAULT_COLOR_PROMPT , pound_reset = DEFAULT_COLOR_POUND_RESET , heading_reset = DEFAULT_COLOR_HEADING_RESET , code_reset = DEFAULT_COLOR_CODE_RESET , backticks_reset = DEFAULT_COLOR_BACKTICKS_RESET , prompt_reset = DEFAULT_COLOR_PROMPT_RESET ) return result
Get a color config object with all the defaults .
52,785
def get_empty_config ( ) : empty_color_config = get_empty_color_config ( ) result = Config ( examples_dir = None , custom_dir = None , color_config = empty_color_config , use_color = None , pager_cmd = None , editor_cmd = None , squeeze = None , subs = None ) return result
Return an empty Config object with no options set .
52,786
def get_empty_color_config ( ) : empty_color_config = ColorConfig ( pound = None , heading = None , code = None , backticks = None , prompt = None , pound_reset = None , heading_reset = None , code_reset = None , backticks_reset = None , prompt_reset = None ) return empty_color_config
Return a color_config with all values set to None .
52,787
def merge_color_configs ( first , second ) : pound = get_priority ( first . pound , second . pound , None ) heading = get_priority ( first . heading , second . heading , None ) code = get_priority ( first . code , second . code , None ) backticks = get_priority ( first . backticks , second . backticks , None ) prompt = get_priority ( first . prompt , second . prompt , None ) pound_reset = get_priority ( first . pound_reset , second . pound_reset , None ) heading_reset = get_priority ( first . heading_reset , second . heading_reset , None ) code_reset = get_priority ( first . code_reset , second . code_reset , None ) backticks_reset = get_priority ( first . backticks_reset , second . backticks_reset , None ) prompt_reset = get_priority ( first . prompt_reset , second . prompt_reset , None ) result = ColorConfig ( pound = pound , heading = heading , code = code , backticks = backticks , prompt = prompt , pound_reset = pound_reset , heading_reset = heading_reset , code_reset = code_reset , backticks_reset = backticks_reset , prompt_reset = prompt_reset ) return result
Merge the color configs .
52,788
def apply_and_get_result ( self , string ) : if self . is_multiline : compiled_pattern = re . compile ( self . pattern , re . MULTILINE ) else : compiled_pattern = re . compile ( self . pattern ) result = re . sub ( compiled_pattern , self . repl , string ) return result
Perform the substitution represented by this object on string and return the result .
52,789
def colorize_text ( self , text ) : result = text result = self . colorize_heading ( result ) result = self . colorize_block_indent ( result ) result = self . colorize_backticks ( result ) return result
Colorize the text .
52,790
def _recursive_get_all_file_names ( dir ) : if not dir : return [ ] result = [ ] for basedir , dirs , files in os . walk ( dir ) : result . extend ( files ) return result
Get all the file names in the directory . Gets all the top level file names only not the full path .
52,791
def edit_custom_examples ( program , config ) : if ( not config . custom_dir ) or ( not os . path . exists ( config . custom_dir ) ) : _inform_cannot_edit_no_custom_dir ( ) return resolved_program = get_resolved_program ( program , config ) custom_file_paths = get_file_paths_for_program ( resolved_program , config . custom_dir ) if ( len ( custom_file_paths ) > 0 ) : path_to_edit = custom_file_paths [ 0 ] else : path_to_edit = os . path . join ( config . custom_dir , resolved_program + '.md' ) subprocess . call ( [ config . editor_cmd , path_to_edit ] )
Edit custom examples for the given program creating the file if it does not exist .
52,792
def get_file_paths_for_program ( program , dir_to_search ) : if dir_to_search is None : return [ ] else : wanted_file_name = program + EXAMPLE_FILE_SUFFIX result = [ ] for basedir , dirs , file_names in os . walk ( dir_to_search ) : for file_name in file_names : if file_name == wanted_file_name : result . append ( os . path . join ( basedir , file_name ) ) return result
Return an array of full paths matching the given program . If no directory is present returns an empty list .
52,793
def page_string ( str_to_page , pager_cmd ) : use_fallback_page_function = False if pager_cmd is None : use_fallback_page_function = True elif pager_cmd == FLAG_FALLBACK : use_fallback_page_function = True try : if use_fallback_page_function : pydoc . pager ( str_to_page ) else : pydoc . pipepager ( str_to_page , cmd = pager_cmd ) except KeyboardInterrupt : pass
Page str_to_page via the pager .
52,794
def get_list_of_all_supported_commands ( config ) : default_files = _recursive_get_all_file_names ( config . examples_dir ) custom_files = _recursive_get_all_file_names ( config . custom_dir ) default_files = [ path for path in default_files if _is_example_file ( path ) ] custom_files = [ path for path in custom_files if _is_example_file ( path ) ] def get_without_suffix ( file_name ) : return file_name . split ( EXAMPLE_FILE_SUFFIX ) [ 0 ] default_files = [ get_without_suffix ( f ) for f in default_files ] custom_files = [ get_without_suffix ( f ) for f in custom_files ] set_default_commands = set ( default_files ) set_custom_commands = set ( custom_files ) alias_dict = get_alias_dict ( config ) both_defined = set_default_commands & set_custom_commands only_default = set_default_commands - set_custom_commands only_custom = set_custom_commands - set_default_commands all_commands = both_defined | only_default | only_custom command_to_rep = { } for command in all_commands : rep = None if command in both_defined : rep = command + ' ' + FLAG_CUSTOM_AND_DEFAULT elif command in only_default : rep = command elif command in only_custom : rep = command + ' ' + FLAG_ONLY_CUSTOM else : raise NameError ( 'command not in known set: ' + str ( command ) ) command_to_rep [ command ] = rep result = [ ] all_commands_and_aliases = all_commands . union ( alias_dict . keys ( ) ) for command in all_commands_and_aliases : if command in alias_dict : target = alias_dict [ command ] rep_of_target = command_to_rep [ target ] result . append ( command + ' -> ' + rep_of_target ) else : rep = command_to_rep [ command ] result . append ( rep ) result . sort ( ) return result
Generate a list of all the commands that have examples known to eg . The format of the list is the command names . The fact that there are examples for cp for example would mean that cp was in the list .
52,795
def get_squeezed_contents ( contents ) : line_between_example_code = substitute . Substitution ( '\n\n ' , '\n ' , True ) lines_between_examples = substitute . Substitution ( '\n\n\n' , '\n\n' , True ) lines_between_sections = substitute . Substitution ( '\n\n\n\n' , '\n\n\n' , True ) result = contents result = line_between_example_code . apply_and_get_result ( result ) result = lines_between_examples . apply_and_get_result ( result ) result = lines_between_sections . apply_and_get_result ( result ) return result
Squeeze the contents by removing blank lines between definition and example and remove duplicate blank lines except between sections .
52,796
def get_colorized_contents ( contents , color_config ) : colorizer = color . EgColorizer ( color_config ) result = colorizer . colorize_text ( contents ) return result
Colorize the contents based on the color_config .
52,797
def get_substituted_contents ( contents , substitutions ) : result = contents for sub in substitutions : result = sub . apply_and_get_result ( result ) return result
Perform a list of substitutions and return the result .
52,798
def get_resolved_program ( program , config_obj ) : alias_dict = get_alias_dict ( config_obj ) if program in alias_dict : return alias_dict [ program ] else : return program
Take a program that may be an alias for another program and return the resolved program .
52,799
def get_alias_dict ( config_obj ) : if not config_obj . examples_dir : return { } alias_file_path = _get_alias_file_path ( config_obj ) if not os . path . isfile ( alias_file_path ) : return { } alias_file_contents = _get_contents_of_file ( alias_file_path ) result = json . loads ( alias_file_contents ) return result
Return a dictionary consisting of all aliases known to eg .