idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
10,800 | def refreshWidgets ( self ) : if hasattr ( self , '_widgets' ) : for w in self . _widgets : w . refresh ( isauto = 0 ) else : raise RuntimeError ( "No widgets found" ) | This function manually refreshed all widgets attached to this simulation . You want to call this function if any particle data has been manually changed . |
10,801 | def status ( self ) : from rebound import __version__ , __build__ s = "" s += "---------------------------------\n" s += "REBOUND version: \t%s\n" % __version__ s += "REBOUND built on: \t%s\n" % __build__ s += "Number of particles: \t%d\n" % self . N s += "Selected integrator: \t" + self . integrator + "\n" s += "Simulation time: \t%.16e\n" % self . t s += "Current timestep: \t%f\n" % self . dt if self . N > 0 : s += "---------------------------------\n" for p in self . particles : s += str ( p ) + "\n" s += "---------------------------------" print ( s ) | Prints a summary of the current status of the simulation . |
10,802 | def integrator ( self ) : i = self . _integrator for name , _i in INTEGRATORS . items ( ) : if i == _i : return name return i | Get or set the intergrator module . |
10,803 | def boundary ( self ) : i = self . _boundary for name , _i in BOUNDARIES . items ( ) : if i == _i : return name return i | Get or set the boundary module . |
10,804 | def gravity ( self ) : i = self . _gravity for name , _i in GRAVITIES . items ( ) : if i == _i : return name return i | Get or set the gravity module . |
10,805 | def collision ( self ) : i = self . _collision for name , _i in COLLISIONS . items ( ) : if i == _i : return name return i | Get or set the collision module . |
10,806 | def add_variation ( self , order = 1 , first_order = None , first_order_2 = None , testparticle = - 1 ) : cur_var_config_N = self . var_config_N if order == 1 : index = clibrebound . reb_add_var_1st_order ( byref ( self ) , c_int ( testparticle ) ) elif order == 2 : if first_order is None : raise AttributeError ( "Please specify corresponding first order variational equations when initializing second order variational equations." ) if first_order_2 is None : first_order_2 = first_order index = clibrebound . reb_add_var_2nd_order ( byref ( self ) , c_int ( testparticle ) , c_int ( first_order . index ) , c_int ( first_order_2 . index ) ) else : raise AttributeError ( "Only variational equations of first and second order are supported." ) s = Variation . from_buffer_copy ( self . var_config [ cur_var_config_N ] ) return s | This function adds a set of variational particles to the simulation . |
10,807 | def init_megno ( self , seed = None ) : if seed is None : clibrebound . reb_tools_megno_init ( byref ( self ) ) else : clibrebound . reb_tools_megno_init_seed ( byref ( self ) , c_uint ( seed ) ) | This function initialises the chaos indicator MEGNO particles and enables their integration . |
10,808 | def remove ( self , index = None , hash = None , keepSorted = True ) : if index is not None : clibrebound . reb_remove ( byref ( self ) , index , keepSorted ) if hash is not None : hash_types = c_uint32 , c_uint , c_ulong PY3 = sys . version_info [ 0 ] == 3 if PY3 : string_types = str , int_types = int , else : string_types = basestring , int_types = int , long if isinstance ( hash , string_types ) : clibrebound . reb_remove_by_hash ( byref ( self ) , rebhash ( hash ) , keepSorted ) elif isinstance ( hash , int_types ) : clibrebound . reb_remove_by_hash ( byref ( self ) , c_uint32 ( hash ) , keepSorted ) elif isinstance ( hash , hash_types ) : clibrebound . reb_remove_by_hash ( byref ( self ) , hash , keepSorted ) if hasattr ( self , '_widgets' ) : self . _display_heartbeat ( pointer ( self ) ) self . process_messages ( ) | Removes a particle from the simulation . |
10,809 | def particles_ascii ( self , prec = 8 ) : s = "" for p in self . particles : s += ( ( "%%.%de " % prec ) * 8 ) % ( p . m , p . r , p . x , p . y , p . z , p . vx , p . vy , p . vz ) + "\n" if len ( s ) : s = s [ : - 1 ] return s | Returns an ASCII string with all particles masses radii positions and velocities . |
10,810 | def add_particles_ascii ( self , s ) : for l in s . split ( "\n" ) : r = l . split ( ) if len ( r ) : try : r = [ float ( x ) for x in r ] p = Particle ( simulation = self , m = r [ 0 ] , r = r [ 1 ] , x = r [ 2 ] , y = r [ 3 ] , z = r [ 4 ] , vx = r [ 5 ] , vy = r [ 6 ] , vz = r [ 7 ] ) self . add ( p ) except : raise AttributeError ( "Each line requires 8 floats corresponding to mass, radius, position (x,y,z) and velocity (x,y,z)." ) | Adds particles from an ASCII string . |
10,811 | def calculate_orbits ( self , primary = None , jacobi_masses = False , heliocentric = None , barycentric = None ) : orbits = [ ] if heliocentric is not None or barycentric is not None : raise AttributeError ( 'heliocentric and barycentric keywords in calculate_orbits are deprecated. Pass primary keyword instead (sim.particles[0] for heliocentric and sim.calculate_com() for barycentric)' ) if primary is None : jacobi = True primary = self . particles [ 0 ] clibrebound . reb_get_com_of_pair . restype = Particle else : jacobi = False for p in self . particles [ 1 : self . N_real ] : if jacobi_masses is True : interior_mass = primary . m primary . m = self . particles [ 0 ] . m * ( p . m + interior_mass ) / interior_mass - p . m orbits . append ( p . calculate_orbit ( primary = primary ) ) primary . m = interior_mass else : orbits . append ( p . calculate_orbit ( primary = primary ) ) if jacobi is True : primary = clibrebound . reb_get_com_of_pair ( primary , p ) return orbits | Calculate orbital parameters for all partices in the simulation . By default this functions returns the orbits in Jacobi coordinates . |
10,812 | def calculate_com ( self , first = 0 , last = None ) : if last is None : last = self . N_real clibrebound . reb_get_com_range . restype = Particle return clibrebound . reb_get_com_range ( byref ( self ) , c_int ( first ) , c_int ( last ) ) | Returns the center of momentum for all particles in the simulation . |
10,813 | def serialize_particle_data ( self , ** kwargs ) : N = self . N possible_keys = [ "hash" , "m" , "r" , "xyz" , "vxvyvz" , "xyzvxvyvz" ] d = { x : None for x in possible_keys } for k , v in kwargs . items ( ) : if k in d : if k == "hash" : if v . dtype != "uint32" : raise AttributeError ( "Expected 'uint32' data type for '%s' array." % k ) if v . size < N : raise AttributeError ( "Array '%s' is not large enough." % k ) d [ k ] = v . ctypes . data_as ( ctypes . POINTER ( ctypes . c_uint32 ) ) else : if v . dtype != "float64" : raise AttributeError ( "Expected 'float64' data type for %s array." % k ) if k in [ "xyz" , "vxvyvz" ] : minsize = 3 * N elif k in [ "xyzvxvyvz" ] : minsize = 6 * N else : minsize = N if v . size < minsize : raise AttributeError ( "Array '%s' is not large enough." % k ) d [ k ] = v . ctypes . data_as ( ctypes . POINTER ( ctypes . c_double ) ) else : raise AttributeError ( "Only '%s' are currently supported attributes for serialization." % "', '" . join ( d . keys ( ) ) ) clibrebound . reb_serialize_particle_data ( byref ( self ) , d [ "hash" ] , d [ "m" ] , d [ "r" ] , d [ "xyz" ] , d [ "vxvyvz" ] , d [ "xyzvxvyvz" ] ) | Fast way to access serialized particle data via numpy arrays . |
10,814 | def calculate_energy ( self ) : clibrebound . reb_tools_energy . restype = c_double return clibrebound . reb_tools_energy ( byref ( self ) ) | Returns the sum of potential and kinetic energy of all particles in the simulation . |
10,815 | def configure_box ( self , boxsize , root_nx = 1 , root_ny = 1 , root_nz = 1 ) : clibrebound . reb_configure_box ( byref ( self ) , c_double ( boxsize ) , c_int ( root_nx ) , c_int ( root_ny ) , c_int ( root_nz ) ) return | Initialize the simulation box . |
10,816 | def configure_ghostboxes ( self , nghostx = 0 , nghosty = 0 , nghostz = 0 ) : clibrebound . nghostx = c_int ( nghostx ) clibrebound . nghosty = c_int ( nghosty ) clibrebound . nghostz = c_int ( nghostz ) return | Initialize the ghost boxes . |
10,817 | def save ( self , filename ) : clibrebound . reb_output_binary ( byref ( self ) , c_char_p ( filename . encode ( "ascii" ) ) ) | Save the entire REBOUND simulation to a binary file . |
10,818 | def particles ( self ) : sim = self . _sim . contents ps = [ ] if self . testparticle >= 0 : N = 1 else : N = sim . N - sim . N_var ParticleList = Particle * N ps = ParticleList . from_address ( ctypes . addressof ( sim . _particles . contents ) + self . index * ctypes . sizeof ( Particle ) ) return ps | Access the variational particles corresponding to this set of variational equations . |
10,819 | def merge_link_object ( serializer , data , instance ) : link_object = { } if not getattr ( instance , 'pk' , None ) : return data link_fields = serializer . get_link_fields ( ) for name , field in six . iteritems ( link_fields ) : if name in data and not data [ name ] : continue link = getattr ( field , 'link' , None ) if link is None : base_url = '' if settings . ENABLE_HOST_RELATIVE_LINKS : base_url = DynamicRouter . get_canonical_path ( serializer . get_resource_key ( ) , instance . pk ) or '' link = '%s%s/' % ( base_url , name ) elif callable ( link ) : link = link ( name , field , data , instance ) link_object [ name ] = link if link_object : data [ 'links' ] = link_object return data | Add a links attribute to the data that maps field names to URLs . |
10,820 | def register_post_processor ( func ) : global POST_PROCESSORS key = func . __name__ POST_PROCESSORS [ key ] = func return func | Register a post processor function to be run as the final step in serialization . The data passed in will already have gone through the sideloading processor . |
10,821 | def process ( self , obj , parent = None , parent_key = None , depth = 0 ) : if isinstance ( obj , list ) : for key , o in enumerate ( obj ) : self . process ( o , parent = obj , parent_key = key , depth = depth ) elif isinstance ( obj , dict ) : dynamic = self . is_dynamic ( obj ) returned = isinstance ( obj , ReturnDict ) if dynamic or returned : for key , o in six . iteritems ( obj ) : if isinstance ( o , list ) or isinstance ( o , dict ) : self . process ( o , parent = obj , parent_key = key , depth = depth + 1 ) if not dynamic or getattr ( obj , 'embed' , False ) : return serializer = obj . serializer name = serializer . get_plural_name ( ) instance = getattr ( obj , 'instance' , serializer . instance ) instance_pk = instance . pk if instance else None pk = getattr ( obj , 'pk_value' , instance_pk ) or instance_pk pk_key = repr ( pk ) seen = True if pk_key not in self . seen [ name ] : seen = False self . seen [ name ] . add ( pk_key ) if depth == 0 : return if name == self . plural_name : name = '%s%s' % ( settings . ADDITIONAL_PRIMARY_RESOURCE_PREFIX , name ) if not seen : if name not in self . data : self . data [ name ] = [ ] self . data [ name ] . append ( obj ) else : for o in self . data . get ( name , [ ] ) : if o . instance . pk == pk : o . update ( obj ) break if parent is not None and parent_key is not None : parent [ parent_key ] = pk | Recursively process the data for sideloading . |
10,822 | def _get_request_fields_from_parent ( self ) : if not self . parent : return None if not getattr ( self . parent , 'request_fields' ) : return None if not isinstance ( self . parent . request_fields , dict ) : return None return self . parent . request_fields . get ( self . field_name ) | Get request fields from the parent serializer . |
10,823 | def determine_metadata ( self , request , view ) : metadata = super ( DynamicMetadata , self ) . determine_metadata ( request , view ) metadata [ 'features' ] = getattr ( view , 'features' , [ ] ) if hasattr ( view , 'get_serializer' ) : serializer = view . get_serializer ( dynamic = False ) if hasattr ( serializer , 'get_name' ) : metadata [ 'resource_name' ] = serializer . get_name ( ) if hasattr ( serializer , 'get_plural_name' ) : metadata [ 'resource_name_plural' ] = serializer . get_plural_name ( ) metadata [ 'properties' ] = self . get_serializer_info ( serializer ) return metadata | Adds properties and features to the metadata response . |
10,824 | def get_field_info ( self , field ) : field_info = OrderedDict ( ) for attr in ( 'required' , 'read_only' , 'default' , 'label' ) : field_info [ attr ] = getattr ( field , attr ) if field_info [ 'default' ] is empty : field_info [ 'default' ] = None if hasattr ( field , 'immutable' ) : field_info [ 'immutable' ] = field . immutable field_info [ 'nullable' ] = field . allow_null if hasattr ( field , 'choices' ) : field_info [ 'choices' ] = [ { 'value' : choice_value , 'display_name' : force_text ( choice_name , strings_only = True ) } for choice_value , choice_name in field . choices . items ( ) ] many = False if isinstance ( field , DynamicRelationField ) : field = field . serializer if isinstance ( field , ListSerializer ) : field = field . child many = True if isinstance ( field , ModelSerializer ) : type = 'many' if many else 'one' field_info [ 'related_to' ] = field . get_plural_name ( ) else : type = self . label_lookup [ field ] field_info [ 'type' ] = type return field_info | Adds related_to and nullable to the metadata response . |
10,825 | def get_model_field ( model , field_name ) : meta = model . _meta try : if DJANGO19 : field = meta . get_field ( field_name ) else : field = meta . get_field_by_name ( field_name ) [ 0 ] return field except : if DJANGO19 : related_objs = ( f for f in meta . get_fields ( ) if ( f . one_to_many or f . one_to_one ) and f . auto_created and not f . concrete ) related_m2m_objs = ( f for f in meta . get_fields ( include_hidden = True ) if f . many_to_many and f . auto_created ) else : related_objs = meta . get_all_related_objects ( ) related_m2m_objs = meta . get_all_related_many_to_many_objects ( ) related_objects = { o . get_accessor_name ( ) : o for o in chain ( related_objs , related_m2m_objs ) } if field_name in related_objects : return related_objects [ field_name ] else : if hasattr ( meta , 'virtual_fields' ) : for field in meta . virtual_fields : if field . name == field_name : return field raise AttributeError ( '%s is not a valid field for %s' % ( field_name , model ) ) | Return a field given a model and field name . |
10,826 | def is_field_remote ( model , field_name ) : if not hasattr ( model , '_meta' ) : return False model_field = get_model_field ( model , field_name ) return isinstance ( model_field , ( ManyToManyField , RelatedObject ) ) | Check whether a given model field is a remote field . |
10,827 | def resettable_cached_property ( func ) : def wrapper ( self ) : if not hasattr ( self , '_resettable_cached_properties' ) : self . _resettable_cached_properties = { } if func . __name__ not in self . _resettable_cached_properties : self . _resettable_cached_properties [ func . __name__ ] = func ( self ) return self . _resettable_cached_properties [ func . __name__ ] return property ( wrapper ) | Decorator to add cached computed properties to an object . Similar to Django s cached_property decorator except stores all the data under a single well - known key so that it can easily be blown away . |
10,828 | def _settings_changed ( self , * args , ** kwargs ) : setting , value = kwargs [ 'setting' ] , kwargs [ 'value' ] if setting == self . name : self . _reload ( value ) | Handle changes to core settings . |
10,829 | def bind ( self , * args , ** kwargs ) : if self . bound : return super ( DynamicRelationField , self ) . bind ( * args , ** kwargs ) self . bound = True parent_model = getattr ( self . parent . Meta , 'model' , None ) remote = is_field_remote ( parent_model , self . source ) try : model_field = get_model_field ( parent_model , self . source ) except : model_field = None if 'required' not in self . kwargs and ( remote or ( model_field and ( model_field . has_default ( ) or model_field . null ) ) ) : self . required = False if 'allow_null' not in self . kwargs and getattr ( model_field , 'null' , False ) : self . allow_null = True self . model_field = model_field | Bind to the parent serializer . |
10,830 | def _inherit_parent_kwargs ( self , kwargs ) : if not self . parent or not self . _is_dynamic : return kwargs if 'request_fields' not in kwargs : request_fields = self . _get_request_fields_from_parent ( ) if request_fields is None : request_fields = True kwargs [ 'request_fields' ] = request_fields if self . embed and kwargs . get ( 'request_fields' ) is True : kwargs [ 'request_fields' ] = { } if hasattr ( self . parent , 'sideloading' ) : kwargs [ 'sideloading' ] = self . parent . sideloading if hasattr ( self . parent , 'debug' ) : kwargs [ 'debug' ] = self . parent . debug return kwargs | Extract any necessary attributes from parent serializer to propagate down to child serializer . |
10,831 | def get_serializer ( self , * args , ** kwargs ) : init_args = { k : v for k , v in six . iteritems ( self . kwargs ) if k in self . SERIALIZER_KWARGS } kwargs = self . _inherit_parent_kwargs ( kwargs ) init_args . update ( kwargs ) if self . embed and self . _is_dynamic : init_args [ 'embed' ] = True return self . _get_cached_serializer ( args , init_args ) | Get an instance of the child serializer . |
10,832 | def to_representation ( self , instance ) : serializer = self . serializer model = serializer . get_model ( ) source = self . source if not self . kwargs [ 'many' ] and serializer . id_only ( ) : source_id = '%s_id' % source if hasattr ( instance , source_id ) : return getattr ( instance , source_id ) elif model is not None : try : instance = getattr ( instance , source ) except model . DoesNotExist : instance = None if model is None : instance = getattr ( instance , source ) if instance is None : return None return serializer . to_representation ( instance ) | Represent the relationship either as an ID or object . |
10,833 | def to_internal_value_single ( self , data , serializer ) : related_model = serializer . Meta . model if isinstance ( data , related_model ) : return data try : instance = related_model . objects . get ( pk = data ) except related_model . DoesNotExist : raise ValidationError ( "Invalid value for '%s': %s object with ID=%s not found" % ( self . field_name , related_model . __name__ , data ) ) return instance | Return the underlying object given the serialized form . |
10,834 | def serializer_class ( self ) : serializer_class = self . _serializer_class if not isinstance ( serializer_class , six . string_types ) : return serializer_class parts = serializer_class . split ( '.' ) module_path = '.' . join ( parts [ : - 1 ] ) if not module_path : if getattr ( self , 'parent' , None ) is None : raise Exception ( "Can not load serializer '%s'" % serializer_class + ' before binding or without specifying full path' ) module_path = self . parent . __module__ module = importlib . import_module ( module_path ) serializer_class = getattr ( module , parts [ - 1 ] ) self . _serializer_class = serializer_class return serializer_class | Get the class of the child serializer . |
10,835 | def _get_django_queryset ( self ) : prefetches = [ ] for field , fprefetch in self . prefetches . items ( ) : has_query = hasattr ( fprefetch , 'query' ) qs = fprefetch . query . queryset if has_query else None prefetches . append ( Prefetch ( field , queryset = qs ) ) queryset = self . queryset if prefetches : queryset = queryset . prefetch_related ( * prefetches ) return queryset | Return Django QuerySet with prefetches properly configured . |
10,836 | def get_renderers ( self ) : renderers = super ( WithDynamicViewSetMixin , self ) . get_renderers ( ) if settings . ENABLE_BROWSABLE_API is False : return [ r for r in renderers if not isinstance ( r , BrowsableAPIRenderer ) ] else : return renderers | Optionally block Browsable API rendering . |
10,837 | def get_request_feature ( self , name ) : if '[]' in name : return self . request . query_params . getlist ( name ) if name in self . features else None elif '{}' in name : return self . _extract_object_params ( name ) if name in self . features else { } else : return self . request . query_params . get ( name ) if name in self . features else None | Parses the request for a particular feature . |
10,838 | def _extract_object_params ( self , name ) : params = self . request . query_params . lists ( ) params_map = { } prefix = name [ : - 1 ] offset = len ( prefix ) for name , value in params : if name . startswith ( prefix ) : if name . endswith ( '}' ) : name = name [ offset : - 1 ] elif name . endswith ( '}[]' ) : name = name [ offset : - 3 ] else : raise exceptions . ParseError ( '"%s" is not a well-formed filter key.' % name ) else : continue params_map [ name ] = value return params_map | Extract object params return as dict |
10,839 | def get_queryset ( self , queryset = None ) : serializer = self . get_serializer ( ) return getattr ( self , 'queryset' , serializer . Meta . model . objects . all ( ) ) | Returns a queryset for this request . |
10,840 | def get_request_fields ( self ) : if hasattr ( self , '_request_fields' ) : return self . _request_fields include_fields = self . get_request_feature ( self . INCLUDE ) exclude_fields = self . get_request_feature ( self . EXCLUDE ) request_fields = { } for fields , include in ( ( include_fields , True ) , ( exclude_fields , False ) ) : if fields is None : continue for field in fields : field_segments = field . split ( '.' ) num_segments = len ( field_segments ) current_fields = request_fields for i , segment in enumerate ( field_segments ) : last = i == num_segments - 1 if segment : if last : current_fields [ segment ] = include else : if segment not in current_fields : current_fields [ segment ] = { } current_fields = current_fields [ segment ] elif not last : raise exceptions . ParseError ( '"%s" is not a valid field.' % field ) self . _request_fields = request_fields return request_fields | Parses the INCLUDE and EXCLUDE features . |
10,841 | def update ( self , request , * args , ** kwargs ) : if self . ENABLE_BULK_UPDATE : patch_all = self . get_request_patch_all ( ) if self . ENABLE_PATCH_ALL and patch_all : data = request . data return self . _patch_all ( data , query = ( patch_all == 'query' ) ) else : partial = 'partial' in kwargs bulk_payload = self . _get_bulk_payload ( request ) if bulk_payload : return self . _bulk_update ( bulk_payload , partial ) try : return super ( DynamicModelViewSet , self ) . update ( request , * args , ** kwargs ) except AssertionError as e : err = str ( e ) if 'Fix your URL conf' in err : raise exceptions . ValidationError ( err ) else : raise | Update one or more model instances . |
10,842 | def create ( self , request , * args , ** kwargs ) : bulk_payload = self . _get_bulk_payload ( request ) if bulk_payload : return self . _create_many ( bulk_payload ) return super ( DynamicModelViewSet , self ) . create ( request , * args , ** kwargs ) | Either create a single or many model instances in bulk using the Serializer s many = True ability from Django REST > = 2 . 2 . 5 . |
10,843 | def destroy ( self , request , * args , ** kwargs ) : bulk_payload = self . _get_bulk_payload ( request ) if bulk_payload : return self . _destroy_many ( bulk_payload ) lookup_url_kwarg = self . lookup_url_kwarg or self . lookup_field if lookup_url_kwarg not in kwargs : return Response ( status = status . HTTP_405_METHOD_NOT_ALLOWED ) return super ( DynamicModelViewSet , self ) . destroy ( request , * args , ** kwargs ) | Either delete a single or many model instances in bulk |
10,844 | def get_resource_key ( self ) : model = self . get_model ( ) if model : return get_model_table ( model ) else : return self . get_name ( ) | Return canonical resource key usually the DB table name . |
10,845 | def data ( self ) : data = super ( DynamicListSerializer , self ) . data processed_data = ReturnDict ( SideloadingProcessor ( self , data ) . data , serializer = self ) if self . child . envelope else ReturnList ( data , serializer = self ) processed_data = post_process ( processed_data ) return processed_data | Get the data after performing post - processing if necessary . |
10,846 | def _dynamic_init ( self , only_fields , include_fields , exclude_fields ) : if not self . dynamic : return if ( isinstance ( self . request_fields , dict ) and self . request_fields . pop ( '*' , None ) is False ) : exclude_fields = '*' only_fields = set ( only_fields or [ ] ) include_fields = include_fields or [ ] exclude_fields = exclude_fields or [ ] if only_fields : exclude_fields = '*' include_fields = only_fields if exclude_fields == '*' : include_fields = set ( list ( include_fields ) + [ field for field , val in six . iteritems ( self . request_fields ) if val or val == { } ] ) all_fields = set ( self . get_all_fields ( ) . keys ( ) ) exclude_fields = all_fields - include_fields elif include_fields == '*' : all_fields = set ( self . get_all_fields ( ) . keys ( ) ) include_fields = all_fields for name in exclude_fields : self . request_fields [ name ] = False for name in include_fields : if not isinstance ( self . request_fields . get ( name ) , dict ) : self . request_fields [ name ] = True | Modifies request_fields via higher - level dynamic field interfaces . |
10,847 | def get_name ( cls ) : if not hasattr ( cls . Meta , 'name' ) : class_name = getattr ( cls . get_model ( ) , '__name__' , None ) setattr ( cls . Meta , 'name' , inflection . underscore ( class_name ) if class_name else None ) return cls . Meta . name | Get the serializer name . |
10,848 | def get_plural_name ( cls ) : if not hasattr ( cls . Meta , 'plural_name' ) : setattr ( cls . Meta , 'plural_name' , inflection . pluralize ( cls . get_name ( ) ) ) return cls . Meta . plural_name | Get the serializer s plural name . |
10,849 | def _all_fields ( self ) : if ( not settings . ENABLE_FIELDS_CACHE or not self . ENABLE_FIELDS_CACHE or self . __class__ not in FIELDS_CACHE ) : all_fields = super ( WithDynamicSerializerMixin , self ) . get_fields ( ) if ( settings . ENABLE_FIELDS_CACHE and self . ENABLE_FIELDS_CACHE ) : FIELDS_CACHE [ self . __class__ ] = all_fields else : all_fields = copy . copy ( FIELDS_CACHE [ self . __class__ ] ) for k , field in six . iteritems ( all_fields ) : if hasattr ( field , 'reset' ) : field . reset ( ) for k , field in six . iteritems ( all_fields ) : field . field_name = k field . parent = self return all_fields | Returns the entire serializer field set . |
10,850 | def get_fields ( self ) : all_fields = self . get_all_fields ( ) if self . dynamic is False : return all_fields if self . id_only ( ) : return { } serializer_fields = copy . deepcopy ( all_fields ) request_fields = self . request_fields deferred = self . _get_deferred_field_names ( serializer_fields ) if request_fields : for name , include in six . iteritems ( request_fields ) : if name not in serializer_fields : raise exceptions . ParseError ( '"%s" is not a valid field name for "%s".' % ( name , self . get_name ( ) ) ) if include is not False and name in deferred : deferred . remove ( name ) elif include is False : deferred . add ( name ) for name in deferred : serializer_fields . pop ( name ) ro_fields = getattr ( self . Meta , 'read_only_fields' , [ ] ) self . flag_fields ( serializer_fields , ro_fields , 'read_only' , True ) pw_fields = getattr ( self . Meta , 'untrimmed_fields' , [ ] ) self . flag_fields ( serializer_fields , pw_fields , 'trim_whitespace' , False , ) immutable_field_names = self . _get_flagged_field_names ( serializer_fields , 'immutable' ) self . flag_fields ( serializer_fields , immutable_field_names , 'read_only' , value = False if self . get_request_method ( ) == 'POST' else True ) return serializer_fields | Returns the serializer s field set . |
10,851 | def _faster_to_representation ( self , instance ) : ret = { } fields = self . _readable_fields is_fast = isinstance ( instance , prefetch . FastObject ) id_fields = self . _readable_id_fields for field in fields : attribute = None if ( is_fast and not isinstance ( field , ( DynamicGenericRelationField , DynamicRelationField ) ) ) : if field in id_fields and field . source not in instance : attribute = instance . get ( field . source + '_id' ) ret [ field . field_name ] = attribute continue else : try : attribute = instance [ field . source ] except KeyError : if hasattr ( instance , field . source ) : attribute = getattr ( instance , field . source ) else : attribute = field . get_attribute ( instance ) print ( 'Missing %s from %s' % ( field . field_name , self . __class__ . __name__ ) ) else : try : attribute = field . get_attribute ( instance ) except SkipField : continue if attribute is None : ret [ field . field_name ] = None else : ret [ field . field_name ] = field . to_representation ( attribute ) return ret | Modified to_representation with optimizations . |
10,852 | def _to_representation ( self , instance ) : if self . enable_optimization : representation = self . _faster_to_representation ( instance ) else : representation = super ( WithDynamicSerializerMixin , self ) . to_representation ( instance ) if settings . ENABLE_LINKS : representation = merge_link_object ( self , representation , instance ) if self . debug : representation [ '_meta' ] = { 'id' : instance . pk , 'type' : self . get_plural_name ( ) } return tag_dict ( representation , serializer = self , instance = instance , embed = self . embed ) | Uncached to_representation . |
10,853 | def to_representation ( self , instance ) : if self . id_only ( ) : return instance . pk pk = getattr ( instance , 'pk' , None ) if not settings . ENABLE_SERIALIZER_OBJECT_CACHE or pk is None : return self . _to_representation ( instance ) else : if pk not in self . obj_cache : self . obj_cache [ pk ] = self . _to_representation ( instance ) return self . obj_cache [ pk ] | Modified to_representation method . Optionally may cache objects . |
10,854 | def save ( self , * args , ** kwargs ) : update = getattr ( self , 'instance' , None ) is not None instance = super ( WithDynamicSerializerMixin , self ) . save ( * args , ** kwargs ) view = self . _context . get ( 'view' ) if view and update : if int ( DRF_VERSION [ 0 ] ) <= 3 and int ( DRF_VERSION [ 1 ] ) < 5 : instance = self . instance = view . get_object ( ) return instance | Serializer save that address prefetch issues . |
10,855 | def to_representation ( self , instance ) : if not isinstance ( instance , dict ) : data = super ( DynamicEphemeralSerializer , self ) . to_representation ( instance ) else : data = instance instance = EphemeralObject ( data ) if self . id_only ( ) : return data else : return tag_dict ( data , serializer = self , instance = instance ) | Provides post processing . Sub - classes should implement their own to_representation method but pass the resulting dict through this function to get tagging and field selection . |
10,856 | def get_directory ( request ) : def get_url ( url ) : return reverse ( url , request = request ) if url else url def is_active_url ( path , url ) : return path . startswith ( url ) if url and path else False path = request . path directory_list = [ ] def sort_key ( r ) : return r [ 0 ] for group_name , endpoints in sorted ( six . iteritems ( directory ) , key = sort_key ) : endpoints_list = [ ] for endpoint_name , endpoint in sorted ( six . iteritems ( endpoints ) , key = sort_key ) : if endpoint_name [ : 1 ] == '_' : continue endpoint_url = get_url ( endpoint . get ( '_url' , None ) ) active = is_active_url ( path , endpoint_url ) endpoints_list . append ( ( endpoint_name , endpoint_url , [ ] , active ) ) url = get_url ( endpoints . get ( '_url' , None ) ) active = is_active_url ( path , url ) directory_list . append ( ( group_name , url , endpoints_list , active ) ) return directory_list | Get API directory as a nested list of lists . |
10,857 | def get_api_root_view ( self , ** kwargs ) : class API ( views . APIView ) : _ignore_model_permissions = True def get ( self , request , * args , ** kwargs ) : directory_list = get_directory ( request ) result = OrderedDict ( ) for group_name , url , endpoints , _ in directory_list : if url : result [ group_name ] = url else : group = OrderedDict ( ) for endpoint_name , url , _ , _ in endpoints : group [ endpoint_name ] = url result [ group_name ] = group return Response ( result ) return API . as_view ( ) | Return API root view using the global directory . |
10,858 | def register ( self , prefix , viewset , base_name = None ) : if base_name is None : base_name = prefix super ( DynamicRouter , self ) . register ( prefix , viewset , base_name ) prefix_parts = prefix . split ( '/' ) if len ( prefix_parts ) > 1 : prefix = prefix_parts [ 0 ] endpoint = '/' . join ( prefix_parts [ 1 : ] ) else : endpoint = prefix prefix = None if prefix and prefix not in directory : current = directory [ prefix ] = { } else : current = directory . get ( prefix , directory ) list_name = self . routes [ 0 ] . name url_name = list_name . format ( basename = base_name ) if endpoint not in current : current [ endpoint ] = { } current [ endpoint ] [ '_url' ] = url_name current [ endpoint ] [ '_viewset' ] = viewset | Add any registered route into a global API directory . |
10,859 | def register_resource ( self , viewset , namespace = None ) : try : serializer = viewset . serializer_class ( ) resource_key = serializer . get_resource_key ( ) resource_name = serializer . get_name ( ) path_name = serializer . get_plural_name ( ) except : import traceback traceback . print_exc ( ) raise Exception ( "Failed to extract resource name from viewset: '%s'." " It, or its serializer, may not be DREST-compatible." % ( viewset ) ) if namespace : namespace = namespace . rstrip ( '/' ) + '/' base_path = namespace or '' base_path = r'%s' % base_path + path_name self . register ( base_path , viewset ) if resource_key in resource_map : raise Exception ( "The resource '%s' has already been mapped to '%s'." " Each resource can only be mapped to one canonical" " path. " % ( resource_key , resource_map [ resource_key ] [ 'path' ] ) ) resource_map [ resource_key ] = { 'path' : base_path , 'viewset' : viewset } if resource_name in resource_name_map : resource_key = resource_name_map [ resource_name ] raise Exception ( "The resource name '%s' has already been mapped to '%s'." " A resource name can only be used once." % ( resource_name , resource_map [ resource_key ] [ 'path' ] ) ) resource_name_map [ resource_name ] = resource_key | Register a viewset that should be considered the canonical endpoint for a particular resource . In addition to generating and registering the route it adds the route in a reverse map to allow DREST to build the canonical URL for a given resource . |
10,860 | def get_canonical_path ( resource_key , pk = None ) : if resource_key not in resource_map : return None base_path = get_script_prefix ( ) + resource_map [ resource_key ] [ 'path' ] if pk : return '%s/%s/' % ( base_path , pk ) else : return base_path | Return canonical resource path . |
10,861 | def get_canonical_serializer ( resource_key , model = None , instance = None , resource_name = None ) : if model : resource_key = get_model_table ( model ) elif instance : resource_key = instance . _meta . db_table elif resource_name : resource_key = resource_name_map [ resource_name ] if resource_key not in resource_map : return None return resource_map [ resource_key ] [ 'viewset' ] . serializer_class | Return canonical serializer for a given resource name . |
10,862 | def get_relation_routes ( self , viewset ) : routes = [ ] if not hasattr ( viewset , 'serializer_class' ) : return routes if not hasattr ( viewset , 'list_related' ) : return routes serializer = viewset . serializer_class ( ) fields = getattr ( serializer , 'get_link_fields' , lambda : [ ] ) ( ) route_name = '{basename}-{methodnamehyphen}' for field_name , field in six . iteritems ( fields ) : methodname = 'list_related' url = ( r'^{prefix}/{lookup}/(?P<field_name>%s)' '{trailing_slash}$' % field_name ) routes . append ( Route ( url = url , mapping = { 'get' : methodname } , name = replace_methodname ( route_name , field_name ) , initkwargs = { } ) ) return routes | Generate routes to serve relational objects . This method will add a sub - URL for each relational field . |
10,863 | def get_paths ( self ) : paths = [ ] for key , child in six . iteritems ( self ) : if isinstance ( child , TreeMap ) and child : for path in child . get_paths ( ) : path . insert ( 0 , key ) paths . append ( path ) else : paths . append ( [ key ] ) return paths | Get all paths from the root to the leaves . |
10,864 | def insert ( self , parts , leaf_value , update = False ) : tree = self if not parts : return tree cur = tree last = len ( parts ) - 1 for i , part in enumerate ( parts ) : if part not in cur : cur [ part ] = TreeMap ( ) if i != last else leaf_value elif i == last : if update : cur [ part ] . update ( leaf_value ) else : cur [ part ] = leaf_value cur = cur [ part ] return self | Add a list of nodes into the tree . |
10,865 | def tag_dict ( obj , * args , ** kwargs ) : if isinstance ( obj , OrderedDict ) : return _TaggedOrderedDict ( obj , * args , ** kwargs ) else : return _TaggedPlainDict ( obj , * args , ** kwargs ) | Create a TaggedDict instance . Will either be a TaggedOrderedDict or TaggedPlainDict depending on the type of obj . |
10,866 | def has_joins ( queryset ) : for join in six . itervalues ( queryset . query . alias_map ) : if join . join_type : return True return False | Return True iff . a queryset includes joins . |
10,867 | def generate_query_key ( self , serializer ) : rewritten = [ ] last = len ( self . field ) - 1 s = serializer field = None for i , field_name in enumerate ( self . field ) : fields = s . fields if field_name not in fields : fields = getattr ( s , 'get_all_fields' , lambda : { } ) ( ) if field_name == 'pk' : rewritten . append ( 'pk' ) continue if field_name not in fields : raise ValidationError ( "Invalid filter field: %s" % field_name ) field = fields [ field_name ] model_field_name = field . source or field_name model_field = get_model_field ( s . get_model ( ) , model_field_name ) if isinstance ( model_field , RelatedObject ) : model_field_name = model_field . field . related_query_name ( ) rewritten . append ( model_field_name ) if i == last : break s = getattr ( field , 'serializer' , None ) if isinstance ( s , serializers . ListSerializer ) : s = s . child if not s : raise ValidationError ( "Invalid nested filter field: %s" % field_name ) if self . operator : rewritten . append ( self . operator ) return ( '__' . join ( rewritten ) , field ) | Get the key that can be passed to Django s filter method . |
10,868 | def filter_queryset ( self , request , queryset , view ) : self . request = request self . view = view extra_filters = self . view . get_extra_filters ( request ) disable_prefetches = self . view . is_update ( ) self . DEBUG = settings . DEBUG return self . _build_queryset ( queryset = queryset , extra_filters = extra_filters , disable_prefetches = disable_prefetches , ) | Filter the queryset . |
10,869 | def _build_implicit_prefetches ( self , model , prefetches , requirements ) : for source , remainder in six . iteritems ( requirements ) : if not remainder or isinstance ( remainder , six . string_types ) : continue related_field = get_model_field ( model , source ) related_model = get_related_model ( related_field ) queryset = self . _build_implicit_queryset ( related_model , remainder ) if related_model else None prefetches [ source ] = self . _create_prefetch ( source , queryset ) return prefetches | Build a prefetch dictionary based on internal requirements . |
10,870 | def _build_implicit_queryset ( self , model , requirements ) : queryset = self . _make_model_queryset ( model ) prefetches = { } self . _build_implicit_prefetches ( model , prefetches , requirements ) prefetch = prefetches . values ( ) queryset = queryset . prefetch_related ( * prefetch ) . distinct ( ) if self . DEBUG : queryset . _using_prefetches = prefetches return queryset | Build a queryset based on implicit requirements . |
10,871 | def _build_requested_prefetches ( self , prefetches , requirements , model , fields , filters ) : for name , field in six . iteritems ( fields ) : original_field = field if isinstance ( field , DynamicRelationField ) : field = field . serializer if isinstance ( field , serializers . ListSerializer ) : field = field . child if not isinstance ( field , serializers . ModelSerializer ) : continue source = field . source or name if '.' in source : raise ValidationError ( 'nested relationship values ' 'are not supported' ) if source in prefetches : continue is_remote = is_field_remote ( model , source ) is_id_only = getattr ( field , 'id_only' , lambda : False ) ( ) if is_id_only and not is_remote : continue related_queryset = getattr ( original_field , 'queryset' , None ) if callable ( related_queryset ) : related_queryset = related_queryset ( field ) source = field . source or name required = requirements . pop ( source , None ) prefetch_queryset = self . _build_queryset ( serializer = field , filters = filters . get ( name , { } ) , queryset = related_queryset , requirements = required ) prefetches [ source ] = self . _create_prefetch ( source , prefetch_queryset ) return prefetches | Build a prefetch dictionary based on request requirements . |
10,872 | def _get_implicit_requirements ( self , fields , requirements ) : for name , field in six . iteritems ( fields ) : source = field . source requires = getattr ( field , 'requires' , None ) or [ source ] for require in requires : if not require : continue requirement = require . split ( '.' ) if requirement [ - 1 ] == '' : requirement [ - 1 ] = '*' requirements . insert ( requirement , TreeMap ( ) , update = True ) | Extract internal prefetch requirements from serializer fields . |
10,873 | def _build_queryset ( self , serializer = None , filters = None , queryset = None , requirements = None , extra_filters = None , disable_prefetches = False , ) : is_root_level = False if not serializer : serializer = self . view . get_serializer ( ) is_root_level = True queryset = self . _get_queryset ( queryset = queryset , serializer = serializer ) model = getattr ( serializer . Meta , 'model' , None ) if not model : return queryset prefetches = { } fields = serializer . fields if requirements is None : requirements = TreeMap ( ) self . _get_implicit_requirements ( fields , requirements ) if filters is None : filters = self . _get_requested_filters ( ) self . _build_requested_prefetches ( prefetches , requirements , model , fields , filters ) self . _build_implicit_prefetches ( model , prefetches , requirements ) if ( '*' not in requirements and not self . view . is_update ( ) and not self . view . is_delete ( ) ) : id_fields = getattr ( serializer , 'get_id_fields' , lambda : [ ] ) ( ) only = [ field for field in set ( id_fields + list ( requirements . keys ( ) ) ) if is_model_field ( model , field ) and not is_field_remote ( model , field ) ] queryset = queryset . only ( * only ) query = self . _filters_to_query ( includes = filters . get ( '_include' ) , excludes = filters . get ( '_exclude' ) , serializer = serializer ) if extra_filters : query = extra_filters if not query else extra_filters & query if query : try : queryset = queryset . filter ( query ) except InternalValidationError as e : raise ValidationError ( dict ( e ) if hasattr ( e , 'error_dict' ) else list ( e ) ) except Exception as e : err_msg = getattr ( e , 'message' , '' ) raise ValidationError ( err_msg ) if hasattr ( serializer , 'filter_queryset' ) : queryset = self . _serializer_filter ( serializer = serializer , queryset = queryset ) prefetch = prefetches . values ( ) if prefetch and not disable_prefetches : queryset = queryset . prefetch_related ( * prefetch ) elif isinstance ( queryset , Manager ) : queryset = queryset . all ( ) if has_joins ( queryset ) or not is_root_level : queryset = queryset . distinct ( ) if self . DEBUG : queryset . _using_prefetches = prefetches return queryset | Build a queryset that pulls in all data required by this request . |
10,874 | def filter_queryset ( self , request , queryset , view ) : self . ordering_param = view . SORT ordering = self . get_ordering ( request , queryset , view ) if ordering : return queryset . order_by ( * ordering ) return queryset | Filter the queryset applying the ordering . |
10,875 | def get_ordering ( self , request , queryset , view ) : params = view . get_request_feature ( view . SORT ) if params : fields = [ param . strip ( ) for param in params ] valid_ordering , invalid_ordering = self . remove_invalid_fields ( queryset , fields , view ) if invalid_ordering : raise ValidationError ( "Invalid filter field: %s" % invalid_ordering ) else : return valid_ordering return self . get_default_ordering ( view ) | Return an ordering for a given request . |
10,876 | def remove_invalid_fields ( self , queryset , fields , view ) : valid_orderings = [ ] invalid_orderings = [ ] for term in fields : stripped_term = term . lstrip ( '-' ) reverse_sort_term = '' if len ( stripped_term ) is len ( term ) else '-' ordering = self . ordering_for ( stripped_term , view ) if ordering : valid_orderings . append ( reverse_sort_term + ordering ) else : invalid_orderings . append ( term ) return valid_orderings , invalid_orderings | Remove invalid fields from an ordering . |
10,877 | def combine ( line , left , intersect , right ) : if left : yield left if intersect : try : for j , i in enumerate ( line , start = - len ( line ) + 1 ) : yield i if j : yield intersect except TypeError : try : item = next ( line ) except StopIteration : pass else : while True : yield item try : peek = next ( line ) except StopIteration : break yield intersect item = peek else : for i in line : yield i if right : yield right | Zip borders between items in line . |
10,878 | def build_row ( row , left , center , right ) : if not row or not row [ 0 ] : yield combine ( ( ) , left , center , right ) return for row_index in range ( len ( row [ 0 ] ) ) : yield combine ( ( c [ row_index ] for c in row ) , left , center , right ) | Combine single or multi - lined cells into a single row of list of lists including borders . |
10,879 | def run ( cls ) : project = __import__ ( IMPORT , fromlist = [ '' ] ) for expected , var in [ ( '@Robpol86' , '__author__' ) , ( LICENSE , '__license__' ) , ( VERSION , '__version__' ) ] : if getattr ( project , var ) != expected : raise SystemExit ( 'Mismatch: {0}' . format ( var ) ) if not re . compile ( r'^%s - \d{4}-\d{2}-\d{2}[\r\n]' % VERSION , re . MULTILINE ) . search ( readme ( ) ) : raise SystemExit ( 'Version not found in readme/changelog file.' ) if INSTALL_REQUIRES : contents = readme ( 'tox.ini' ) section = re . compile ( r'[\r\n]+install_requires =[\r\n]+(.+?)[\r\n]+\w' , re . DOTALL ) . findall ( contents ) if not section : raise SystemExit ( 'Missing install_requires section in tox.ini.' ) in_tox = re . findall ( r' ([^=]+)==[\w\d.-]+' , section [ 0 ] ) if INSTALL_REQUIRES != in_tox : raise SystemExit ( 'Missing/unordered pinned dependencies in tox.ini.' ) | Check variables . |
10,880 | def terminal_size ( kernel32 = None ) : if IS_WINDOWS : kernel32 = kernel32 or ctypes . windll . kernel32 try : return get_console_info ( kernel32 , kernel32 . GetStdHandle ( STD_ERROR_HANDLE ) ) except OSError : try : return get_console_info ( kernel32 , kernel32 . GetStdHandle ( STD_OUTPUT_HANDLE ) ) except OSError : return DEFAULT_WIDTH , DEFAULT_HEIGHT try : device = __import__ ( 'fcntl' ) . ioctl ( 0 , __import__ ( 'termios' ) . TIOCGWINSZ , '\0\0\0\0\0\0\0\0' ) except IOError : return DEFAULT_WIDTH , DEFAULT_HEIGHT height , width = struct . unpack ( 'hhhh' , device ) [ : 2 ] return width , height | Get the width and height of the terminal . |
10,881 | def set_terminal_title ( title , kernel32 = None ) : try : title_bytes = title . encode ( 'utf-8' ) except AttributeError : title_bytes = title if IS_WINDOWS : kernel32 = kernel32 or ctypes . windll . kernel32 try : is_ascii = all ( ord ( c ) < 128 for c in title ) except TypeError : is_ascii = all ( c < 128 for c in title ) if is_ascii : return kernel32 . SetConsoleTitleA ( title_bytes ) != 0 else : return kernel32 . SetConsoleTitleW ( title ) != 0 sys . stdout . write ( b'\033]0;' + title_bytes + b'\007' ) return True | Set the terminal title . |
10,882 | def table_abcd ( ) : table_instance = SingleTable ( [ [ 'A' , 'B' ] , [ 'C' , 'D' ] ] ) table_instance . outer_border = False table_inner_borders = table_instance . table . splitlines ( ) table_instance . outer_border = True table_instance . inner_heading_row_border = False table_instance . inner_column_border = False table_outer_borders = table_instance . table . splitlines ( ) smallest , largest = sorted ( [ table_inner_borders , table_outer_borders ] , key = len ) smallest += [ '' ] * ( len ( largest ) - len ( smallest ) ) combined = list ( ) for i , row in enumerate ( largest ) : combined . append ( row . ljust ( 10 ) + ' ' + smallest [ i ] ) return '\n' . join ( combined ) | Return table string to be printed . Two tables on one line . |
10,883 | def column_max_width ( self , column_number ) : inner_widths = max_dimensions ( self . table_data ) [ 0 ] outer_border = 2 if self . outer_border else 0 inner_border = 1 if self . inner_column_border else 0 padding = self . padding_left + self . padding_right return column_max_width ( inner_widths , column_number , outer_border , inner_border , padding ) | Return the maximum width of a column based on the current terminal width . |
10,884 | def table_width ( self ) : outer_widths = max_dimensions ( self . table_data , self . padding_left , self . padding_right ) [ 2 ] outer_border = 2 if self . outer_border else 0 inner_border = 1 if self . inner_column_border else 0 return table_width ( outer_widths , outer_border , inner_border ) | Return the width of the table including padding and borders . |
10,885 | def horizontal_border ( self , _ , outer_widths ) : horizontal = str ( self . CHAR_INNER_HORIZONTAL ) left = self . CHAR_OUTER_LEFT_VERTICAL intersect = self . CHAR_INNER_VERTICAL right = self . CHAR_OUTER_RIGHT_VERTICAL columns = list ( ) for i , width in enumerate ( outer_widths ) : justify = self . justify_columns . get ( i ) width = max ( 3 , width ) if justify == 'left' : columns . append ( ':' + horizontal * ( width - 1 ) ) elif justify == 'right' : columns . append ( horizontal * ( width - 1 ) + ':' ) elif justify == 'center' : columns . append ( ':' + horizontal * ( width - 2 ) + ':' ) else : columns . append ( horizontal * width ) return combine ( columns , left , intersect , right ) | Handle the GitHub heading border . |
10,886 | def visible_width ( string ) : if '\033' in string : string = RE_COLOR_ANSI . sub ( '' , string ) try : string = string . decode ( 'u8' ) except ( AttributeError , UnicodeEncodeError ) : pass width = 0 for char in string : if unicodedata . east_asian_width ( char ) in ( 'F' , 'W' ) : width += 2 else : width += 1 return width | Get the visible width of a unicode string . |
10,887 | def align_and_pad_cell ( string , align , inner_dimensions , padding , space = ' ' ) : if not hasattr ( string , 'splitlines' ) : string = str ( string ) lines = string . splitlines ( ) or [ '' ] if string . endswith ( '\n' ) : lines . append ( '' ) if 'bottom' in align : lines = ( [ '' ] * ( inner_dimensions [ 1 ] - len ( lines ) + padding [ 2 ] ) ) + lines + ( [ '' ] * padding [ 3 ] ) elif 'middle' in align : delta = inner_dimensions [ 1 ] - len ( lines ) lines = ( [ '' ] * ( delta // 2 + delta % 2 + padding [ 2 ] ) ) + lines + ( [ '' ] * ( delta // 2 + padding [ 3 ] ) ) else : lines = ( [ '' ] * padding [ 2 ] ) + lines + ( [ '' ] * ( inner_dimensions [ 1 ] - len ( lines ) + padding [ 3 ] ) ) for i , line in enumerate ( lines ) : new_width = inner_dimensions [ 0 ] + len ( line ) - visible_width ( line ) if 'right' in align : lines [ i ] = line . rjust ( padding [ 0 ] + new_width , space ) + ( space * padding [ 1 ] ) elif 'center' in align : lines [ i ] = ( space * padding [ 0 ] ) + line . center ( new_width , space ) + ( space * padding [ 1 ] ) else : lines [ i ] = ( space * padding [ 0 ] ) + line . ljust ( new_width + padding [ 1 ] , space ) return lines | Align a string horizontally and vertically . Also add additional padding in both dimensions . |
10,888 | def max_dimensions ( table_data , padding_left = 0 , padding_right = 0 , padding_top = 0 , padding_bottom = 0 ) : inner_widths = [ 0 ] * ( max ( len ( r ) for r in table_data ) if table_data else 0 ) inner_heights = [ 0 ] * len ( table_data ) for j , row in enumerate ( table_data ) : for i , cell in enumerate ( row ) : if not hasattr ( cell , 'count' ) or not hasattr ( cell , 'splitlines' ) : cell = str ( cell ) if not cell : continue inner_heights [ j ] = max ( inner_heights [ j ] , cell . count ( '\n' ) + 1 ) inner_widths [ i ] = max ( inner_widths [ i ] , * [ visible_width ( l ) for l in cell . splitlines ( ) ] ) outer_widths = [ padding_left + i + padding_right for i in inner_widths ] outer_heights = [ padding_top + i + padding_bottom for i in inner_heights ] return inner_widths , inner_heights , outer_widths , outer_heights | Get maximum widths of each column and maximum height of each row . |
10,889 | def column_max_width ( inner_widths , column_number , outer_border , inner_border , padding ) : column_count = len ( inner_widths ) terminal_width = terminal_size ( ) [ 0 ] non_data_space = outer_border non_data_space += inner_border * ( column_count - 1 ) non_data_space += column_count * padding data_space = sum ( inner_widths ) - inner_widths [ column_number ] return terminal_width - data_space - non_data_space | Determine the maximum width of a column based on the current terminal width . |
10,890 | def table_width ( outer_widths , outer_border , inner_border ) : column_count = len ( outer_widths ) non_data_space = outer_border if column_count : non_data_space += inner_border * ( column_count - 1 ) data_space = sum ( outer_widths ) return data_space + non_data_space | Determine the width of the entire table including borders and padding . |
10,891 | def horizontal_border ( self , style , outer_widths ) : if style == 'top' : horizontal = self . CHAR_OUTER_TOP_HORIZONTAL left = self . CHAR_OUTER_TOP_LEFT intersect = self . CHAR_OUTER_TOP_INTERSECT if self . inner_column_border else '' right = self . CHAR_OUTER_TOP_RIGHT title = self . title elif style == 'bottom' : horizontal = self . CHAR_OUTER_BOTTOM_HORIZONTAL left = self . CHAR_OUTER_BOTTOM_LEFT intersect = self . CHAR_OUTER_BOTTOM_INTERSECT if self . inner_column_border else '' right = self . CHAR_OUTER_BOTTOM_RIGHT title = None elif style == 'heading' : horizontal = self . CHAR_H_INNER_HORIZONTAL left = self . CHAR_H_OUTER_LEFT_INTERSECT if self . outer_border else '' intersect = self . CHAR_H_INNER_INTERSECT if self . inner_column_border else '' right = self . CHAR_H_OUTER_RIGHT_INTERSECT if self . outer_border else '' title = None elif style == 'footing' : horizontal = self . CHAR_F_INNER_HORIZONTAL left = self . CHAR_F_OUTER_LEFT_INTERSECT if self . outer_border else '' intersect = self . CHAR_F_INNER_INTERSECT if self . inner_column_border else '' right = self . CHAR_F_OUTER_RIGHT_INTERSECT if self . outer_border else '' title = None else : horizontal = self . CHAR_INNER_HORIZONTAL left = self . CHAR_OUTER_LEFT_INTERSECT if self . outer_border else '' intersect = self . CHAR_INNER_INTERSECT if self . inner_column_border else '' right = self . CHAR_OUTER_RIGHT_INTERSECT if self . outer_border else '' title = None return build_border ( outer_widths , horizontal , left , intersect , right , title ) | Build any kind of horizontal border for the table . |
10,892 | def gen_row_lines ( self , row , style , inner_widths , height ) : r cells_in_row = list ( ) if len ( row ) != len ( inner_widths ) : row = row + [ '' ] * ( len ( inner_widths ) - len ( row ) ) for i , cell in enumerate ( row ) : align = ( self . justify_columns . get ( i ) , ) inner_dimensions = ( inner_widths [ i ] , height ) padding = ( self . padding_left , self . padding_right , 0 , 0 ) cells_in_row . append ( align_and_pad_cell ( cell , align , inner_dimensions , padding ) ) if style == 'heading' : left = self . CHAR_H_OUTER_LEFT_VERTICAL if self . outer_border else '' center = self . CHAR_H_INNER_VERTICAL if self . inner_column_border else '' right = self . CHAR_H_OUTER_RIGHT_VERTICAL if self . outer_border else '' elif style == 'footing' : left = self . CHAR_F_OUTER_LEFT_VERTICAL if self . outer_border else '' center = self . CHAR_F_INNER_VERTICAL if self . inner_column_border else '' right = self . CHAR_F_OUTER_RIGHT_VERTICAL if self . outer_border else '' else : left = self . CHAR_OUTER_LEFT_VERTICAL if self . outer_border else '' center = self . CHAR_INNER_VERTICAL if self . inner_column_border else '' right = self . CHAR_OUTER_RIGHT_VERTICAL if self . outer_border else '' for line in build_row ( cells_in_row , left , center , right ) : yield line | r Combine cells in row and group them into lines with vertical borders . |
10,893 | def simple_atmo_opstring ( haze , contrast , bias ) : gamma_b = 1 - haze gamma_g = 1 - ( haze / 3.0 ) ops = ( "gamma g {gamma_g}, " "gamma b {gamma_b}, " "sigmoidal rgb {contrast} {bias}" ) . format ( gamma_g = gamma_g , gamma_b = gamma_b , contrast = contrast , bias = bias ) return ops | Make a simple atmospheric correction formula . |
10,894 | def _op_factory ( func , kwargs , opname , bands , rgb_op = False ) : def f ( arr ) : newarr = arr . copy ( ) if rgb_op : newarr [ 0 : 3 ] = func ( newarr [ 0 : 3 ] , ** kwargs ) else : for b in bands : newarr [ b - 1 ] = func ( arr [ b - 1 ] , ** kwargs ) return newarr f . __name__ = str ( opname ) return f | create an operation function closure don t call directly use parse_operations returns a function which itself takes and returns ndarrays |
10,895 | def parse_operations ( ops_string ) : band_lookup = { "r" : 1 , "g" : 2 , "b" : 3 } count = len ( band_lookup ) opfuncs = { "saturation" : saturation , "sigmoidal" : sigmoidal , "gamma" : gamma } opkwargs = { "saturation" : ( "proportion" , ) , "sigmoidal" : ( "contrast" , "bias" ) , "gamma" : ( "g" , ) , } rgb_ops = ( "saturation" , ) tokens = [ x . strip ( ) for x in ops_string . replace ( "," , "" ) . split ( " " ) ] operations = [ ] current = [ ] for token in tokens : if token . lower ( ) in opfuncs . keys ( ) : if len ( current ) > 0 : operations . append ( current ) current = [ ] current . append ( token . lower ( ) ) if len ( current ) > 0 : operations . append ( current ) result = [ ] for parts in operations : opname = parts [ 0 ] bandstr = parts [ 1 ] args = parts [ 2 : ] try : func = opfuncs [ opname ] except KeyError : raise ValueError ( "{} is not a valid operation" . format ( opname ) ) if opname in rgb_ops : args = [ bandstr ] + args bands = ( 1 , 2 , 3 ) else : bands = set ( ) for bs in bandstr : try : band = int ( bs ) except ValueError : band = band_lookup [ bs . lower ( ) ] if band < 1 or band > count : raise ValueError ( "{} BAND must be between 1 and {}" . format ( opname , count ) ) bands . add ( band ) args = [ float ( arg ) for arg in args ] kwargs = dict ( zip ( opkwargs [ opname ] , args ) ) f = _op_factory ( func = func , kwargs = kwargs , opname = opname , bands = bands , rgb_op = ( opname in rgb_ops ) , ) result . append ( f ) return result | Takes a string of operations written with a handy DSL |
10,896 | def check_jobs ( jobs ) : if jobs == 0 : raise click . UsageError ( "Jobs must be >= 1 or == -1" ) elif jobs < 0 : import multiprocessing jobs = multiprocessing . cpu_count ( ) return jobs | Validate number of jobs . |
10,897 | def to_math_type ( arr ) : max_int = np . iinfo ( arr . dtype ) . max return arr . astype ( math_type ) / max_int | Convert an array from native integer dtype range to 0 .. 1 scaling down linearly |
10,898 | def scale_dtype ( arr , dtype ) : max_int = np . iinfo ( dtype ) . max return ( arr * max_int ) . astype ( dtype ) | Convert an array from 0 .. 1 to dtype scaling up linearly |
10,899 | def magick_to_rio ( convert_opts ) : ops = [ ] bands = None def set_band ( x ) : global bands if x . upper ( ) == "RGB" : x = "RGB" bands = x . upper ( ) set_band ( "RGB" ) def append_sig ( arg ) : global bands args = list ( filter ( None , re . split ( "[,x]+" , arg ) ) ) if len ( args ) == 1 : args . append ( 0.5 ) elif len ( args ) == 2 : args [ 1 ] = float ( args [ 1 ] . replace ( "%" , "" ) ) / 100.0 ops . append ( "sigmoidal {} {} {}" . format ( bands , * args ) ) def append_gamma ( arg ) : global bands ops . append ( "gamma {} {}" . format ( bands , arg ) ) def append_sat ( arg ) : args = list ( filter ( None , re . split ( "[,x]+" , arg ) ) ) prop = float ( args [ 1 ] ) / 100 ops . append ( "saturation {}" . format ( prop ) ) nextf = None for part in convert_opts . strip ( ) . split ( " " ) : if part == "-channel" : nextf = set_band elif part == "+channel" : set_band ( "RGB" ) nextf = None elif part == "-sigmoidal-contrast" : nextf = append_sig elif part == "-gamma" : nextf = append_gamma elif part == "-modulate" : nextf = append_sat else : if nextf : nextf ( part ) nextf = None return " " . join ( ops ) | Translate a limited subset of imagemagick convert commands to rio color operations |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.