idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
56,600 | def update ( self , ** kwargs ) : u for key , value in kwargs . items ( ) : helper = helpers . CAST_DICT . get ( type ( value ) , str ) tag = self . _get_aliases ( ) . get ( key , key ) elements = list ( self . _xml . iterchildren ( tag = tag ) ) if elements : for element in elements : element . text = helper ( value ) else : element = etree . Element ( key ) element . text = helper ( value ) self . _xml . append ( element ) self . _aliases = None | u Updating or creation of new simple nodes . |
56,601 | def sget ( self , path , default = NONE_NODE ) : u attrs = str ( path ) . split ( "." ) text_or_attr = None last_attr = attrs [ - 1 ] if last_attr == '#text' or last_attr . startswith ( '@' ) : text_or_attr = last_attr [ 1 : ] attrs = attrs [ : - 1 ] if default is NONE_NODE : default = None my_object = self for attr in attrs : try : if isinstance ( my_object , ( list , tuple ) ) and re . match ( '^\-?\d+$' , attr ) : my_object_next = my_object [ int ( attr ) ] else : my_object_next = getattr ( my_object , attr ) my_object = my_object_next except ( AttributeError , KeyError , IndexError ) : return default if text_or_attr : try : return my_object . getattr ( text_or_attr ) except AttributeError : return None else : return my_object | u Enables access to nodes if one or more of them don t exist . |
56,602 | def create ( self , tag , value ) : u child_tags = { child . tag for child in self . _xml } if tag in child_tags : raise KeyError ( 'Node {} already exists in XML tree.' . format ( tag ) ) self . set ( tag , value ) | u Creates a node if it doesn t exist yet . |
56,603 | def set ( self , name , value ) : u try : element = next ( self . _xml . iterchildren ( tag = name ) ) except StopIteration : element = etree . SubElement ( self . _xml , name ) if isinstance ( value , dict ) : self . assign_dict ( element , value ) elif isinstance ( value , ( list , tuple , set ) ) : self . assign_sequence_or_set ( element , value ) else : self . assign_literal ( element , value ) self . _aliases = None | u Assigns a new XML structure to the node . |
56,604 | def assign_dict ( self , node , xml_dict ) : new_node = etree . Element ( node . tag ) self . _xml . replace ( node , new_node ) helpers . dict_to_etree ( xml_dict , new_node ) | Assigns a Python dict to a lxml node . |
56,605 | def assign_literal ( element , value ) : u helper = helpers . CAST_DICT . get ( type ( value ) , str ) element . clear ( ) element . text = helper ( value ) | u Assigns a literal . |
56,606 | def to_dict ( self , ** kw ) : u _ , value = helpers . etree_to_dict ( self . _xml , ** kw ) . popitem ( ) return value | u Converts the lxml object to a dict . |
56,607 | def _get_aliases ( self ) : u if self . _aliases is None : self . _aliases = { } if self . _xml is not None : for child in self . _xml . iterchildren ( ) : self . _aliases [ helpers . normalize_tag ( child . tag ) ] = child . tag return self . _aliases | u Creates a dict with aliases . |
56,608 | def xpath ( self , path , namespaces = None , regexp = False , smart_strings = True , single_use = False , ) : u if ( namespaces in [ 'exslt' , 're' ] or ( regexp and not namespaces ) ) : namespaces = { 're' : "http://exslt.org/regular-expressions" } if single_use : node = self . _xml . xpath ( path ) else : xpe = self . xpath_evaluator ( namespaces = namespaces , regexp = regexp , smart_strings = smart_strings ) node = xpe ( path ) if len ( node ) == 1 : node = node [ 0 ] if len ( node ) : return self . __class__ ( node ) else : return Literal ( node ) return node | u Executes XPath query on the lxml object and returns a correct object . |
56,609 | def xpath_evaluator ( self , namespaces = None , regexp = False , smart_strings = True ) : u return etree . XPathEvaluator ( self . _xml , namespaces = namespaces , regexp = regexp , smart_strings = smart_strings ) | u Creates an XPathEvaluator instance for an ElementTree or an Element . |
56,610 | def get_last_modified_date ( * args , ** kwargs ) : try : latest_note = Note . objects . latest ( ) latest_release = Release . objects . latest ( ) except ObjectDoesNotExist : return None return max ( latest_note . modified , latest_release . modified ) | Returns the date of the last modified Note or Release . |
56,611 | def using_ios_stash ( ) : print ( 'detected install path:' ) print ( os . path . dirname ( __file__ ) ) module_names = set ( sys . modules . keys ( ) ) return 'stash' in module_names or 'stash.system' in module_names | returns true if sys path hints the install is running on ios |
56,612 | def get_partition_scores ( image , min_w = 1 , min_h = 1 ) : h , w = image . shape [ : 2 ] if w == 0 or h == 0 : return [ ] area = h * w cnz = numpy . count_nonzero total = cnz ( image ) if total == 0 or area == total : return [ ] if h < min_h * 2 : y_c = [ ] else : y_c = [ ( - abs ( ( count / ( ( h - y ) * w ) ) - ( ( total - count ) / ( y * w ) ) ) , y , 0 ) for count , y in ( ( cnz ( image [ y : ] ) , y ) for y in range ( min_h , image . shape [ 0 ] - min_h ) ) ] if w < min_w * 2 : x_c = [ ] else : x_c = [ ( - abs ( ( count / ( h * ( w - x ) ) ) - ( ( total - count ) / ( h * x ) ) ) , x , 1 ) for count , x in ( ( cnz ( image [ : , x : ] ) , x ) for x in range ( min_w , image . shape [ 1 ] - min_w ) ) ] return sorted ( x_c + y_c ) | Return list of best to worst binary splits along the x and y axis . |
56,613 | def __init_defaults ( self , config ) : provider = self . __provider if provider == 'sqlite' : config . setdefault ( 'dbname' , ':memory:' ) config . setdefault ( 'create_db' , True ) elif provider == 'mysql' : config . setdefault ( 'port' , 3306 ) config . setdefault ( 'charset' , 'utf8' ) elif provider == 'postgres' : config . setdefault ( 'port' , 5432 ) elif provider == 'oracle' : config . setdefault ( 'port' , 1521 ) else : raise ValueError ( 'Unsupported provider "{}"' . format ( provider ) ) if provider != 'sqlite' : config . setdefault ( 'host' , 'localhost' ) config . setdefault ( 'user' , None ) config . setdefault ( 'password' , None ) config . setdefault ( 'dbname' , None ) | Initializes the default connection settings . |
56,614 | async def newnations ( self , root ) : return [ aionationstates . Nation ( n ) for n in root . find ( 'NEWNATIONS' ) . text . split ( ',' ) ] | Most recently founded nations from newest . |
56,615 | async def regions ( self , root ) : return [ aionationstates . Region ( r ) for r in root . find ( 'REGIONS' ) . text . split ( ',' ) ] | List of all the regions seemingly in order of creation . |
56,616 | def regionsbytag ( self , * tags ) : if len ( tags ) > 10 : raise ValueError ( 'You can specify up to 10 tags' ) if not tags : raise ValueError ( 'No tags specified' ) @ api_query ( 'regionsbytag' , tags = ',' . join ( tags ) ) async def result ( _ , root ) : text = root . find ( 'REGIONS' ) . text return ( [ aionationstates . Region ( r ) for r in text . split ( ',' ) ] if text else [ ] ) return result ( self ) | All regions with any of the named tags . |
56,617 | def dispatch ( self , id ) : @ api_query ( 'dispatch' , dispatchid = str ( id ) ) async def result ( _ , root ) : elem = root . find ( 'DISPATCH' ) if not elem : raise NotFound ( f'No dispatch found with id {id}' ) return Dispatch ( elem ) return result ( self ) | Dispatch by id . |
56,618 | def dispatchlist ( self , * , author = None , category = None , subcategory = None , sort = 'new' ) : params = { 'sort' : sort } if author : params [ 'dispatchauthor' ] = author if category and subcategory : if ( category not in dispatch_categories or subcategory not in dispatch_categories [ category ] ) : raise ValueError ( 'Invalid category/subcategory' ) params [ 'dispatchcategory' ] = f'{category}:{subcategory}' elif category : if category not in dispatch_categories : raise ValueError ( 'Invalid category' ) params [ 'dispatchcategory' ] = category else : raise ValueError ( 'Cannot request subcategory without category' ) @ api_query ( 'dispatchlist' , ** params ) async def result ( _ , root ) : return [ DispatchThumbnail . _from_elem ( elem ) for elem in root . find ( 'DISPATCHLIST' ) ] return result ( self ) | Find dispatches by certain criteria . |
56,619 | def poll ( self , id ) : @ api_query ( 'poll' , pollid = str ( id ) ) async def result ( _ , root ) : elem = root . find ( 'POLL' ) if not elem : raise NotFound ( f'No poll found with id {id}' ) return Poll ( elem ) return result ( self ) | Poll with a given id . |
56,620 | def banner ( self , * ids , _expand_macros = None ) : async def noop ( s ) : return s _expand_macros = _expand_macros or noop @ api_query ( 'banner' , banner = ',' . join ( ids ) ) async def result ( _ , root ) : banners = [ await Banner ( elem , _expand_macros ) for elem in root . find ( 'BANNERS' ) ] if not len ( banners ) == len ( ids ) : raise NotFound ( 'one of the banner ids provided is invalid' ) return banners return result ( self ) | Get data about banners by their ids . |
56,621 | async def send_telegram ( self , * , client_key , telegram_id , telegram_key , recepient ) : params = { 'a' : 'sendTG' , 'client' : client_key , 'tgid' : str ( telegram_id ) , 'key' : telegram_key , 'to' : recepient } return await self . _call_api ( params ) | A basic interface to the Telegrams API . |
56,622 | async def happenings ( self , * , nations = None , regions = None , filters = None , beforeid = None , beforetime = None ) : while True : happening_bunch = await self . _get_happenings ( nations = nations , regions = regions , filters = filters , beforeid = beforeid , beforetime = beforetime ) for happening in happening_bunch : yield happening if len ( happening_bunch ) < 100 : break beforeid = happening_bunch [ - 1 ] . id | Iterate through happenings from newest to oldest . |
56,623 | def find_potential_match_regions ( template , transformed_array , method = 'correlation' , raw_tolerance = 0.666 ) : if method == 'correlation' : match_value = np . sum ( template ** 2 ) elif method == 'squared difference' : match_value = 0 elif method == 'correlation coefficient' : temp_minus_mean = template - np . mean ( template ) match_value = np . sum ( temp_minus_mean ** 2 ) else : raise ValueError ( 'Matching method not implemented' ) condition = ( ( np . round ( transformed_array , decimals = 3 ) >= match_value * raw_tolerance ) & ( np . round ( transformed_array , decimals = 3 ) <= match_value * ( 1. / raw_tolerance ) ) ) return np . transpose ( condition . nonzero ( ) ) | To prevent prohibitively slow calculation of normalisation coefficient at each point in image find potential match points and normalise these only these . This function uses the definitions of the matching functions to calculate the expected match value and finds positions in the transformed array matching these - normalisation will then eliminate false positives |
56,624 | def normalise_correlation ( image_tile_dict , transformed_array , template , normed_tolerance = 1 ) : template_norm = np . linalg . norm ( template ) image_norms = { ( x , y ) : np . linalg . norm ( image_tile_dict [ ( x , y ) ] ) * template_norm for ( x , y ) in image_tile_dict . keys ( ) } match_points = image_tile_dict . keys ( ) h , w = template . shape image_matches_normalised = { match_points [ i ] : transformed_array [ match_points [ i ] [ 0 ] , match_points [ i ] [ 1 ] ] / image_norms [ match_points [ i ] ] for i in range ( len ( match_points ) ) } result = { key : value for key , value in image_matches_normalised . items ( ) if np . round ( value , decimals = 3 ) >= normed_tolerance } return result . keys ( ) | Calculates the normalisation coefficients of potential match positions Then normalises the correlation at these positions and returns them if they do indeed constitute a match |
56,625 | def normalise_correlation_coefficient ( image_tile_dict , transformed_array , template , normed_tolerance = 1 ) : template_mean = np . mean ( template ) template_minus_mean = template - template_mean template_norm = np . linalg . norm ( template_minus_mean ) image_norms = { ( x , y ) : np . linalg . norm ( image_tile_dict [ ( x , y ) ] - np . mean ( image_tile_dict [ ( x , y ) ] ) ) * template_norm for ( x , y ) in image_tile_dict . keys ( ) } match_points = image_tile_dict . keys ( ) h , w = template . shape image_matches_normalised = { match_points [ i ] : transformed_array [ match_points [ i ] [ 0 ] , match_points [ i ] [ 1 ] ] / image_norms [ match_points [ i ] ] for i in range ( len ( match_points ) ) } normalised_matches = { key : value for key , value in image_matches_normalised . items ( ) if np . round ( value , decimals = 3 ) >= normed_tolerance } return normalised_matches . keys ( ) | As above but for when the correlation coefficient matching method is used |
56,626 | def calculate_squared_differences ( image_tile_dict , transformed_array , template , sq_diff_tolerance = 0.1 ) : template_norm_squared = np . sum ( template ** 2 ) image_norms_squared = { ( x , y ) : np . sum ( image_tile_dict [ ( x , y ) ] ** 2 ) for ( x , y ) in image_tile_dict . keys ( ) } match_points = image_tile_dict . keys ( ) h , w = template . shape image_matches_normalised = { match_points [ i ] : - 2 * transformed_array [ match_points [ i ] [ 0 ] , match_points [ i ] [ 1 ] ] + image_norms_squared [ match_points [ i ] ] + template_norm_squared for i in range ( len ( match_points ) ) } cutoff = h * w * 255 ** 2 * sq_diff_tolerance normalised_matches = { key : value for key , value in image_matches_normalised . items ( ) if np . round ( value , decimals = 3 ) <= cutoff } return normalised_matches . keys ( ) | As above but for when the squared differences matching method is used |
56,627 | async def post ( self ) : post = ( await self . region . _get_messages ( fromid = self . _post_id , limit = 1 ) ) [ 0 ] assert post . id == self . _post_id return post | Get the message lodged . |
56,628 | async def resolution ( self ) : resolutions = await asyncio . gather ( aionationstates . ga . resolution_at_vote , aionationstates . sc . resolution_at_vote , ) for resolution in resolutions : if ( resolution is not None and resolution . name == self . resolution_name ) : return resolution raise aionationstates . NotFound | Get the resolution voted on . |
56,629 | async def proposal ( self ) : proposals = await aionationstates . wa . proposals ( ) for proposal in proposals : if ( proposal . name == self . proposal_name ) : return proposal raise aionationstates . NotFound | Get the proposal in question . |
56,630 | def append ( self , electrode_id ) : do_append = False if not self . electrode_ids : do_append = True elif self . device . shape_indexes . shape [ 0 ] > 0 : source = self . electrode_ids [ - 1 ] target = electrode_id if not ( source == target ) : source_id , target_id = self . device . shape_indexes [ [ source , target ] ] try : if self . device . adjacency_matrix [ source_id , target_id ] : do_append = True except IndexError : logger . warning ( 'Electrodes `%s` and `%s` are not ' 'connected.' , source , target ) if do_append : self . electrode_ids . append ( electrode_id ) return do_append | Append the specified electrode to the route . |
56,631 | def insert_surface ( self , position , name , surface , alpha = 1. ) : if name in self . df_surfaces . index : raise NameError ( 'Surface already exists with `name="{}"`.' . format ( name ) ) self . df_surfaces . loc [ name ] = surface , alpha surfaces_order = self . df_surfaces . index . values . tolist ( ) surfaces_order . remove ( name ) base_index = surfaces_order . index ( 'background' ) + 1 if position < 0 : position = len ( surfaces_order ) + position surfaces_order . insert ( base_index + position , name ) self . reorder_surfaces ( surfaces_order ) | Insert Cairo surface as new layer . |
56,632 | def append_surface ( self , name , surface , alpha = 1. ) : self . insert_surface ( position = self . df_surfaces . index . shape [ 0 ] , name = name , surface = surface , alpha = alpha ) | Append Cairo surface as new layer on top of existing layers . |
56,633 | def remove_surface ( self , name ) : self . df_surfaces . drop ( name , axis = 0 , inplace = True ) self . reorder_surfaces ( self . df_surfaces . index ) | Remove layer from rendering stack and flatten remaining layers . |
56,634 | def clone_surface ( self , source_name , target_name , target_position = - 1 , alpha = 1. ) : source_surface = self . df_surfaces . surface . ix [ source_name ] source_width = source_surface . get_width ( ) source_height = source_surface . get_height ( ) source_format = source_surface . get_format ( ) target_surface = cairo . ImageSurface ( source_format , source_width , source_height ) target_cairo_context = cairo . Context ( target_surface ) target_cairo_context . set_source_surface ( source_surface , 0 , 0 ) target_cairo_context . paint ( ) self . insert_surface ( target_position , target_name , target_surface , alpha ) | Clone surface from existing layer to a new name inserting new surface at specified position . |
56,635 | def render_electrode_shapes ( self , df_shapes = None , shape_scale = 0.8 , fill = ( 1 , 1 , 1 ) ) : surface = self . get_surface ( ) if df_shapes is None : if hasattr ( self . canvas , 'df_canvas_shapes' ) : df_shapes = self . canvas . df_canvas_shapes else : return surface if 'x_center' not in df_shapes or 'y_center' not in df_shapes : return surface cairo_context = cairo . Context ( surface ) df_shapes = df_shapes . copy ( ) df_shapes [ [ 'x' , 'y' ] ] = ( df_shapes [ [ 'x_center' , 'y_center' ] ] + df_shapes [ [ 'x_center_offset' , 'y_center_offset' ] ] . values * shape_scale ) for path_id , df_path_i in ( df_shapes . groupby ( self . canvas . shape_i_columns ) [ [ 'x' , 'y' ] ] ) : vertices_x = df_path_i . x . values vertices_y = df_path_i . y . values cairo_context . move_to ( vertices_x [ 0 ] , vertices_y [ 0 ] ) for x , y in itertools . izip ( vertices_x [ 1 : ] , vertices_y [ 1 : ] ) : cairo_context . line_to ( x , y ) cairo_context . close_path ( ) cairo_context . set_source_rgba ( * fill ) cairo_context . fill ( ) return surface | Render electrode state shapes . |
56,636 | def render_registration ( self ) : surface = self . get_surface ( ) if self . canvas is None or self . df_canvas_corners . shape [ 0 ] == 0 : return surface corners = self . df_canvas_corners . copy ( ) corners [ 'w' ] = 1 transform = self . canvas . shapes_to_canvas_transform canvas_corners = corners . values . dot ( transform . T . values ) . T points_x = canvas_corners [ 0 ] points_y = canvas_corners [ 1 ] cairo_context = cairo . Context ( surface ) cairo_context . move_to ( points_x [ 0 ] , points_y [ 0 ] ) for x , y in zip ( points_x [ 1 : ] , points_y [ 1 : ] ) : cairo_context . line_to ( x , y ) cairo_context . line_to ( points_x [ 0 ] , points_y [ 0 ] ) cairo_context . set_source_rgb ( 1 , 0 , 0 ) cairo_context . stroke ( ) return surface | Render pinned points on video frame as red rectangle . |
56,637 | def draw_route ( self , df_route , cr , color = None , line_width = None ) : df_route_centers = ( self . canvas . df_shape_centers . ix [ df_route . electrode_i ] [ [ 'x_center' , 'y_center' ] ] ) df_endpoint_marker = ( .6 * self . get_endpoint_marker ( df_route_centers ) + df_route_centers . iloc [ - 1 ] . values ) cr . save ( ) if color is None : color_rgb_255 = np . array ( [ 96 , 189 , 104 , .8 * 255 ] ) color = ( color_rgb_255 / 255. ) . tolist ( ) if len ( color ) < 4 : color += [ 1. ] * ( 4 - len ( color ) ) cr . set_source_rgba ( * color ) cr . move_to ( * df_route_centers . iloc [ 0 ] ) for electrode_i , center_i in df_route_centers . iloc [ 1 : ] . iterrows ( ) : cr . line_to ( * center_i ) if line_width is None : line_width = np . sqrt ( ( df_endpoint_marker . max ( ) . values - df_endpoint_marker . min ( ) . values ) . prod ( ) ) * .1 cr . set_line_width ( 4 ) cr . stroke ( ) cr . move_to ( * df_endpoint_marker . iloc [ 0 ] ) for electrode_i , center_i in df_endpoint_marker . iloc [ 1 : ] . iterrows ( ) : cr . line_to ( * center_i ) cr . close_path ( ) cr . set_source_rgba ( * color ) cr . fill ( ) cr . restore ( ) | Draw a line between electrodes listed in a route . |
56,638 | def on_widget__button_press_event ( self , widget , event ) : if self . mode == 'register_video' and event . button == 1 : self . start_event = event . copy ( ) return elif self . mode == 'control' : shape = self . canvas . find_shape ( event . x , event . y ) if shape is None : return state = event . get_state ( ) if event . button == 1 : self . _route = Route ( self . device ) self . _route . append ( shape ) self . last_pressed = shape if not ( state & gtk . gdk . MOD1_MASK ) : self . emit ( 'route-electrode-added' , shape ) | Called when any mouse button is pressed . |
56,639 | def on_widget__button_release_event ( self , widget , event ) : event = event . copy ( ) if self . mode == 'register_video' and ( event . button == 1 and self . start_event is not None ) : self . emit ( 'point-pair-selected' , { 'start_event' : self . start_event , 'end_event' : event . copy ( ) } ) self . start_event = None return elif self . mode == 'control' : self . df_routes = self . df_routes . loc [ self . df_routes . route_i >= 0 ] . copy ( ) shape = self . canvas . find_shape ( event . x , event . y ) if shape is not None : electrode_data = { 'electrode_id' : shape , 'event' : event . copy ( ) } if event . button == 1 : if gtk . gdk . BUTTON1_MASK == event . get_state ( ) : if self . _route . append ( shape ) : self . emit ( 'route-electrode-added' , shape ) if len ( self . _route . electrode_ids ) == 1 : self . emit ( 'electrode-selected' , electrode_data ) else : route = self . _route self . emit ( 'route-selected' , route ) elif ( event . get_state ( ) == ( gtk . gdk . MOD1_MASK | gtk . gdk . BUTTON1_MASK ) and self . last_pressed != shape ) : self . emit ( 'electrode-pair-selected' , { 'source_id' : self . last_pressed , 'target_id' : shape , 'event' : event . copy ( ) } ) self . last_pressed = None elif event . button == 3 : menu = self . create_context_menu ( event , shape ) menu . popup ( None , None , None , event . button , event . time ) self . _route = None | Called when any mouse button is released . |
56,640 | def on_widget__motion_notify_event ( self , widget , event ) : if self . canvas is None : return elif event . is_hint : pointer = event . window . get_pointer ( ) x , y , mod_type = pointer else : x = event . x y = event . y shape = self . canvas . find_shape ( x , y ) self . widget . grab_focus ( ) if shape != self . last_hovered : if self . last_hovered is not None : self . emit ( 'electrode-mouseout' , { 'electrode_id' : self . last_hovered , 'event' : event . copy ( ) } ) self . last_hovered = None elif shape is not None : self . last_hovered = shape if self . _route is not None : if self . _route . append ( shape ) and not ( event . get_state ( ) & gtk . gdk . MOD1_MASK ) : self . emit ( 'route-electrode-added' , shape ) self . emit ( 'electrode-mouseover' , { 'electrode_id' : self . last_hovered , 'event' : event . copy ( ) } ) | Called when mouse pointer is moved within drawing area . |
56,641 | def register_electrode_command ( self , command , title = None , group = None ) : commands = self . electrode_commands . setdefault ( group , OrderedDict ( ) ) if title is None : title = ( command [ : 1 ] . upper ( ) + command [ 1 : ] ) . replace ( '_' , ' ' ) commands [ command ] = title | Register electrode command . |
56,642 | def register_route_command ( self , command , title = None , group = None ) : commands = self . route_commands . setdefault ( group , OrderedDict ( ) ) if title is None : title = ( command [ : 1 ] . upper ( ) + command [ 1 : ] ) . replace ( '_' , ' ' ) commands [ command ] = title | Register route command . |
56,643 | def after ( self ) : d = Deferred ( ) self . _after_deferreds . append ( d ) return d . chain | Return a deferred that will fire after the request is finished . |
56,644 | def after_response ( self , request , fn , * args , ** kwargs ) : self . _requests [ id ( request ) ] [ "callbacks" ] . append ( ( fn , args , kwargs ) ) | Call the given callable after the given request has its response . |
56,645 | def plot_degbandshalffill ( ) : ulim = [ 3.45 , 5.15 , 6.85 , 8.55 ] bands = range ( 1 , 5 ) for band , u_int in zip ( bands , ulim ) : name = 'Z_half_' + str ( band ) + 'band' dop = [ 0.5 ] data = ssplt . calc_z ( band , dop , np . arange ( 0 , u_int , 0.1 ) , 0. , name ) plt . plot ( data [ 'u_int' ] , data [ 'zeta' ] [ 0 , : , 0 ] , label = '$N={}$' . format ( str ( band ) ) ) ssplt . label_saves ( 'Z_half_multiorb.png' ) | Plot of Quasiparticle weight for degenerate half - filled bands showing the Mott transition |
56,646 | def plot_dop ( bands , int_max , dop , hund_cu , name ) : data = ssplt . calc_z ( bands , dop , np . arange ( 0 , int_max , 0.1 ) , hund_cu , name ) ssplt . plot_curves_z ( data , name ) | Plot of Quasiparticle weight for N degenerate bands under selected doping shows transition only at half - fill the rest are metallic states |
56,647 | def plot_dop_phase ( bands , int_max , hund_cu ) : name = 'Z_dop_phase_' + str ( bands ) + 'bands_U' + str ( int_max ) + 'J' + str ( hund_cu ) dop = np . sort ( np . hstack ( ( np . linspace ( 0.01 , 0.99 , 50 ) , np . arange ( 1. / 2. / bands , 1 , 1 / 2 / bands ) ) ) ) data = ssplt . calc_z ( bands , dop , np . arange ( 0 , int_max , 0.1 ) , hund_cu , name ) ssplt . imshow_z ( data , name ) ssplt . surf_z ( data , name ) | Phase plot of Quasiparticle weight for N degenerate bands under doping shows transition only at interger filling the rest are metallic states |
56,648 | def _register_extensions ( self , namespace ) : extmanager = ExtensionManager ( 'extensions.classes.{}' . format ( namespace ) , propagate_map_exceptions = True ) if extmanager . extensions : extmanager . map ( util . register_extension_class , base = self ) extmanager = ExtensionManager ( 'extensions.methods.{}' . format ( namespace ) , propagate_map_exceptions = True ) if extmanager . extensions : extmanager . map ( util . register_extension_method , base = self ) | Register any extensions under the given namespace . |
56,649 | def acls ( self ) : if self . _acls is None : self . _acls = InstanceAcls ( instance = self ) return self . _acls | The instance bound ACLs operations layer . |
56,650 | def all ( self ) : return self . _instance . _client . acls . all ( self . _instance . name ) | Get all ACLs for this instance . |
56,651 | def create ( self , cidr_mask , description , ** kwargs ) : return self . _instance . _client . acls . create ( self . _instance . name , cidr_mask , description , ** kwargs ) | Create an ACL for this instance . |
56,652 | def get ( self , acl ) : return self . _instance . _client . acls . get ( self . _instance . name , acl ) | Get the ACL specified by ID belonging to this instance . |
56,653 | def _VarintEncoder ( ) : local_chr = chr def EncodeVarint ( write , value ) : bits = value & 0x7f value >>= 7 while value : write ( 0x80 | bits ) bits = value & 0x7f value >>= 7 return write ( bits ) return EncodeVarint | Return an encoder for a basic varint value . |
56,654 | def _SignedVarintEncoder ( ) : local_chr = chr def EncodeSignedVarint ( write , value ) : if value < 0 : value += ( 1 << 64 ) bits = value & 0x7f value >>= 7 while value : write ( 0x80 | bits ) bits = value & 0x7f value >>= 7 return write ( bits ) return EncodeSignedVarint | Return an encoder for a basic signed varint value . |
56,655 | def match ( value , query ) : if type ( query ) in [ str , int , float , type ( None ) ] : return value == query elif type ( query ) == dict and len ( query . keys ( ) ) == 1 : for op in query : if op == "$eq" : return value == query [ op ] elif op == "$lt" : return value < query [ op ] elif op == "$lte" : return value <= query [ op ] elif op == "$gt" : return value > query [ op ] elif op == "$gte" : return value >= query [ op ] elif op == "$ne" : return value != query [ op ] elif op == "$in" : return value in query [ op ] elif op == "$nin" : return value not in query [ op ] else : GeoQLError ( "Not a valid query operator: " + op ) else : raise GeoQLError ( "Not a valid query: " + str ( query ) ) | Determine whether a value satisfies a query . |
56,656 | def features_tags_parse_str_to_dict ( obj ) : features = obj [ 'features' ] for i in tqdm ( range ( len ( features ) ) ) : tags = features [ i ] [ 'properties' ] . get ( 'tags' ) if tags is not None : try : tags = json . loads ( "{" + tags . replace ( "=>" , ":" ) + "}" ) except : try : tags = eval ( "{" + tags . replace ( "=>" , ":" ) + "}" ) except : tags = None if type ( tags ) == dict : features [ i ] [ 'properties' ] [ 'tags' ] = { k : tags [ k ] for k in tags } elif tags is None and 'tags' in features [ i ] [ 'properties' ] : del features [ i ] [ 'properties' ] [ 'tags' ] return obj | Parse tag strings of all features in the collection into a Python dictionary if possible . |
56,657 | def features_keep_by_property ( obj , query ) : features_keep = [ ] for feature in tqdm ( obj [ 'features' ] ) : if all ( [ match ( feature [ 'properties' ] . get ( prop ) , qry ) for ( prop , qry ) in query . items ( ) ] ) : features_keep . append ( feature ) obj [ 'features' ] = features_keep return obj | Filter all features in a collection by retaining only those that satisfy the provided query . |
56,658 | def features_keep_within_radius ( obj , center , radius , units ) : features_keep = [ ] for feature in tqdm ( obj [ 'features' ] ) : if all ( [ getattr ( geopy . distance . vincenty ( ( lat , lon ) , center ) , units ) < radius for ( lon , lat ) in geojson . utils . coords ( feature ) ] ) : features_keep . append ( feature ) obj [ 'features' ] = features_keep return obj | Filter all features in a collection by retaining only those that fall within the specified radius . |
56,659 | def features_keep_using_features ( obj , bounds ) : bounds_shapes = [ ( feature , shapely . geometry . shape ( feature [ 'geometry' ] ) ) for feature in tqdm ( bounds [ 'features' ] ) if feature [ 'geometry' ] is not None ] index = rtree . index . Index ( ) for i in tqdm ( range ( len ( bounds_shapes ) ) ) : ( feature , shape ) = bounds_shapes [ i ] index . insert ( i , shape . bounds ) features_keep = [ ] for feature in tqdm ( obj [ 'features' ] ) : if 'geometry' in feature and 'coordinates' in feature [ 'geometry' ] : coordinates = feature [ 'geometry' ] [ 'coordinates' ] if any ( [ shape . contains ( shapely . geometry . Point ( lon , lat ) ) for ( lon , lat ) in coordinates for ( feature , shape ) in [ bounds_shapes [ i ] for i in index . nearest ( ( lon , lat , lon , lat ) , 1 ) ] ] ) : features_keep . append ( feature ) continue obj [ 'features' ] = features_keep return obj | Filter all features in a collection by retaining only those that fall within the features in the second collection . |
56,660 | def features_node_edge_graph ( obj ) : points = { } features = obj [ 'features' ] for feature in tqdm ( obj [ 'features' ] ) : for ( lon , lat ) in geojson . utils . coords ( feature ) : points . setdefault ( ( lon , lat ) , 0 ) points [ ( lon , lat ) ] += 1 points = [ p for ( p , c ) in points . items ( ) if c > 1 ] features = [ geojson . Point ( p ) for p in points ] for f in tqdm ( obj [ 'features' ] ) : seqs = [ ] seq = [ ] for point in geojson . utils . coords ( f ) : if len ( seq ) > 0 : seq . append ( point ) if point in points : seq . append ( point ) if len ( seq ) > 1 and seq [ 0 ] in points : seqs . append ( seq ) seq = [ point ] for seq in seqs : features . append ( geojson . Feature ( geometry = { "coordinates" : seq , "type" : f [ 'geometry' ] [ 'type' ] } , properties = f [ 'properties' ] , type = f [ 'type' ] ) ) obj [ 'features' ] = features return obj | Transform the features into a more graph - like structure by appropriately splitting LineString features into two - point edges that connect Point nodes . |
56,661 | def get_table_info ( conn , tablename ) : r = conn . execute ( "pragma table_info('{}')" . format ( tablename ) ) ret = TableInfo ( ( ( row [ "name" ] , row ) for row in r ) ) return ret | Returns TableInfo object |
56,662 | def early_warning ( iterable , name = 'this generator' ) : nxt = None prev = next ( iterable ) while 1 : try : nxt = next ( iterable ) except : warning ( ' {} is now empty' . format ( name ) ) yield prev break else : yield prev prev = nxt | This function logs an early warning that the generator is empty . |
56,663 | def post ( self , data , request , id ) : if id : raise errors . MethodNotAllowed ( ) user = self . _dict_to_model ( data ) user . save ( ) return Response ( 201 , None , { 'Location' : '%s%d' % ( reverse ( 'user' ) , user . pk ) } ) | Create a new resource using POST |
56,664 | def get ( self , request , id ) : if id : return self . _get_one ( id ) else : return self . _get_all ( ) | Get one user or all users |
56,665 | def put ( self , data , request , id ) : if not id : raise errors . MethodNotAllowed ( ) userdata = self . _dict_to_model ( data ) userdata . pk = id try : userdata . save ( force_update = True ) except DatabaseError : raise errors . NotFound ( ) | Update a single user . |
56,666 | def delete ( self , request , id ) : if not id : raise errors . MethodNotAllowed ( ) try : models . User . objects . get ( pk = id ) . delete ( ) except models . User . DoesNotExist : pass | Delete a single user . |
56,667 | def _get_one ( self , id ) : try : return self . _to_dict ( models . User . objects . get ( pk = id ) ) except models . User . DoesNotExist : raise errors . NotFound ( ) | Get one user from db and turn into dict |
56,668 | def _get_all ( self ) : return [ self . _to_dict ( row ) for row in models . User . objects . all ( ) ] | Get all users from db and turn into list of dicts |
56,669 | def _dict_to_model ( self , data ) : try : user = models . User ( ** data ) except TypeError : raise errors . BadRequest ( ) else : return user | Create new user model instance based on the received data . |
56,670 | def captures ( self , uuid , withTitles = False ) : picker = lambda x : x . get ( 'capture' , [ ] ) return self . _get ( ( uuid , ) , picker , withTitles = 'yes' if withTitles else 'no' ) | Return the captures for a given uuid optional value withTitles = yes |
56,671 | def uuid ( self , type , val ) : picker = lambda x : x . get ( 'uuid' , x ) return self . _get ( ( type , val ) , picker ) | Return the item - uuid for a identifier |
56,672 | def mods ( self , uuid ) : picker = lambda x : x . get ( 'mods' , { } ) return self . _get ( ( 'mods' , uuid ) , picker ) | Return a mods record for a given uuid |
56,673 | def get_i18n_day_name ( day_nb , display = 'short' , ln = None ) : ln = default_ln ( ln ) _ = gettext_set_language ( ln ) if display == 'short' : days = { 0 : _ ( "Sun" ) , 1 : _ ( "Mon" ) , 2 : _ ( "Tue" ) , 3 : _ ( "Wed" ) , 4 : _ ( "Thu" ) , 5 : _ ( "Fri" ) , 6 : _ ( "Sat" ) } else : days = { 0 : _ ( "Sunday" ) , 1 : _ ( "Monday" ) , 2 : _ ( "Tuesday" ) , 3 : _ ( "Wednesday" ) , 4 : _ ( "Thursday" ) , 5 : _ ( "Friday" ) , 6 : _ ( "Saturday" ) } return days [ day_nb ] | Get the string representation of a weekday internationalized |
56,674 | def get_i18n_month_name ( month_nb , display = 'short' , ln = None ) : ln = default_ln ( ln ) _ = gettext_set_language ( ln ) if display == 'short' : months = { 0 : _ ( "Month" ) , 1 : _ ( "Jan" ) , 2 : _ ( "Feb" ) , 3 : _ ( "Mar" ) , 4 : _ ( "Apr" ) , 5 : _ ( "May" ) , 6 : _ ( "Jun" ) , 7 : _ ( "Jul" ) , 8 : _ ( "Aug" ) , 9 : _ ( "Sep" ) , 10 : _ ( "Oct" ) , 11 : _ ( "Nov" ) , 12 : _ ( "Dec" ) } else : months = { 0 : _ ( "Month" ) , 1 : _ ( "January" ) , 2 : _ ( "February" ) , 3 : _ ( "March" ) , 4 : _ ( "April" ) , 5 : _ ( "May " ) , 6 : _ ( "June" ) , 7 : _ ( "July" ) , 8 : _ ( "August" ) , 9 : _ ( "September" ) , 10 : _ ( "October" ) , 11 : _ ( "November" ) , 12 : _ ( "December" ) } return months [ month_nb ] . strip ( ) | Get a non - numeric representation of a month internationalized . |
56,675 | def create_month_selectbox ( name , selected_month = 0 , ln = None ) : ln = default_ln ( ln ) out = "<select name=\"%s\">\n" % name for i in range ( 0 , 13 ) : out += "<option value=\"%i\"" % i if ( i == selected_month ) : out += " selected=\"selected\"" out += ">%s</option>\n" % get_i18n_month_name ( i , ln ) out += "</select>\n" return out | Creates an HTML menu for month selection . Value of selected field is numeric . |
56,676 | def parse_runtime_limit ( value , now = None ) : def extract_time ( value ) : value = _RE_RUNTIMELIMIT_HOUR . search ( value ) . groupdict ( ) return timedelta ( hours = int ( value [ 'hours' ] ) , minutes = int ( value [ 'minutes' ] ) ) def extract_weekday ( value ) : key = value [ : 3 ] . lower ( ) try : return { 'mon' : 0 , 'tue' : 1 , 'wed' : 2 , 'thu' : 3 , 'fri' : 4 , 'sat' : 5 , 'sun' : 6 , } [ key ] except KeyError : raise ValueError ( "%s is not a good weekday name." % value ) if now is None : now = datetime . now ( ) today = now . date ( ) g = _RE_RUNTIMELIMIT_FULL . search ( value ) if not g : raise ValueError ( '"%s" does not seem to be correct format for ' 'parse_runtime_limit() ' '[Wee[kday]] [hh[:mm][-hh[:mm]]]).' % value ) pieces = g . groupdict ( ) if pieces [ 'weekday_begin' ] is None : first_occasion_day = timedelta ( days = 0 ) next_occasion_delta = timedelta ( days = 1 ) else : if pieces [ 'weekday_end' ] is None : pieces [ 'weekday_end' ] = pieces [ 'weekday_begin' ] weekday_begin = extract_weekday ( pieces [ 'weekday_begin' ] ) weekday_end = extract_weekday ( pieces [ 'weekday_end' ] ) if weekday_begin <= today . weekday ( ) <= weekday_end : first_occasion_day = timedelta ( days = 0 ) else : days = ( weekday_begin - today . weekday ( ) ) % 7 first_occasion_day = timedelta ( days = days ) weekday = ( now + first_occasion_day ) . weekday ( ) if weekday < weekday_end : next_occasion_delta = timedelta ( days = 1 ) else : days = weekday_begin - weekday + 7 next_occasion_delta = timedelta ( days = days ) if pieces [ 'hour_begin' ] is None : pieces [ 'hour_begin' ] = '00:00' if pieces [ 'hour_end' ] is None : pieces [ 'hour_end' ] = '00:00' beginning_time = extract_time ( pieces [ 'hour_begin' ] ) ending_time = extract_time ( pieces [ 'hour_end' ] ) if not ending_time : ending_time = beginning_time + timedelta ( days = 1 ) elif beginning_time and ending_time and beginning_time > ending_time : ending_time += timedelta ( days = 1 ) start_time = real_datetime . combine ( today , real_time ( hour = 0 , minute = 0 ) ) current_range = ( start_time + first_occasion_day + beginning_time , start_time + first_occasion_day + ending_time ) if now > current_range [ 1 ] : current_range = tuple ( t + next_occasion_delta for t in current_range ) future_range = ( current_range [ 0 ] + next_occasion_delta , current_range [ 1 ] + next_occasion_delta ) return current_range , future_range | Parsing CLI option for runtime limit supplied as VALUE . |
56,677 | def guess_datetime ( datetime_string ) : if CFG_HAS_EGENIX_DATETIME : try : return Parser . DateTimeFromString ( datetime_string ) . timetuple ( ) except ValueError : pass else : for format in ( None , '%x %X' , '%X %x' , '%Y-%M-%dT%h:%m:%sZ' ) : try : return time . strptime ( datetime_string , format ) except ValueError : pass raise ValueError ( "It is not possible to guess the datetime format of %s" % datetime_string ) | Try to guess the datetime contained in a string of unknow format . |
56,678 | def get_dst ( date_obj ) : dst = 0 if date_obj . year >= 1900 : tmp_date = time . mktime ( date_obj . timetuple ( ) ) dst = time . localtime ( tmp_date ) [ - 1 ] return dst | Determine if dst is locally enabled at this time |
56,679 | def utc_to_localtime ( date_str , fmt = "%Y-%m-%d %H:%M:%S" , input_fmt = "%Y-%m-%dT%H:%M:%SZ" ) : date_struct = datetime . strptime ( date_str , input_fmt ) date_struct += timedelta ( hours = get_dst ( date_struct ) ) date_struct -= timedelta ( seconds = time . timezone ) return strftime ( fmt , date_struct ) | Convert UTC to localtime |
56,680 | def _handle_call ( self , actual_call , stubbed_call ) : self . _actual_calls . append ( actual_call ) use_call = stubbed_call or actual_call return use_call . return_value | Extends Stub call handling behavior to be callable by default . |
56,681 | def formatted_args ( self ) : arg_reprs = list ( map ( repr , self . args ) ) kwarg_reprs = [ '%s=%s' % ( k , repr ( v ) ) for k , v in self . kwargs . items ( ) ] return '(%s)' % ', ' . join ( arg_reprs + kwarg_reprs ) | Format call arguments as a string . |
56,682 | def check ( self ) : for tool in ( 'cd-hit' , 'prank' , 'hmmbuild' , 'hmmpress' , 'hmmscan' , 'phmmer' , 'mafft' , 'meme' ) : if not self . pathfinder . exists ( tool ) : raise RuntimeError ( "Dependency {} is missing" . format ( tool ) ) | Check if data and third party tools are available |
56,683 | def generate_non_rabs ( self ) : logging . info ( 'Building non-Rab DB' ) run_cmd ( [ self . pathfinder [ 'cd-hit' ] , '-i' , self . path [ 'non_rab_db' ] , '-o' , self . output [ 'non_rab_db' ] , '-d' , '100' , '-c' , str ( config [ 'param' ] [ 'non_rab_db_identity_threshold' ] ) , '-g' , '1' , '-T' , self . cpu ] ) os . remove ( self . output [ 'non_rab_db' ] + '.clstr' ) | Shrink the non - Rab DB size by reducing sequence redundancy . |
56,684 | def parse_duration ( duration ) : duration = str ( duration ) . upper ( ) . strip ( ) elements = ELEMENTS . copy ( ) for pattern in ( SIMPLE_DURATION , COMBINED_DURATION ) : if pattern . match ( duration ) : found = pattern . match ( duration ) . groupdict ( ) del found [ 'time' ] elements . update ( dict ( ( k , int ( v or 0 ) ) for k , v in found . items ( ) ) ) return datetime . timedelta ( days = ( elements [ 'days' ] + _months_to_days ( elements [ 'months' ] ) + _years_to_days ( elements [ 'years' ] ) ) , hours = elements [ 'hours' ] , minutes = elements [ 'minutes' ] , seconds = elements [ 'seconds' ] ) return ParseError ( ) | Attepmts to parse an ISO8601 formatted duration . |
56,685 | def skin_details ( skin_id , lang = "en" ) : params = { "skin_id" : skin_id , "lang" : lang } cache_name = "skin_details.%(skin_id)s.%(lang)s.json" % params return get_cached ( "skin_details.json" , cache_name , params = params ) | This resource returns details about a single skin . |
56,686 | def bubble_to_gexf ( bblfile : str , gexffile : str = None , oriented : bool = False ) : tree = BubbleTree . from_bubble_file ( bblfile , oriented = bool ( oriented ) ) gexf_converter . tree_to_file ( tree , gexffile ) return gexffile | Write in bblfile a graph equivalent to those depicted in bubble file |
56,687 | def bubble_to_js ( bblfile : str , jsdir : str = None , oriented : bool = False , ** style ) : js_converter . bubble_to_dir ( bblfile , jsdir , oriented = bool ( oriented ) , ** style ) return jsdir | Write in jsdir a graph equivalent to those depicted in bubble file |
56,688 | def tree_to_graph ( bbltree : BubbleTree ) -> Graph or Digraph : GraphObject = Digraph if bbltree . oriented else Graph def create ( name : str ) : ret = GraphObject ( 'cluster_' + name ) ret . node ( name , style = 'invis' , shape = 'point' ) ret . body . append ( 'color=lightgrey' ) ret . body . append ( 'label=""' ) ret . body . append ( 'shape=ellipse' ) ret . body . append ( 'penwidth=2' ) ret . body . append ( 'pencolor=black' ) return ret nodes = frozenset ( bbltree . nodes ( ) ) subgraphs = { } for powernode in bbltree . powernodes ( ) : if powernode not in subgraphs : subgraphs [ powernode ] = create ( powernode ) for succ in bbltree . inclusions [ powernode ] : if succ not in subgraphs : if succ not in nodes : subgraphs [ succ ] = create ( succ ) else : subgraphs [ powernode ] . node ( succ ) for powernode , succs in bbltree . inclusions . items ( ) : for succ in succs : if succ not in nodes : subgraphs [ powernode ] . subgraph ( subgraphs [ succ ] ) graph = GraphObject ( 'graph' , graph_attr = { 'compound' : 'true' } ) for root in bbltree . roots : if root in subgraphs : graph . subgraph ( subgraphs [ root ] ) for source , targets in bbltree . edges . items ( ) : for target in targets : if source <= target : attrs = { } if source not in nodes : attrs . update ( { 'ltail' : 'cluster_' + source } ) if target not in nodes : attrs . update ( { 'lhead' : 'cluster_' + target } ) graph . edge ( source , target , ** attrs ) return graph | Compute as a graphviz . Graph instance the given graph . |
56,689 | def to_dict ( self ) : ret = OrderedDict ( ) for attrname in self . attrs : ret [ attrname ] = self . __getattribute__ ( attrname ) return ret | Returns OrderedDict whose keys are self . attrs |
56,690 | def to_list ( self ) : ret = OrderedDict ( ) for attrname in self . attrs : ret [ attrname ] = self . __getattribute__ ( attrname ) return ret | Returns list containing values of attributes listed in self . attrs |
56,691 | def uniq ( pipe ) : pipe = iter ( pipe ) previous = next ( pipe ) yield previous for i in pipe : if i is not previous : previous = i yield i | this works like bash s uniq command where the generator only iterates if the next value is not the previous |
56,692 | def send_request ( url , method , data , args , params , headers , cookies , timeout , is_json , verify_cert ) : for p in args : url = url . replace ( ':' + p , str ( args [ p ] ) ) try : if data : if is_json : headers [ 'Content-Type' ] = 'application/json' data = json . dumps ( data ) request = requests . Request ( method . upper ( ) , url , data = data , params = params , headers = headers , cookies = cookies ) else : request = requests . Request ( method . upper ( ) , url , params = params , headers = headers , cookies = cookies ) session = requests . Session ( ) session . verify = verify_cert r = session . send ( request . prepare ( ) , timeout = timeout ) session . close ( ) except requests . exceptions . Timeout : return { 'data' : { } , 'cookies' : CookieJar ( ) , 'content_type' : '' , 'status' : 0 , 'is_json' : False , 'timeout' : True } try : content_type = r . headers . get ( 'Content-Type' , 'application/json' ) response = r . json ( ) isjson = True except json . decoder . JSONDecodeError : content_type = r . headers . get ( 'Content-Type' , 'text/html' ) response = r . text isjson = False return { 'data' : response , 'cookies' : r . cookies , 'content_type' : content_type , 'status' : r . status_code , 'is_json' : isjson , 'timeout' : False } | Forge and send HTTP request . |
56,693 | def neighbors ( self ) -> List [ 'Node' ] : self . _load_neighbors ( ) return [ edge . source if edge . source != self else edge . target for edge in self . _neighbors . values ( ) ] | The list of neighbors of the node . |
56,694 | def _load_neighbors ( self ) -> None : if not self . are_neighbors_cached : self . _load_neighbors_from_external_source ( ) db : GraphDatabaseInterface = self . _graph . database db_node : DBNode = db . Node . find_by_name ( self . name ) db_node . are_neighbors_cached = True db . session . commit ( ) self . are_neighbors_cached = True if not self . _are_neighbors_loaded : self . _load_neighbors_from_database ( ) | Loads all neighbors of the node from the local database and from the external data source if needed . |
56,695 | def _load_neighbors_from_database ( self ) -> None : self . _are_neighbors_loaded = True graph : Graph = self . _graph neighbors : List [ DBNode ] = graph . database . Node . find_by_name ( self . name ) . neighbors nodes : NodeList = graph . nodes for db_node in neighbors : graph . add_node ( db_node . name , db_node . external_id ) neighbor : Node = nodes . get_node_by_name ( db_node . name ) graph . add_edge ( self , neighbor , 1 , False ) | Loads the neighbors of the node from the local database . |
56,696 | def key ( self ) -> Tuple [ int , int ] : return self . _source . index , self . _target . index | The unique identifier of the edge consisting of the indexes of its source and target nodes . |
56,697 | def edge_list ( self ) -> List [ Edge ] : return [ edge for edge in sorted ( self . _edges . values ( ) , key = attrgetter ( "key" ) ) ] | The ordered list of edges in the container . |
56,698 | def beforeSummaryReport ( self , event ) : self . prof . disable ( ) stats = pstats . Stats ( self . prof , stream = event . stream ) . sort_stats ( self . sort ) event . stream . writeln ( nose2 . util . ln ( 'Profiling results' ) ) stats . print_stats ( ) if self . pfile : stats . dump_stats ( self . pfile ) if self . cachegrind : visualize ( self . prof . getstats ( ) ) | Output profiling results |
56,699 | def separate ( text ) : alphabet = 'abcdefghijklmnopqrstuvwxyz' words = text . split ( ) standardwords = [ ] for word in words : newstr = '' for char in word : if char in alphabet or char in alphabet . upper ( ) : newstr += char if newstr != '' : standardwords . append ( newstr ) return map ( lambda x : x . lower ( ) , standardwords ) | Takes text and separates it into a list of words |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.