idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
13,500
def _update_all_devices ( self ) : self . all_devices = [ ] self . all_devices . extend ( self . keyboards ) self . all_devices . extend ( self . mice ) self . all_devices . extend ( self . gamepads ) self . all_devices . extend ( self . other_devices )
Update the all_devices list .
13,501
def _parse_device_path ( self , device_path , char_path_override = None ) : try : device_type = device_path . rsplit ( '-' , 1 ) [ 1 ] except IndexError : warn ( "The following device path was skipped as it could " "not be parsed: %s" % device_path , RuntimeWarning ) return realpath = os . path . realpath ( device_path...
Parse each device and add to the approriate list .
13,502
def _find_xinput ( self ) : for dll in XINPUT_DLL_NAMES : try : self . xinput = getattr ( ctypes . windll , dll ) except OSError : pass else : self . xinput_dll = dll break else : warn ( "No xinput driver dll found, gamepads not supported." , RuntimeWarning )
Find most recent xinput library .
13,503
def _find_devices_win ( self ) : self . _find_xinput ( ) self . _detect_gamepads ( ) self . _count_devices ( ) if self . _raw_device_counts [ 'keyboards' ] > 0 : self . keyboards . append ( Keyboard ( self , "/dev/input/by-id/usb-A_Nice_Keyboard-event-kbd" ) ) if self . _raw_device_counts [ 'mice' ] > 0 : self . mice ....
Find devices on Windows .
13,504
def _find_devices_mac ( self ) : self . keyboards . append ( Keyboard ( self ) ) self . mice . append ( MightyMouse ( self ) ) self . mice . append ( Mouse ( self ) )
Find devices on Mac .
13,505
def _detect_gamepads ( self ) : state = XinputState ( ) for device_number in range ( 4 ) : res = self . xinput . XInputGetState ( device_number , ctypes . byref ( state ) ) if res == XINPUT_ERROR_SUCCESS : device_path = ( "/dev/input/by_id/" + "usb-Microsoft_Corporation_Controller_%s-event-joystick" % device_number ) s...
Find gamepads .
13,506
def _count_devices ( self ) : number_of_devices = ctypes . c_uint ( ) if ctypes . windll . user32 . GetRawInputDeviceList ( ctypes . POINTER ( ctypes . c_int ) ( ) , ctypes . byref ( number_of_devices ) , ctypes . sizeof ( RawInputDeviceList ) ) == - 1 : warn ( "Call to GetRawInputDeviceList was unsuccessful." "We have...
See what Windows GetRawInputDeviceList wants to tell us .
13,507
def _find_by ( self , key ) : by_path = glob . glob ( '/dev/input/by-{key}/*-event-*' . format ( key = key ) ) for device_path in by_path : self . _parse_device_path ( device_path )
Find devices .
13,508
def _find_special ( self ) : charnames = self . _get_char_names ( ) for eventdir in glob . glob ( '/sys/class/input/event*' ) : char_name = os . path . split ( eventdir ) [ 1 ] if char_name in charnames : continue name_file = os . path . join ( eventdir , 'device' , 'name' ) with open ( name_file ) as name_file : devic...
Look for special devices .
13,509
def get_event_string ( self , evtype , code ) : if WIN and evtype == 'Key' : try : code = self . codes [ 'wincodes' ] [ code ] except KeyError : pass try : return self . codes [ evtype ] [ code ] except KeyError : raise UnknownEventCode ( "We don't know this event." , evtype , code )
Get the string name of the event .
13,510
def detect_microbit ( self ) : try : gpad = MicroBitPad ( self ) except ModuleNotFoundError : warn ( "The microbit library could not be found in the pythonpath. \n" "For more information, please visit \n" "https://inputs.readthedocs.io/en/latest/user/microbit.html" , RuntimeWarning ) else : self . microbits . append ( ...
Detect a microbit .
13,511
def set_display ( self , index = None ) : if index : image = self . microbit . Image . STD_IMAGES [ index ] else : image = self . default_image self . microbit . display . show ( image )
Show an image on the display .
13,512
def _setup_rumble ( self ) : self . left_rumble = self . _get_ready_to ( '99500' ) self . right_rumble = self . _get_ready_to ( '00599' ) self . double_rumble = self . _get_ready_to ( '99599' )
Setup the three animations which simulate a rumble .
13,513
def _get_ready_to ( self , rumble ) : return [ self . microbit . Image ( ':' . join ( [ rumble if char == '1' else '00500' for char in code ] ) ) for code in SPIN_UP_MOTOR ]
Watch us wreck the mike! PSYCHE!
13,514
def _full_speed_rumble ( self , images , duration ) : while duration > 0 : self . microbit . display . show ( images [ 0 ] ) time . sleep ( 0.04 ) self . microbit . display . show ( images [ 1 ] ) time . sleep ( 0.04 ) duration -= 0.08
Simulate the motors running at full .
13,515
def _spin_up ( self , images , duration ) : total = 0 for image in images : self . microbit . display . show ( image ) time . sleep ( 0.05 ) total += 0.05 if total >= duration : return remaining = duration - total self . _full_speed_rumble ( images [ - 2 : ] , remaining ) self . set_display ( )
Simulate the motors getting warmed up .
13,516
def handle_new_events ( self , events ) : for event in events : self . events . append ( self . create_event_object ( event [ 0 ] , event [ 1 ] , int ( event [ 2 ] ) ) )
Add each new events to the event queue .
13,517
def handle_abs ( self ) : x_raw = self . microbit . accelerometer . get_x ( ) y_raw = self . microbit . accelerometer . get_y ( ) x_abs = ( 'Absolute' , 0x00 , x_raw ) y_abs = ( 'Absolute' , 0x01 , y_raw ) return x_abs , y_abs
Gets the state as the raw abolute numbers .
13,518
def handle_dpad ( self ) : x_raw = self . microbit . accelerometer . get_x ( ) y_raw = self . microbit . accelerometer . get_y ( ) minus_sens = self . sensitivity * - 1 if x_raw < minus_sens : x_state = ( 'Absolute' , 0x10 , - 1 ) elif x_raw > self . sensitivity : x_state = ( 'Absolute' , 0x10 , 1 ) else : x_state = ( ...
Gets the state of the virtual dpad .
13,519
def check_state ( self ) : if self . dpad : x_state , y_state = self . handle_dpad ( ) else : x_state , y_state = self . handle_abs ( ) new_state = set ( ( x_state , y_state , ( 'Key' , 0x130 , int ( self . microbit . button_a . is_pressed ( ) ) ) , ( 'Key' , 0x131 , int ( self . microbit . button_b . is_pressed ( ) ) ...
Tracks differences in the device state .
13,520
def handle_input ( self ) : difference = self . check_state ( ) if not difference : return self . events = [ ] self . handle_new_events ( difference ) self . update_timeval ( ) self . events . append ( self . sync_marker ( self . timeval ) ) self . write_to_pipe ( self . events )
Sends differences in the device state to the MicroBitPad as events .
13,521
def main ( ) : while 1 : events = get_mouse ( ) for event in events : print ( event . ev_type , event . code , event . state )
Just print out some event infomation when the mouse is used .
13,522
def main ( ) : while 1 : events = get_key ( ) if events : for event in events : print ( event . ev_type , event . code , event . state )
Just print out some event infomation when keys are pressed .
13,523
def main ( ) : while 1 : events = get_gamepad ( ) for event in events : print ( event . ev_type , event . code , event . state )
Just print out some event infomation when the gamepad is used .
13,524
def main ( gamepad = None ) : if not gamepad : gamepad = inputs . devices . gamepads [ 0 ] gamepad . set_vibration ( 1 , 0 , 1000 ) time . sleep ( 2 ) gamepad . set_vibration ( 0 , 1 , 1000 ) time . sleep ( 2 ) gamepad . set_vibration ( 1 , 1 , 2000 ) time . sleep ( 2 )
Vibrate the gamepad .
13,525
def validate ( self , value ) : errors = [ ] valid = self . _is_valid ( value ) if not valid : errors . append ( self . fail ( value ) ) return errors for constraint in self . _constraints_inst : error = constraint . is_valid ( value ) if error : errors . append ( error ) return errors
Check if value is valid .
13,526
def _process_schema ( self , schema_dict , validators ) : schema_flat = util . flatten ( schema_dict ) for key , expression in schema_flat . items ( ) : try : schema_flat [ key ] = syntax . parse ( expression , validators ) except SyntaxError as e : error = str ( e ) + ' at node \'%s\'' % key raise SyntaxError ( error ...
Go through a schema and construct validators .
13,527
def _validate ( self , validator , data , key , position = None , includes = None ) : errors = [ ] if position : position = '%s.%s' % ( position , key ) else : position = key try : data_item = util . get_value ( data , key ) except KeyError : if validator . is_optional : return errors errors . append ( '%s: Required fi...
Run through a schema and a data structure validating along the way .
13,528
def _validate_item ( self , validator , data_item , position , includes ) : errors = [ ] if data_item is None and validator . is_optional and validator . can_be_none : return errors errors += self . _validate_primitive ( validator , data_item , position ) if errors : return errors if isinstance ( validator , val . Incl...
Validates a single data item against validator .
13,529
def _find_data_path_schema ( data_path , schema_name ) : if not data_path or data_path == '/' or data_path == '.' : return None directory = os . path . dirname ( data_path ) path = glob . glob ( os . path . join ( directory , schema_name ) ) if not path : return _find_schema ( directory , schema_name ) return path [ 0 ...
Starts in the data file folder and recursively looks in parents for schema_name
13,530
def _find_schema ( data_path , schema_name ) : path = glob . glob ( schema_name ) for p in path : if os . path . isfile ( p ) : return p return _find_data_path_schema ( data_path , schema_name )
Checks if schema_name is a valid file if not searches in data_path for it .
13,531
def flatten ( dic , keep_iter = False , position = None ) : child = { } if not dic : return { } for k , v in get_iter ( dic ) : if isstr ( k ) : k = k . replace ( '.' , '_' ) if position : item_position = '%s.%s' % ( position , k ) else : item_position = '%s' % k if is_iter ( v ) : child . update ( flatten ( dic [ k ] ...
Returns a flattened dictionary from a dictionary of nested dictionaries and lists . keep_iter will treat iterables as valid values while also flattening them .
13,532
def to_representation ( self , obj ) : value = self . model_field . __get__ ( obj , None ) return smart_text ( value , strings_only = True )
convert value to representation .
13,533
def run_validators ( self , value ) : try : self . model_field . validate ( value ) except MongoValidationError as e : raise ValidationError ( e . message ) super ( DocumentField , self ) . run_validators ( value )
validate value .
13,534
def get_object_or_404 ( queryset , * args , ** kwargs ) : try : return queryset . get ( * args , ** kwargs ) except ( ValueError , TypeError , DoesNotExist , ValidationError ) : raise Http404 ( )
replacement of rest_framework . generics and django . shrtcuts analogues
13,535
def recursive_save ( self , validated_data , instance = None ) : me_data = dict ( ) for key , value in validated_data . items ( ) : try : field = self . fields [ key ] if isinstance ( field , EmbeddedDocumentSerializer ) : me_data [ key ] = field . recursive_save ( value ) elif ( ( isinstance ( field , serializers . Li...
Recursively traverses validated_data and creates EmbeddedDocuments of the appropriate subtype from them .
13,536
def apply_customization ( self , serializer , customization ) : if customization . fields is not None : if len ( customization . fields ) == 0 : serializer . Meta . fields = ALL_FIELDS else : serializer . Meta . fields = customization . fields if customization . exclude is not None : serializer . Meta . exclude = custo...
Applies fields customization to a nested or embedded DocumentSerializer .
13,537
def to_internal_value ( self , data ) : ret = super ( DynamicDocumentSerializer , self ) . to_internal_value ( data ) dynamic_data = self . _get_dynamic_data ( ret ) ret . update ( dynamic_data ) return ret
Updates _validated_data with dynamic data i . e . data not listed in fields .
13,538
def get_field_kwargs ( field_name , model_field ) : kwargs = { } kwargs [ 'model_field' ] = model_field if hasattr ( model_field , 'verbose_name' ) and needs_label ( model_field , field_name ) : kwargs [ 'label' ] = capfirst ( model_field . verbose_name ) if hasattr ( model_field , 'help_text' ) : kwargs [ 'help_text' ...
Creating a default instance of a basic non - relational field .
13,539
def get_relation_kwargs ( field_name , relation_info ) : model_field , related_model = relation_info kwargs = { } if related_model and not issubclass ( related_model , EmbeddedDocument ) : kwargs [ 'queryset' ] = related_model . objects if model_field : if hasattr ( model_field , 'verbose_name' ) and needs_label ( mode...
Creating a default instance of a flat relational field .
13,540
def get_nested_relation_kwargs ( field_name , relation_info ) : kwargs = get_relation_kwargs ( field_name , relation_info ) kwargs . pop ( 'queryset' ) kwargs . pop ( 'required' ) kwargs [ 'read_only' ] = True return kwargs
Creating a default instance of a nested serializer
13,541
def degrees_dir ( CIJ ) : CIJ = binarize ( CIJ , copy = True ) id = np . sum ( CIJ , axis = 0 ) od = np . sum ( CIJ , axis = 1 ) deg = id + od return id , od , deg
Node degree is the number of links connected to the node . The indegree is the number of inward links and the outdegree is the number of outward links .
13,542
def degrees_und ( CIJ ) : CIJ = binarize ( CIJ , copy = True ) return np . sum ( CIJ , axis = 0 )
Node degree is the number of links connected to the node .
13,543
def strengths_dir ( CIJ ) : istr = np . sum ( CIJ , axis = 0 ) ostr = np . sum ( CIJ , axis = 1 ) return istr + ostr
Node strength is the sum of weights of links connected to the node . The instrength is the sum of inward link weights and the outstrength is the sum of outward link weights .
13,544
def strengths_und_sign ( W ) : W = W . copy ( ) n = len ( W ) np . fill_diagonal ( W , 0 ) Spos = np . sum ( W * ( W > 0 ) , axis = 0 ) Sneg = np . sum ( W * ( W < 0 ) , axis = 0 ) vpos = np . sum ( W [ W > 0 ] ) vneg = np . sum ( W [ W < 0 ] ) return Spos , Sneg , vpos , vneg
Node strength is the sum of weights of links connected to the node .
13,545
def edge_nei_overlap_bu ( CIJ ) : ik , jk = np . where ( CIJ ) lel = len ( CIJ [ ik , jk ] ) n = len ( CIJ ) deg = degrees_und ( CIJ ) ec = np . zeros ( ( lel , ) ) degij = np . zeros ( ( 2 , lel ) ) for e in range ( lel ) : neiik = np . setdiff1d ( np . union1d ( np . where ( CIJ [ ik [ e ] , : ] ) , np . where ( CIJ ...
This function determines the neighbors of two nodes that are linked by an edge and then computes their overlap . Connection matrix must be binary and directed . Entries of EC that are inf indicate that no edge is present . Entries of EC that are 0 denote local bridges i . e . edges that link completely non - overlappin...
13,546
def matching_ind ( CIJ ) : n = len ( CIJ ) Min = np . zeros ( ( n , n ) ) Mout = np . zeros ( ( n , n ) ) Mall = np . zeros ( ( n , n ) ) for i in range ( n - 1 ) : for j in range ( i + 1 , n ) : c1i = CIJ [ : , i ] c2i = CIJ [ : , j ] usei = np . logical_or ( c1i , c2i ) usei [ i ] = 0 usei [ j ] = 0 nconi = np . sum ...
For any two nodes u and v the matching index computes the amount of overlap in the connection patterns of u and v . Self - connections and u - v connections are ignored . The matching index is a symmetric quantity similar to a correlation or a dot product .
13,547
def dice_pairwise_und ( a1 , a2 ) : a1 = binarize ( a1 , copy = True ) a2 = binarize ( a2 , copy = True ) n = len ( a1 ) np . fill_diagonal ( a1 , 0 ) np . fill_diagonal ( a2 , 0 ) d = np . zeros ( ( n , ) ) for i in range ( n ) : d [ i ] = 2 * ( np . sum ( np . logical_and ( a1 [ : , i ] , a2 [ : , i ] ) ) / ( np . su...
Calculates pairwise dice similarity for each vertex between two matrices . Treats the matrices as binary and undirected .
13,548
def corr_flat_und ( a1 , a2 ) : n = len ( a1 ) if len ( a2 ) != n : raise BCTParamError ( "Cannot calculate flattened correlation on " "matrices of different size" ) triu_ix = np . where ( np . triu ( np . ones ( ( n , n ) ) , 1 ) ) return np . corrcoef ( a1 [ triu_ix ] . flat , a2 [ triu_ix ] . flat ) [ 0 ] [ 1 ]
Returns the correlation coefficient between two flattened adjacency matrices . Only the upper triangular part is used to avoid double counting undirected matrices . Similarity metric for weighted matrices .
13,549
def corr_flat_dir ( a1 , a2 ) : n = len ( a1 ) if len ( a2 ) != n : raise BCTParamError ( "Cannot calculate flattened correlation on " "matrices of different size" ) ix = np . logical_not ( np . eye ( n ) ) return np . corrcoef ( a1 [ ix ] . flat , a2 [ ix ] . flat ) [ 0 ] [ 1 ]
Returns the correlation coefficient between two flattened adjacency matrices . Similarity metric for weighted matrices .
13,550
def adjacency_plot_und ( A , coor , tube = False ) : from mayavi import mlab n = len ( A ) nr_edges = ( n * n - 1 ) // 2 ixes , = np . where ( np . triu ( np . ones ( ( n , n ) ) , 1 ) . flat ) adjdat = A . flat [ ixes ] A_r = np . tile ( coor , ( n , 1 , 1 ) ) starts = np . reshape ( A_r , ( n * n , 3 ) ) [ ixes , : ]...
This function in matlab is a visualization helper which translates an adjacency matrix and an Nx3 matrix of spatial coordinates and plots a 3D isometric network connecting the undirected unweighted nodes using a specific plotting format . Including the formatted output is not useful at all for bctpy since matplotlib wi...
13,551
def backbone_wu ( CIJ , avgdeg ) : n = len ( CIJ ) if not np . all ( CIJ == CIJ . T ) : raise BCTParamError ( 'backbone_wu can only be computed for undirected ' 'matrices. If your matrix is has noise, correct it with np.around' ) CIJtree = np . zeros ( ( n , n ) ) i , j = np . where ( np . max ( CIJ ) == CIJ ) im = [ ...
The network backbone contains the dominant connections in the network and may be used to aid network visualization . This function computes the backbone of a given weighted and undirected connection matrix CIJ using a minimum - spanning - tree based algorithm .
13,552
def reorderMAT ( m , H = 5000 , cost = 'line' ) : from scipy import linalg , stats m = m . copy ( ) n = len ( m ) np . fill_diagonal ( m , 0 ) if cost == 'line' : profile = stats . norm . pdf ( range ( 1 , n + 1 ) , 0 , n / 2 ) [ : : - 1 ] elif cost == 'circ' : profile = stats . norm . pdf ( range ( 1 , n + 1 ) , n / 2...
This function reorders the connectivity matrix in order to place more edges closer to the diagonal . This often helps in displaying community structure clusters etc .
13,553
def reorder_matrix ( m1 , cost = 'line' , verbose = False , H = 1e4 , Texp = 10 , T0 = 1e-3 , Hbrk = 10 ) : from scipy import linalg , stats n = len ( m1 ) if n < 2 : raise BCTParamError ( "align_matrix will infinite loop on a singleton " "or null matrix." ) if cost == 'line' : profile = stats . norm . pdf ( range ( 1 ...
This function rearranges the nodes in matrix M1 such that the matrix elements are squeezed along the main diagonal . The function uses a version of simulated annealing .
13,554
def writetoPAJ ( CIJ , fname , directed ) : n = np . size ( CIJ , axis = 0 ) with open ( fname , 'w' ) as fd : fd . write ( '*vertices %i \r' % n ) for i in range ( 1 , n + 1 ) : fd . write ( '%i "%i" \r' % ( i , i ) ) if directed : fd . write ( '*arcs \r' ) else : fd . write ( '*edges \r' ) for i in range ( n ) : for ...
This function writes a Pajek . net file from a numpy matrix
13,555
def makeevenCIJ ( n , k , sz_cl , seed = None ) : rng = get_rng ( seed ) mx_lvl = int ( np . floor ( np . log2 ( n ) ) ) sz_cl -= 1 t = np . ones ( ( 2 , 2 ) ) * 2 Nlvl = 2 ** mx_lvl if Nlvl != n : print ( "Warning: n must be a power of 2" ) n = Nlvl for lvl in range ( 1 , mx_lvl ) : s = 2 ** ( lvl + 1 ) CIJ = np . one...
This function generates a random directed network with a specified number of fully connected modules linked together by evenly distributed remaining random connections .
13,556
def makerandCIJdegreesfixed ( inv , outv , seed = None ) : rng = get_rng ( seed ) n = len ( inv ) k = np . sum ( inv ) in_inv = np . zeros ( ( k , ) ) out_inv = np . zeros ( ( k , ) ) i_in = 0 i_out = 0 for i in range ( n ) : in_inv [ i_in : i_in + inv [ i ] ] = i out_inv [ i_out : i_out + outv [ i ] ] = i i_in += inv ...
This function generates a directed random network with a specified in - degree and out - degree sequence .
13,557
def makerandCIJ_dir ( n , k , seed = None ) : rng = get_rng ( seed ) ix , = np . where ( np . logical_not ( np . eye ( n ) ) . flat ) rp = rng . permutation ( np . size ( ix ) ) CIJ = np . zeros ( ( n , n ) ) CIJ . flat [ ix [ rp ] [ : k ] ] = 1 return CIJ
This function generates a directed random network
13,558
def randmio_dir ( R , itr , seed = None ) : rng = get_rng ( seed ) R = R . copy ( ) n = len ( R ) i , j = np . where ( R ) k = len ( i ) itr *= k max_attempts = np . round ( n * k / ( n * ( n - 1 ) ) ) eff = 0 for it in range ( int ( itr ) ) : att = 0 while att <= max_attempts : while True : e1 = rng . randint ( k ) e2...
This function randomizes a directed network while preserving the in - and out - degree distributions . In weighted networks the function preserves the out - strength but not the in - strength distributions .
13,559
def randmio_und ( R , itr , seed = None ) : if not np . all ( R == R . T ) : raise BCTParamError ( "Input must be undirected" ) rng = get_rng ( seed ) R = R . copy ( ) n = len ( R ) i , j = np . where ( np . tril ( R ) ) k = len ( i ) itr *= k max_attempts = np . round ( n * k / ( n * ( n - 1 ) ) ) eff = 0 for it in ra...
This function randomizes an undirected network while preserving the degree distribution . The function does not preserve the strength distribution in weighted networks .
13,560
def randmio_und_signed ( R , itr , seed = None ) : rng = get_rng ( seed ) R = R . copy ( ) n = len ( R ) itr *= int ( n * ( n - 1 ) / 2 ) max_attempts = int ( np . round ( n / 2 ) ) eff = 0 for it in range ( int ( itr ) ) : att = 0 while att <= max_attempts : a , b , c , d = pick_four_unique_nodes_quickly ( n , rng ) r...
This function randomizes an undirected weighted network with positive and negative weights while simultaneously preserving the degree distribution of positive and negative weights . The function does not preserve the strength distribution in weighted networks .
13,561
def evaluate_generative_model ( A , Atgt , D , eta , gamma = None , model_type = 'matching' , model_var = 'powerlaw' , epsilon = 1e-6 , seed = None ) : m = np . size ( np . where ( Atgt . flat ) ) // 2 n = len ( Atgt ) xk = np . sum ( Atgt , axis = 1 ) xc = clustering_coef_bu ( Atgt ) xb = betweenness_bin ( Atgt ) xe =...
Generates synthetic networks with parameters provided and evaluates their energy function . The energy function is defined as in Betzel et al . 2016 . Basically it takes the Kolmogorov - Smirnov statistics of 4 network measures ; comparing the degree distributions clustering coefficients betweenness centrality and Eucl...
13,562
def diversity_coef_sign ( W , ci ) : n = len ( W ) _ , ci = np . unique ( ci , return_inverse = True ) ci += 1 m = np . max ( ci ) def entropy ( w_ ) : S = np . sum ( w_ , axis = 1 ) Snm = np . zeros ( ( n , m ) ) for i in range ( m ) : Snm [ : , i ] = np . sum ( w_ [ : , ci == i + 1 ] , axis = 1 ) pnm = Snm / ( np . t...
The Shannon - entropy based diversity coefficient measures the diversity of intermodular connections of individual nodes and ranges from 0 to 1 .
13,563
def edge_betweenness_bin ( G ) : n = len ( G ) BC = np . zeros ( ( n , ) ) EBC = np . zeros ( ( n , n ) ) for u in range ( n ) : D = np . zeros ( ( n , ) ) D [ u ] = 1 NP = np . zeros ( ( n , ) ) NP [ u ] = 1 P = np . zeros ( ( n , n ) ) Q = np . zeros ( ( n , ) , dtype = int ) q = n - 1 Gu = G . copy ( ) V = np . arra...
Edge betweenness centrality is the fraction of all shortest paths in the network that contain a given edge . Edges with high values of betweenness centrality participate in a large number of shortest paths .
13,564
def erange ( CIJ ) : N = len ( CIJ ) K = np . size ( np . where ( CIJ ) [ 1 ] ) Erange = np . zeros ( ( N , N ) ) i , j = np . where ( CIJ ) for c in range ( len ( i ) ) : CIJcut = CIJ . copy ( ) CIJcut [ i [ c ] , j [ c ] ] = 0 R , D = reachdist ( CIJcut ) Erange [ i [ c ] , j [ c ] ] = D [ i [ c ] , j [ c ] ] eta = (...
Shortcuts are central edges which significantly reduce the characteristic path length in the network .
13,565
def module_degree_zscore ( W , ci , flag = 0 ) : _ , ci = np . unique ( ci , return_inverse = True ) ci += 1 if flag == 2 : W = W . copy ( ) W = W . T elif flag == 3 : W = W . copy ( ) W = W + W . T n = len ( W ) Z = np . zeros ( ( n , ) ) for i in range ( 1 , int ( np . max ( ci ) + 1 ) ) : Koi = np . sum ( W [ np . i...
The within - module degree z - score is a within - module version of degree centrality .
13,566
def pagerank_centrality ( A , d , falff = None ) : from scipy import linalg N = len ( A ) if falff is None : norm_falff = np . ones ( ( N , ) ) / N else : norm_falff = falff / np . sum ( falff ) deg = np . sum ( A , axis = 0 ) deg [ deg == 0 ] = 1 D1 = np . diag ( 1 / deg ) B = np . eye ( N ) - d * np . dot ( A , D1 ) ...
The PageRank centrality is a variant of eigenvector centrality . This function computes the PageRank centrality of each vertex in a graph .
13,567
def subgraph_centrality ( CIJ ) : from scipy import linalg vals , vecs = linalg . eig ( CIJ ) Cs = np . real ( np . dot ( vecs * vecs , np . exp ( vals ) ) ) return Cs
The subgraph centrality of a node is a weighted sum of closed walks of different lengths in the network starting and ending at the node . This function returns a vector of subgraph centralities for each node of the network .
13,568
def invert ( W , copy = True ) : if copy : W = W . copy ( ) E = np . where ( W ) W [ E ] = 1. / W [ E ] return W
Inverts elementwise the weights in an input connection matrix . In other words change the from the matrix of internode strengths to the matrix of internode distances .
13,569
def ci2ls ( ci ) : if not np . size ( ci ) : return ci _ , ci = np . unique ( ci , return_inverse = True ) ci += 1 nr_indices = int ( max ( ci ) ) ls = [ ] for c in range ( nr_indices ) : ls . append ( [ ] ) for i , x in enumerate ( ci ) : ls [ ci [ i ] - 1 ] . append ( i ) return ls
Convert from a community index vector to a 2D python list of modules The list is a pure python list not requiring numpy .
13,570
def ls2ci ( ls , zeroindexed = False ) : if ls is None or np . size ( ls ) == 0 : return ( ) nr_indices = sum ( map ( len , ls ) ) ci = np . zeros ( ( nr_indices , ) , dtype = int ) z = int ( not zeroindexed ) for i , x in enumerate ( ls ) : for j , y in enumerate ( ls [ i ] ) : ci [ ls [ i ] [ j ] ] = i + z return ci
Convert from a 2D python list of modules to a community index vector . The list is a pure python list not requiring numpy .
13,571
def _safe_squeeze ( arr , * args , ** kwargs ) : out = np . squeeze ( arr , * args , ** kwargs ) if np . ndim ( out ) == 0 : out = out . reshape ( ( 1 , ) ) return out
numpy . squeeze will reduce a 1 - item array down to a zero - dimensional array which is not necessarily desirable . This function does the squeeze operation but ensures that there is at least 1 dimension in the output .
13,572
def modularity_und_sign ( W , ci , qtype = 'sta' ) : n = len ( W ) _ , ci = np . unique ( ci , return_inverse = True ) ci += 1 W0 = W * ( W > 0 ) W1 = - W * ( W < 0 ) s0 = np . sum ( W0 ) s1 = np . sum ( W1 ) Knm0 = np . zeros ( ( n , n ) ) Knm1 = np . zeros ( ( n , n ) ) for m in range ( int ( np . max ( ci ) ) ) : Kn...
This function simply calculates the signed modularity for a given partition . It does not do automatic partition generation right now .
13,573
def partition_distance ( cx , cy ) : n = np . size ( cx ) _ , cx = np . unique ( cx , return_inverse = True ) _ , cy = np . unique ( cy , return_inverse = True ) _ , cxy = np . unique ( cx + cy * 1j , return_inverse = True ) cx += 1 cy += 1 cxy += 1 Px = np . histogram ( cx , bins = np . max ( cx ) ) [ 0 ] / n Py = np ...
This function quantifies the distance between pairs of community partitions with information theoretic measures .
13,574
def breadth ( CIJ , source ) : n = len ( CIJ ) white = 0 gray = 1 black = 2 color = np . zeros ( ( n , ) ) distance = np . inf * np . ones ( ( n , ) ) branch = np . zeros ( ( n , ) ) color [ source ] = gray distance [ source ] = 0 branch [ source ] = - 1 Q = [ source ] while Q : u = Q [ 0 ] ns , = np . where ( CIJ [ u ...
Implementation of breadth - first search .
13,575
def charpath ( D , include_diagonal = False , include_infinite = True ) : D = D . copy ( ) if not include_diagonal : np . fill_diagonal ( D , np . nan ) if not include_infinite : D [ np . isinf ( D ) ] = np . nan Dv = D [ np . logical_not ( np . isnan ( D ) ) ] . ravel ( ) lambda_ = np . mean ( Dv ) efficiency = np . m...
The characteristic path length is the average shortest path length in the network . The global efficiency is the average inverse shortest path length in the network .
13,576
def cycprob ( Pq ) : fcyc = np . zeros ( np . size ( Pq , axis = 2 ) ) for q in range ( np . size ( Pq , axis = 2 ) ) : if np . sum ( Pq [ : , : , q ] ) > 0 : fcyc [ q ] = np . sum ( np . diag ( Pq [ : , : , q ] ) ) / np . sum ( Pq [ : , : , q ] ) else : fcyc [ q ] = 0 pcyc = np . zeros ( np . size ( Pq , axis = 2 ) ) ...
Cycles are paths which begin and end at the same node . Cycle probability for path length d is the fraction of all paths of length d - 1 that may be extended to form cycles of length d .
13,577
def distance_wei_floyd ( adjacency , transform = None ) : if transform is not None : if transform == 'log' : if np . logical_or ( adjacency > 1 , adjacency < 0 ) . any ( ) : raise ValueError ( "Connection strengths must be in the " + "interval [0,1) to use the transform " + "-log(w_ij)." ) SPL = - np . log ( adjacency ...
Computes the topological length of the shortest possible path connecting every pair of nodes in the network .
13,578
def findwalks ( CIJ ) : CIJ = binarize ( CIJ , copy = True ) n = len ( CIJ ) Wq = np . zeros ( ( n , n , n ) ) CIJpwr = CIJ . copy ( ) Wq [ : , : , 1 ] = CIJ for q in range ( n ) : CIJpwr = np . dot ( CIJpwr , CIJ ) Wq [ : , : , q ] = CIJpwr twalk = np . sum ( Wq ) wlq = np . sum ( np . sum ( Wq , axis = 0 ) , axis = 0...
Walks are sequences of linked nodes that may visit a single node more than once . This function finds the number of walks of a given length between any two nodes .
13,579
def mean_first_passage_time ( adjacency ) : P = np . linalg . solve ( np . diag ( np . sum ( adjacency , axis = 1 ) ) , adjacency ) n = len ( P ) D , V = np . linalg . eig ( P . T ) aux = np . abs ( D - 1 ) index = np . where ( aux == aux . min ( ) ) [ 0 ] if aux [ index ] > 10e-3 : raise ValueError ( "Cannot find eige...
Calculates mean first passage time of adjacency
13,580
def teachers_round ( x ) : if ( ( x > 0 ) and ( x % 1 >= 0.5 ) ) or ( ( x < 0 ) and ( x % 1 > 0.5 ) ) : return int ( np . ceil ( x ) ) else : return int ( np . floor ( x ) )
Do rounding such that . 5 always rounds to 1 and not bankers rounding . This is for compatibility with matlab functions and ease of testing .
13,581
def dummyvar ( cis , return_sparse = False ) : n = np . size ( cis , axis = 0 ) m = np . size ( cis , axis = 1 ) r = np . sum ( ( np . max ( len ( np . unique ( cis [ : , i ] ) ) ) ) for i in range ( m ) ) nnz = np . prod ( cis . shape ) ix = np . argsort ( cis , axis = 0 ) s_cis = cis [ ix ] [ : , range ( m ) , range ...
This is an efficient implementation of matlab s dummyvar command using sparse matrices .
13,582
def assortativity_bin ( CIJ , flag = 0 ) : if flag == 0 : deg = degrees_und ( CIJ ) i , j = np . where ( np . triu ( CIJ , 1 ) > 0 ) K = len ( i ) degi = deg [ i ] degj = deg [ j ] else : id , od , deg = degrees_dir ( CIJ ) i , j = np . where ( CIJ > 0 ) K = len ( i ) if flag == 1 : degi = od [ i ] degj = id [ j ] elif...
The assortativity coefficient is a correlation coefficient between the degrees of all nodes on two opposite ends of a link . A positive assortativity coefficient indicates that nodes tend to link to other nodes with the same or similar degree .
13,583
def kcore_bd ( CIJ , k , peel = False ) : if peel : peelorder , peellevel = ( [ ] , [ ] ) iter = 0 CIJkcore = CIJ . copy ( ) while True : id , od , deg = degrees_dir ( CIJkcore ) ff , = np . where ( np . logical_and ( deg < k , deg > 0 ) ) if ff . size == 0 : break iter += 1 CIJkcore [ ff , : ] = 0 CIJkcore [ : , ff ] ...
The k - core is the largest subnetwork comprising nodes of degree at least k . This function computes the k - core for a given binary directed connection matrix by recursively peeling off nodes with degree lower than k until no such nodes remain .
13,584
def kcore_bu ( CIJ , k , peel = False ) : if peel : peelorder , peellevel = ( [ ] , [ ] ) iter = 0 CIJkcore = CIJ . copy ( ) while True : deg = degrees_und ( CIJkcore ) ff , = np . where ( np . logical_and ( deg < k , deg > 0 ) ) if ff . size == 0 : break iter += 1 CIJkcore [ ff , : ] = 0 CIJkcore [ : , ff ] = 0 if pee...
The k - core is the largest subnetwork comprising nodes of degree at least k . This function computes the k - core for a given binary undirected connection matrix by recursively peeling off nodes with degree lower than k until no such nodes remain .
13,585
def score_wu ( CIJ , s ) : CIJscore = CIJ . copy ( ) while True : str = strengths_und ( CIJscore ) ff , = np . where ( np . logical_and ( str < s , str > 0 ) ) if ff . size == 0 : break CIJscore [ ff , : ] = 0 CIJscore [ : , ff ] = 0 sn = np . sum ( str > 0 ) return CIJscore , sn
The s - core is the largest subnetwork comprising nodes of strength at least s . This function computes the s - core for a given weighted undirected connection matrix . Computation is analogous to the more widely used k - core but is based on node strengths instead of node degrees .
13,586
def find_pad_index ( self , array ) : try : return list ( array ) . index ( self . pad_value ) except ValueError : return len ( array )
Find padding index .
13,587
def get_length ( self , y ) : lens = [ self . find_pad_index ( row ) for row in y ] return lens
Get true length of y .
13,588
def convert_idx_to_name ( self , y , lens ) : y = [ [ self . id2label [ idx ] for idx in row [ : l ] ] for row , l in zip ( y , lens ) ] return y
Convert label index to name .
13,589
def predict ( self , X , y ) : y_pred = self . model . predict_on_batch ( X ) y_true = np . argmax ( y , - 1 ) y_pred = np . argmax ( y_pred , - 1 ) lens = self . get_length ( y_true ) y_true = self . convert_idx_to_name ( y_true , lens ) y_pred = self . convert_idx_to_name ( y_pred , lens ) return y_true , y_pred
Predict sequences .
13,590
def score ( self , y_true , y_pred ) : score = f1_score ( y_true , y_pred ) print ( ' - f1: {:04.2f}' . format ( score * 100 ) ) print ( classification_report ( y_true , y_pred , digits = 4 ) ) return score
Calculate f1 score .
13,591
def get_entities ( seq , suffix = False ) : if any ( isinstance ( s , list ) for s in seq ) : seq = [ item for sublist in seq for item in sublist + [ 'O' ] ] prev_tag = 'O' prev_type = '' begin_offset = 0 chunks = [ ] for i , chunk in enumerate ( seq + [ 'O' ] ) : if suffix : tag = chunk [ - 1 ] type_ = chunk . split (...
Gets entities from sequence .
13,592
def end_of_chunk ( prev_tag , tag , prev_type , type_ ) : chunk_end = False if prev_tag == 'E' : chunk_end = True if prev_tag == 'S' : chunk_end = True if prev_tag == 'B' and tag == 'B' : chunk_end = True if prev_tag == 'B' and tag == 'S' : chunk_end = True if prev_tag == 'B' and tag == 'O' : chunk_end = True if prev_t...
Checks if a chunk ended between the previous and current word .
13,593
def start_of_chunk ( prev_tag , tag , prev_type , type_ ) : chunk_start = False if tag == 'B' : chunk_start = True if tag == 'S' : chunk_start = True if prev_tag == 'E' and tag == 'E' : chunk_start = True if prev_tag == 'E' and tag == 'I' : chunk_start = True if prev_tag == 'S' and tag == 'E' : chunk_start = True if pr...
Checks if a chunk started between the previous and current word .
13,594
def f1_score ( y_true , y_pred , average = 'micro' , suffix = False ) : true_entities = set ( get_entities ( y_true , suffix ) ) pred_entities = set ( get_entities ( y_pred , suffix ) ) nb_correct = len ( true_entities & pred_entities ) nb_pred = len ( pred_entities ) nb_true = len ( true_entities ) p = nb_correct / nb...
Compute the F1 score .
13,595
def precision_score ( y_true , y_pred , average = 'micro' , suffix = False ) : true_entities = set ( get_entities ( y_true , suffix ) ) pred_entities = set ( get_entities ( y_pred , suffix ) ) nb_correct = len ( true_entities & pred_entities ) nb_pred = len ( pred_entities ) score = nb_correct / nb_pred if nb_pred > 0 ...
Compute the precision .
13,596
def recall_score ( y_true , y_pred , average = 'micro' , suffix = False ) : true_entities = set ( get_entities ( y_true , suffix ) ) pred_entities = set ( get_entities ( y_pred , suffix ) ) nb_correct = len ( true_entities & pred_entities ) nb_true = len ( true_entities ) score = nb_correct / nb_true if nb_true > 0 els...
Compute the recall .
13,597
def classification_report ( y_true , y_pred , digits = 2 , suffix = False ) : true_entities = set ( get_entities ( y_true , suffix ) ) pred_entities = set ( get_entities ( y_pred , suffix ) ) name_width = 0 d1 = defaultdict ( set ) d2 = defaultdict ( set ) for e in true_entities : d1 [ e [ 0 ] ] . add ( ( e [ 1 ] , e [...
Build a text report showing the main classification metrics .
13,598
def _timedelta_to_seconds ( td ) : if isinstance ( td , numbers . Real ) : td = datetime . timedelta ( seconds = td ) return td . total_seconds ( )
Convert a datetime . timedelta object into a seconds interval for rotating file ouput .
13,599
def getLogger ( name = None , ** kwargs ) : adapter = _LOGGERS . get ( name ) if not adapter : adapter = KeywordArgumentAdapter ( logging . getLogger ( name ) , kwargs ) _LOGGERS [ name ] = adapter return adapter
Build a logger with the given name .