idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
18,300
def starting_at ( self , datetime_or_str ) : if isinstance ( datetime_or_str , str ) : self . _starting_at = parse ( datetime_or_str ) elif isinstance ( datetime_or_str , datetime . datetime ) : self . _starting_at = datetime_or_str else : raise ValueError ( '.starting_at() method can only take strings or datetime objects' ) return self
Set the starting time for the cron job . If not specified the starting time will always be the beginning of the interval that is current when the cron is started .
18,301
def run ( self , func , * func_args , ** func__kwargs ) : self . _func = func self . _func_args = func_args self . _func_kwargs = func__kwargs return self
Specify the function to run at the scheduled times
18,302
def _get_target ( self ) : if None in [ self . _func , self . _func_kwargs , self . _func_kwargs , self . _every_kwargs ] : raise ValueError ( 'You must call the .every() and .run() methods on every tab.' ) return self . _loop
returns a callable with no arguments designed to be the target of a Subprocess
18,303
def wrapped_target ( target , q_stdout , q_stderr , q_error , robust , name , * args , ** kwargs ) : import sys sys . stdout = IOQueue ( q_stdout ) sys . stderr = IOQueue ( q_stderr ) try : target ( * args , ** kwargs ) except : if not robust : s = 'Error in tab\n' + traceback . format_exc ( ) logger = daiquiri . getLogger ( name ) logger . error ( s ) else : raise if not robust : q_error . put ( name ) raise
Wraps a target with queues replacing stdout and stderr
18,304
def loop ( self , max_seconds = None ) : loop_started = datetime . datetime . now ( ) self . _is_running = True while self . _is_running : self . process_error_queue ( self . q_error ) if max_seconds is not None : if ( datetime . datetime . now ( ) - loop_started ) . total_seconds ( ) > max_seconds : break for subprocess in self . _subprocesses : if not subprocess . is_alive ( ) : subprocess . start ( ) self . process_io_queue ( self . q_stdout , sys . stdout ) self . process_io_queue ( self . q_stderr , sys . stderr )
Main loop for the process . This will run continuously until maxiter
18,305
def escape ( string , escape_pattern ) : try : return string . translate ( escape_pattern ) except AttributeError : warnings . warn ( "Non-string-like data passed. " "Attempting to convert to 'str'." ) return str ( string ) . translate ( tag_escape )
Assistant function for string escaping
18,306
def _make_serializer ( meas , schema , rm_none , extra_tags , placeholder ) : _validate_schema ( schema , placeholder ) tags = [ ] fields = [ ] ts = None meas = meas for k , t in schema . items ( ) : if t is MEASUREMENT : meas = f"{{i.{k}}}" elif t is TIMEINT : ts = f"{{i.{k}}}" elif t is TIMESTR : if pd : ts = f"{{pd.Timestamp(i.{k} or 0).value}}" else : ts = f"{{dt_to_int(str_to_dt(i.{k}))}}" elif t is TIMEDT : if pd : ts = f"{{pd.Timestamp(i.{k} or 0).value}}" else : ts = f"{{dt_to_int(i.{k})}}" elif t is TAG : tags . append ( f"{k}={{str(i.{k}).translate(tag_escape)}}" ) elif t is TAGENUM : tags . append ( f"{k}={{getattr(i.{k}, 'name', i.{k} or None)}}" ) elif t in ( FLOAT , BOOL ) : fields . append ( f"{k}={{i.{k}}}" ) elif t is INT : fields . append ( f"{k}={{i.{k}}}i" ) elif t is STR : fields . append ( f"{k}=\\\"{{str(i.{k}).translate(str_escape)}}\\\"" ) elif t is ENUM : fields . append ( f"{k}=\\\"{{getattr(i.{k}, 'name', i.{k} or None)}}\\\"" ) else : raise SchemaError ( f"Invalid attribute type {k!r}: {t!r}" ) extra_tags = extra_tags or { } for k , v in extra_tags . items ( ) : tags . append ( f"{k}={v}" ) if placeholder : fields . insert ( 0 , f"_=true" ) sep = ',' if tags else '' ts = f' {ts}' if ts else '' fmt = f"{meas}{sep}{','.join(tags)} {','.join(fields)}{ts}" if rm_none : pat = r',\w+="?None"?i?' f = eval ( 'lambda i: re.sub(r\'{}\', "", f"{}").encode()' . format ( pat , fmt ) ) else : f = eval ( 'lambda i: f"{}".encode()' . format ( fmt ) ) f . __doc__ = "Returns InfluxDB line protocol representation of user-defined class" f . _args = dict ( meas = meas , schema = schema , rm_none = rm_none , extra_tags = extra_tags , placeholder = placeholder ) return f
Factory of line protocol parsers
18,307
def lineprotocol ( cls = None , * , schema : Optional [ Mapping [ str , type ] ] = None , rm_none : bool = False , extra_tags : Optional [ Mapping [ str , str ] ] = None , placeholder : bool = False ) : def _lineprotocol ( cls ) : _schema = schema or getattr ( cls , '__annotations__' , { } ) f = _make_serializer ( cls . __name__ , _schema , rm_none , extra_tags , placeholder ) cls . to_lineprotocol = f return cls return _lineprotocol ( cls ) if cls else _lineprotocol
Adds to_lineprotocol method to arbitrary user - defined classes
18,308
def _serialize_fields ( point ) : output = [ ] for k , v in point [ 'fields' ] . items ( ) : k = escape ( k , key_escape ) if isinstance ( v , bool ) : output . append ( f'{k}={v}' ) elif isinstance ( v , int ) : output . append ( f'{k}={v}i' ) elif isinstance ( v , str ) : output . append ( f'{k}="{v.translate(str_escape)}"' ) elif v is None : continue else : output . append ( f'{k}={v}' ) return ',' . join ( output )
Field values can be floats integers strings or Booleans .
18,309
def serialize ( data , measurement = None , tag_columns = None , ** extra_tags ) : if isinstance ( data , bytes ) : return data elif isinstance ( data , str ) : return data . encode ( 'utf-8' ) elif hasattr ( data , 'to_lineprotocol' ) : return data . to_lineprotocol ( ) elif pd is not None and isinstance ( data , pd . DataFrame ) : return dataframe . serialize ( data , measurement , tag_columns , ** extra_tags ) elif isinstance ( data , dict ) : return mapping . serialize ( data , measurement , ** extra_tags ) elif hasattr ( data , '__iter__' ) : return b'\n' . join ( [ serialize ( i , measurement , tag_columns , ** extra_tags ) for i in data ] ) else : raise ValueError ( 'Invalid input' , data )
Converts input data into line protocol format
18,310
def iterpoints ( resp : dict , parser : Optional [ Callable ] = None ) -> Iterator [ Any ] : for statement in resp [ 'results' ] : if 'series' not in statement : continue for series in statement [ 'series' ] : if parser is None : return ( x for x in series [ 'values' ] ) elif 'meta' in inspect . signature ( parser ) . parameters : meta = { k : series [ k ] for k in series if k != 'values' } meta [ 'statement_id' ] = statement [ 'statement_id' ] return ( parser ( * x , meta = meta ) for x in series [ 'values' ] ) else : return ( parser ( * x ) for x in series [ 'values' ] ) return iter ( [ ] )
Iterates a response JSON yielding data point by point .
18,311
def parse ( resp ) -> DataFrameType : statements = [ ] for statement in resp [ 'results' ] : series = { } for s in statement . get ( 'series' , [ ] ) : series [ _get_name ( s ) ] = _drop_zero_index ( _serializer ( s ) ) statements . append ( series ) if len ( statements ) == 1 : series : dict = statements [ 0 ] if len ( series ) == 1 : return list ( series . values ( ) ) [ 0 ] else : return series return statements
Makes a dictionary of DataFrames from a response object
18,312
def _itertuples ( df ) : cols = [ df . iloc [ : , k ] for k in range ( len ( df . columns ) ) ] return zip ( df . index , * cols )
Custom implementation of DataFrame . itertuples that returns plain tuples instead of namedtuples . About 50% faster .
18,313
def serialize ( df , measurement , tag_columns = None , ** extra_tags ) -> bytes : if measurement is None : raise ValueError ( "Missing 'measurement'" ) if not isinstance ( df . index , pd . DatetimeIndex ) : raise ValueError ( 'DataFrame index is not DatetimeIndex' ) tag_columns = set ( tag_columns or [ ] ) isnull = df . isnull ( ) . any ( axis = 1 ) tags = [ ] fields = [ ] for k , v in extra_tags . items ( ) : tags . append ( f"{k}={escape(v, key_escape)}" ) for i , ( k , v ) in enumerate ( df . dtypes . items ( ) ) : k = k . translate ( key_escape ) if k in tag_columns : tags . append ( f"{k}={{p[{i+1}]}}" ) elif issubclass ( v . type , np . integer ) : fields . append ( f"{k}={{p[{i+1}]}}i" ) elif issubclass ( v . type , ( np . float , np . bool_ ) ) : fields . append ( f"{k}={{p[{i+1}]}}" ) else : fields . append ( f"{k}=\"{{p[{i+1}]}}\"" ) fmt = ( f'{measurement}' , f'{"," if tags else ""}' , ',' . join ( tags ) , ' ' , ',' . join ( fields ) , ' {p[0].value}' ) f = eval ( "lambda p: f'{}'" . format ( '' . join ( fmt ) ) ) if isnull . any ( ) : lp = map ( f , _itertuples ( df [ ~ isnull ] ) ) rep = _replace ( df ) lp_nan = ( reduce ( lambda a , b : re . sub ( * b , a ) , rep , f ( p ) ) for p in _itertuples ( df [ isnull ] ) ) return '\n' . join ( chain ( lp , lp_nan ) ) . encode ( 'utf-8' ) else : return '\n' . join ( map ( f , _itertuples ( df ) ) ) . encode ( 'utf-8' )
Converts a Pandas DataFrame into line protocol format
18,314
def runner ( coro ) : @ wraps ( coro ) def inner ( self , * args , ** kwargs ) : if self . mode == 'async' : return coro ( self , * args , ** kwargs ) return self . _loop . run_until_complete ( coro ( self , * args , ** kwargs ) ) return inner
Function execution decorator .
18,315
def _check_error ( response ) : if 'error' in response : raise InfluxDBError ( response [ 'error' ] ) elif 'results' in response : for statement in response [ 'results' ] : if 'error' in statement : msg = '{d[error]} (statement {d[statement_id]})' raise InfluxDBError ( msg . format ( d = statement ) )
Checks for JSON error messages and raises Python exception
18,316
def create_magic_packet ( macaddress ) : if len ( macaddress ) == 12 : pass elif len ( macaddress ) == 17 : sep = macaddress [ 2 ] macaddress = macaddress . replace ( sep , '' ) else : raise ValueError ( 'Incorrect MAC address format' ) data = b'FFFFFFFFFFFF' + ( macaddress * 16 ) . encode ( ) send_data = b'' for i in range ( 0 , len ( data ) , 2 ) : send_data += struct . pack ( b'B' , int ( data [ i : i + 2 ] , 16 ) ) return send_data
Create a magic packet .
18,317
def send_magic_packet ( * macs , ** kwargs ) : packets = [ ] ip = kwargs . pop ( 'ip_address' , BROADCAST_IP ) port = kwargs . pop ( 'port' , DEFAULT_PORT ) for k in kwargs : raise TypeError ( 'send_magic_packet() got an unexpected keyword ' 'argument {!r}' . format ( k ) ) for mac in macs : packet = create_magic_packet ( mac ) packets . append ( packet ) sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) sock . setsockopt ( socket . SOL_SOCKET , socket . SO_BROADCAST , 1 ) sock . connect ( ( ip , port ) ) for packet in packets : sock . send ( packet ) sock . close ( )
Wake up computers having any of the given mac addresses .
18,318
def main ( argv = None ) : parser = argparse . ArgumentParser ( description = 'Wake one or more computers using the wake on lan' ' protocol.' ) parser . add_argument ( 'macs' , metavar = 'mac address' , nargs = '+' , help = 'The mac addresses or of the computers you are trying to wake.' ) parser . add_argument ( '-i' , metavar = 'ip' , default = BROADCAST_IP , help = 'The ip address of the host to send the magic packet to.' ' (default {})' . format ( BROADCAST_IP ) ) parser . add_argument ( '-p' , metavar = 'port' , type = int , default = DEFAULT_PORT , help = 'The port of the host to send the magic packet to (default 9)' ) args = parser . parse_args ( argv ) send_magic_packet ( * args . macs , ip_address = args . i , port = args . p )
Run wake on lan as a CLI application .
18,319
def mjml ( parser , token ) : nodelist = parser . parse ( ( 'endmjml' , ) ) parser . delete_first_token ( ) tokens = token . split_contents ( ) if len ( tokens ) != 1 : raise template . TemplateSyntaxError ( "'%r' tag doesn't receive any arguments." % tokens [ 0 ] ) return MJMLRenderNode ( nodelist )
Compile MJML template after render django template .
18,320
def parse_header_line ( self , line ) : self . header = line [ 1 : ] . rstrip ( ) . split ( '\t' ) if len ( self . header ) < 9 : self . header = line [ 1 : ] . rstrip ( ) . split ( ) self . individuals = self . header [ 9 : ]
docstring for parse_header_line
18,321
def print_header ( self ) : lines_to_print = [ ] lines_to_print . append ( '##fileformat=' + self . fileformat ) if self . filedate : lines_to_print . append ( '##fileformat=' + self . fileformat ) for filt in self . filter_dict : lines_to_print . append ( self . filter_dict [ filt ] ) for form in self . format_dict : lines_to_print . append ( self . format_dict [ form ] ) for info in self . info_dict : lines_to_print . append ( self . info_dict [ info ] ) for contig in self . contig_dict : lines_to_print . append ( self . contig_dict [ contig ] ) for alt in self . alt_dict : lines_to_print . append ( self . alt_dict [ alt ] ) for other in self . other_dict : lines_to_print . append ( self . other_dict [ other ] ) lines_to_print . append ( '#' + '\t' . join ( self . header ) ) return lines_to_print
Returns a list with the header lines if proper format
18,322
def add_variant ( self , chrom , pos , rs_id , ref , alt , qual , filt , info , form = None , genotypes = [ ] ) : variant_info = [ chrom , pos , rs_id , ref , alt , qual , filt , info ] if form : variant_info . append ( form ) for individual in genotypes : variant_info . append ( individual ) variant_line = '\t' . join ( variant_info ) variant = format_variant ( line = variant_line , header_parser = self . metadata , check_info = self . check_info ) if not ( self . split_variants and len ( variant [ 'ALT' ] . split ( ',' ) ) > 1 ) : self . variants . append ( variant ) else : for splitted_variant in split_variants ( variant_dict = variant , header_parser = self . metadata , allele_symbol = self . allele_symbol ) : self . variants . append ( splitted_variant )
Add a variant to the parser . This function is for building a vcf . It takes the relevant parameters and make a vcf variant in the proper format .
18,323
def content_get ( self , cid , nid = None ) : r = self . request ( method = "content.get" , data = { "cid" : cid } , nid = nid ) return self . _handle_error ( r , "Could not get post {}." . format ( cid ) )
Get data from post cid in network nid
18,324
def content_create ( self , params ) : r = self . request ( method = "content.create" , data = params ) return self . _handle_error ( r , "Could not create object {}." . format ( repr ( params ) ) )
Create a post or followup .
18,325
def add_students ( self , student_emails , nid = None ) : r = self . request ( method = "network.update" , data = { "from" : "ClassSettingsPage" , "add_students" : student_emails } , nid = nid , nid_key = "id" ) return self . _handle_error ( r , "Could not add users." )
Enroll students in a network nid .
18,326
def get_all_users ( self , nid = None ) : r = self . request ( method = "network.get_all_users" , nid = nid ) return self . _handle_error ( r , "Could not get users." )
Get a listing of data for each user in a network nid
18,327
def get_users ( self , user_ids , nid = None ) : r = self . request ( method = "network.get_users" , data = { "ids" : user_ids } , nid = nid ) return self . _handle_error ( r , "Could not get users." )
Get a listing of data for specific users user_ids in a network nid
18,328
def remove_users ( self , user_ids , nid = None ) : r = self . request ( method = "network.update" , data = { "remove_users" : user_ids } , nid = nid , nid_key = "id" ) return self . _handle_error ( r , "Could not remove users." )
Remove users from a network nid
18,329
def get_my_feed ( self , limit = 150 , offset = 20 , sort = "updated" , nid = None ) : r = self . request ( method = "network.get_my_feed" , nid = nid , data = dict ( limit = limit , offset = offset , sort = sort ) ) return self . _handle_error ( r , "Could not retrieve your feed." )
Get my feed
18,330
def filter_feed ( self , updated = False , following = False , folder = False , filter_folder = "" , sort = "updated" , nid = None ) : assert sum ( [ updated , following , folder ] ) == 1 if folder : assert filter_folder if updated : filter_type = dict ( updated = 1 ) elif following : filter_type = dict ( following = 1 ) else : filter_type = dict ( folder = 1 , filter_folder = filter_folder ) r = self . request ( nid = nid , method = "network.filter_feed" , data = dict ( sort = sort , ** filter_type ) ) return self . _handle_error ( r , "Could not retrieve filtered feed." )
Get filtered feed
18,331
def search ( self , query , nid = None ) : r = self . request ( method = "network.search" , nid = nid , data = dict ( query = query ) ) return self . _handle_error ( r , "Search with query '{}' failed." . format ( query ) )
Search for posts with query
18,332
def get_stats ( self , nid = None ) : r = self . request ( api_type = "main" , method = "network.get_stats" , nid = nid , ) return self . _handle_error ( r , "Could not retrieve stats for class." )
Get statistics for class
18,333
def request ( self , method , data = None , nid = None , nid_key = 'nid' , api_type = "logic" , return_response = False ) : self . _check_authenticated ( ) nid = nid if nid else self . _nid if data is None : data = { } headers = { } if "session_id" in self . session . cookies : headers [ "CSRF-Token" ] = self . session . cookies [ "session_id" ] endpoint = self . base_api_urls [ api_type ] if api_type == "logic" : endpoint += "?method={}&aid={}" . format ( method , _piazza_nonce ( ) ) response = self . session . post ( endpoint , data = json . dumps ( { "method" : method , "params" : dict ( { nid_key : nid } , ** data ) } ) , headers = headers ) return response if return_response else response . json ( )
Get data from arbitrary Piazza API endpoint method in network nid
18,334
def _handle_error ( self , result , err_msg ) : if result . get ( u'error' ) : raise RequestError ( "{}\nResponse: {}" . format ( err_msg , json . dumps ( result , indent = 2 ) ) ) else : return result . get ( u'result' )
Check result for error
18,335
def get_user_classes ( self ) : status = self . get_user_status ( ) uid = status [ 'id' ] raw_classes = status . get ( 'networks' , [ ] ) classes = [ ] for rawc in raw_classes : c = { k : rawc [ k ] for k in [ 'name' , 'term' ] } c [ 'num' ] = rawc . get ( 'course_number' , '' ) c [ 'nid' ] = rawc [ 'id' ] c [ 'is_ta' ] = uid in rawc [ 'prof_hash' ] classes . append ( c ) return classes
Get list of the current user s classes . This is a subset of the information returned by the call to get_user_status .
18,336
def nonce ( ) : nonce_part1 = _int2base ( int ( _time ( ) * 1000 ) , 36 ) nonce_part2 = _int2base ( round ( _random ( ) * 1679616 ) , 36 ) return "{}{}" . format ( nonce_part1 , nonce_part2 )
Returns a new nonce to be used with the Piazza API .
18,337
def iter_all_posts ( self , limit = None ) : feed = self . get_feed ( limit = 999999 , offset = 0 ) cids = [ post [ 'id' ] for post in feed [ "feed" ] ] if limit is not None : cids = cids [ : limit ] for cid in cids : yield self . get_post ( cid )
Get all posts visible to the current user
18,338
def create_post ( self , post_type , post_folders , post_subject , post_content , is_announcement = 0 , bypass_email = 0 , anonymous = False ) : params = { "anonymous" : "yes" if anonymous else "no" , "subject" : post_subject , "content" : post_content , "folders" : post_folders , "type" : post_type , "config" : { "bypass_email" : bypass_email , "is_announcement" : is_announcement } } return self . _rpc . content_create ( params )
Create a post
18,339
def create_followup ( self , post , content , anonymous = False ) : try : cid = post [ "id" ] except KeyError : cid = post params = { "cid" : cid , "type" : "followup" , "subject" : content , "content" : "" , "anonymous" : "yes" if anonymous else "no" , } return self . _rpc . content_create ( params )
Create a follow - up on a post post .
18,340
def create_instructor_answer ( self , post , content , revision , anonymous = False ) : try : cid = post [ "id" ] except KeyError : cid = post params = { "cid" : cid , "type" : "i_answer" , "content" : content , "revision" : revision , "anonymous" : "yes" if anonymous else "no" , } return self . _rpc . content_instructor_answer ( params )
Create an instructor s answer to a post post .
18,341
def mark_as_duplicate ( self , duplicated_cid , master_cid , msg = '' ) : content_id_from = self . get_post ( duplicated_cid ) [ "id" ] content_id_to = self . get_post ( master_cid ) [ "id" ] params = { "cid_dupe" : content_id_from , "cid_to" : content_id_to , "msg" : msg } return self . _rpc . content_mark_duplicate ( params )
Mark the post at duplicated_cid as a duplicate of master_cid
18,342
def resolve_post ( self , post ) : try : cid = post [ "id" ] except KeyError : cid = post params = { "cid" : cid , "resolved" : "true" } return self . _rpc . content_mark_resolved ( params )
Mark post as resolved
18,343
def delete_post ( self , post ) : try : cid = post [ 'id' ] except KeyError : cid = post except TypeError : post = self . get_post ( post ) cid = post [ 'id' ] params = { "cid" : cid , } return self . _rpc . content_delete ( params )
Deletes post by cid
18,344
def get_feed ( self , limit = 100 , offset = 0 ) : return self . _rpc . get_my_feed ( limit = limit , offset = offset )
Get your feed for this network
18,345
def get_filtered_feed ( self , feed_filter ) : assert isinstance ( feed_filter , ( UnreadFilter , FollowingFilter , FolderFilter ) ) return self . _rpc . filter_feed ( ** feed_filter . to_kwargs ( ) )
Get your feed containing only posts filtered by feed_filter
18,346
def get_dataset ( self , dataset ) : success = True dataset_path = self . base_dataset_path + dataset if not isdir ( dataset_path ) : was_error = False for iteration in range ( 5 ) : if iteration == 0 or was_error is True : zip_path = dataset_path + ".zip" if not isfile ( zip_path ) : try : with DLProgress ( unit = 'B' , unit_scale = True , miniters = 1 , desc = dataset ) as pbar : urlretrieve ( self . datasets [ dataset ] [ "url" ] , zip_path , pbar . hook ) except Exception as ex : print ( "Error downloading %s: %s" % ( dataset , ex ) ) was_error = True if not isdir ( dataset_path ) : try : with zipfile . ZipFile ( zip_path ) as zip_archive : zip_archive . extractall ( path = dataset_path ) zip_archive . close ( ) except Exception as ex : print ( "Error unzipping %s: %s" % ( zip_path , ex ) ) try : remove ( zip_path ) except FileNotFoundError : pass was_error = True if was_error : print ( "\nThis recognizer is trained by the CASIA handwriting database." ) print ( "If the download doesn't work, you can get the files at %s" % self . datasets [ dataset ] [ "url" ] ) print ( "If you have download problems, " "wget may be effective at downloading because of download resuming." ) success = False return success
Checks to see if the dataset is present . If not it downloads and unzips it .
18,347
def first_plugin_context ( self ) : first_spf_reg = next ( iter ( self . registrations ) ) return self . get_context_from_spf ( first_spf_reg )
Returns the context is associated with the first app this plugin was registered on
18,348
async def route_wrapper ( self , route , request , context , request_args , request_kw , * decorator_args , with_context = None , ** decorator_kw ) : if with_context : resp = route ( request , context , * request_args , ** request_kw ) else : resp = route ( request , * request_args , ** request_kw ) if isawaitable ( resp ) : resp = await resp return resp
This is the function that is called when a route is decorated with your plugin decorator . Context will normally be None but the user can pass use_context = True so the route will get the plugin context
18,349
def check_credentials ( client ) : pid , uid , gid = get_peercred ( client ) euid = os . geteuid ( ) client_name = "PID:%s UID:%s GID:%s" % ( pid , uid , gid ) if uid not in ( 0 , euid ) : raise SuspiciousClient ( "Can't accept client with %s. It doesn't match the current EUID:%s or ROOT." % ( client_name , euid ) ) _LOG ( "Accepted connection on fd:%s from %s" % ( client . fileno ( ) , client_name ) ) return pid , uid , gid
Checks credentials for given socket .
18,350
def handle_connection_exec ( client ) : class ExitExecLoop ( Exception ) : pass def exit ( ) : raise ExitExecLoop ( ) client . settimeout ( None ) fh = os . fdopen ( client . detach ( ) if hasattr ( client , 'detach' ) else client . fileno ( ) ) with closing ( client ) : with closing ( fh ) : try : payload = fh . readline ( ) while payload : _LOG ( "Running: %r." % payload ) eval ( compile ( payload , '<manhole>' , 'exec' ) , { 'exit' : exit } , _MANHOLE . locals ) payload = fh . readline ( ) except ExitExecLoop : _LOG ( "Exiting exec loop." )
Alternate connection handler . No output redirection .
18,351
def handle_connection_repl ( client ) : client . settimeout ( None ) backup = [ ] old_interval = getinterval ( ) patches = [ ( 'r' , ( 'stdin' , '__stdin__' ) ) , ( 'w' , ( 'stdout' , '__stdout__' ) ) ] if _MANHOLE . redirect_stderr : patches . append ( ( 'w' , ( 'stderr' , '__stderr__' ) ) ) try : client_fd = client . fileno ( ) for mode , names in patches : for name in names : backup . append ( ( name , getattr ( sys , name ) ) ) setattr ( sys , name , _ORIGINAL_FDOPEN ( client_fd , mode , 1 if PY3 else 0 ) ) try : handle_repl ( _MANHOLE . locals ) except Exception as exc : _LOG ( "REPL failed with %r." % exc ) _LOG ( "DONE." ) finally : try : setinterval ( 2147483647 ) try : client . close ( ) except IOError : pass junk = [ ] for name , fh in backup : junk . append ( getattr ( sys , name ) ) setattr ( sys , name , fh ) del backup for fh in junk : try : if hasattr ( fh , 'detach' ) : fh . detach ( ) else : fh . close ( ) except IOError : pass del fh del junk finally : setinterval ( old_interval ) _LOG ( "Cleaned up." )
Handles connection .
18,352
def install ( verbose = True , verbose_destination = sys . __stderr__ . fileno ( ) if hasattr ( sys . __stderr__ , 'fileno' ) else sys . __stderr__ , strict = True , ** kwargs ) : global _MANHOLE with _LOCK : if _MANHOLE is None : _MANHOLE = Manhole ( ) else : if strict : raise AlreadyInstalled ( "Manhole already installed!" ) else : _LOG . release ( ) _MANHOLE . release ( ) _LOG . configure ( verbose , verbose_destination ) _MANHOLE . configure ( ** kwargs ) return _MANHOLE
Installs the manhole .
18,353
def dump_stacktraces ( ) : lines = [ ] for thread_id , stack in sys . _current_frames ( ) . items ( ) : lines . append ( "\n######### ProcessID=%s, ThreadID=%s #########" % ( os . getpid ( ) , thread_id ) ) for filename , lineno , name , line in traceback . extract_stack ( stack ) : lines . append ( 'File: "%s", line %d, in %s' % ( filename , lineno , name ) ) if line : lines . append ( " %s" % ( line . strip ( ) ) ) lines . append ( "#############################################\n\n" ) print ( '\n' . join ( lines ) , file = sys . stderr if _MANHOLE . redirect_stderr else sys . stdout )
Dumps thread ids and tracebacks to stdout .
18,354
def clone ( self , ** kwargs ) : return ManholeThread ( self . get_socket , self . sigmask , self . start_timeout , connection_handler = self . connection_handler , daemon_connection = self . daemon_connection , ** kwargs )
Make a fresh thread with the same options . This is usually used on dead threads .
18,355
def reinstall ( self ) : with _LOCK : if not ( self . thread . is_alive ( ) and self . thread in _ORIGINAL__ACTIVE ) : self . thread = self . thread . clone ( bind_delay = self . reinstall_delay ) if self . should_restart : self . thread . start ( )
Reinstalls the manhole . Checks if the thread is running . If not it starts it again .
18,356
def patched_forkpty ( self ) : pid , master_fd = self . original_os_forkpty ( ) if not pid : _LOG ( 'Fork detected. Reinstalling Manhole.' ) self . reinstall ( ) return pid , master_fd
Fork a new process with a new pseudo - terminal as controlling tty .
18,357
def update ( self , alert_condition_nrql_id , policy_id , name = None , threshold_type = None , query = None , since_value = None , terms = None , expected_groups = None , value_function = None , runbook_url = None , ignore_overlap = None , enabled = True ) : conditions_nrql_dict = self . list ( policy_id ) target_condition_nrql = None for condition in conditions_nrql_dict [ 'nrql_conditions' ] : if int ( condition [ 'id' ] ) == alert_condition_nrql_id : target_condition_nrql = condition break if target_condition_nrql is None : raise NoEntityException ( 'Target alert condition nrql is not included in that policy.' 'policy_id: {}, alert_condition_nrql_id {}' . format ( policy_id , alert_condition_nrql_id ) ) data = { 'nrql_condition' : { 'type' : threshold_type or target_condition_nrql [ 'type' ] , 'enabled' : target_condition_nrql [ 'enabled' ] , 'name' : name or target_condition_nrql [ 'name' ] , 'terms' : terms or target_condition_nrql [ 'terms' ] , 'nrql' : { 'query' : query or target_condition_nrql [ 'nrql' ] [ 'query' ] , 'since_value' : since_value or target_condition_nrql [ 'nrql' ] [ 'since_value' ] , } } } if enabled is not None : data [ 'nrql_condition' ] [ 'enabled' ] = str ( enabled ) . lower ( ) if runbook_url is not None : data [ 'nrql_condition' ] [ 'runbook_url' ] = runbook_url elif 'runbook_url' in target_condition_nrql : data [ 'nrql_condition' ] [ 'runbook_url' ] = target_condition_nrql [ 'runbook_url' ] if expected_groups is not None : data [ 'nrql_condition' ] [ 'expected_groups' ] = expected_groups elif 'expected_groups' in target_condition_nrql : data [ 'nrql_condition' ] [ 'expected_groups' ] = target_condition_nrql [ 'expected_groups' ] if ignore_overlap is not None : data [ 'nrql_condition' ] [ 'ignore_overlap' ] = ignore_overlap elif 'ignore_overlap' in target_condition_nrql : data [ 'nrql_condition' ] [ 'ignore_overlap' ] = target_condition_nrql [ 'ignore_overlap' ] if value_function is not None : data [ 'nrql_condition' ] [ 'value_function' ] = value_function elif 'value_function' in target_condition_nrql : data [ 'nrql_condition' ] [ 'value_function' ] = target_condition_nrql [ 'value_function' ] if data [ 'nrql_condition' ] [ 'type' ] == 'static' : if 'value_function' not in data [ 'nrql_condition' ] : raise ConfigurationException ( 'Alert is set as static but no value_function config specified' ) data [ 'nrql_condition' ] . pop ( 'expected_groups' , None ) data [ 'nrql_condition' ] . pop ( 'ignore_overlap' , None ) elif data [ 'nrql_condition' ] [ 'type' ] == 'outlier' : if 'expected_groups' not in data [ 'nrql_condition' ] : raise ConfigurationException ( 'Alert is set as outlier but expected_groups config is not specified' ) if 'ignore_overlap' not in data [ 'nrql_condition' ] : raise ConfigurationException ( 'Alert is set as outlier but ignore_overlap config is not specified' ) data [ 'nrql_condition' ] . pop ( 'value_function' , None ) return self . _put ( url = '{0}alerts_nrql_conditions/{1}.json' . format ( self . URL , alert_condition_nrql_id ) , headers = self . headers , data = data )
Updates any of the optional parameters of the alert condition nrql
18,358
def create ( self , policy_id , name , threshold_type , query , since_value , terms , expected_groups = None , value_function = None , runbook_url = None , ignore_overlap = None , enabled = True ) : data = { 'nrql_condition' : { 'type' : threshold_type , 'name' : name , 'enabled' : enabled , 'terms' : terms , 'nrql' : { 'query' : query , 'since_value' : since_value } } } if runbook_url is not None : data [ 'nrql_condition' ] [ 'runbook_url' ] = runbook_url if expected_groups is not None : data [ 'nrql_condition' ] [ 'expected_groups' ] = expected_groups if ignore_overlap is not None : data [ 'nrql_condition' ] [ 'ignore_overlap' ] = ignore_overlap if value_function is not None : data [ 'nrql_condition' ] [ 'value_function' ] = value_function if data [ 'nrql_condition' ] [ 'type' ] == 'static' : if 'value_function' not in data [ 'nrql_condition' ] : raise ConfigurationException ( 'Alert is set as static but no value_function config specified' ) data [ 'nrql_condition' ] . pop ( 'expected_groups' , None ) data [ 'nrql_condition' ] . pop ( 'ignore_overlap' , None ) elif data [ 'nrql_condition' ] [ 'type' ] == 'outlier' : if 'expected_groups' not in data [ 'nrql_condition' ] : raise ConfigurationException ( 'Alert is set as outlier but expected_groups config is not specified' ) if 'ignore_overlap' not in data [ 'nrql_condition' ] : raise ConfigurationException ( 'Alert is set as outlier but ignore_overlap config is not specified' ) data [ 'nrql_condition' ] . pop ( 'value_function' , None ) return self . _post ( url = '{0}alerts_nrql_conditions/policies/{1}.json' . format ( self . URL , policy_id ) , headers = self . headers , data = data )
Creates an alert condition nrql
18,359
def delete ( self , alert_condition_nrql_id ) : return self . _delete ( url = '{0}alerts_nrql_conditions/{1}.json' . format ( self . URL , alert_condition_nrql_id ) , headers = self . headers )
This API endpoint allows you to delete an alert condition nrql
18,360
def list ( self , filter_name = None , filter_ids = None , filter_labels = None , page = None ) : label_param = '' if filter_labels : label_param = ';' . join ( [ '{}:{}' . format ( label , value ) for label , value in filter_labels . items ( ) ] ) filters = [ 'filter[name]={0}' . format ( filter_name ) if filter_name else None , 'filter[ids]={0}' . format ( ',' . join ( [ str ( app_id ) for app_id in filter_ids ] ) ) if filter_ids else None , 'filter[labels]={0}' . format ( label_param ) if filter_labels else None , 'page={0}' . format ( page ) if page else None ] return self . _get ( url = '{0}servers.json' . format ( self . URL ) , headers = self . headers , params = self . build_param_string ( filters ) )
This API endpoint returns a paginated list of the Servers associated with your New Relic account . Servers can be filtered by their name or by a list of server IDs .
18,361
def update ( self , id , name = None ) : nr_data = self . show ( id ) [ 'server' ] data = { 'server' : { 'name' : name or nr_data [ 'name' ] , } } return self . _put ( url = '{0}servers/{1}.json' . format ( self . URL , id ) , headers = self . headers , data = data )
Updates any of the optional parameters of the server
18,362
def create ( self , name , incident_preference ) : data = { "policy" : { "name" : name , "incident_preference" : incident_preference } } return self . _post ( url = '{0}alerts_policies.json' . format ( self . URL ) , headers = self . headers , data = data )
This API endpoint allows you to create an alert policy
18,363
def update ( self , id , name , incident_preference ) : data = { "policy" : { "name" : name , "incident_preference" : incident_preference } } return self . _put ( url = '{0}alerts_policies/{1}.json' . format ( self . URL , id ) , headers = self . headers , data = data )
This API endpoint allows you to update an alert policy
18,364
def delete ( self , id ) : return self . _delete ( url = '{0}alerts_policies/{1}.json' . format ( self . URL , id ) , headers = self . headers )
This API endpoint allows you to delete an alert policy
18,365
def associate_with_notification_channel ( self , id , channel_id ) : return self . _put ( url = '{0}alerts_policy_channels.json?policy_id={1}&channel_ids={2}' . format ( self . URL , id , channel_id ) , headers = self . headers )
This API endpoint allows you to associate an alert policy with an notification channel
18,366
def dissociate_from_notification_channel ( self , id , channel_id ) : return self . _delete ( url = '{0}alerts_policy_channels.json?policy_id={1}&channel_id={2}' . format ( self . URL , id , channel_id ) , headers = self . headers )
This API endpoint allows you to dissociate an alert policy from an notification channel
18,367
def list ( self , policy_id , page = None ) : filters = [ 'policy_id={0}' . format ( policy_id ) , 'page={0}' . format ( page ) if page else None ] return self . _get ( url = '{0}alerts_conditions.json' . format ( self . URL ) , headers = self . headers , params = self . build_param_string ( filters ) )
This API endpoint returns a paginated list of alert conditions associated with the given policy_id .
18,368
def update ( self , alert_condition_id , policy_id , type = None , condition_scope = None , name = None , entities = None , metric = None , runbook_url = None , terms = None , user_defined = None , enabled = None ) : conditions_dict = self . list ( policy_id ) target_condition = None for condition in conditions_dict [ 'conditions' ] : if int ( condition [ 'id' ] ) == alert_condition_id : target_condition = condition break if target_condition is None : raise NoEntityException ( 'Target alert condition is not included in that policy.' 'policy_id: {}, alert_condition_id {}' . format ( policy_id , alert_condition_id ) ) data = { 'condition' : { 'type' : type or target_condition [ 'type' ] , 'name' : name or target_condition [ 'name' ] , 'entities' : entities or target_condition [ 'entities' ] , 'condition_scope' : condition_scope or target_condition [ 'condition_scope' ] , 'terms' : terms or target_condition [ 'terms' ] , 'metric' : metric or target_condition [ 'metric' ] , 'runbook_url' : runbook_url or target_condition [ 'runbook_url' ] , } } if enabled is not None : data [ 'condition' ] [ 'enabled' ] = str ( enabled ) . lower ( ) if data [ 'condition' ] [ 'metric' ] == 'user_defined' : if user_defined : data [ 'condition' ] [ 'user_defined' ] = user_defined elif 'user_defined' in target_condition : data [ 'condition' ] [ 'user_defined' ] = target_condition [ 'user_defined' ] else : raise ConfigurationException ( 'Metric is set as user_defined but no user_defined config specified' ) return self . _put ( url = '{0}alerts_conditions/{1}.json' . format ( self . URL , alert_condition_id ) , headers = self . headers , data = data )
Updates any of the optional parameters of the alert condition
18,369
def create ( self , policy_id , type , condition_scope , name , entities , metric , terms , runbook_url = None , user_defined = None , enabled = True ) : data = { 'condition' : { 'type' : type , 'name' : name , 'enabled' : enabled , 'entities' : entities , 'condition_scope' : condition_scope , 'terms' : terms , 'metric' : metric , 'runbook_url' : runbook_url , } } if metric == 'user_defined' : if user_defined : data [ 'condition' ] [ 'user_defined' ] = user_defined else : raise ConfigurationException ( 'Metric is set as user_defined but no user_defined config specified' ) return self . _post ( url = '{0}alerts_conditions/policies/{1}.json' . format ( self . URL , policy_id ) , headers = self . headers , data = data )
Creates an alert condition
18,370
def delete ( self , alert_condition_id ) : return self . _delete ( url = '{0}alerts_conditions/{1}.json' . format ( self . URL , alert_condition_id ) , headers = self . headers )
This API endpoint allows you to delete an alert condition
18,371
def list ( self , policy_id , limit = None , offset = None ) : filters = [ 'policy_id={0}' . format ( policy_id ) , 'limit={0}' . format ( limit ) if limit else '50' , 'offset={0}' . format ( offset ) if offset else '0' ] return self . _get ( url = '{0}alerts/conditions' . format ( self . URL ) , headers = self . headers , params = self . build_param_string ( filters ) )
This API endpoint returns a paginated list of alert conditions for infrastucture metrics associated with the given policy_id .
18,372
def show ( self , alert_condition_infra_id ) : return self . _get ( url = '{0}alerts/conditions/{1}' . format ( self . URL , alert_condition_infra_id ) , headers = self . headers , )
This API endpoint returns an alert condition for infrastucture identified by its ID .
18,373
def create ( self , policy_id , name , condition_type , alert_condition_configuration , enabled = True ) : data = { "data" : alert_condition_configuration } data [ 'data' ] [ 'type' ] = condition_type data [ 'data' ] [ 'policy_id' ] = policy_id data [ 'data' ] [ 'name' ] = name data [ 'data' ] [ 'enabled' ] = enabled return self . _post ( url = '{0}alerts/conditions' . format ( self . URL ) , headers = self . headers , data = data )
This API endpoint allows you to create an alert condition for infrastucture
18,374
def update ( self , alert_condition_infra_id , policy_id , name , condition_type , alert_condition_configuration , enabled = True ) : data = { "data" : alert_condition_configuration } data [ 'data' ] [ 'type' ] = condition_type data [ 'data' ] [ 'policy_id' ] = policy_id data [ 'data' ] [ 'name' ] = name data [ 'data' ] [ 'enabled' ] = enabled return self . _put ( url = '{0}alerts/conditions/{1}' . format ( self . URL , alert_condition_infra_id ) , headers = self . headers , data = data )
This API endpoint allows you to update an alert condition for infrastucture
18,375
def delete ( self , alert_condition_infra_id ) : return self . _delete ( url = '{0}alerts/conditions/{1}' . format ( self . URL , alert_condition_infra_id ) , headers = self . headers )
This API endpoint allows you to delete an alert condition for infrastucture
18,376
def create ( self , name , category , applications = None , servers = None ) : data = { "label" : { "category" : category , "name" : name , "links" : { "applications" : applications or [ ] , "servers" : servers or [ ] } } } return self . _put ( url = '{0}labels.json' . format ( self . URL ) , headers = self . headers , data = data )
This API endpoint will create a new label with the provided name and category
18,377
def delete ( self , key ) : return self . _delete ( url = '{url}labels/labels/{key}.json' . format ( url = self . URL , key = key ) , headers = self . headers , )
When applications are provided this endpoint will remove those applications from the label .
18,378
def list ( self , filter_guid = None , filter_ids = None , detailed = None , page = None ) : filters = [ 'filter[guid]={0}' . format ( filter_guid ) if filter_guid else None , 'filter[ids]={0}' . format ( ',' . join ( [ str ( app_id ) for app_id in filter_ids ] ) ) if filter_ids else None , 'detailed={0}' . format ( detailed ) if detailed is not None else None , 'page={0}' . format ( page ) if page else None ] return self . _get ( url = '{0}plugins.json' . format ( self . URL ) , headers = self . headers , params = self . build_param_string ( filters ) )
This API endpoint returns a paginated list of the plugins associated with your New Relic account .
18,379
def list ( self , application_id , filter_hostname = None , filter_ids = None , page = None ) : filters = [ 'filter[hostname]={0}' . format ( filter_hostname ) if filter_hostname else None , 'filter[ids]={0}' . format ( ',' . join ( [ str ( app_id ) for app_id in filter_ids ] ) ) if filter_ids else None , 'page={0}' . format ( page ) if page else None ] return self . _get ( url = '{root}applications/{application_id}/instances.json' . format ( root = self . URL , application_id = application_id ) , headers = self . headers , params = self . build_param_string ( filters ) )
This API endpoint returns a paginated list of instances associated with the given application .
18,380
def show ( self , application_id , host_id ) : return self . _get ( url = '{root}applications/{application_id}/hosts/{host_id}.json' . format ( root = self . URL , application_id = application_id , host_id = host_id ) , headers = self . headers , )
This API endpoint returns a single application host identified by its ID .
18,381
def metric_data ( self , id , names , values = None , from_dt = None , to_dt = None , summarize = False ) : params = [ 'from={0}' . format ( from_dt ) if from_dt else None , 'to={0}' . format ( to_dt ) if to_dt else None , 'summarize=true' if summarize else None ] params += [ 'names[]={0}' . format ( name ) for name in names ] if values : params += [ 'values[]={0}' . format ( value ) for value in values ] return self . _get ( url = '{0}components/{1}/metrics/data.json' . format ( self . URL , id ) , headers = self . headers , params = self . build_param_string ( params ) )
This API endpoint returns a list of values for each of the requested metrics . The list of available metrics can be returned using the Metric Name API endpoint .
18,382
def create ( self , dashboard_data ) : return self . _post ( url = '{0}dashboards.json' . format ( self . URL ) , headers = self . headers , data = dashboard_data , )
This API endpoint creates a dashboard and all defined widgets .
18,383
def update ( self , id , dashboard_data ) : return self . _put ( url = '{0}dashboards/{1}.json' . format ( self . URL , id ) , headers = self . headers , data = dashboard_data , )
This API endpoint updates a dashboard and all defined widgets .
18,384
def operatorPrecedence ( base , operators ) : expression = Forward ( ) last = base | Suppress ( '(' ) + expression + Suppress ( ')' ) def parse_operator ( expr , arity , association , action = None , extra = None ) : return expr , arity , association , action , extra for op in operators : expr , arity , association , action , extra = parse_operator ( * op ) if arity < 1 or arity > 2 : raise Exception ( "Arity must be unary (1) or binary (2)" ) if association not in ( opAssoc . LEFT , opAssoc . RIGHT ) : raise Exception ( "Association must be LEFT or RIGHT" ) this = Forward ( ) if association is opAssoc . LEFT : new_last = ( last | extra ) if extra else last if arity == 1 : operator_expression = new_last + OneOrMore ( expr ) elif arity == 2 : operator_expression = last + OneOrMore ( expr + new_last ) elif association is opAssoc . RIGHT : new_this = ( this | extra ) if extra else this if arity == 1 : operator_expression = expr + new_this elif arity == 2 : operator_expression = last + OneOrMore ( new_this ) if action is not None : operator_expression . setParseAction ( action ) this <<= ( operator_expression | last ) last = this expression <<= last return expression
This re - implements pyparsing s operatorPrecedence function .
18,385
def set_parse_attributes ( self , string , location , tokens ) : "Fluent API for setting parsed location" self . string = string self . location = location self . tokens = tokens return self
Fluent API for setting parsed location
18,386
def evaluate_object ( obj , cls = None , cache = False , ** kwargs ) : old_obj = obj if isinstance ( obj , Element ) : if cache : obj = obj . evaluate_cached ( ** kwargs ) else : obj = obj . evaluate ( cache = cache , ** kwargs ) if cls is not None and type ( obj ) != cls : obj = cls ( obj ) for attr in ( 'string' , 'location' , 'tokens' ) : if hasattr ( old_obj , attr ) : setattr ( obj , attr , getattr ( old_obj , attr ) ) return obj
Evaluates elements and coerces objects to a class if needed
18,387
def readGraph ( edgeList , nodeList = None , directed = False , idKey = 'ID' , eSource = 'From' , eDest = 'To' ) : progArgs = ( 0 , "Starting to reading graphs" ) if metaknowledge . VERBOSE_MODE : progKwargs = { 'dummy' : False } else : progKwargs = { 'dummy' : True } with _ProgressBar ( * progArgs , ** progKwargs ) as PBar : if directed : grph = nx . DiGraph ( ) else : grph = nx . Graph ( ) if nodeList : PBar . updateVal ( 0 , "Reading " + nodeList ) f = open ( os . path . expanduser ( os . path . abspath ( nodeList ) ) ) nFile = csv . DictReader ( f ) for line in nFile : vals = line ndID = vals [ idKey ] del vals [ idKey ] if len ( vals ) > 0 : grph . add_node ( ndID , ** vals ) else : grph . add_node ( ndID ) f . close ( ) PBar . updateVal ( .25 , "Reading " + edgeList ) f = open ( os . path . expanduser ( os . path . abspath ( edgeList ) ) ) eFile = csv . DictReader ( f ) for line in eFile : vals = line eFrom = vals [ eSource ] eTo = vals [ eDest ] del vals [ eSource ] del vals [ eDest ] if len ( vals ) > 0 : grph . add_edge ( eFrom , eTo , ** vals ) else : grph . add_edge ( eFrom , eTo ) PBar . finish ( "{} nodes and {} edges found" . format ( len ( grph . nodes ( ) ) , len ( grph . edges ( ) ) ) ) f . close ( ) return grph
Reads the files given by _edgeList_ and _nodeList_ and creates a networkx graph for the files .
18,388
def writeGraph ( grph , name , edgeInfo = True , typing = False , suffix = 'csv' , overwrite = True , allSameAttribute = False ) : progArgs = ( 0 , "Writing the graph to files starting with: {}" . format ( name ) ) if metaknowledge . VERBOSE_MODE : progKwargs = { 'dummy' : False } else : progKwargs = { 'dummy' : True } with _ProgressBar ( * progArgs , ** progKwargs ) as PBar : if typing : if isinstance ( grph , nx . classes . digraph . DiGraph ) or isinstance ( grph , nx . classes . multidigraph . MultiDiGraph ) : grphType = "_directed" else : grphType = "_undirected" else : grphType = '' nameCompts = os . path . split ( os . path . expanduser ( os . path . normpath ( name ) ) ) if nameCompts [ 0 ] == '' and nameCompts [ 1 ] == '' : edgeListName = "edgeList" + grphType + '.' + suffix nodesAtrName = "nodeAttributes" + grphType + '.' + suffix elif nameCompts [ 0 ] == '' : edgeListName = nameCompts [ 1 ] + "_edgeList" + grphType + '.' + suffix nodesAtrName = nameCompts [ 1 ] + "_nodeAttributes" + grphType + '.' + suffix elif nameCompts [ 1 ] == '' : edgeListName = os . path . join ( nameCompts [ 0 ] , "edgeList" + grphType + '.' + suffix ) nodesAtrName = os . path . join ( nameCompts [ 0 ] , "nodeAttributes" + grphType + '.' + suffix ) else : edgeListName = os . path . join ( nameCompts [ 0 ] , nameCompts [ 1 ] + "_edgeList" + grphType + '.' + suffix ) nodesAtrName = os . path . join ( nameCompts [ 0 ] , nameCompts [ 1 ] + "_nodeAttributes" + grphType + '.' + suffix ) if not overwrite : if os . path . isfile ( edgeListName ) : raise OSError ( edgeListName + " already exists" ) if os . path . isfile ( nodesAtrName ) : raise OSError ( nodesAtrName + " already exists" ) writeEdgeList ( grph , edgeListName , extraInfo = edgeInfo , allSameAttribute = allSameAttribute , _progBar = PBar ) writeNodeAttributeFile ( grph , nodesAtrName , allSameAttribute = allSameAttribute , _progBar = PBar ) PBar . finish ( "{} nodes and {} edges written to file" . format ( len ( grph . nodes ( ) ) , len ( grph . edges ( ) ) ) )
Writes both the edge list and the node attribute list of _grph_ to files starting with _name_ .
18,389
def getNodeDegrees ( grph , weightString = "weight" , strictMode = False , returnType = int , edgeType = 'bi' ) : ndsDict = { } for nd in grph . nodes ( ) : ndsDict [ nd ] = returnType ( 0 ) for e in grph . edges ( data = True ) : if weightString : try : edgVal = returnType ( e [ 2 ] [ weightString ] ) except KeyError : if strictMode : raise KeyError ( "The edge from " + str ( e [ 0 ] ) + " to " + str ( e [ 1 ] ) + " does not have the attribute: '" + str ( weightString ) + "'" ) else : edgVal = returnType ( 1 ) else : edgVal = returnType ( 1 ) if edgeType == 'bi' : ndsDict [ e [ 0 ] ] += edgVal ndsDict [ e [ 1 ] ] += edgVal elif edgeType == 'in' : ndsDict [ e [ 1 ] ] += edgVal elif edgeType == 'out' : ndsDict [ e [ 0 ] ] += edgVal else : raise ValueError ( "edgeType must be 'bi', 'in', or 'out'" ) return ndsDict
Retunrs a dictionary of nodes to their degrees the degree is determined by adding the weight of edge with the weight being the string weightString that gives the name of the attribute of each edge containng thier weight . The Weights are then converted to the type returnType . If weightString is give as False instead each edge is counted as 1 .
18,390
def mergeGraphs ( targetGraph , addedGraph , incrementedNodeVal = 'count' , incrementedEdgeVal = 'weight' ) : for addedNode , attribs in addedGraph . nodes ( data = True ) : if incrementedNodeVal : try : targetGraph . node [ addedNode ] [ incrementedNodeVal ] += attribs [ incrementedNodeVal ] except KeyError : targetGraph . add_node ( addedNode , ** attribs ) else : if not targetGraph . has_node ( addedNode ) : targetGraph . add_node ( addedNode , ** attribs ) for edgeNode1 , edgeNode2 , attribs in addedGraph . edges ( data = True ) : if incrementedEdgeVal : try : targetGraph . edges [ edgeNode1 , edgeNode2 ] [ incrementedEdgeVal ] += attribs [ incrementedEdgeVal ] except KeyError : targetGraph . add_edge ( edgeNode1 , edgeNode2 , ** attribs ) else : if not targetGraph . Graph . has_edge ( edgeNode1 , edgeNode2 ) : targetGraph . add_edge ( edgeNode1 , edgeNode2 , ** attribs )
A quick way of merging graphs this is meant to be quick and is only intended for graphs generated by metaknowledge . This does not check anything and as such may cause unexpected results if the source and target were not generated by the same method .
18,391
def AD ( val ) : retDict = { } for v in val : split = v . split ( ' : ' ) retDict [ split [ 0 ] ] = [ s for s in ' : ' . join ( split [ 1 : ] ) . replace ( '\n' , '' ) . split ( ';' ) if s != '' ] return retDict
Affiliation Undoing what the parser does then splitting at the semicolons and dropping newlines extra fitlering is required beacuse some AD s end with a semicolon
18,392
def AUID ( val ) : retDict = { } for v in val : split = v . split ( ' : ' ) retDict [ split [ 0 ] ] = ' : ' . join ( split [ 1 : ] ) return retDict
AuthorIdentifier one line only just need to undo the parser s effects
18,393
def isInteractive ( ) : if sys . stdout . isatty ( ) and os . name != 'nt' : try : import threading except ImportError : return False else : return True else : return False
A basic check of if the program is running in interactive mode
18,394
def getInstitutions ( self , tags = None , seperator = ";" , _getTag = False ) : if tags is None : tags = [ ] elif isinstance ( tags , str ) : tags = [ tags ] for k in self . keys ( ) : if 'institution' in k . lower ( ) and k not in tags : tags . append ( k ) return super ( ) . getInvestigators ( tags = tags , seperator = seperator , _getTag = _getTag )
Returns a list with the names of the institution . The optional arguments are ignored
18,395
def writeRecord ( self , f ) : if self . bad : raise BadPubmedRecord ( "This record cannot be converted to a file as the input was malformed.\nThe original line number (if any) is: {} and the original file is: '{}'" . format ( self . _sourceLine , self . _sourceFile ) ) else : authTags = { } for tag in authorBasedTags : for val in self . _fieldDict . get ( tag , [ ] ) : split = val . split ( ' : ' ) try : authTags [ split [ 0 ] ] . append ( "{0}{1}- {2}\n" . format ( tag , ' ' * ( 4 - len ( tag ) ) , ' : ' . join ( split [ 1 : ] ) . replace ( '\n' , '\n ' ) ) ) except KeyError : authTags [ split [ 0 ] ] = [ "{0}{1}- {2}\n" . format ( tag , ' ' * ( 4 - len ( tag ) ) , ' : ' . join ( split [ 1 : ] ) . replace ( '\n' , '\n ' ) ) ] for tag , value in self . _fieldDict . items ( ) : if tag in authorBasedTags : continue else : for v in value : f . write ( "{0}{1}- {2}\n" . format ( tag , ' ' * ( 4 - len ( tag ) ) , v . replace ( '\n' , '\n ' ) ) ) if tag == 'AU' : for authVal in authTags . get ( v , [ ] ) : f . write ( authVal )
This is nearly identical to the original the FAU tag is the only tag not writen in the same place doing so would require changing the parser and lots of extra logic .
18,396
def quickVisual ( G , showLabel = False ) : colours = "brcmykwg" f = plt . figure ( 1 ) ax = f . add_subplot ( 1 , 1 , 1 ) ndTypes = [ ] ndColours = [ ] layout = nx . spring_layout ( G , k = 4 / math . sqrt ( len ( G . nodes ( ) ) ) ) for nd in G . nodes ( data = True ) : if 'type' in nd [ 1 ] : if nd [ 1 ] [ 'type' ] not in ndTypes : ndTypes . append ( nd [ 1 ] [ 'type' ] ) ndColours . append ( colours [ ndTypes . index ( nd [ 1 ] [ 'type' ] ) % len ( colours ) ] ) elif len ( ndColours ) > 1 : raise RuntimeError ( "Some nodes do not have a type" ) if len ( ndColours ) < 1 : nx . draw_networkx_nodes ( G , pos = layout , node_color = colours [ 0 ] , node_shape = '8' , node_size = 100 , ax = ax ) else : nx . draw_networkx_nodes ( G , pos = layout , node_color = ndColours , node_shape = '8' , node_size = 100 , ax = ax ) nx . draw_networkx_edges ( G , pos = layout , width = .7 , ax = ax ) if showLabel : nx . draw_networkx_labels ( G , pos = layout , font_size = 8 , ax = ax ) plt . axis ( 'off' ) f . set_facecolor ( 'w' )
Just makes a simple _matplotlib_ figure and displays it with each node coloured by its type . You can add labels with _showLabel_ . This looks a bit nicer than the one provided my _networkx_ s defaults .
18,397
def graphDensityContourPlot ( G , iters = 50 , layout = None , layoutScaleFactor = 1 , overlay = False , nodeSize = 10 , axisSamples = 100 , blurringFactor = .1 , contours = 15 , graphType = 'coloured' ) : from mpl_toolkits . mplot3d import Axes3D if not isinstance ( G , nx . classes . digraph . DiGraph ) and not isinstance ( G , nx . classes . graph . Graph ) : raise TypeError ( "{} is not a valid input." . format ( type ( G ) ) ) if layout is None : layout = nx . spring_layout ( G , scale = axisSamples - 1 , iterations = iters ) grid = np . zeros ( [ axisSamples , axisSamples ] , dtype = np . float32 ) for v in layout . values ( ) : x , y = tuple ( int ( x ) for x in v . round ( 0 ) ) grid [ y ] [ x ] += 1 elif isinstance ( layout , dict ) : layout = layout . copy ( ) grid = np . zeros ( [ axisSamples , axisSamples ] , dtype = np . float32 ) multFactor = ( axisSamples - 1 ) / layoutScaleFactor for k in layout . keys ( ) : tmpPos = layout [ k ] * multFactor layout [ k ] = tmpPos x , y = tuple ( int ( x ) for x in tmpPos . round ( 0 ) ) grid [ y ] [ x ] += 1 else : raise TypeError ( "{} is not a valid input." . format ( type ( layout ) ) ) fig = plt . figure ( ) axis = fig . gca ( projection = '3d' ) if overlay : nx . draw_networkx ( G , pos = layout , ax = axis , node_size = nodeSize , with_labels = False , edgelist = [ ] ) grid = ndi . gaussian_filter ( grid , ( blurringFactor * axisSamples , blurringFactor * axisSamples ) ) X = Y = np . arange ( 0 , axisSamples , 1 ) X , Y = np . meshgrid ( X , Y ) if graphType == "solid" : CS = axis . plot_surface ( X , Y , grid ) else : CS = axis . contourf ( X , Y , grid , contours ) axis . set_xlabel ( 'X' ) axis . set_ylabel ( 'Y' ) axis . set_zlabel ( 'Node Density' )
Creates a 3D plot giving the density of nodes on a 2D plane as a surface in 3D .
18,398
def makeBiDirectional ( d ) : dTmp = d . copy ( ) for k in d : dTmp [ d [ k ] ] = k return dTmp
Helper for generating tagNameConverter Makes dict that maps from key to value and back
18,399
def reverseDict ( d ) : retD = { } for k in d : retD [ d [ k ] ] = k return retD
Helper for generating fullToTag Makes dict of value to key