idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,400
def union_join ( left , right , left_as = 'left' , right_as = 'right' ) : attrs = { } attrs . update ( get_object_attrs ( right ) ) attrs . update ( get_object_attrs ( left ) ) attrs [ left_as ] = left attrs [ right_as ] = right if isinstance ( left , dict ) and isinstance ( right , dict ) : return attrs else : joined_class = type ( left . __class__ . __name__ + right . __class__ . __name__ , ( Union , ) , { } ) return joined_class ( attrs )
Join function truest to the SQL style join . Merges both objects together in a sum - type saving references to each parent in left and right attributes .
56,401
def configKeyButtons ( self , enableButtons = [ ] , bounceTime = DEF_BOUNCE_TIME_NORMAL , pullUpDown = GPIO . PUD_UP , event = GPIO . BOTH ) : for key in enableButtons : self . setKeyButton ( key [ "id" ] , key [ "callback" ] , bounceTime , pullUpDown , event ) pass
! \ ~english Config multi key buttons IO and event on same time
56,402
def best_item_from_list ( item , options , fuzzy = 90 , fname_match = True , fuzzy_fragment = None , guess = False ) : match = best_match_from_list ( item , options , fuzzy , fname_match , fuzzy_fragment , guess ) if match : return match [ 0 ] return None
Returns just the best item or None
56,403
async def create_hlk_sw16_connection ( port = None , host = None , disconnect_callback = None , reconnect_callback = None , loop = None , logger = None , timeout = None , reconnect_interval = None ) : client = SW16Client ( host , port = port , disconnect_callback = disconnect_callback , reconnect_callback = reconnect_callback , loop = loop , logger = logger , timeout = timeout , reconnect_interval = reconnect_interval ) await client . setup ( ) return client
Create HLK - SW16 Client class .
56,404
def _reset_timeout ( self ) : if self . _timeout : self . _timeout . cancel ( ) self . _timeout = self . loop . call_later ( self . client . timeout , self . transport . close )
Reset timeout for date keep alive .
56,405
def reset_cmd_timeout ( self ) : if self . _cmd_timeout : self . _cmd_timeout . cancel ( ) self . _cmd_timeout = self . loop . call_later ( self . client . timeout , self . transport . close )
Reset timeout for command execution .
56,406
def _valid_packet ( raw_packet ) : if raw_packet [ 0 : 1 ] != b'\xcc' : return False if len ( raw_packet ) != 19 : return False checksum = 0 for i in range ( 1 , 17 ) : checksum += raw_packet [ i ] if checksum != raw_packet [ 18 ] : return False return True
Validate incoming packet .
56,407
def _handle_raw_packet ( self , raw_packet ) : if raw_packet [ 1 : 2 ] == b'\x1f' : self . _reset_timeout ( ) year = raw_packet [ 2 ] month = raw_packet [ 3 ] day = raw_packet [ 4 ] hour = raw_packet [ 5 ] minute = raw_packet [ 6 ] sec = raw_packet [ 7 ] week = raw_packet [ 8 ] self . logger . debug ( 'received date: Year: %s, Month: %s, Day: %s, Hour: %s, ' 'Minute: %s, Sec: %s, Week %s' , year , month , day , hour , minute , sec , week ) elif raw_packet [ 1 : 2 ] == b'\x0c' : states = { } changes = [ ] for switch in range ( 0 , 16 ) : if raw_packet [ 2 + switch : 3 + switch ] == b'\x01' : states [ format ( switch , 'x' ) ] = True if ( self . client . states . get ( format ( switch , 'x' ) , None ) is not True ) : changes . append ( format ( switch , 'x' ) ) self . client . states [ format ( switch , 'x' ) ] = True elif raw_packet [ 2 + switch : 3 + switch ] == b'\x02' : states [ format ( switch , 'x' ) ] = False if ( self . client . states . get ( format ( switch , 'x' ) , None ) is not False ) : changes . append ( format ( switch , 'x' ) ) self . client . states [ format ( switch , 'x' ) ] = False for switch in changes : for status_cb in self . client . status_callbacks . get ( switch , [ ] ) : status_cb ( states [ switch ] ) self . logger . debug ( states ) if self . client . in_transaction : self . client . in_transaction = False self . client . active_packet = False self . client . active_transaction . set_result ( states ) while self . client . status_waiters : waiter = self . client . status_waiters . popleft ( ) waiter . set_result ( states ) if self . client . waiters : self . send_packet ( ) else : self . _cmd_timeout . cancel ( ) elif self . _cmd_timeout : self . _cmd_timeout . cancel ( ) else : self . logger . warning ( 'received unknown packet: %s' , binascii . hexlify ( raw_packet ) )
Parse incoming packet .
56,408
def send_packet ( self ) : waiter , packet = self . client . waiters . popleft ( ) self . logger . debug ( 'sending packet: %s' , binascii . hexlify ( packet ) ) self . client . active_transaction = waiter self . client . in_transaction = True self . client . active_packet = packet self . reset_cmd_timeout ( ) self . transport . write ( packet )
Write next packet in send queue .
56,409
def format_packet ( command ) : frame_header = b"\xaa" verify = b"\x0b" send_delim = b"\xbb" return frame_header + command . ljust ( 17 , b"\x00" ) + verify + send_delim
Format packet to be sent .
56,410
async def setup ( self ) : while True : fut = self . loop . create_connection ( lambda : SW16Protocol ( self , disconnect_callback = self . handle_disconnect_callback , loop = self . loop , logger = self . logger ) , host = self . host , port = self . port ) try : self . transport , self . protocol = await asyncio . wait_for ( fut , timeout = self . timeout ) except asyncio . TimeoutError : self . logger . warning ( "Could not connect due to timeout error." ) except OSError as exc : self . logger . warning ( "Could not connect due to error: %s" , str ( exc ) ) else : self . is_connected = True if self . reconnect_callback : self . reconnect_callback ( ) break await asyncio . sleep ( self . reconnect_interval )
Set up the connection with automatic retry .
56,411
def stop ( self ) : self . reconnect = False self . logger . debug ( "Shutting down." ) if self . transport : self . transport . close ( )
Shut down transport .
56,412
async def handle_disconnect_callback ( self ) : self . is_connected = False if self . disconnect_callback : self . disconnect_callback ( ) if self . reconnect : self . logger . debug ( "Protocol disconnected...reconnecting" ) await self . setup ( ) self . protocol . reset_cmd_timeout ( ) if self . in_transaction : self . protocol . transport . write ( self . active_packet ) else : packet = self . protocol . format_packet ( b"\x1e" ) self . protocol . transport . write ( packet )
Reconnect automatically unless stopping .
56,413
def register_status_callback ( self , callback , switch ) : if self . status_callbacks . get ( switch , None ) is None : self . status_callbacks [ switch ] = [ ] self . status_callbacks [ switch ] . append ( callback )
Register a callback which will fire when state changes .
56,414
def _send ( self , packet ) : fut = self . loop . create_future ( ) self . waiters . append ( ( fut , packet ) ) if self . waiters and self . in_transaction is False : self . protocol . send_packet ( ) return fut
Add packet to send queue .
56,415
async def turn_on ( self , switch = None ) : if switch is not None : switch = codecs . decode ( switch . rjust ( 2 , '0' ) , 'hex' ) packet = self . protocol . format_packet ( b"\x10" + switch + b"\x01" ) else : packet = self . protocol . format_packet ( b"\x0a" ) states = await self . _send ( packet ) return states
Turn on relay .
56,416
async def turn_off ( self , switch = None ) : if switch is not None : switch = codecs . decode ( switch . rjust ( 2 , '0' ) , 'hex' ) packet = self . protocol . format_packet ( b"\x10" + switch + b"\x02" ) else : packet = self . protocol . format_packet ( b"\x0b" ) states = await self . _send ( packet ) return states
Turn off relay .
56,417
async def status ( self , switch = None ) : if switch is not None : if self . waiters or self . in_transaction : fut = self . loop . create_future ( ) self . status_waiters . append ( fut ) states = await fut state = states [ switch ] else : packet = self . protocol . format_packet ( b"\x1e" ) states = await self . _send ( packet ) state = states [ switch ] else : if self . waiters or self . in_transaction : fut = self . loop . create_future ( ) self . status_waiters . append ( fut ) state = await fut else : packet = self . protocol . format_packet ( b"\x1e" ) state = await self . _send ( packet ) return state
Get current relay status .
56,418
def _delta_dir ( ) : repo = Repo ( ) current_dir = os . getcwd ( ) repo_dir = repo . tree ( ) . abspath delta_dir = current_dir . replace ( repo_dir , '' ) if delta_dir : return delta_dir + '/' else : return ''
returns the relative path of the current directory to the git repository . This path will be added the filename path to find the file . It current_dir is the git root this function returns an empty string .
56,419
def add_file_to_repo ( filename ) : try : repo = Repo ( ) index = repo . index index . add ( [ _delta_dir ( ) + filename ] ) except Exception as e : print ( "exception while gitadding file: %s" % e . message )
Add a file to the git repo
56,420
def reset_to_last_commit ( ) : try : repo = Repo ( ) gitcmd = repo . git gitcmd . reset ( hard = True ) except Exception : pass
reset a modified file to his last commit status
56,421
def commit_history ( filename ) : result = [ ] repo = Repo ( ) for commit in repo . head . commit . iter_parents ( paths = _delta_dir ( ) + filename ) : result . append ( { 'date' : datetime . fromtimestamp ( commit . committed_date + commit . committer_tz_offset ) , 'hexsha' : commit . hexsha } ) return result
Retrieve the commit history for a given filename .
56,422
def read_committed_file ( gitref , filename ) : repo = Repo ( ) commitobj = repo . commit ( gitref ) blob = commitobj . tree [ _delta_dir ( ) + filename ] return blob . data_stream . read ( )
Retrieve the content of a file in an old commit and returns it .
56,423
def get ( self , key , _else = None ) : with self . _lock : self . expired ( ) try : value = self . _dict [ key ] . get ( ) return value except KeyError : return _else except ValueError : return _else
The method to get an assets value
56,424
def set ( self , key , value , expires = None , future = None ) : with self . _lock : try : self . _dict [ key ] . set ( value , expires = expires , future = future ) except KeyError : self . _dict [ key ] = moment ( value , expires = expires , future = future , lock = self . _lock ) return value
Set a value
56,425
def values ( self ) : self . expired ( ) values = [ ] for key in self . _dict . keys ( ) : try : value = self . _dict [ key ] . get ( ) values . append ( value ) except : continue return values
Will only return the current values
56,426
def has_key ( self , key ) : if key in self . _dict : try : self [ key ] return True except ValueError : return False except KeyError : return False return False
Does the key exist? This method will check to see if it has expired too .
56,427
def dicom2db ( file_path , file_type , is_copy , step_id , db_conn , sid_by_patient = False , pid_in_vid = False , visit_in_path = False , rep_in_path = False ) : global conn conn = db_conn tags = dict ( ) logging . info ( "Extracting DICOM headers from '%s'" % file_path ) try : dcm = dicom . read_file ( file_path ) dataset = db_conn . get_dataset ( step_id ) tags [ 'participant_id' ] = _extract_participant ( dcm , dataset , pid_in_vid ) if visit_in_path : tags [ 'visit_id' ] = _extract_visit_from_path ( dcm , file_path , pid_in_vid , sid_by_patient , dataset , tags [ 'participant_id' ] ) else : tags [ 'visit_id' ] = _extract_visit ( dcm , dataset , tags [ 'participant_id' ] , sid_by_patient , pid_in_vid ) tags [ 'session_id' ] = _extract_session ( dcm , tags [ 'visit_id' ] ) tags [ 'sequence_type_id' ] = _extract_sequence_type ( dcm ) tags [ 'sequence_id' ] = _extract_sequence ( tags [ 'session_id' ] , tags [ 'sequence_type_id' ] ) if rep_in_path : tags [ 'repetition_id' ] = _extract_repetition_from_path ( dcm , file_path , tags [ 'sequence_id' ] ) else : tags [ 'repetition_id' ] = _extract_repetition ( dcm , tags [ 'sequence_id' ] ) tags [ 'file_id' ] = extract_dicom ( file_path , file_type , is_copy , tags [ 'repetition_id' ] , step_id ) except InvalidDicomError : logging . warning ( "%s is not a DICOM file !" % step_id ) except IntegrityError : logging . warning ( "A problem occurred with the DB ! A rollback will be performed..." ) conn . db_session . rollback ( ) return tags
Extract some meta - data from a DICOM file and store in a DB .
56,428
def remaining_bytes ( self , meta = True ) : pos , self . _pos = self . _pos , len ( self . buffer ) return self . buffer [ pos : ]
Returns the remaining unread bytes from the buffer .
56,429
def decode ( self , bytes ) : self . buffer = bytes self . _pos = 0 Packet = identifier . get_packet_from_id ( self . _read_variunt ( ) ) if Packet is None : return None packet = Packet ( ) packet . ParseFromString ( self . remaining_bytes ( ) ) return packet
Decodes the packet off the byte string .
56,430
def encode ( self , packet ) : id = identifier . get_packet_id ( packet ) if id is None : raise EncoderException ( 'unknown packet' ) self . _write_variunt ( id ) self . _write ( packet . SerializeToString ( ) ) return bytes ( self . buffer )
Pushes a packet to the writer encoding it on the internal buffer .
56,431
def create ( self , data ) : if data is None : return None prototype = { } errors = { } for field_name , field_spec in self . spec . fields . items ( ) : try : value = self . _create_value ( data , field_name , self . spec ) except ValidationError , e : if field_name not in self . default_create_values : if hasattr ( e , 'message_dict' ) : errors . update ( dict ( zip ( [ field_name + '.' + key for key in e . message_dict . keys ( ) ] , e . message_dict . values ( ) ) ) ) else : errors [ field_name ] = e . messages else : key_name = self . property_name_map [ field_name ] prototype [ key_name ] = value if self . prevent_extra_fields : extras = set ( data . keys ( ) ) - set ( self . property_name_map . keys ( ) ) if extras : errors [ ', ' . join ( extras ) ] = [ 'field(s) not allowed' ] if errors : raise ValidationError ( errors ) _data = deepcopy ( self . default_create_values ) _data . update ( prototype ) if self . klass : instance = self . klass ( ) instance . __dict__ . update ( prototype ) return instance else : return prototype
Create object from the given data .
56,432
def serialize ( self , entity , request = None ) : def should_we_insert ( value , field_spec ) : return value not in self . missing or field_spec . required errors = { } ret = { } for field_name , field_spec in self . spec . fields . items ( ) : value = self . _get_value_for_serialization ( entity , field_name , field_spec ) func = self . _get_serialize_func ( field_name , self . spec ) try : value = func ( value , entity , request ) if should_we_insert ( value , field_spec ) : ret [ field_name ] = value except ValidationError , e : if hasattr ( e , 'message_dict' ) : errors . update ( dict ( zip ( [ field_name + '.' + key for key in e . message_dict . keys ( ) ] , e . message_dict . values ( ) ) ) ) else : errors [ field_name ] = e . messages if errors : raise ValidationError ( errors ) return None if ret == { } else ret
Serialize entity into dictionary .
56,433
def _create_value ( self , data , name , spec ) : field = getattr ( self , 'create_' + name , None ) if field : return field ( data , name , spec ) value = data . get ( name ) return spec . fields [ name ] . clean ( value )
Create the value for a field .
56,434
def _get_serialize_func ( self , name , spec ) : func = getattr ( self , 'serialize_' + name , None ) if func : return func func = getattr ( spec . fields [ name ] , 'serialize' , None ) if func : return func return lambda value , entity , request : value
Return the function that is used for serialization .
56,435
def _create_mappings ( self , spec ) : ret = dict ( zip ( set ( spec . fields ) , set ( spec . fields ) ) ) ret . update ( dict ( [ ( n , s . alias ) for n , s in spec . fields . items ( ) if s . alias ] ) ) return ret
Create property name map based on aliases .
56,436
def all_substrings ( s ) : join = '' . join for i in range ( 1 , len ( s ) + 1 ) : for sub in window ( s , i ) : yield join ( sub )
yields all substrings of a string
56,437
def equivalent_release_for_product ( self , product ) : releases = self . _default_manager . filter ( version__startswith = self . major_version ( ) + '.' , channel = self . channel , product = product ) . order_by ( '-version' ) if not getattr ( settings , 'DEV' , False ) : releases = releases . filter ( is_public = True ) if releases : return sorted ( sorted ( releases , reverse = True , key = lambda r : len ( r . version . split ( '.' ) ) ) , reverse = True , key = lambda r : r . version . split ( '.' ) [ 1 ] ) [ 0 ]
Returns the release for a specified product with the same channel and major version with the highest minor version or None if no such releases exist
56,438
def notes ( self , public_only = False ) : tag_index = dict ( ( tag , i ) for i , tag in enumerate ( Note . TAGS ) ) notes = self . note_set . order_by ( '-sort_num' , 'created' ) if public_only : notes = notes . filter ( is_public = True ) known_issues = [ n for n in notes if n . is_known_issue_for ( self ) ] new_features = sorted ( sorted ( ( n for n in notes if not n . is_known_issue_for ( self ) ) , key = lambda note : tag_index . get ( note . tag , 0 ) ) , key = lambda n : n . tag == 'Fixed' and n . note . startswith ( self . version ) , reverse = True ) return new_features , known_issues
Retrieve a list of Note instances that should be shown for this release grouped as either new features or known issues and sorted first by sort_num highest to lowest and then by created date which is applied to both groups and then for new features we also sort by tag in the order specified by Note . TAGS with untagged notes coming first then finally moving any note with the fixed tag that starts with the release version to the top for what we call dot fixes .
56,439
def to_dict ( self ) : data = model_to_dict ( self , exclude = [ 'id' ] ) data [ 'title' ] = unicode ( self ) data [ 'slug' ] = self . slug data [ 'release_date' ] = self . release_date . date ( ) . isoformat ( ) data [ 'created' ] = self . created . isoformat ( ) data [ 'modified' ] = self . modified . isoformat ( ) new_features , known_issues = self . notes ( public_only = False ) for note in known_issues : note . tag = 'Known' data [ 'notes' ] = [ n . to_dict ( self ) for n in chain ( new_features , known_issues ) ] return data
Return a dict all all data about the release
56,440
def to_simple_dict ( self ) : return { 'version' : self . version , 'product' : self . product , 'channel' : self . channel , 'is_public' : self . is_public , 'slug' : self . slug , 'title' : unicode ( self ) , }
Return a dict of only the basic data about the release
56,441
def playToneList ( self , playList = None ) : if playList == None : return False for t in playList : self . playTone ( t [ "freq" ] , t [ "reps" ] , t [ "delay" ] , t [ "muteDelay" ] ) self . stopTone ( ) return True
! \ ~english Play tone from a tone list
56,442
def all ( self , instance ) : url = self . _url . format ( instance = instance ) response = requests . get ( url , ** self . _default_request_kwargs ) data = self . _get_response_data ( response ) return self . _concrete_acl_list ( data )
Get all ACLs associated with the instance specified by name .
56,443
def create ( self , instance , cidr_mask , description , ** kwargs ) : url = self . _url . format ( instance = instance ) request_data = { 'cidr_mask' : cidr_mask , 'description' : description } request_data . update ( kwargs ) response = requests . post ( url , data = json . dumps ( request_data ) , ** self . _default_request_kwargs ) if response . status_code == 200 : logger . info ( 'Successfully created a new ACL for instance {} with: {}.' . format ( instance , request_data ) ) else : logger . info ( 'Failed to create a new ACL for instance {} with: {}.' . format ( instance , request_data ) ) data = self . _get_response_data ( response ) return self . _concrete_acl ( data )
Create an ACL entry for the specified instance .
56,444
def get ( self , instance , acl ) : base_url = self . _url . format ( instance = instance ) url = '{base}{aclid}/' . format ( base = base_url , aclid = acl ) response = requests . get ( url , ** self . _default_request_kwargs ) data = self . _get_response_data ( response ) return self . _concrete_acl ( data )
Get an ACL by ID belonging to the instance specified by name .
56,445
def delete ( self , instance , acl ) : base_url = self . _url . format ( instance = instance ) url = '{base}{aclid}/' . format ( base = base_url , aclid = acl ) response = requests . delete ( url , ** self . _default_request_kwargs ) if response . status_code == 200 : logger . info ( 'Successfully deleted ACL {}' . format ( acl ) ) else : logger . info ( 'Failed to delete ACL {}' . format ( acl ) ) logger . info ( 'Response: [{0}] {1}' . format ( response . status_code , response . content ) ) raise errors . ObjectRocketException ( 'Failed to delete ACL.' )
Delete an ACL by ID belonging to the instance specified by name .
56,446
def _concrete_acl ( self , acl_doc ) : if not isinstance ( acl_doc , dict ) : return None try : return Acl ( document = acl_doc , acls = self ) except Exception as ex : logger . exception ( ex ) logger . error ( 'Could not instantiate ACL document. You probably need to upgrade to a ' 'recent version of the client. Document which caused this error: {}' . format ( acl_doc ) ) return None
Concretize an ACL document .
56,447
def _concrete_acl_list ( self , acl_docs ) : if not acl_docs : return [ ] return list ( filter ( None , [ self . _concrete_acl ( acl_doc = doc ) for doc in acl_docs ] ) )
Concretize a list of ACL documents .
56,448
def _default_request_kwargs ( self ) : defaults = copy . deepcopy ( super ( Acls , self ) . _default_request_kwargs ) defaults . setdefault ( 'headers' , { } ) . update ( { 'X-Auth-Token' : self . _client . auth . _token } ) return defaults
The default request keyword arguments to be passed to the requests library .
56,449
def _url ( self ) : base_url = self . _client . _url . rstrip ( '/' ) return '{}/instances/{}/acls/{}/' . format ( base_url , self . instance_name , self . id )
The URL of this ACL object .
56,450
def insert_list_of_dictionaries_into_database_tables ( dbConn , log , dictList , dbTableName , uniqueKeyList = [ ] , dateModified = False , dateCreated = True , batchSize = 2500 , replace = False , dbSettings = False ) : log . debug ( 'completed the ````insert_list_of_dictionaries_into_database_tables`` function' ) global count global totalCount global globalDbConn global sharedList reDate = re . compile ( '^[0-9]{4}-[0-9]{2}-[0-9]{2}T' ) if dbSettings : globalDbConn = dbSettings else : globalDbConn = dbConn if len ( dictList ) == 0 : log . warning ( 'the dictionary to be added to the database is empty' % locals ( ) ) return None if len ( dictList ) : convert_dictionary_to_mysql_table ( dbConn = dbConn , log = log , dictionary = dictList [ 0 ] , dbTableName = dbTableName , uniqueKeyList = uniqueKeyList , dateModified = dateModified , reDatetime = reDate , replace = replace , dateCreated = dateCreated ) dictList = dictList [ 1 : ] dbConn . autocommit ( False ) if len ( dictList ) : total = len ( dictList ) batches = int ( total / batchSize ) start = 0 end = 0 sharedList = [ ] for i in range ( batches + 1 ) : end = end + batchSize start = i * batchSize thisBatch = dictList [ start : end ] sharedList . append ( ( thisBatch , end ) ) totalCount = total + 1 ltotalCount = totalCount print "Starting to insert %(ltotalCount)s rows into %(dbTableName)s" % locals ( ) print dbSettings if dbSettings == False : fmultiprocess ( log = log , function = _insert_single_batch_into_database , inputArray = range ( len ( sharedList ) ) , dbTableName = dbTableName , uniqueKeyList = uniqueKeyList , dateModified = dateModified , replace = replace , batchSize = batchSize , reDatetime = reDate , dateCreated = dateCreated ) else : fmultiprocess ( log = log , function = _add_dictlist_to_database_via_load_in_file , inputArray = range ( len ( sharedList ) ) , dbTablename = dbTableName , dbSettings = dbSettings , dateModified = dateModified ) sys . stdout . write ( "\x1b[1A\x1b[2K" ) print "%(ltotalCount)s / %(ltotalCount)s rows inserted into %(dbTableName)s" % locals ( ) log . debug ( 'completed the ``insert_list_of_dictionaries_into_database_tables`` function' ) return None
insert list of dictionaries into database tables
56,451
def make_directory ( path ) : try : makedirs ( path ) logging . debug ( 'Directory created: {0}' . format ( path ) ) except OSError as e : if e . errno != errno . EEXIST : raise
Create directory if that not exists .
56,452
def copy_file ( self , from_path , to_path ) : if not op . exists ( op . dirname ( to_path ) ) : self . make_directory ( op . dirname ( to_path ) ) shutil . copy ( from_path , to_path ) logging . debug ( 'File copied: {0}' . format ( to_path ) )
Copy file .
56,453
def params ( self ) : parser = JinjaInterpolationNamespace ( ) parser . read ( self . configuration ) return dict ( parser [ 'params' ] or { } )
Read self params from configuration .
56,454
def scan ( cls , path ) : result = [ ] try : for _p in listdir ( path ) : try : result . append ( Template ( _p , op . join ( path , _p ) ) ) except ValueError : continue except OSError : pass return result
Scan directory for templates .
56,455
def copy ( self ) : templates = self . prepare_templates ( ) if self . params . interactive : keys = list ( self . parser . default ) for key in keys : if key . startswith ( '_' ) : continue prompt = "{0} (default is \"{1}\")? " . format ( key , self . parser . default [ key ] ) if _compat . PY2 : value = raw_input ( prompt . encode ( 'utf-8' ) ) . decode ( 'utf-8' ) else : value = input ( prompt . encode ( 'utf-8' ) ) value = value . strip ( ) if value : self . parser . default [ key ] = value self . parser . default [ 'templates' ] = tt = ',' . join ( t . name for t in templates ) logging . warning ( "Paste templates: {0}" . format ( tt ) ) self . make_directory ( self . params . TARGET ) logging . debug ( "\nDefault context:\n----------------" ) logging . debug ( '' . join ( '{0:<15} {1}\n' . format ( * v ) for v in self . parser . default . items ( ) ) ) return [ t . paste ( ** dict ( self . parser . default . items ( ) ) ) for t in templates ]
Prepare and paste self templates .
56,456
def iterate_templates ( self ) : return [ t for dd in self . dirs for t in Template . scan ( dd ) ]
Iterate self starter templates .
56,457
def on_canvas_slave__electrode_pair_selected ( self , slave , data ) : import networkx as nx source_id = data [ 'source_id' ] target_id = data [ 'target_id' ] if self . canvas_slave . device is None or self . plugin is None : return slave . df_routes = slave . df_routes . loc [ slave . df_routes . route_i >= 0 ] . copy ( ) try : shortest_path = self . canvas_slave . device . find_path ( source_id , target_id ) self . plugin . execute_async ( 'droplet_planning_plugin' , 'add_route' , drop_route = shortest_path ) except nx . NetworkXNoPath : logger . error ( 'No path found between %s and %s.' , source_id , target_id )
Process pair of selected electrodes .
56,458
def ping_hub ( self ) : if self . plugin is not None : try : self . plugin . execute ( self . plugin . hub_name , 'ping' , timeout_s = 1 , silent = True ) except IOError : self . on_heartbeat_error ( ) else : self . heartbeat_alive_timestamp = datetime . now ( ) logger . debug ( 'Hub connection alive as of %s' , self . heartbeat_alive_timestamp ) return True
Attempt to ping the ZeroMQ plugin hub to verify connection is alive .
56,459
def get_string_version ( name , default = DEFAULT_STRING_NOT_FOUND , allow_ambiguous = True ) : callar = inspect . getouterframes ( inspect . currentframe ( ) ) [ 1 ] [ 1 ] if callar . startswith ( '<doctest' ) : callar = inspect . getouterframes ( inspect . currentframe ( ) ) [ - 1 ] [ 1 ] try : di = get_distribution ( name ) installed_directory = os . path . join ( di . location , name ) if not callar . startswith ( installed_directory ) and not allow_ambiguous : raise DistributionNotFound except DistributionNotFound : return default else : return di . version
Get string version from installed package information .
56,460
def get_tuple_version ( name , default = DEFAULT_TUPLE_NOT_FOUND , allow_ambiguous = True ) : def _prefer_int ( x ) : try : return int ( x ) except ValueError : return x version = get_string_version ( name , default = default , allow_ambiguous = allow_ambiguous ) if isinstance ( version , tuple ) : return version return tuple ( map ( _prefer_int , version . split ( '.' ) ) )
Get tuple version from installed package information for easy handling .
56,461
def get_versions ( name , default_string = DEFAULT_STRING_NOT_FOUND , default_tuple = DEFAULT_TUPLE_NOT_FOUND , allow_ambiguous = True ) : version_string = get_string_version ( name , default_string , allow_ambiguous ) version_tuple = get_tuple_version ( name , default_tuple , allow_ambiguous ) return version_string , version_tuple
Get string and tuple versions from installed package information
56,462
def no_empty_value ( func ) : @ wraps ( func ) def wrapper ( value ) : if not value : raise Exception ( "Empty value not allowed" ) return func ( value ) return wrapper
Raises an exception if function argument is empty .
56,463
def to_bool ( value ) : cases = { '0' : False , 'false' : False , 'no' : False , '1' : True , 'true' : True , 'yes' : True , } value = value . lower ( ) if isinstance ( value , basestring ) else value return cases . get ( value , bool ( value ) )
Converts human boolean - like values to Python boolean .
56,464
def etree_to_dict ( t , trim = True , ** kw ) : u d = { t . tag : { } if t . attrib else None } children = list ( t ) etree_to_dict_w_args = partial ( etree_to_dict , trim = trim , ** kw ) if children : dd = defaultdict ( list ) d = { t . tag : { } } for dc in map ( etree_to_dict_w_args , children ) : for k , v in dc . iteritems ( ) : if k is not etree . Comment : dd [ k ] . append ( v ) d [ t . tag ] = { k : v [ 0 ] if len ( v ) == 1 else v for k , v in dd . iteritems ( ) } if t . attrib : d [ t . tag ] . update ( ( '@' + k , v ) for k , v in t . attrib . iteritems ( ) ) if trim and t . text : t . text = t . text . strip ( ) if t . text : if t . tag is etree . Comment and not kw . get ( 'without_comments' ) : d [ '#comments' ] = t . text elif children or t . attrib : d [ t . tag ] [ '#text' ] = t . text else : d [ t . tag ] = t . text return d
u Converts an lxml . etree object to Python dict .
56,465
def dict_to_etree ( d , root ) : u def _to_etree ( d , node ) : if d is None or len ( d ) == 0 : return elif isinstance ( d , basestring ) : node . text = d elif isinstance ( d , dict ) : for k , v in d . items ( ) : assert isinstance ( k , basestring ) if k . startswith ( '#' ) : assert k == '#text' assert isinstance ( v , basestring ) node . text = v elif k . startswith ( '@' ) : assert isinstance ( v , basestring ) node . set ( k [ 1 : ] , v ) elif isinstance ( v , list ) : sub_elem = etree . SubElement ( node , k ) for child_num , e in enumerate ( v ) : if e is None : if child_num == 0 : continue _to_etree ( node , k ) else : if child_num != 0 and not ( isinstance ( e , dict ) and not all ( e . values ( ) ) ) : sub_elem = etree . SubElement ( node , k ) _to_etree ( e , sub_elem ) else : _to_etree ( v , etree . SubElement ( node , k ) ) elif etree . iselement ( d ) : etree . SubElement ( d , node ) else : raise AttributeError ( 'Argument is neither dict nor basestring.' ) _to_etree ( d , root ) return root
u Converts a dict to lxml . etree object .
56,466
def objwalk ( self , obj , path = ( ) , memo = None ) : string_types = ( str , unicode ) if str is bytes else ( str , bytes ) iteritems = lambda mapping : getattr ( mapping , 'iteritems' , mapping . items ) ( ) if memo is None : memo = set ( ) iterator = None if isinstance ( obj , Mapping ) : iterator = iteritems elif isinstance ( obj , ( Sequence , Set ) ) and not isinstance ( obj , string_types ) : iterator = enumerate if iterator : if id ( obj ) not in memo : memo . add ( id ( obj ) ) for path_component , value in iterator ( obj ) : for result in self . objwalk ( value , path + ( path_component , ) , memo ) : yield result memo . remove ( id ( obj ) ) else : yield path , obj
Traverse a dictionary recursively and save path
56,467
def set_cache_dir ( directory ) : global cache_dir if directory is None : cache_dir = None return if not os . path . exists ( directory ) : os . makedirs ( directory ) if not os . path . isdir ( directory ) : raise ValueError ( "not a directory" ) cache_dir = directory
Set the directory to cache JSON responses from most API endpoints .
56,468
def element_is_empty ( elem_to_parse , element_path = None ) : element = get_element ( elem_to_parse , element_path ) if element is None : return True is_empty = ( ( element . text is None or not element . text . strip ( ) ) and ( element . tail is None or not element . tail . strip ( ) ) and ( element . attrib is None or not len ( element . attrib ) ) and ( not len ( element . getchildren ( ) ) ) ) return is_empty
Returns true if the element is None or has no text tail children or attributes . Whitespace in the element is stripped from text and tail before making the determination .
56,469
def insert_element ( elem_to_parse , elem_idx , elem_path , elem_txt = u'' , ** attrib_kwargs ) : element = get_element ( elem_to_parse ) if element is None or not elem_path : return None if not elem_idx : elem_idx = 0 if elem_path and XPATH_DELIM in elem_path : tags = elem_path . split ( XPATH_DELIM ) if element_exists ( element , elem_path ) : parent = get_element ( element , XPATH_DELIM . join ( tags [ : - 1 ] ) ) return insert_element ( parent , elem_idx , tags [ - 1 ] , elem_txt , ** attrib_kwargs ) else : this_elem = element last_idx = len ( tags ) - 1 for idx , tag in enumerate ( tags ) : next_elem = get_element ( this_elem , tag ) if next_elem is None : if idx == last_idx : next_elem = insert_element ( this_elem , elem_idx , tag , elem_txt , ** attrib_kwargs ) else : next_elem = insert_element ( this_elem , 0 , tag , u'' , ** attrib_kwargs ) this_elem = next_elem return this_elem subelem = Element ( elem_path , attrib_kwargs ) subelem . text = elem_txt element . insert ( elem_idx , subelem ) return subelem
Creates an element named after elem_path containing elem_txt with kwargs as attributes inserts it into elem_to_parse at elem_idx and returns it .
56,470
def remove_empty_element ( parent_to_parse , element_path , target_element = None ) : element = get_element ( parent_to_parse ) removed = [ ] if element is None or not element_path : return removed if target_element : if not element_path . endswith ( target_element ) : element_path = XPATH_DELIM . join ( [ element_path , target_element ] ) target_element = None if XPATH_DELIM not in element_path : for subelem in get_elements ( element , element_path ) : if element_is_empty ( subelem ) : removed . append ( subelem ) element . remove ( subelem ) else : xpath_segments = element_path . split ( XPATH_DELIM ) element_path = XPATH_DELIM . join ( xpath_segments [ : - 1 ] ) target_element = xpath_segments [ - 1 ] for parent in get_elements ( element , element_path ) : for child in get_elements ( parent , target_element ) : if element_is_empty ( child ) : removed . append ( child ) parent . remove ( child ) if element_is_empty ( parent ) : if len ( xpath_segments ) == 2 : removed . extend ( remove_empty_element ( element , xpath_segments [ 0 ] ) ) else : next_element_path = XPATH_DELIM . join ( xpath_segments [ : - 2 ] ) next_target_element = parent . tag removed . extend ( remove_empty_element ( element , next_element_path , next_target_element ) ) return removed
Searches for all empty sub - elements named after element_name in the parsed element and if it exists removes them all and returns them as a list .
56,471
def remove_element_attributes ( elem_to_parse , * args ) : element = get_element ( elem_to_parse ) if element is None : return element if len ( args ) : attribs = element . attrib return { key : attribs . pop ( key ) for key in args if key in attribs } return { }
Removes the specified keys from the element s attributes and returns a dict containing the attributes that have been removed .
56,472
def _get_elements_property ( parent_to_parse , element_path , prop_name ) : parent_element = get_element ( parent_to_parse ) if parent_element is None : return [ ] if element_path and not element_exists ( parent_element , element_path ) : return [ ] if not element_path : texts = getattr ( parent_element , prop_name ) texts = texts . strip ( ) if isinstance ( texts , string_types ) else texts texts = [ texts ] if texts else [ ] else : texts = [ t for t in ( prop . strip ( ) if isinstance ( prop , string_types ) else prop for prop in ( getattr ( node , prop_name ) for node in parent_element . findall ( element_path ) ) if prop ) if t ] return texts
A helper to construct a list of values from
56,473
def _set_element_property ( parent_to_parse , element_path , prop_name , value ) : element = get_element ( parent_to_parse ) if element is None : return None if element_path and not element_exists ( element , element_path ) : element = insert_element ( element , 0 , element_path ) if not isinstance ( value , string_types ) : value = u'' setattr ( element , prop_name , value ) return element
Assigns the value to the parsed parent element and then returns it
56,474
def set_elements_tail ( parent_to_parse , element_path = None , tail_values = None ) : if tail_values is None : tail_values = [ ] return _set_elements_property ( parent_to_parse , element_path , _ELEM_TAIL , tail_values )
Assigns an array of tail values to each of the elements parsed from the parent . The tail values are assigned in the same order they are provided . If there are less values then elements the remaining elements are skipped ; but if there are more new elements will be inserted for each with the remaining tail values .
56,475
def set_elements_text ( parent_to_parse , element_path = None , text_values = None ) : if text_values is None : text_values = [ ] return _set_elements_property ( parent_to_parse , element_path , _ELEM_TEXT , text_values )
Assigns an array of text values to each of the elements parsed from the parent . The text values are assigned in the same order they are provided . If there are less values then elements the remaining elements are skipped ; but if there are more new elements will be inserted for each with the remaining text values .
56,476
def strip_namespaces ( file_or_xml ) : xml_content = _xml_content_to_string ( file_or_xml ) if not isinstance ( xml_content , string_types ) : return xml_content while _NAMESPACES_FROM_DEC_REGEX . search ( xml_content ) is not None : xml_content = _NAMESPACES_FROM_DEC_REGEX . sub ( r'\1' , xml_content ) xml_content = _NAMESPACES_FROM_TAG_REGEX . sub ( r'\1' , xml_content ) xml_content = _NAMESPACES_FROM_ATTR_REGEX . sub ( r'\1\3' , xml_content ) return xml_content
Removes all namespaces from the XML file or string passed in . If file_or_xml is not a file or string it is returned as is .
56,477
def strip_xml_declaration ( file_or_xml ) : xml_content = _xml_content_to_string ( file_or_xml ) if not isinstance ( xml_content , string_types ) : return xml_content return _XML_DECLARATION_REGEX . sub ( r'' , xml_content , 1 )
Removes XML declaration line from file or string passed in . If file_or_xml is not a file or string it is returned as is .
56,478
def floating_point_to_datetime ( day , fp_time ) : result = datetime ( year = day . year , month = day . month , day = day . day ) result += timedelta ( minutes = math . ceil ( 60 * fp_time ) ) return result
Convert a floating point time to a datetime .
56,479
def adhan ( day , location , parameters , timezone_offset = 0 ) : latitude , longitude = location time_at_sun_angle = partial ( compute_time_at_sun_angle , day = day , latitude = latitude ) zuhr_time = compute_zuhr_utc ( day , longitude ) shuruq_time = zuhr_time - time_at_sun_angle ( angle = SUNRISE_ANGLE ) maghrib_time = zuhr_time + time_at_sun_angle ( angle = SUNSET_ANGLE ) fajr_time = zuhr_time - time_at_sun_angle ( angle = parameters [ 'fajr_angle' ] ) if parameters . get ( 'isha_delay' , None ) : isha_time = maghrib_time + parameters [ 'isha_delay' ] else : isha_time = ( zuhr_time + time_at_sun_angle ( angle = parameters [ 'isha_angle' ] ) ) asr_multiplier = parameters . get ( 'asr_multiplier' , ASR_STANDARD ) asr_time = zuhr_time + time_at_shadow_length ( day = day , latitude = latitude , multiplier = asr_multiplier ) offset = timedelta ( minutes = 60 * timezone_offset ) return { 'fajr' : floating_point_to_datetime ( day , fajr_time ) + offset , 'zuhr' : floating_point_to_datetime ( day , zuhr_time ) + offset , 'shuruq' : floating_point_to_datetime ( day , shuruq_time ) + offset , 'asr' : floating_point_to_datetime ( day , asr_time ) + offset , 'maghrib' : floating_point_to_datetime ( day , maghrib_time ) + offset , 'isha' : floating_point_to_datetime ( day , isha_time ) + offset , }
Calculate adhan times given the parameters .
56,480
def _make_fn_text ( self ) : if not self . _f : text = "(not loaded)" elif self . _f . filename : text = os . path . relpath ( self . _f . filename , "." ) else : text = "(filename not set)" return text
Makes filename text
56,481
def format_BLB ( ) : rc ( "figure" , facecolor = "white" ) rc ( 'font' , family = 'serif' , size = 10 ) rc ( 'xtick' , labelsize = 10 ) rc ( 'ytick' , labelsize = 10 ) rc ( 'axes' , linewidth = 1 ) rc ( 'xtick.major' , size = 4 , width = 1 ) rc ( 'xtick.minor' , size = 2 , width = 1 ) rc ( 'ytick.major' , size = 4 , width = 1 ) rc ( 'ytick.minor' , size = 2 , width = 1 )
Sets some formatting options in Matplotlib .
56,482
def get_declared_fields ( bases , attrs ) : def is_field ( prop ) : return isinstance ( prop , forms . Field ) or isinstance ( prop , BaseRepresentation ) fields = [ ( field_name , attrs . pop ( field_name ) ) for field_name , obj in attrs . items ( ) if is_field ( obj ) ] for base in bases [ : : - 1 ] : if hasattr ( base , 'base_fields' ) : fields = base . base_fields . items ( ) + fields return dict ( fields )
Find all fields and return them as a dictionary .
56,483
def validate ( self , data = None ) : errors = { } data = self . _getData ( data ) for name , field in self . fields . items ( ) : try : field . clean ( data . get ( name ) ) except ValidationError , e : errors [ name ] = e . messages except AttributeError , e : raise ValidationError ( 'data should be of type dict but is %s' % ( type ( data ) , ) ) extras = set ( data . keys ( ) ) - set ( self . fields . keys ( ) ) if extras : errors [ ', ' . join ( extras ) ] = [ 'field(s) not allowed' ] if errors : raise ValidationError ( errors )
Validate the data
56,484
def _getData ( self , data ) : if not isinstance ( data , dict ) : raise ValidationError ( 'data is not a valid dictionary: %s' % ( str ( type ( data ) ) , ) ) return data
Check that data is acceptable and return it .
56,485
def main ( ) : for text in [ "how are you" , "ip address" , "restart" , "run command" , "rain EGPF" , "reverse SSH" ] : print ( "\nparse text: " + text + "\nWait 3 seconds, then parse." ) time . sleep ( 3 ) response = megaparsex . multiparse ( text = text , parsers = [ megaparsex . parse , parse_networking ] , help_message = "Does not compute. I can report my IP address and I " "can restart my script." ) if type ( response ) is megaparsex . confirmation : while response . confirmed ( ) is None : response . test ( text = megaparsex . get_input ( prompt = response . prompt ( ) + " " ) ) if response . confirmed ( ) : print ( response . feedback ( ) ) response . run ( ) else : print ( response . feedback ( ) ) elif type ( response ) is megaparsex . command : output = response . engage_command ( command = megaparsex . get_input ( prompt = response . prompt ( ) + " " ) , background = False ) if output : print ( "output:\n{output}" . format ( output = output ) ) else : print ( response )
Loop over a list of input text strings . Parse each string using a list of parsers one included in megaparsex and one defined in this script . If a confirmation is requested seek confirmation otherwise display any response text and engage any triggered functions .
56,486
def get_packet_id ( self , packet ) : for p in self . _packets : if isinstance ( packet , p [ 'cls' ] ) : return p [ 'id' ] return None
Returns the ID of a protocol buffer packet . Returns None if no ID was found .
56,487
def main ( * args ) : args = args or sys . argv [ 1 : ] params = PARSER . parse_args ( args ) from . log import setup_logging setup_logging ( params . level . upper ( ) ) from . core import Starter starter = Starter ( params ) if not starter . params . TEMPLATES or starter . params . list : setup_logging ( 'WARN' ) for t in sorted ( starter . iterate_templates ( ) ) : logging . warn ( "%s -- %s" , t . name , t . params . get ( 'description' , 'no description' ) ) return True try : starter . copy ( ) except Exception as e : logging . error ( e ) sys . exit ( 1 )
Enter point .
56,488
def summarize ( df , preview_rows = 8 , display_max_cols = None , display_width = None , output_path = None , output_safe = True , to_folder = False ) : assert type ( df ) is pd . DataFrame initial_settings = pd_settings ( display_max_cols , None , display_width ) df_preview = _io . preview ( df , preview_rows ) df_desc_num , df_desc_cat = detailed_desc ( df ) percent_values = stats . percentiles ( df ) potential_outliers = stats . df_outliers ( df ) . dropna ( axis = 1 , how = 'all' ) potential_outliers = potential_outliers if _utils . rows ( potential_outliers ) else None corr_values = regstats . corr_matrix ( df ) title_list = [ 'Preview' , 'Describe (Numerical)' , 'Describe (Categorical)' , 'Percentile Details' , 'Potential Outliers' , 'Correlation Matrix' ] info_list = [ df_preview , df_desc_num , df_desc_cat , percent_values , potential_outliers , corr_values ] error_list = [ None , 'No numerical data.' , 'All numerical data.' , 'No numerical data.' , 'No potential outliers.' , 'No categorical, bool, or numerical data.' ] output = '' for title , value , error_text in zip ( title_list , info_list , error_list ) : if value is None : value = "{} skipped: {}" . format ( title , error_text ) if str ( value ) . endswith ( '\n' ) : value = value [ : - 1 ] output += '{}\n{}\n\n' . format ( _io . title_line ( title ) , value ) if output_path is None : print ( output ) else : if not to_folder : print ( 'Outputting to file...' ) _io . output_to_file ( output , output_path , output_safe ) else : print ( 'Outputting to folder...' ) if not os . path . exists ( output_path ) : os . mkdir ( output_path ) for title , value , error_text in zip ( title_list , info_list , error_list ) : if value is None : print ( "{} skipped: {}" . format ( title , error_text ) ) else : file_dir = os . path . join ( output_path , "{}.csv" . format ( title ) ) if type ( value ) is pd . DataFrame : value . to_csv ( file_dir ) else : _io . output_to_file ( value , file_dir , False ) print ( 'Done!' ) pd_settings ( * initial_settings )
Prints information about the DataFrame to a file or to the prompt .
56,489
def timed_pipe ( generator , seconds = 3 ) : start = ts ( ) end = start + seconds for i in generator : if ts ( ) < end : yield i else : break
This is a time limited pipeline . If you have a infinite pipeline and want it to stop yielding after a certain amount of time use this!
56,490
def destruct ( particles , index ) : mat = np . zeros ( ( 2 ** particles , 2 ** particles ) ) flipper = 2 ** index for i in range ( 2 ** particles ) : ispin = btest ( i , index ) if ispin == 1 : mat [ i ^ flipper , i ] = phase ( i , index ) return csr_matrix ( mat )
Fermion annihilation operator in matrix representation for a indexed particle in a bounded N - particles fermion fock space
56,491
def json_unicode_to_utf8 ( data ) : if isinstance ( data , unicode ) : return data . encode ( 'utf-8' ) elif isinstance ( data , dict ) : newdict = { } for key in data : newdict [ json_unicode_to_utf8 ( key ) ] = json_unicode_to_utf8 ( data [ key ] ) return newdict elif isinstance ( data , list ) : return [ json_unicode_to_utf8 ( elem ) for elem in data ] else : return data
Change all strings in a JSON structure to UTF - 8 .
56,492
def json_decode_file ( filename ) : seq = open ( filename ) . read ( ) seq = json_remove_comments ( seq ) return json_unicode_to_utf8 ( json . loads ( seq ) )
Parses a textfile using json to build a python object representation
56,493
def _post_init ( self ) : try : return self . postinit ( ) except Exception as exc : return self . _onerror ( Result . from_exception ( exc , uuid = self . uuid ) )
A post init trigger
56,494
def _postrun ( self , result ) : logger . debug ( "{}.PostRun: {}[{}]" . format ( self . __class__ . __name__ , self . __class__ . path , self . uuid ) , extra = dict ( kmsg = Message ( self . uuid , entrypoint = self . __class__ . path , params = self . params , metadata = self . metadata ) . dump ( ) ) ) return self . postrun ( result )
To execute after exection
56,495
def execute ( self , result = None ) : try : return self . unsafe_execute ( result = result ) except Exception as exc : return self . _onerror ( Result . from_exception ( exc , uuid = self . uuid ) )
Execution wrapper to make sure that it return a result
56,496
def to_Message ( self , result = None ) : return Message ( uuid = self . uuid , entrypoint = self . __class__ . path , params = self . params , result = result if result else self . result , metadata = self . metadata )
Entrypoint - > Message
56,497
def from_Message ( cls , kmsg ) : return cls ( uuid = kmsg . uuid , params = kmsg . params , result = kmsg . result , metadata = kmsg . metadata )
Message - > Entrypoint
56,498
def save_as ( self , filename = None ) : if filename is None : filename = self . filename if filename is None : filename = self . default_filename if filename is None : raise RuntimeError ( "Class '{}' has no default filename" . format ( self . __class__ . __name__ ) ) self . _do_save_as ( filename ) self . filename = filename
Dumps object contents into file on disk .
56,499
def load ( self , filename = None ) : assert not self . __flag_loaded , "File can be loaded only once" if filename is None : filename = self . default_filename assert filename is not None , "{0!s} class has no default filename" . format ( self . __class__ . __name__ ) size = os . path . getsize ( filename ) if size == 0 : raise RuntimeError ( "Empty file: '{0!s}'" . format ( filename ) ) self . _test_magic ( filename ) self . _do_load ( filename ) self . filename = filename self . __flag_loaded = True
Loads file and registers filename as attribute .