idx
int64 0
63k
| question
stringlengths 53
5.28k
| target
stringlengths 5
805
|
|---|---|---|
2,000
|
def continue_prompt ( message = "" ) : answer = False message = message + "\n'Yes' or 'No' to continue: " while answer not in ( 'Yes' , 'No' ) : answer = prompt ( message , eventloop = eventloop ( ) ) if answer == "Yes" : answer = True break if answer == "No" : answer = False break return answer
|
Prompt the user to continue or not
|
2,001
|
def printo ( msg , encoding = None , errors = 'replace' , std_type = 'stdout' ) : std = getattr ( sys , std_type , sys . stdout ) if encoding is None : try : encoding = std . encoding except AttributeError : encoding = None if encoding is None : encoding = 'ascii' if hasattr ( std , 'buffer' ) : std = std . buffer std . write ( msg . encode ( encoding , errors = errors ) ) std . write ( b'\n' ) std . flush ( )
|
Write msg on stdout . If no encoding is specified the detected encoding of stdout is used . If the encoding can t encode some chars they are replaced by ?
|
2,002
|
def format_tree ( tree ) : def _traverse_tree ( tree , parents = None ) : tree [ 'parents' ] = parents childs = tree . get ( 'childs' , [ ] ) nb_childs = len ( childs ) for index , child in enumerate ( childs ) : child_parents = list ( parents ) + [ index == nb_childs - 1 ] tree [ 'childs' ] [ index ] = _traverse_tree ( tree [ 'childs' ] [ index ] , parents = child_parents ) return tree tree = _traverse_tree ( tree , parents = [ ] ) def _get_rows_data ( tree , rows ) : prefix = '' for p in tree [ 'parents' ] [ : - 1 ] : if p is False : prefix += '│ ' else : prefix += ' ' if not tree [ 'parents' ] : pass elif tree [ 'parents' ] [ - 1 ] is True : prefix += '└── ' else : prefix += '├── ' if isinstance ( tree [ 'node' ] , string_types ) : tree [ 'node' ] = [ tree [ 'node' ] ] rows . append ( [ prefix + tree [ 'node' ] [ 0 ] ] + tree [ 'node' ] [ 1 : ] ) for child in tree . get ( 'childs' , [ ] ) : rows = _get_rows_data ( child , rows ) return rows rows = _get_rows_data ( tree , [ ] ) return format_table ( rows )
|
Format a python tree structure
|
2,003
|
def parallel_map ( func , iterable , args = None , kwargs = None , workers = None ) : if args is None : args = ( ) if kwargs is None : kwargs = { } if workers is not None : pool = Pool ( workers ) else : pool = Group ( ) iterable = [ pool . spawn ( func , i , * args , ** kwargs ) for i in iterable ] pool . join ( raise_error = True ) for idx , i in enumerate ( iterable ) : i_type = type ( i . get ( ) ) i_value = i . get ( ) if issubclass ( i_type , BaseException ) : raise i_value iterable [ idx ] = i_value return iterable
|
Map func on a list using gevent greenlets .
|
2,004
|
def parse ( self , words ) : def exact ( words ) : try : return float ( words ) except : return None guess = exact ( words ) if guess is not None : return guess split = words . split ( ' ' ) if split [ - 1 ] in self . __fractions__ : split [ - 1 ] = self . __fractions__ [ split [ - 1 ] ] elif split [ - 1 ] in self . __ordinals__ : split [ - 1 ] = self . __ordinals__ [ split [ - 1 ] ] parsed_ordinals = ' ' . join ( split ) return self . parseFloat ( parsed_ordinals )
|
A general method for parsing word - representations of numbers . Supports floats and integers .
|
2,005
|
def parseFloat ( self , words ) : def pointFloat ( words ) : m = re . search ( r'(.*) point (.*)' , words ) if m : whole = m . group ( 1 ) frac = m . group ( 2 ) total = 0.0 coeff = 0.10 for digit in frac . split ( ' ' ) : total += coeff * self . parse ( digit ) coeff /= 10.0 return self . parseInt ( whole ) + total return None def fractionFloat ( words ) : m = re . search ( r'(.*) and (.*)' , words ) if m : whole = self . parseInt ( m . group ( 1 ) ) frac = m . group ( 2 ) frac = re . sub ( r'(\w+)s(\b)' , '\g<1>\g<2>' , frac ) frac = re . sub ( r'(\b)a(\b)' , '\g<1>one\g<2>' , frac ) split = frac . split ( ' ' ) num = split [ : 1 ] denom = split [ 1 : ] while denom : try : num_value = self . parse ( ' ' . join ( num ) ) denom_value = self . parse ( ' ' . join ( denom ) ) return whole + float ( num_value ) / denom_value except : num += denom [ : 1 ] denom = denom [ 1 : ] return None result = pointFloat ( words ) if result : return result result = fractionFloat ( words ) if result : return result return self . parseInt ( words )
|
Convert a floating - point number described in words to a double .
|
2,006
|
def parseInt ( self , words ) : words = words . replace ( " and " , " " ) . lower ( ) words = re . sub ( r'(\b)a(\b)' , '\g<1>one\g<2>' , words ) def textToNumber ( s ) : a = re . split ( r"[\s-]+" , s ) n = 0 g = 0 for w in a : x = NumberService . __small__ . get ( w , None ) if x is not None : g += x elif w == "hundred" : g *= 100 else : x = NumberService . __magnitude__ . get ( w , None ) if x is not None : n += g * x g = 0 else : raise NumberService . NumberException ( "Unknown number: " + w ) return n + g return textToNumber ( words )
|
Parses words to the integer they describe .
|
2,007
|
def parseMagnitude ( m ) : m = NumberService ( ) . parse ( m ) def toDecimalPrecision ( n , k ) : return float ( "%.*f" % ( k , round ( n , k ) ) ) digits = 2 magnitude = toDecimalPrecision ( m , digits ) while not magnitude : digits += 1 magnitude = toDecimalPrecision ( m , digits ) if m < 1.0 : magnitude = toDecimalPrecision ( m , digits + 1 ) if int ( magnitude ) == magnitude : magnitude = int ( magnitude ) magString = str ( magnitude ) magString = re . sub ( r'(\d)e-(\d+)' , '\g<1> times ten to the negative \g<2>' , magString ) magString = re . sub ( r'(\d)e\+(\d+)' , '\g<1> times ten to the \g<2>' , magString ) magString = re . sub ( r'-(\d+)' , 'negative \g<1>' , magString ) magString = re . sub ( r'\b0(\d+)' , '\g<1>' , magString ) return magString
|
Parses a number m into a human - ready string representation . For example crops off floats if they re too accurate .
|
2,008
|
def serialize ( self , raw = False ) : if raw : return self . _key . encode ( ) return self . _key . encode ( nacl . encoding . Base64Encoder )
|
Encode the private part of the key in a base64 format by default but when raw is True it will return hex encoded bytes .
|
2,009
|
def _do_get ( self , url , ** kwargs ) : scaleioapi_post_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } try : response = self . _session . get ( url ) if response . status_code == requests . codes . ok : self . conn . logger . debug ( '_do_get() - HTTP response OK, data: %s' , response . text ) return response else : self . conn . logger . error ( '_do_get() - HTTP response error: %s' , response . status_code ) self . conn . logger . error ( '_do_get() - HTTP response error, data: %s' , response . text ) raise RuntimeError ( "_do_get() - HTTP response error" + response . status_code ) except Exception as e : self . conn . logger . error ( "_do_get() - Unhandled Error Occurred: %s" % str ( e ) ) raise RuntimeError ( "_do_get() - Communication error with ScaleIO gateway" ) return response
|
Convenient method for GET requests Returns http request status value from a POST request
|
2,010
|
def _do_post ( self , url , ** kwargs ) : scaleioapi_post_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } try : response = self . _session . post ( url , headers = scaleioapi_post_headers , ** kwargs ) self . conn . logger . debug ( '_do_post() - HTTP response: %s' , response . text ) if response . status_code == requests . codes . ok : self . conn . logger . debug ( '_do_post() - HTTP response OK, data: %s' , response . text ) return response else : self . conn . logger . error ( '_do_post() - HTTP response error: %s' , response . status_code ) self . conn . logger . error ( '_do_post() - HTTP response error, data: %s' , response . text ) raise RuntimeError ( "_do_post() - HTTP response error" + response . status_code ) except Exception as e : self . conn . logger . error ( "_do_post() - Unhandled Error Occurred: %s" % str ( e ) ) raise RuntimeError ( "_do_post() - Communication error with ScaleIO gateway" ) return response
|
Convenient method for POST requests Returns http request status value from a POST request
|
2,011
|
def discharge_required_response ( macaroon , path , cookie_suffix_name , message = None ) : if message is None : message = 'discharge required' content = json . dumps ( { 'Code' : 'macaroon discharge required' , 'Message' : message , 'Info' : { 'Macaroon' : macaroon . to_dict ( ) , 'MacaroonPath' : path , 'CookieNameSuffix' : cookie_suffix_name } , } ) . encode ( 'utf-8' ) return content , { 'WWW-Authenticate' : 'Macaroon' , 'Content-Type' : 'application/json' }
|
Get response content and headers from a discharge macaroons error .
|
2,012
|
def request_version ( req_headers ) : vs = req_headers . get ( BAKERY_PROTOCOL_HEADER ) if vs is None : return bakery . VERSION_1 try : x = int ( vs ) except ValueError : return bakery . VERSION_1 if x > bakery . LATEST_VERSION : return bakery . LATEST_VERSION return x
|
Determines the bakery protocol version from a client request . If the protocol cannot be determined or is invalid the original version of the protocol is used . If a later version is found the latest known version is used which is OK because versions are backwardly compatible .
|
2,013
|
def from_dict ( cls , serialized ) : def field ( name ) : return serialized . get ( name ) or serialized . get ( name . lower ( ) ) return Error ( code = field ( 'Code' ) , message = field ( 'Message' ) , info = ErrorInfo . from_dict ( field ( 'Info' ) ) , version = bakery . LATEST_VERSION , )
|
Create an error from a JSON - deserialized object
|
2,014
|
def interaction_method ( self , kind , x ) : if self . info is None or self . code != ERR_INTERACTION_REQUIRED : raise InteractionError ( 'not an interaction-required error (code {})' . format ( self . code ) ) entry = self . info . interaction_methods . get ( kind ) if entry is None : raise InteractionMethodNotFound ( 'interaction method {} not found' . format ( kind ) ) return x . from_dict ( entry )
|
Checks whether the error is an InteractionRequired error that implements the method with the given name and JSON - unmarshals the method - specific data into x by calling its from_dict method with the deserialized JSON object .
|
2,015
|
def from_dict ( cls , serialized ) : if serialized is None : return None macaroon = serialized . get ( 'Macaroon' ) if macaroon is not None : macaroon = bakery . Macaroon . from_dict ( macaroon ) path = serialized . get ( 'MacaroonPath' ) cookie_name_suffix = serialized . get ( 'CookieNameSuffix' ) visit_url = serialized . get ( 'VisitURL' ) wait_url = serialized . get ( 'WaitURL' ) interaction_methods = serialized . get ( 'InteractionMethods' ) return ErrorInfo ( macaroon = macaroon , macaroon_path = path , cookie_name_suffix = cookie_name_suffix , visit_url = visit_url , wait_url = wait_url , interaction_methods = interaction_methods )
|
Create a new ErrorInfo object from a JSON deserialized dictionary
|
2,016
|
async def dump_blob ( elem , elem_type = None ) : elem_is_blob = isinstance ( elem , x . BlobType ) data = getattr ( elem , x . BlobType . DATA_ATTR ) if elem_is_blob else elem if data is None or len ( data ) == 0 : return b'' if isinstance ( data , ( bytes , bytearray , list ) ) : return base64 . b16encode ( bytes ( data ) ) else : raise ValueError ( 'Unknown blob type' )
|
Dumps blob message . Supports both blob and raw value .
|
2,017
|
async def dump_container ( obj , container , container_type , params = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else dump_field elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE obj = [ ] if obj is None else get_elem ( obj ) if container is None : return None for elem in container : fvalue = await field_archiver ( None , elem , elem_type , params [ 1 : ] if params else None ) obj . append ( fvalue ) return obj
|
Serializes container as popo
|
2,018
|
async def load_container ( obj , container_type , params = None , container = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else load_field if obj is None : return None c_len = len ( obj ) elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE res = container if container else [ ] for i in range ( c_len ) : fvalue = await field_archiver ( obj [ i ] , elem_type , params [ 1 : ] if params else None , eref ( res , i ) if container else None ) if not container : res . append ( fvalue ) return res
|
Loads container of elements from the object representation . Supports the container ref . Returns loaded container .
|
2,019
|
async def dump_message_field ( obj , msg , field , field_archiver = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] fvalue = getattr ( msg , fname , None ) field_archiver = field_archiver if field_archiver else dump_field return await field_archiver ( eref ( obj , fname , True ) , fvalue , ftype , params )
|
Dumps a message field to the object . Field is defined by the message field specification .
|
2,020
|
async def load_message_field ( obj , msg , field , field_archiver = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] field_archiver = field_archiver if field_archiver else load_field await field_archiver ( obj [ fname ] , ftype , params , eref ( msg , fname ) )
|
Loads message field from the object . Field is defined by the message field specification . Returns loaded value supports field reference .
|
2,021
|
async def dump_message ( obj , msg , field_archiver = None ) : mtype = msg . __class__ fields = mtype . f_specs ( ) obj = collections . OrderedDict ( ) if obj is None else get_elem ( obj ) for field in fields : await dump_message_field ( obj , msg = msg , field = field , field_archiver = field_archiver ) return obj
|
Dumps message to the object . Returns message popo representation .
|
2,022
|
async def load_message ( obj , msg_type , msg = None , field_archiver = None ) : msg = msg_type ( ) if msg is None else msg fields = msg_type . f_specs ( ) if msg_type else msg . __class__ . f_specs ( ) for field in fields : await load_message_field ( obj , msg , field , field_archiver = field_archiver ) return msg
|
Loads message if the given type from the object . Supports reading directly to existing message .
|
2,023
|
async def dump_variant ( obj , elem , elem_type = None , params = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else dump_field if isinstance ( elem , x . VariantType ) or elem_type . WRAPS_VALUE : return { elem . variant_elem : await field_archiver ( None , getattr ( elem , elem . variant_elem ) , elem . variant_elem_type ) } else : fdef = elem_type . find_fdef ( elem_type . f_specs ( ) , elem ) return { fdef [ 0 ] : await field_archiver ( None , elem , fdef [ 1 ] ) }
|
Transform variant to the popo object representation .
|
2,024
|
async def dump_field ( obj , elem , elem_type , params = None ) : if isinstance ( elem , ( int , bool ) ) or issubclass ( elem_type , x . UVarintType ) or issubclass ( elem_type , x . IntType ) : return set_elem ( obj , elem ) elif issubclass ( elem_type , x . BlobType ) or isinstance ( obj , bytes ) or isinstance ( obj , bytearray ) : return set_elem ( obj , await dump_blob ( elem ) ) elif issubclass ( elem_type , x . UnicodeType ) or isinstance ( elem , str ) : return set_elem ( obj , elem ) elif issubclass ( elem_type , x . VariantType ) : return set_elem ( obj , await dump_variant ( None , elem , elem_type , params ) ) elif issubclass ( elem_type , x . ContainerType ) : return set_elem ( obj , await dump_container ( None , elem , elem_type , params ) ) elif issubclass ( elem_type , x . MessageType ) : return set_elem ( obj , await dump_message ( None , elem ) ) else : raise TypeError
|
Dumps generic field to the popo object representation according to the element specification . General multiplexer .
|
2,025
|
async def load_field ( obj , elem_type , params = None , elem = None ) : if issubclass ( elem_type , x . UVarintType ) or issubclass ( elem_type , x . IntType ) or isinstance ( obj , ( int , bool ) ) : return set_elem ( elem , obj ) elif issubclass ( elem_type , x . BlobType ) : fvalue = await load_blob ( obj , elem_type ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . UnicodeType ) or isinstance ( elem , str ) : return set_elem ( elem , obj ) elif issubclass ( elem_type , x . VariantType ) : fvalue = await load_variant ( obj , elem = get_elem ( elem ) , elem_type = elem_type , params = params ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . ContainerType ) : fvalue = await load_container ( obj , elem_type , params = params , container = get_elem ( elem ) ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . MessageType ) : fvalue = await load_message ( obj , msg_type = elem_type , msg = get_elem ( elem ) ) return set_elem ( elem , fvalue ) else : raise TypeError
|
Loads a field from the reader based on the field type specification . Demultiplexer .
|
2,026
|
def instantiate ( data , blueprint ) : Validator = jsonschema . validators . validator_for ( blueprint ) blueprinter = extend ( Validator ) ( blueprint ) return blueprinter . instantiate ( data )
|
Instantiate the given data using the blueprinter .
|
2,027
|
def main ( argv = None ) : from vsgen import VSGSuite from vsgen import VSGLogger if argv is None : argv = sys . argv pylogger = VSGLogger ( ) args = VSGSuite . make_parser ( description = 'Executes the vsgen package as an application.' ) . parse_args ( argv [ 1 : ] ) for s in VSGSuite . from_args ( ** vars ( args ) ) : s . write ( False ) return 0
|
The entry point of the script .
|
2,028
|
def parse_fields ( attributes ) : return tuple ( field . bind_name ( name ) for name , field in six . iteritems ( attributes ) if isinstance ( field , fields . Field ) )
|
Parse model fields .
|
2,029
|
def prepare_fields_attribute ( attribute_name , attributes , class_name ) : attribute = attributes . get ( attribute_name ) if not attribute : attribute = tuple ( ) elif isinstance ( attribute , std_collections . Iterable ) : attribute = tuple ( attribute ) else : raise errors . Error ( '{0}.{1} is supposed to be a list of {2}, ' 'instead {3} given' , class_name , attribute_name , fields . Field , attribute ) return attribute
|
Prepare model fields attribute .
|
2,030
|
def bind_fields_to_model_cls ( cls , model_fields ) : return dict ( ( field . name , field . bind_model_cls ( cls ) ) for field in model_fields )
|
Bind fields to model class .
|
2,031
|
def bind_collection_to_model_cls ( cls ) : cls . Collection = type ( '{0}.Collection' . format ( cls . __name__ ) , ( cls . Collection , ) , { 'value_type' : cls } ) cls . Collection . __module__ = cls . __module__
|
Bind collection to model s class .
|
2,032
|
def checklist ( ctx ) : checklist = steps = dict ( x1 = None , x2 = None , x3 = None , x4 = None , x5 = None , x6 = None ) yesno_map = { True : "x" , False : "_" , None : " " } answers = { name : yesno_map [ value ] for name , value in steps . items ( ) } print ( checklist . format ( ** answers ) )
|
Checklist for releasing this project .
|
2,033
|
def build_packages ( ctx , hide = False ) : print ( "build_packages:" ) ctx . run ( "python setup.py sdist bdist_wheel" , echo = True , hide = hide )
|
Build packages for this release .
|
2,034
|
def register ( self , name , function , description = None ) : return self . __app . threads . register ( name , function , self . _plugin , description )
|
Register a new thread .
|
2,035
|
def unregister ( self , thread ) : if thread not in self . threads . keys ( ) : self . log . warning ( "Can not unregister thread %s" % thread ) else : del ( self . threads [ thread ] ) self . __log . debug ( "Thread %s got unregistered" % thread )
|
Unregisters an existing thread so that this thread is no longer available .
|
2,036
|
def get ( self , thread = None , plugin = None ) : if plugin is not None : if thread is None : threads_list = { } for key in self . threads . keys ( ) : if self . threads [ key ] . plugin == plugin : threads_list [ key ] = self . threads [ key ] return threads_list else : if thread in self . threads . keys ( ) : if self . threads [ thread ] . plugin == plugin : return self . threads [ thread ] else : return None else : return None else : if thread is None : return self . threads else : if thread in self . threads . keys ( ) : return self . threads [ thread ] else : return None
|
Get one or more threads .
|
2,037
|
def create_schema_from_xsd_directory ( directory , version ) : schema = Schema ( version ) for f in _get_xsd_from_directory ( directory ) : logger . info ( "Loading schema %s" % f ) fill_schema_from_xsd_file ( f , schema ) return schema
|
Create and fill the schema from a directory which contains xsd files . It calls fill_schema_from_xsd_file for each xsd file found .
|
2,038
|
def fill_schema_from_xsd_file ( filename , schema ) : ifmap_statements = _parse_xsd_file ( filename ) properties_all = [ ] for v in ifmap_statements . values ( ) : if ( isinstance ( v [ 0 ] , IDLParser . Link ) ) : src_name = v [ 1 ] target_name = v [ 2 ] src = schema . _get_or_add_resource ( src_name ) target = schema . _get_or_add_resource ( target_name ) if "has" in v [ 3 ] : src . children . append ( target_name ) target . parent = src_name if "ref" in v [ 3 ] : src . refs . append ( target_name ) target . back_refs . append ( src_name ) elif isinstance ( v [ 0 ] , IDLParser . Property ) : target_name = v [ 1 ] [ 0 ] prop = ResourceProperty ( v [ 0 ] . name , is_list = v [ 0 ] . is_list , is_map = v [ 0 ] . is_map ) if target_name != 'all' : target = schema . _get_or_add_resource ( target_name ) target . properties . append ( prop ) else : properties_all . append ( prop ) for r in schema . all_resources ( ) : schema . resource ( r ) . properties += properties_all
|
From an xsd file it fills the schema by creating needed Resource . The generateds idl_parser is used to parse ifmap statements in the xsd file .
|
2,039
|
def split_ls ( func ) : @ wraps ( func ) def wrapper ( self , files , silent = True , exclude_deleted = False ) : if not isinstance ( files , ( tuple , list ) ) : files = [ files ] counter = 0 index = 0 results = [ ] while files : if index >= len ( files ) : results += func ( self , files , silent , exclude_deleted ) break length = len ( str ( files [ index ] ) ) if length + counter > CHAR_LIMIT : runfiles = files [ : index ] files = files [ index : ] counter = 0 index = 0 results += func ( self , runfiles , silent , exclude_deleted ) runfiles = None del runfiles else : index += 1 counter += length return results return wrapper
|
Decorator to split files into manageable chunks as not to exceed the windows cmd limit
|
2,040
|
def __getVariables ( self ) : try : startupinfo = None if os . name == 'nt' : startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW output = subprocess . check_output ( [ 'p4' , 'set' ] , startupinfo = startupinfo ) if six . PY3 : output = str ( output , 'utf8' ) except subprocess . CalledProcessError as err : LOGGER . error ( err ) return p4vars = { } for line in output . splitlines ( ) : if not line : continue try : k , v = line . split ( '=' , 1 ) except ValueError : continue p4vars [ k . strip ( ) ] = v . strip ( ) . split ( ' (' ) [ 0 ] if p4vars [ k . strip ( ) ] . startswith ( '(config' ) : del p4vars [ k . strip ( ) ] self . _port = self . _port or os . getenv ( 'P4PORT' , p4vars . get ( 'P4PORT' ) ) self . _user = self . _user or os . getenv ( 'P4USER' , p4vars . get ( 'P4USER' ) ) self . _client = self . _client or os . getenv ( 'P4CLIENT' , p4vars . get ( 'P4CLIENT' ) )
|
Parses the P4 env vars using set p4
|
2,041
|
def client ( self ) : if isinstance ( self . _client , six . string_types ) : self . _client = Client ( self . _client , self ) return self . _client
|
The client used in perforce queries
|
2,042
|
def status ( self ) : try : res = self . run ( [ 'info' ] ) if res [ 0 ] [ 'clientName' ] == '*unknown*' : return ConnectionStatus . INVALID_CLIENT self . run ( [ 'user' , '-o' ] ) except errors . CommandError as err : if 'password (P4PASSWD) invalid or unset' in str ( err . args [ 0 ] ) : return ConnectionStatus . NO_AUTH if 'Connect to server failed' in str ( err . args [ 0 ] ) : return ConnectionStatus . OFFLINE return ConnectionStatus . OK
|
The status of the connection to perforce
|
2,043
|
def run ( self , cmd , stdin = None , marshal_output = True , ** kwargs ) : records = [ ] args = [ self . _executable , "-u" , self . _user , "-p" , self . _port ] if self . _client : args += [ "-c" , str ( self . _client ) ] if marshal_output : args . append ( '-G' ) if isinstance ( cmd , six . string_types ) : raise ValueError ( 'String commands are not supported, please use a list' ) args += cmd command = ' ' . join ( args ) startupinfo = None if os . name == 'nt' : startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW proc = subprocess . Popen ( args , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE , startupinfo = startupinfo , ** kwargs ) if stdin : proc . stdin . write ( six . b ( stdin ) ) if marshal_output : try : while True : record = marshal . load ( proc . stdout ) if record . get ( b'code' , '' ) == b'error' and record [ b'severity' ] >= self . _level : proc . stdin . close ( ) proc . stdout . close ( ) raise errors . CommandError ( record [ b'data' ] , record , command ) if isinstance ( record , dict ) : if six . PY2 : records . append ( record ) else : records . append ( { str ( k , 'utf8' ) : str ( v ) if isinstance ( v , int ) else str ( v , 'utf8' , errors = 'ignore' ) for k , v in record . items ( ) } ) except EOFError : pass stdout , stderr = proc . communicate ( ) else : records , stderr = proc . communicate ( ) if stderr : raise errors . CommandError ( stderr , command ) return records
|
Runs a p4 command and returns a list of dictionary objects
|
2,044
|
def findChangelist ( self , description = None ) : if description is None : change = Default ( self ) else : if isinstance ( description , six . integer_types ) : change = Changelist ( description , self ) else : pending = self . run ( [ 'changes' , '-l' , '-s' , 'pending' , '-c' , str ( self . _client ) , '-u' , self . _user ] ) for cl in pending : if cl [ 'desc' ] . strip ( ) == description . strip ( ) : LOGGER . debug ( 'Changelist found: {}' . format ( cl [ 'change' ] ) ) change = Changelist ( int ( cl [ 'change' ] ) , self ) break else : LOGGER . debug ( 'No changelist found, creating one' ) change = Changelist . create ( description , self ) change . client = self . _client change . save ( ) return change
|
Gets or creates a Changelist object with a description
|
2,045
|
def add ( self , filename , change = None ) : try : if not self . canAdd ( filename ) : raise errors . RevisionError ( 'File is not under client path' ) if change is None : self . run ( [ 'add' , filename ] ) else : self . run ( [ 'add' , '-c' , str ( change . change ) , filename ] ) data = self . run ( [ 'fstat' , filename ] ) [ 0 ] except errors . CommandError as err : LOGGER . debug ( err ) raise errors . RevisionError ( 'File is not under client path' ) rev = Revision ( data , self ) if isinstance ( change , Changelist ) : change . append ( rev ) return rev
|
Adds a new file to a changelist
|
2,046
|
def canAdd ( self , filename ) : try : result = self . run ( [ 'add' , '-n' , '-t' , 'text' , filename ] ) [ 0 ] except errors . CommandError as err : LOGGER . debug ( err ) return False if result . get ( 'code' ) not in ( 'error' , 'info' ) : return True LOGGER . warn ( 'Unable to add {}: {}' . format ( filename , result [ 'data' ] ) ) return False
|
Determines if a filename can be added to the depot under the current client
|
2,047
|
def query ( self , files = True ) : if self . _change : cl = str ( self . _change ) self . _p4dict = { camel_case ( k ) : v for k , v in six . iteritems ( self . _connection . run ( [ 'change' , '-o' , cl ] ) [ 0 ] ) } if files : self . _files = [ ] if self . _p4dict . get ( 'status' ) == 'pending' or self . _change == 0 : change = self . _change or 'default' data = self . _connection . run ( [ 'opened' , '-c' , str ( change ) ] ) self . _files = [ Revision ( r , self . _connection ) for r in data ] else : data = self . _connection . run ( [ 'describe' , str ( self . _change ) ] ) [ 0 ] depotfiles = [ ] for k , v in six . iteritems ( data ) : if k . startswith ( 'depotFile' ) : depotfiles . append ( v ) self . _files = self . _connection . ls ( depotfiles )
|
Queries the depot to get the current status of the changelist
|
2,048
|
def remove ( self , rev , permanent = False ) : if not isinstance ( rev , Revision ) : raise TypeError ( 'argument needs to be an instance of Revision' ) if rev not in self : raise ValueError ( '{} not in changelist' . format ( rev ) ) self . _files . remove ( rev ) if not permanent : rev . changelist = self . _connection . default
|
Removes a revision from this changelist
|
2,049
|
def revert ( self , unchanged_only = False ) : if self . _reverted : raise errors . ChangelistError ( 'This changelist has been reverted' ) change = self . _change if self . _change == 0 : change = 'default' cmd = [ 'revert' , '-c' , str ( change ) ] if unchanged_only : cmd . append ( '-a' ) files = [ f . depotFile for f in self . _files ] if files : cmd += files self . _connection . run ( cmd ) self . _files = [ ] self . _reverted = True
|
Revert all files in this changelist
|
2,050
|
def submit ( self ) : if self . _dirty : self . save ( ) self . _connection . run ( [ 'submit' , '-c' , str ( self . _change ) ] , marshal_output = False )
|
Submits a chagelist to the depot
|
2,051
|
def delete ( self ) : try : self . revert ( ) except errors . ChangelistError : pass self . _connection . run ( [ 'change' , '-d' , str ( self . _change ) ] )
|
Reverts all files in this changelist then deletes the changelist from perforce
|
2,052
|
def create ( description = '<Created by Python>' , connection = None ) : connection = connection or Connection ( ) description = description . replace ( '\n' , '\n\t' ) form = NEW_FORMAT . format ( client = str ( connection . client ) , description = description ) result = connection . run ( [ 'change' , '-i' ] , stdin = form , marshal_output = False ) return Changelist ( int ( result . split ( ) [ 1 ] ) , connection )
|
Creates a new changelist
|
2,053
|
def query ( self ) : self . _p4dict = self . _connection . run ( [ 'fstat' , '-m' , '1' , self . _p4dict [ 'depotFile' ] ] ) [ 0 ] self . _head = HeadRevision ( self . _p4dict ) self . _filename = self . depotFile
|
Runs an fstat for this file and repopulates the data
|
2,054
|
def edit ( self , changelist = 0 ) : command = 'reopen' if self . action in ( 'add' , 'edit' ) else 'edit' if int ( changelist ) : self . _connection . run ( [ command , '-c' , str ( changelist . change ) , self . depotFile ] ) else : self . _connection . run ( [ command , self . depotFile ] ) self . query ( )
|
Checks out the file
|
2,055
|
def lock ( self , lock = True , changelist = 0 ) : cmd = 'lock' if lock else 'unlock' if changelist : self . _connection . run ( [ cmd , '-c' , changelist , self . depotFile ] ) else : self . _connection . run ( [ cmd , self . depotFile ] ) self . query ( )
|
Locks or unlocks the file
|
2,056
|
def sync ( self , force = False , safe = True , revision = 0 , changelist = 0 ) : cmd = [ 'sync' ] if force : cmd . append ( '-f' ) if safe : cmd . append ( '-s' ) if revision : cmd . append ( '{}#{}' . format ( self . depotFile , revision ) ) elif changelist : cmd . append ( '{}@{}' . format ( self . depotFile , changelist ) ) else : cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
|
Syncs the file at the current revision
|
2,057
|
def revert ( self , unchanged = False ) : cmd = [ 'revert' ] if unchanged : cmd . append ( '-a' ) wasadd = self . action == 'add' cmd . append ( self . depotFile ) self . _connection . run ( cmd ) if 'movedFile' in self . _p4dict : self . _p4dict [ 'depotFile' ] = self . _p4dict [ 'movedFile' ] if not wasadd : self . query ( ) if self . _changelist : self . _changelist . remove ( self , permanent = True )
|
Reverts any file changes
|
2,058
|
def shelve ( self , changelist = None ) : if changelist is None and self . changelist . description == 'default' : raise errors . ShelveError ( 'Unabled to shelve files in the default changelist' ) cmd = [ 'shelve' ] if changelist : cmd += [ '-c' , str ( changelist ) ] cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
|
Shelves the file if it is in a changelist
|
2,059
|
def delete ( self , changelist = 0 ) : cmd = [ 'delete' ] if changelist : cmd += [ '-c' , str ( changelist ) ] cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
|
Marks the file for delete
|
2,060
|
def hash ( self ) : if 'digest' not in self . _p4dict : self . _p4dict = self . _connection . run ( [ 'fstat' , '-m' , '1' , '-Ol' , self . depotFile ] ) [ 0 ] return self . _p4dict [ 'digest' ]
|
The hash value of the current revision
|
2,061
|
def view ( self ) : spec = [ ] for k , v in six . iteritems ( self . _p4dict ) : if k . startswith ( 'view' ) : match = RE_FILESPEC . search ( v ) if match : spec . append ( FileSpec ( v [ : match . end ( ) - 1 ] , v [ match . end ( ) : ] ) ) return spec
|
A list of view specs
|
2,062
|
def stream ( self ) : stream = self . _p4dict . get ( 'stream' ) if stream : return Stream ( stream , self . _connection )
|
Which stream if any the client is under
|
2,063
|
async def set_version ( self , tp , params , version = None , elem = None ) : self . registry . set_tr ( None ) tw = TypeWrapper ( tp , params ) if not tw . is_versioned ( ) : return TypeWrapper . ELEMENTARY_RES if not self . version_db . is_versioned ( tw ) : if version is None : version = self . _cur_version ( tw , elem ) await dump_uvarint ( self . iobj , 0 ) await dump_uvarint ( self . iobj , version ) self . version_db . set_version ( tw , 0 , version ) return self . version_db . get_version ( tw ) [ 1 ]
|
Stores version to the stream if not stored yet
|
2,064
|
async def version ( self , tp , params , version = None , elem = None ) : if self . writing : return await self . set_version ( tp , params , version , elem ) else : return await self . get_version ( tp , params )
|
Symmetric version management
|
2,065
|
async def root_message ( self , msg , msg_type = None ) : await self . root ( ) await self . message ( msg , msg_type )
|
Root - level message . First entry in the archive . Archive headers processing
|
2,066
|
async def dump_message ( self , msg , msg_type = None ) : mtype = msg . __class__ if msg_type is None else msg_type fields = mtype . f_specs ( ) for field in fields : await self . message_field ( msg = msg , field = field )
|
Dumps message to the writer .
|
2,067
|
async def load_message ( self , msg_type , msg = None ) : msg = msg_type ( ) if msg is None else msg fields = msg_type . f_specs ( ) if msg_type else msg . __class__ . f_specs ( ) for field in fields : await self . message_field ( msg , field ) return msg
|
Loads message if the given type from the reader . Supports reading directly to existing message .
|
2,068
|
def contrail_error_handler ( f ) : @ wraps ( f ) def wrapper ( * args , ** kwargs ) : try : return f ( * args , ** kwargs ) except HttpError as e : if e . details : e . message , e . details = e . details , e . message e . args = ( "%s (HTTP %s)" % ( e . message , e . http_status ) , ) raise return wrapper
|
Handle HTTP errors returned by the API server
|
2,069
|
def make ( self , host = "localhost" , port = 8082 , protocol = "http" , base_uri = "" , os_auth_type = "http" , ** kwargs ) : loader = loading . base . get_plugin_loader ( os_auth_type ) plugin_options = { opt . dest : kwargs . pop ( "os_%s" % opt . dest ) for opt in loader . get_options ( ) if 'os_%s' % opt . dest in kwargs } plugin = loader . load_from_options ( ** plugin_options ) return self . load_from_argparse_arguments ( Namespace ( ** kwargs ) , host = host , port = port , protocol = protocol , base_uri = base_uri , auth = plugin )
|
Initialize a session to Contrail API server
|
2,070
|
def post_json ( self , url , data , cls = None , ** kwargs ) : kwargs [ 'data' ] = to_json ( data , cls = cls ) kwargs [ 'headers' ] = self . default_headers return self . post ( url , ** kwargs ) . json ( )
|
POST data to the api - server
|
2,071
|
def put_json ( self , url , data , cls = None , ** kwargs ) : kwargs [ 'data' ] = to_json ( data , cls = cls ) kwargs [ 'headers' ] = self . default_headers return self . put ( url , ** kwargs ) . json ( )
|
PUT data to the api - server
|
2,072
|
def fqname_to_id ( self , fq_name , type ) : data = { "type" : type , "fq_name" : list ( fq_name ) } return self . post_json ( self . make_url ( "/fqname-to-id" ) , data ) [ "uuid" ]
|
Return uuid for fq_name
|
2,073
|
def id_to_fqname ( self , uuid , type = None ) : data = { "uuid" : uuid } result = self . post_json ( self . make_url ( "/id-to-fqname" ) , data ) result [ 'fq_name' ] = FQName ( result [ 'fq_name' ] ) if type is not None and not result [ 'type' ] . replace ( '_' , '-' ) == type : raise HttpError ( 'uuid %s not found for type %s' % ( uuid , type ) , http_status = 404 ) return result
|
Return fq_name and type for uuid
|
2,074
|
def add_kv_store ( self , key , value ) : data = { 'operation' : 'STORE' , 'key' : key , 'value' : value } return self . post ( self . make_url ( "/useragent-kv" ) , data = to_json ( data ) , headers = self . default_headers ) . text
|
Add a key - value store entry .
|
2,075
|
def remove_kv_store ( self , key ) : data = { 'operation' : 'DELETE' , 'key' : key } return self . post ( self . make_url ( "/useragent-kv" ) , data = to_json ( data ) , headers = self . default_headers ) . text
|
Remove a key - value store entry .
|
2,076
|
def canonical_ops ( ops ) : new_ops = sorted ( set ( ops ) , key = lambda x : ( x . entity , x . action ) ) return new_ops
|
Returns the given operations array sorted with duplicates removed .
|
2,077
|
def _macaroon_id_ops ( ops ) : id_ops = [ ] for entity , entity_ops in itertools . groupby ( ops , lambda x : x . entity ) : actions = map ( lambda x : x . action , entity_ops ) id_ops . append ( id_pb2 . Op ( entity = entity , actions = actions ) ) return id_ops
|
Return operations suitable for serializing as part of a MacaroonId .
|
2,078
|
def macaroon ( self , version , expiry , caveats , ops ) : if len ( ops ) == 0 : raise ValueError ( 'cannot mint a macaroon associated ' 'with no operations' ) ops = canonical_ops ( ops ) root_key , storage_id = self . root_keystore_for_ops ( ops ) . root_key ( ) id = self . _new_macaroon_id ( storage_id , expiry , ops ) id_bytes = six . int2byte ( LATEST_VERSION ) + id . SerializeToString ( ) if macaroon_version ( version ) < MACAROON_V2 : id_bytes = raw_urlsafe_b64encode ( id_bytes ) m = Macaroon ( root_key , id_bytes , self . location , version , self . namespace , ) m . add_caveat ( checkers . time_before_caveat ( expiry ) , self . key , self . locator ) m . add_caveats ( caveats , self . key , self . locator ) return m
|
Takes a macaroon with the given version from the oven associates it with the given operations and attaches the given caveats . There must be at least one operation specified . The macaroon will expire at the given time - a time_before first party caveat will be added with that time .
|
2,079
|
def ops_entity ( self , ops ) : hash_entity = hashlib . sha256 ( ) for op in ops : hash_entity . update ( '{}\n{}\n' . format ( op . action , op . entity ) . encode ( ) ) hash_encoded = base64 . urlsafe_b64encode ( hash_entity . digest ( ) ) return 'multi-' + hash_encoded . decode ( 'utf-8' ) . rstrip ( '=' )
|
Returns a new multi - op entity name string that represents all the given operations and caveats . It returns the same value regardless of the ordering of the operations . It assumes that the operations have been canonicalized and that there s at least one operation .
|
2,080
|
def macaroon_ops ( self , macaroons ) : if len ( macaroons ) == 0 : raise ValueError ( 'no macaroons provided' ) storage_id , ops = _decode_macaroon_id ( macaroons [ 0 ] . identifier_bytes ) root_key = self . root_keystore_for_ops ( ops ) . get ( storage_id ) if root_key is None : raise VerificationError ( 'macaroon key not found in storage' ) v = Verifier ( ) conditions = [ ] def validator ( condition ) : conditions . append ( condition ) return True v . satisfy_general ( validator ) try : v . verify ( macaroons [ 0 ] , root_key , macaroons [ 1 : ] ) except Exception as exc : raise six . raise_from ( VerificationError ( 'verification failed: {}' . format ( str ( exc ) ) ) , exc , ) if ( self . ops_store is not None and len ( ops ) == 1 and ops [ 0 ] . entity . startswith ( 'multi-' ) ) : ops = self . ops_store . get_ops ( ops [ 0 ] . entity ) return ops , conditions
|
This method makes the oven satisfy the MacaroonOpStore protocol required by the Checker class .
|
2,081
|
def extend ( self , iterable ) : return super ( Collection , self ) . extend ( self . _ensure_iterable_is_valid ( iterable ) )
|
Extend the list by appending all the items in the given list .
|
2,082
|
def insert ( self , index , value ) : return super ( Collection , self ) . insert ( index , self . _ensure_value_is_valid ( value ) )
|
Insert an item at a given position .
|
2,083
|
def _ensure_value_is_valid ( self , value ) : if not isinstance ( value , self . __class__ . value_type ) : raise TypeError ( '{0} is not valid collection value, instance ' 'of {1} required' . format ( value , self . __class__ . value_type ) ) return value
|
Ensure that value is a valid collection s value .
|
2,084
|
def container_elem_type ( container_type , params ) : elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE return elem_type
|
Returns container element type
|
2,085
|
def is_valid ( doi ) : match = REGEX . match ( doi ) return ( match is not None ) and ( match . group ( 0 ) == doi )
|
Check that a given DOI is a valid canonical DOI .
|
2,086
|
def get_oa_version ( doi ) : try : request = requests . get ( "%s%s" % ( DISSEMIN_API , doi ) ) request . raise_for_status ( ) result = request . json ( ) assert result [ "status" ] == "ok" return result [ "paper" ] [ "pdf_url" ] except ( AssertionError , ValueError , KeyError , RequestException ) : return None
|
Get an OA version for a given DOI .
|
2,087
|
def get_oa_policy ( doi ) : try : request = requests . get ( "%s%s" % ( DISSEMIN_API , doi ) ) request . raise_for_status ( ) result = request . json ( ) assert result [ "status" ] == "ok" return ( [ i for i in result [ "paper" ] [ "publications" ] if i [ "doi" ] == doi ] [ 0 ] ) [ "policy" ] except ( AssertionError , ValueError , KeyError , RequestException , IndexError ) : return None
|
Get OA policy for a given DOI .
|
2,088
|
def get_linked_version ( doi ) : try : request = requests . head ( to_url ( doi ) ) return request . headers . get ( "location" ) except RequestException : return None
|
Get the original link behind the DOI .
|
2,089
|
def get_bibtex ( doi ) : try : request = requests . get ( to_url ( doi ) , headers = { "accept" : "application/x-bibtex" } ) request . raise_for_status ( ) assert request . headers . get ( "content-type" ) == "application/x-bibtex" return request . text except ( RequestException , AssertionError ) : return None
|
Get a BibTeX entry for a given DOI .
|
2,090
|
def _configure_logging ( self , logger_dict = None ) : self . log . debug ( "Configure logging" ) for handler in self . log . handlers : self . log . removeHandler ( handler ) if logger_dict is None : self . log . debug ( "No logger dictionary defined. Doing default logger configuration" ) formatter = logging . Formatter ( "%(name)s - %(asctime)s - [%(levelname)s] - %(module)s - %(message)s" ) stream_handler = logging . StreamHandler ( sys . stdout ) stream_handler . setLevel ( logging . WARNING ) stream_handler . setFormatter ( formatter ) self . log . addHandler ( stream_handler ) self . log . setLevel ( logging . WARNING ) else : self . log . debug ( "Logger dictionary defined. Loading dictConfig for logging" ) logging . config . dictConfig ( logger_dict ) self . log . debug ( "dictConfig loaded" )
|
Configures the logging module with a given dictionary which in most cases was loaded from a configuration file .
|
2,091
|
async def _dump_message_field ( self , writer , msg , field , fvalue = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] fvalue = getattr ( msg , fname , None ) if fvalue is None else fvalue await self . dump_field ( writer , fvalue , ftype , params )
|
Dumps a message field to the writer . Field is defined by the message field specification .
|
2,092
|
async def _load_message_field ( self , reader , msg , field ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] await self . load_field ( reader , ftype , params , eref ( msg , fname ) )
|
Loads message field from the reader . Field is defined by the message field specification . Returns loaded value supports field reference .
|
2,093
|
def start ( self , message ) : self . _start = time . clock ( ) VSGLogger . info ( "{0:<20} - Started" . format ( message ) )
|
Manually starts timer with the message .
|
2,094
|
def stop ( self , message ) : self . _stop = time . clock ( ) VSGLogger . info ( "{0:<20} - Finished [{1}s]" . format ( message , self . pprint ( self . _stop - self . _start ) ) )
|
Manually stops timer with the message .
|
2,095
|
def get_bibtex ( identifier ) : identifier_type , identifier_id = identifier if identifier_type not in __valid_identifiers__ : return None module = sys . modules . get ( "libbmc.%s" % ( identifier_type , ) , None ) if module is None : return None return getattr ( module , "get_bibtex" ) ( identifier_id )
|
Try to fetch BibTeX from a found identifier .
|
2,096
|
def initialize ( self , * args , ** kwargs ) : super ( JSONHandler , self ) . initialize ( * args , ** kwargs ) content_type = self . request . headers . get ( 'Content-Type' , '' ) if 'application/json' in content_type . lower ( ) : self . _parse_json_body_arguments ( )
|
Only try to parse as JSON if the JSON content type header is set .
|
2,097
|
def get_plaintext_citations ( bibtex ) : parser = BibTexParser ( ) parser . customization = convert_to_unicode if os . path . isfile ( bibtex ) : with open ( bibtex ) as fh : bib_database = bibtexparser . load ( fh , parser = parser ) else : bib_database = bibtexparser . loads ( bibtex , parser = parser ) bibentries = [ bibentry_as_plaintext ( bibentry ) for bibentry in bib_database . entries ] return bibentries
|
Parse a BibTeX file to get a clean list of plaintext citations .
|
2,098
|
def init ( filename = ConfigPath ) : section , parts = "DEFAULT" , filename . rsplit ( ":" , 1 ) if len ( parts ) > 1 and os . path . isfile ( parts [ 0 ] ) : filename , section = parts if not os . path . isfile ( filename ) : return vardict , parser = globals ( ) , configparser . RawConfigParser ( ) parser . optionxform = str try : def parse_value ( raw ) : try : return json . loads ( raw ) except ValueError : return raw txt = open ( filename ) . read ( ) if not re . search ( "\\[\\w+\\]" , txt ) : txt = "[DEFAULT]\n" + txt parser . readfp ( StringIO . StringIO ( txt ) , filename ) for k , v in parser . items ( section ) : vardict [ k ] = parse_value ( v ) except Exception : logging . warn ( "Error reading config from %s." , filename , exc_info = True )
|
Loads INI configuration into this module s attributes .
|
2,099
|
def save ( filename = ConfigPath ) : default_values = defaults ( ) parser = configparser . RawConfigParser ( ) parser . optionxform = str try : save_types = basestring , int , float , tuple , list , dict , type ( None ) for k , v in sorted ( globals ( ) . items ( ) ) : if not isinstance ( v , save_types ) or k . startswith ( "_" ) or default_values . get ( k , parser ) == v : continue try : parser . set ( "DEFAULT" , k , json . dumps ( v ) ) except Exception : pass if parser . defaults ( ) : with open ( filename , "wb" ) as f : f . write ( "# %s %s configuration written on %s.\n" % ( Title , Version , datetime . datetime . now ( ) . strftime ( "%Y-%m-%d %H:%M:%S" ) ) ) parser . write ( f ) else : try : os . unlink ( filename ) except Exception : pass except Exception : logging . warn ( "Error writing config to %s." , filename , exc_info = True )
|
Saves this module s changed attributes to INI configuration .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.