idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
60,100 | def load_json ( self , path ) : with open ( self . profile_path ( path , must_exist = True ) , encoding = 'utf-8' ) as f : data = json . load ( f ) return data | Load a JSON file from the user profile . |
60,101 | def load_mozlz4 ( self , path ) : with open ( self . profile_path ( path , must_exist = True ) , 'rb' ) as f : if f . read ( 8 ) != b'mozLz40\0' : raise NotMozLz4Error ( 'Not Mozilla LZ4 format.' ) data = lz4 . block . decompress ( f . read ( ) ) return data | Load a Mozilla LZ4 file from the user profile . |
60,102 | def csv_from_items ( items , stream = None ) : items = iter ( items ) first = next ( items ) cls = first . __class__ if stream is None : stream = sys . stdout fields = [ f . name for f in attr . fields ( cls ) ] writer = csv . DictWriter ( stream , fieldnames = fields ) writer . writeheader ( ) writer . writerow ( attr . asdict ( first ) ) writer . writerows ( ( attr . asdict ( x ) for x in items ) ) | Write a list of items to stream in CSV format . |
60,103 | def profile_path ( self , path , must_exist = False ) : full_path = self . session . profile / path if must_exist and not full_path . exists ( ) : raise FileNotFoundError ( errno . ENOENT , os . strerror ( errno . ENOENT ) , PurePath ( full_path ) . name , ) return full_path | Return path from current profile . |
60,104 | def make_rpc_call ( self , rpc_command ) : if not self . is_alive ( ) : self . close ( ) self . open ( ) result = self . _execute_rpc ( rpc_command ) return ET . tostring ( result ) | Allow a user to query a device directly using XML - requests . |
60,105 | def open ( self ) : try : self . device = ConnectHandler ( device_type = 'cisco_xr' , ip = self . hostname , port = self . port , username = self . username , password = self . password , ** self . netmiko_kwargs ) self . device . timeout = self . timeout self . _xml_agent_alive = True except NetMikoTimeoutException as t_err : raise ConnectError ( t_err . args [ 0 ] ) except NetMikoAuthenticationException as au_err : raise ConnectError ( au_err . args [ 0 ] ) self . _cli_prompt = self . device . find_prompt ( ) self . _enter_xml_mode ( ) | Open a connection to an IOS - XR device . |
60,106 | def _execute_show ( self , show_command ) : rpc_command = '<CLI><Exec>{show_command}</Exec></CLI>' . format ( show_command = escape_xml ( show_command ) ) response = self . _execute_rpc ( rpc_command ) raw_response = response . xpath ( './/CLI/Exec' ) [ 0 ] . text return raw_response . strip ( ) if raw_response else '' | Executes an operational show - type command . |
60,107 | def _execute_config_show ( self , show_command , delay_factor = .1 ) : rpc_command = '<CLI><Configuration>{show_command}</Configuration></CLI>' . format ( show_command = escape_xml ( show_command ) ) response = self . _execute_rpc ( rpc_command , delay_factor = delay_factor ) raw_response = response . xpath ( './/CLI/Configuration' ) [ 0 ] . text return raw_response . strip ( ) if raw_response else '' | Executes a configuration show - type command . |
60,108 | def close ( self ) : if self . lock_on_connect or self . locked : self . unlock ( ) self . _unlock_xml_agent ( ) if hasattr ( self . device , 'remote_conn' ) : self . device . remote_conn . close ( ) | Close the connection to the IOS - XR device . |
60,109 | def lock ( self ) : if not self . locked : rpc_command = '<Lock/>' try : self . _execute_rpc ( rpc_command ) except XMLCLIError : raise LockError ( 'Unable to enter in configure exclusive mode!' , self ) self . locked = True | Lock the config database . |
60,110 | def unlock ( self ) : if self . locked : rpc_command = '<Unlock/>' try : self . _execute_rpc ( rpc_command ) except XMLCLIError : raise UnlockError ( 'Unable to unlock the config!' , self ) self . locked = False | Unlock the IOS - XR device config . |
60,111 | def load_candidate_config ( self , filename = None , config = None ) : configuration = '' if filename is None : configuration = config else : with open ( filename ) as f : configuration = f . read ( ) rpc_command = '<CLI><Configuration>{configuration}</Configuration></CLI>' . format ( configuration = escape_xml ( configuration ) ) try : self . _execute_rpc ( rpc_command ) except InvalidInputError as e : self . discard_config ( ) raise InvalidInputError ( e . args [ 0 ] , self ) | Load candidate confguration . |
60,112 | def get_candidate_config ( self , merge = False , formal = False ) : command = "show configuration" if merge : command += " merge" if formal : command += " formal" response = self . _execute_config_show ( command ) match = re . search ( ".*(!! IOS XR Configuration.*)$" , response , re . DOTALL ) if match is not None : response = match . group ( 1 ) return response | Retrieve the configuration loaded as candidate config in your configuration session . |
60,113 | def compare_config ( self ) : _show_merge = self . _execute_config_show ( 'show configuration merge' ) _show_run = self . _execute_config_show ( 'show running-config' ) diff = difflib . unified_diff ( _show_run . splitlines ( 1 ) [ 2 : - 2 ] , _show_merge . splitlines ( 1 ) [ 2 : - 2 ] ) return '' . join ( [ x . replace ( '\r' , '' ) for x in diff ] ) | Compare configuration to be merged with the one on the device . |
60,114 | def commit_config ( self , label = None , comment = None , confirmed = None ) : rpc_command = '<Commit' if label : rpc_command += ' Label="%s"' % label if comment : rpc_command += ' Comment="%s"' % comment [ : 60 ] if confirmed : if 30 <= int ( confirmed ) <= 300 : rpc_command += ' Confirmed="%d"' % int ( confirmed ) else : raise InvalidInputError ( 'confirmed needs to be between 30 and 300 seconds' , self ) rpc_command += '/>' self . _execute_rpc ( rpc_command ) | Commit the candidate config . |
60,115 | def rollback ( self , rb_id = 1 ) : rpc_command = '<Unlock/><Rollback><Previous>{rb_id}</Previous></Rollback><Lock/>' . format ( rb_id = rb_id ) self . _execute_rpc ( rpc_command ) | Rollback the last committed configuration . |
60,116 | def _main ( ) : import sys def log ( message ) : print ( message ) def print_usage ( ) : log ( 'usage: %s <application key> <application secret> send <number> <message> <from_number>' % sys . argv [ 0 ] ) log ( ' %s <application key> <application secret> status <message_id>' % sys . argv [ 0 ] ) if len ( sys . argv ) > 4 and sys . argv [ 3 ] == 'send' : key , secret , number , message = sys . argv [ 1 ] , sys . argv [ 2 ] , sys . argv [ 4 ] , sys . argv [ 5 ] client = SinchSMS ( key , secret ) if len ( sys . argv ) > 6 : log ( client . send_message ( number , message , sys . argv [ 6 ] ) ) else : log ( client . send_message ( number , message ) ) elif len ( sys . argv ) > 3 and sys . argv [ 3 ] == 'status' : key , secret , message_id = sys . argv [ 1 ] , sys . argv [ 2 ] , sys . argv [ 4 ] client = SinchSMS ( key , secret ) log ( client . check_status ( message_id ) ) else : print_usage ( ) sys . exit ( 1 ) sys . exit ( 0 ) | A simple demo to be used from command line . |
60,117 | def _request ( self , url , values = None ) : if values : json_data = json . dumps ( values ) request = urllib2 . Request ( url , json_data . encode ( ) ) request . add_header ( 'content-type' , 'application/json' ) request . add_header ( 'authorization' , self . _auth ) connection = urllib2 . urlopen ( request ) response = connection . read ( ) connection . close ( ) else : request = urllib2 . Request ( url ) request . add_header ( 'authorization' , self . _auth ) connection = urllib2 . urlopen ( request ) response = connection . read ( ) connection . close ( ) try : result = json . loads ( response . decode ( ) ) except ValueError as exception : return { 'errorCode' : 1 , 'message' : str ( exception ) } return result | Send a request and read response . |
60,118 | def send_message ( self , to_number , message , from_number = None ) : values = { 'Message' : message } if from_number is not None : values [ 'From' ] = from_number return self . _request ( self . SEND_SMS_URL + to_number , values ) | Send a message to the specified number and return a response dictionary . |
60,119 | def convert_descriptor ( self , descriptor ) : fields = [ ] fallbacks = [ ] schema = tableschema . Schema ( descriptor ) for index , field in enumerate ( schema . fields ) : converted_type = self . convert_type ( field . type ) if not converted_type : converted_type = 'STRING' fallbacks . append ( index ) mode = 'NULLABLE' if field . required : mode = 'REQUIRED' fields . append ( { 'name' : _slugify_field_name ( field . name ) , 'type' : converted_type , 'mode' : mode , } ) converted_descriptor = { 'fields' : fields , } return ( converted_descriptor , fallbacks ) | Convert descriptor to BigQuery |
60,120 | def convert_row ( self , row , schema , fallbacks ) : for index , field in enumerate ( schema . fields ) : value = row [ index ] if index in fallbacks : value = _uncast_value ( value , field = field ) else : value = field . cast_value ( value ) row [ index ] = value return row | Convert row to BigQuery |
60,121 | def convert_type ( self , type ) : mapping = { 'any' : 'STRING' , 'array' : None , 'boolean' : 'BOOLEAN' , 'date' : 'DATE' , 'datetime' : 'DATETIME' , 'duration' : None , 'geojson' : None , 'geopoint' : None , 'integer' : 'INTEGER' , 'number' : 'FLOAT' , 'object' : None , 'string' : 'STRING' , 'time' : 'TIME' , 'year' : 'INTEGER' , 'yearmonth' : None , } if type not in mapping : message = 'Type %s is not supported' % type raise tableschema . exceptions . StorageError ( message ) return mapping [ type ] | Convert type to BigQuery |
60,122 | def restore_descriptor ( self , converted_descriptor ) : fields = [ ] for field in converted_descriptor [ 'fields' ] : field_type = self . restore_type ( field [ 'type' ] ) resfield = { 'name' : field [ 'name' ] , 'type' : field_type , } if field . get ( 'mode' , 'NULLABLE' ) != 'NULLABLE' : resfield [ 'constraints' ] = { 'required' : True } fields . append ( resfield ) descriptor = { 'fields' : fields } return descriptor | Restore descriptor rom BigQuery |
60,123 | def restore_row ( self , row , schema ) : for index , field in enumerate ( schema . fields ) : if field . type == 'datetime' : row [ index ] = parse ( row [ index ] ) if field . type == 'date' : row [ index ] = parse ( row [ index ] ) . date ( ) if field . type == 'time' : row [ index ] = parse ( row [ index ] ) . time ( ) return schema . cast_row ( row ) | Restore row from BigQuery |
60,124 | def restore_type ( self , type ) : mapping = { 'BOOLEAN' : 'boolean' , 'DATE' : 'date' , 'DATETIME' : 'datetime' , 'INTEGER' : 'integer' , 'FLOAT' : 'number' , 'STRING' : 'string' , 'TIME' : 'time' , } if type not in mapping : message = 'Type %s is not supported' % type raise tableschema . exceptions . StorageError ( message ) return mapping [ type ] | Restore type from BigQuery |
60,125 | def _start_operation ( self , ast , operation , precedence ) : if TRACE_PARSE : print ( ' start_operation:' , repr ( operation ) , 'AST:' , ast ) op_prec = precedence [ operation ] while True : if ast [ 1 ] is None : if TRACE_PARSE : print ( ' start_op: ast[1] is None:' , repr ( ast ) ) ast [ 1 ] = operation if TRACE_PARSE : print ( ' , repr ( ast ) ) return ast prec = precedence [ ast [ 1 ] ] if prec > op_prec : if TRACE_PARSE : print ( ' start_op: prec > op_prec:' , repr ( ast ) ) ast = [ ast , operation , ast . pop ( - 1 ) ] if TRACE_PARSE : print ( ' , repr ( ast ) ) return ast if prec == op_prec : if TRACE_PARSE : print ( ' start_op: prec == op_prec:' , repr ( ast ) ) return ast if not ( inspect . isclass ( ast [ 1 ] ) and issubclass ( ast [ 1 ] , Function ) ) : raise ParseError ( error_code = PARSE_INVALID_NESTING ) if ast [ 0 ] is None : if TRACE_PARSE : print ( ' start_op: ast[0] is None:' , repr ( ast ) ) subexp = ast [ 1 ] ( * ast [ 2 : ] ) new_ast = [ ast [ 0 ] , operation , subexp ] if TRACE_PARSE : print ( ' , repr ( new_ast ) ) return new_ast else : if TRACE_PARSE : print ( ' start_op: else:' , repr ( ast ) ) ast [ 0 ] . append ( ast [ 1 ] ( * ast [ 2 : ] ) ) ast = ast [ 0 ] if TRACE_PARSE : print ( ' , repr ( ast ) ) | Returns an AST where all operations of lower precedence are finalized . |
60,126 | def tokenize ( self , expr ) : if not isinstance ( expr , basestring ) : raise TypeError ( 'expr must be string but it is %s.' % type ( expr ) ) TOKENS = { '*' : TOKEN_AND , '&' : TOKEN_AND , 'and' : TOKEN_AND , '+' : TOKEN_OR , '|' : TOKEN_OR , 'or' : TOKEN_OR , '~' : TOKEN_NOT , '!' : TOKEN_NOT , 'not' : TOKEN_NOT , '(' : TOKEN_LPAR , ')' : TOKEN_RPAR , '[' : TOKEN_LPAR , ']' : TOKEN_RPAR , 'true' : TOKEN_TRUE , '1' : TOKEN_TRUE , 'false' : TOKEN_FALSE , '0' : TOKEN_FALSE , 'none' : TOKEN_FALSE } position = 0 length = len ( expr ) while position < length : tok = expr [ position ] sym = tok . isalpha ( ) or tok == '_' if sym : position += 1 while position < length : char = expr [ position ] if char . isalnum ( ) or char in ( '.' , ':' , '_' ) : position += 1 tok += char else : break position -= 1 try : yield TOKENS [ tok . lower ( ) ] , tok , position except KeyError : if sym : yield TOKEN_SYMBOL , tok , position elif tok not in ( ' ' , '\t' , '\r' , '\n' ) : raise ParseError ( token_string = tok , position = position , error_code = PARSE_UNKNOWN_TOKEN ) position += 1 | Return an iterable of 3 - tuple describing each token given an expression unicode string . |
60,127 | def _rdistributive ( self , expr , op_example ) : if expr . isliteral : return expr expr_class = expr . __class__ args = ( self . _rdistributive ( arg , op_example ) for arg in expr . args ) args = tuple ( arg . simplify ( ) for arg in args ) if len ( args ) == 1 : return args [ 0 ] expr = expr_class ( * args ) dualoperation = op_example . dual if isinstance ( expr , dualoperation ) : expr = expr . distributive ( ) return expr | Recursively flatten the expr expression for the op_example AND or OR operation instance exmaple . |
60,128 | def normalize ( self , expr , operation ) : assert operation in ( self . AND , self . OR , ) expr = expr . literalize ( ) expr = expr . simplify ( ) operation_example = operation ( self . TRUE , self . FALSE ) expr = self . _rdistributive ( expr , operation_example ) expr = expr . simplify ( ) return expr | Return a normalized expression transformed to its normal form in the given AND or OR operation . |
60,129 | def get_literals ( self ) : if self . isliteral : return [ self ] if not self . args : return [ ] return list ( itertools . chain . from_iterable ( arg . get_literals ( ) for arg in self . args ) ) | Return a list of all the literals contained in this expression . Include recursively subexpressions symbols . This includes duplicates . |
60,130 | def literalize ( self ) : if self . isliteral : return self args = tuple ( arg . literalize ( ) for arg in self . args ) if all ( arg is self . args [ i ] for i , arg in enumerate ( args ) ) : return self return self . __class__ ( * args ) | Return an expression where NOTs are only occurring as literals . Applied recursively to subexpressions . |
60,131 | def get_symbols ( self ) : return [ s if isinstance ( s , Symbol ) else s . args [ 0 ] for s in self . get_literals ( ) ] | Return a list of all the symbols contained in this expression . Include recursively subexpressions symbols . This includes duplicates . |
60,132 | def pretty ( self , indent = 0 , debug = False ) : debug_details = '' if debug : debug_details += '<isliteral=%r, iscanonical=%r>' % ( self . isliteral , self . iscanonical ) obj = "'%s'" % self . obj if isinstance ( self . obj , basestring ) else repr ( self . obj ) return ( ' ' * indent ) + ( '%s(%s%s)' % ( self . __class__ . __name__ , debug_details , obj ) ) | Return a pretty formatted representation of self . |
60,133 | def pretty ( self , indent = 0 , debug = False ) : debug_details = '' if debug : debug_details += '<isliteral=%r, iscanonical=%r' % ( self . isliteral , self . iscanonical ) identity = getattr ( self , 'identity' , None ) if identity is not None : debug_details += ', identity=%r' % ( identity ) annihilator = getattr ( self , 'annihilator' , None ) if annihilator is not None : debug_details += ', annihilator=%r' % ( annihilator ) dual = getattr ( self , 'dual' , None ) if dual is not None : debug_details += ', dual=%r' % ( dual ) debug_details += '>' cls = self . __class__ . __name__ args = [ a . pretty ( indent = indent + 2 , debug = debug ) for a in self . args ] pfargs = ',\n' . join ( args ) cur_indent = ' ' * indent new_line = '' if self . isliteral else '\n' return '{cur_indent}{cls}({debug_details}{new_line}{pfargs}\n{cur_indent})' . format ( ** locals ( ) ) | Return a pretty formatted representation of self as an indented tree . |
60,134 | def literalize ( self ) : expr = self . demorgan ( ) if isinstance ( expr , self . __class__ ) : return expr return expr . literalize ( ) | Return an expression where NOTs are only occurring as literals . |
60,135 | def simplify ( self ) : if self . iscanonical : return self expr = self . cancel ( ) if not isinstance ( expr , self . __class__ ) : return expr . simplify ( ) if expr . args [ 0 ] in ( self . TRUE , self . FALSE , ) : return expr . args [ 0 ] . dual expr = self . __class__ ( expr . args [ 0 ] . simplify ( ) ) expr . iscanonical = True return expr | Return a simplified expr in canonical form . |
60,136 | def cancel ( self ) : expr = self while True : arg = expr . args [ 0 ] if not isinstance ( arg , self . __class__ ) : return expr expr = arg . args [ 0 ] if not isinstance ( expr , self . __class__ ) : return expr | Cancel itself and following NOTs as far as possible . Returns the simplified expression . |
60,137 | def demorgan ( self ) : expr = self . cancel ( ) if expr . isliteral or not isinstance ( expr , self . NOT ) : return expr op = expr . args [ 0 ] return op . dual ( * ( self . __class__ ( arg ) . cancel ( ) for arg in op . args ) ) | Return a expr where the NOT function is moved inward . This is achieved by canceling double NOTs and using De Morgan laws . |
60,138 | def pretty ( self , indent = 1 , debug = False ) : debug_details = '' if debug : debug_details += '<isliteral=%r, iscanonical=%r>' % ( self . isliteral , self . iscanonical ) if self . isliteral : pretty_literal = self . args [ 0 ] . pretty ( indent = 0 , debug = debug ) return ( ' ' * indent ) + '%s(%s%s)' % ( self . __class__ . __name__ , debug_details , pretty_literal ) else : return super ( NOT , self ) . pretty ( indent = indent , debug = debug ) | Return a pretty formatted representation of self . Include additional debug details if debug is True . |
60,139 | def simplify ( self ) : if self . iscanonical : return self args = [ arg . simplify ( ) for arg in self . args ] expr = self . __class__ ( * args ) expr = expr . literalize ( ) expr = expr . flatten ( ) if self . annihilator in expr . args : return self . annihilator args = [ ] for arg in expr . args : if arg not in args : args . append ( arg ) if len ( args ) == 1 : return args [ 0 ] if self . identity in args : args . remove ( self . identity ) if len ( args ) == 1 : return args [ 0 ] for arg in args : if self . NOT ( arg ) in args : return self . annihilator i = 0 while i < len ( args ) - 1 : j = i + 1 ai = args [ i ] if not isinstance ( ai , self . dual ) : i += 1 continue while j < len ( args ) : aj = args [ j ] if not isinstance ( aj , self . dual ) or len ( ai . args ) != len ( aj . args ) : j += 1 continue negated = None for arg in ai . args : if arg in aj . args : pass elif self . NOT ( arg ) . cancel ( ) in aj . args : if negated is None : negated = arg else : negated = None break else : negated = None break if negated is not None : del args [ j ] aiargs = list ( ai . args ) aiargs . remove ( negated ) if len ( aiargs ) == 1 : args [ i ] = aiargs [ 0 ] else : args [ i ] = self . dual ( * aiargs ) if len ( args ) == 1 : return args [ 0 ] else : return self . __class__ ( * args ) . simplify ( ) j += 1 i += 1 args = self . absorb ( args ) if len ( args ) == 1 : return args [ 0 ] args . sort ( ) expr = self . __class__ ( * args ) expr . iscanonical = True return expr | Return a new simplified expression in canonical form from this expression . |
60,140 | def flatten ( self ) : args = list ( self . args ) i = 0 for arg in self . args : if isinstance ( arg , self . __class__ ) : args [ i : i + 1 ] = arg . args i += len ( arg . args ) else : i += 1 return self . __class__ ( * args ) | Return a new expression where nested terms of this expression are flattened as far as possible . |
60,141 | def absorb ( self , args ) : args = list ( args ) if not args : args = list ( self . args ) i = 0 while i < len ( args ) : absorber = args [ i ] j = 0 while j < len ( args ) : if j == i : j += 1 continue target = args [ j ] if not isinstance ( target , self . dual ) : j += 1 continue if absorber in target : del args [ j ] if j < i : i -= 1 continue neg_absorber = self . NOT ( absorber ) . cancel ( ) if neg_absorber in target : b = target . subtract ( neg_absorber , simplify = False ) if b is None : del args [ j ] if j < i : i -= 1 continue else : args [ j ] = b j += 1 continue if isinstance ( absorber , self . dual ) : remove = None for arg in absorber . args : narg = self . NOT ( arg ) . cancel ( ) if arg in target . args : pass elif narg in target . args : if remove is None : remove = narg else : remove = None break else : remove = None break if remove is not None : args [ j ] = target . subtract ( remove , simplify = True ) j += 1 i += 1 return args | Given an args sequence of expressions return a new list of expression applying absorption and negative absorption . |
60,142 | def subtract ( self , expr , simplify ) : args = self . args if expr in self . args : args = list ( self . args ) args . remove ( expr ) elif isinstance ( expr , self . __class__ ) : if all ( arg in self . args for arg in expr . args ) : args = tuple ( arg for arg in self . args if arg not in expr ) if len ( args ) == 0 : return None if len ( args ) == 1 : return args [ 0 ] newexpr = self . __class__ ( * args ) if simplify : newexpr = newexpr . simplify ( ) return newexpr | Return a new expression where the expr expression has been removed from this expression if it exists . |
60,143 | def distributive ( self ) : dual = self . dual args = list ( self . args ) for i , arg in enumerate ( args ) : if isinstance ( arg , dual ) : args [ i ] = arg . args else : args [ i ] = ( arg , ) prod = itertools . product ( * args ) args = tuple ( self . __class__ ( * arg ) . simplify ( ) for arg in prod ) if len ( args ) == 1 : return args [ 0 ] else : return dual ( * args ) | Return a term where the leading AND or OR terms are switched . |
60,144 | def ages ( self ) : match = self . _ages_re . match ( self . raw_fields . get ( 'ages' ) ) if not match : match = self . _ages_re2 . match ( self . raw_fields . get ( 'ages' ) ) return self . Ages ( int ( match . group ( 1 ) ) , int ( match . group ( 1 ) ) ) return self . Ages ( int ( match . group ( 1 ) ) , int ( match . group ( 2 ) ) ) | The age range that the user is interested in . |
60,145 | def single ( self ) : return 'display: none;' not in self . _looking_for_xpb . li ( id = 'ajax_single' ) . one_ ( self . _profile . profile_tree ) . attrib [ 'style' ] | Whether or not the user is only interested in people that are single . |
60,146 | def update ( self , ages = None , single = None , near_me = None , kinds = None , gentation = None ) : ages = ages or self . ages single = single if single is not None else self . single near_me = near_me if near_me is not None else self . near_me kinds = kinds or self . kinds gentation = gentation or self . gentation data = { 'okc_api' : '1' , 'searchprefs.submit' : '1' , 'update_prefs' : '1' , 'lquery' : '' , 'locid' : '0' , 'filter5' : '1, 1' } if kinds : kinds_numbers = self . _build_kinds_numbers ( kinds ) if kinds_numbers : data [ 'lookingfor' ] = kinds_numbers age_min , age_max = ages data . update ( looking_for_filters . legacy_build ( status = single , gentation = gentation , radius = 25 if near_me else 0 , age_min = age_min , age_max = age_max ) ) log . info ( simplejson . dumps ( { 'looking_for_update' : data } ) ) util . cached_property . bust_caches ( self ) response = self . _profile . authcode_post ( 'profileedit2' , data = data ) self . _profile . refresh ( reload = False ) return response . content | Update the looking for attributes of the logged in user . |
60,147 | def upload_and_confirm ( self , incoming , ** kwargs ) : response_dict = self . upload ( incoming ) if 'error' in response_dict : log . warning ( 'Failed to upload photo' ) return response_dict if isinstance ( incoming , Info ) : kwargs . setdefault ( 'thumb_nail_left' , incoming . thumb_nail_left ) kwargs . setdefault ( 'thumb_nail_top' , incoming . thumb_nail_top ) kwargs . setdefault ( 'thumb_nail_right' , incoming . thumb_nail_right ) kwargs . setdefault ( 'thumb_nail_bottom' , incoming . thumb_nail_bottom ) kwargs [ 'height' ] = response_dict . get ( 'height' ) kwargs [ 'width' ] = response_dict . get ( 'width' ) self . confirm ( response_dict [ 'id' ] , ** kwargs ) return response_dict | Upload the file to okcupid and confirm among other things its thumbnail position . |
60,148 | def delete ( self , photo_id , album_id = 0 ) : if isinstance ( photo_id , Info ) : photo_id = photo_id . id return self . _session . okc_post ( 'photoupload' , data = { 'albumid' : album_id , 'picid' : photo_id , 'authcode' : self . _authcode , 'picture.delete_ajax' : 1 } ) | Delete a photo from the logged in users account . |
60,149 | def __get_dbms_version ( self , make_connection = True ) : if not self . connection and make_connection : self . connect ( ) with self . connection . cursor ( ) as cursor : cursor . execute ( "SELECT SERVERPROPERTY('productversion')" ) return cursor . fetchone ( ) [ 0 ] | Returns the DBMS Version string or . If a connection to the database has not already been established a connection will be made when make_connection is True . |
60,150 | def respond_from_user_question ( self , user_question , importance ) : user_response_ids = [ option . id for option in user_question . answer_options if option . is_users ] match_response_ids = [ option . id for option in user_question . answer_options if option . is_match ] if len ( match_response_ids ) == len ( user_question . answer_options ) : match_response_ids = 'irrelevant' return self . respond ( user_question . id , user_response_ids , match_response_ids , importance , note = user_question . explanation or '' ) | Respond to a question in exactly the way that is described by the given user_question . |
60,151 | def respond_from_question ( self , question , user_question , importance ) : option_index = user_question . answer_text_to_option [ question . their_answer ] . id self . respond ( question . id , [ option_index ] , [ option_index ] , importance ) | Copy the answer given in question to the logged in user s profile . |
60,152 | def message ( self , username , message_text ) : if not isinstance ( username , six . string_types ) : username = username . username for mailbox in ( self . inbox , self . outbox ) : for thread in mailbox : if thread . correspondent . lower ( ) == username . lower ( ) : thread . reply ( message_text ) return return self . _message_sender . send ( username , message_text ) | Message an okcupid user . If an existing conversation between the logged in user and the target user can be found reply to that thread instead of starting a new one . |
60,153 | def get_question_answer_id ( self , question , fast = False , bust_questions_cache = False ) : if hasattr ( question , 'answer_id' ) : return question . answer_id user_question = self . get_user_question ( question , fast = fast , bust_questions_cache = bust_questions_cache ) return user_question . get_answer_id_for_question ( question ) | Get the index of the answer that was given to question |
60,154 | def update_looking_for ( profile_tree , looking_for ) : div = profile_tree . xpath ( "//div[@id = 'what_i_want']" ) [ 0 ] looking_for [ 'gentation' ] = div . xpath ( ".//li[@id = 'ajax_gentation']/text()" ) [ 0 ] . strip ( ) looking_for [ 'ages' ] = replace_chars ( div . xpath ( ".//li[@id = 'ajax_ages']/text()" ) [ 0 ] . strip ( ) ) looking_for [ 'near' ] = div . xpath ( ".//li[@id = 'ajax_near']/text()" ) [ 0 ] . strip ( ) looking_for [ 'single' ] = div . xpath ( ".//li[@id = 'ajax_single']/text()" ) [ 0 ] . strip ( ) try : looking_for [ 'seeking' ] = div . xpath ( ".//li[@id = 'ajax_lookingfor']/text()" ) [ 0 ] . strip ( ) except : pass | Update looking_for attribute of a Profile . |
60,155 | def update_details ( profile_tree , details ) : div = profile_tree . xpath ( "//div[@id = 'profile_details']" ) [ 0 ] for dl in div . iter ( 'dl' ) : title = dl . find ( 'dt' ) . text item = dl . find ( 'dd' ) if title == 'Last Online' and item . find ( 'span' ) is not None : details [ title . lower ( ) ] = item . find ( 'span' ) . text . strip ( ) elif title . lower ( ) in details and len ( item . text ) : details [ title . lower ( ) ] = item . text . strip ( ) else : continue details [ title . lower ( ) ] = replace_chars ( details [ title . lower ( ) ] ) | Update details attribute of a Profile . |
60,156 | def get_default_gentation ( gender , orientation ) : gender = gender . lower ( ) [ 0 ] orientation = orientation . lower ( ) return gender_to_orientation_to_gentation [ gender ] [ orientation ] | Return the default gentation for the given gender and orientation . |
60,157 | def update_mailbox ( self , mailbox_name = 'inbox' ) : with txn ( ) as session : last_updated_name = '{0}_last_updated' . format ( mailbox_name ) okcupyd_user = session . query ( model . OKCupydUser ) . join ( model . User ) . filter ( model . User . okc_id == self . _user . profile . id ) . with_for_update ( ) . one ( ) log . info ( simplejson . dumps ( { '{0}_last_updated' . format ( mailbox_name ) : helpers . datetime_to_string ( getattr ( okcupyd_user , last_updated_name ) ) } ) ) res = self . _sync_mailbox_until ( getattr ( self . _user , mailbox_name ) ( ) , getattr ( okcupyd_user , last_updated_name ) ) if not res : return None , None last_updated , threads , new_messages = res if last_updated : setattr ( okcupyd_user , last_updated_name , last_updated ) return threads , new_messages | Update the mailbox associated with the given mailbox name . |
60,158 | def photos ( self ) : for photo_info in self . dest_user . profile . photo_infos : self . dest_user . photo . delete ( photo_info ) return [ self . dest_user . photo . upload_and_confirm ( info ) for info in reversed ( self . source_profile . photo_infos ) ] | Copy photos to the destination user . |
60,159 | def essays ( self ) : for essay_name in self . dest_user . profile . essays . essay_names : setattr ( self . dest_user . profile . essays , essay_name , getattr ( self . source_profile . essays , essay_name ) ) | Copy essays from the source profile to the destination profile . |
60,160 | def looking_for ( self ) : looking_for = self . source_profile . looking_for return self . dest_user . profile . looking_for . update ( gentation = looking_for . gentation , single = looking_for . single , near_me = looking_for . near_me , kinds = looking_for . kinds , ages = looking_for . ages ) | Copy looking for attributes from the source profile to the destination profile . |
60,161 | def details ( self ) : return self . dest_user . profile . details . convert_and_update ( self . source_profile . details . as_dict ) | Copy details from the source profile to the destination profile . |
60,162 | def message ( self , message , thread_id = None ) : return_value = helpers . Messager ( self . _session ) . send ( self . username , message , self . authcode , thread_id ) self . refresh ( reload = False ) return return_value | Message the user associated with this profile . |
60,163 | def rate ( self , rating ) : parameters = { 'voterid' : self . _current_user_id , 'target_userid' : self . id , 'type' : 'vote' , 'cf' : 'profile2' , 'target_objectid' : 0 , 'vote_type' : 'personality' , 'score' : rating , } response = self . _session . okc_post ( 'vote_handler' , data = parameters ) response_json = response . json ( ) log_function = log . info if response_json . get ( 'status' , False ) else log . error log_function ( simplejson . dumps ( { 'rate_response' : response_json , 'sent_parameters' : parameters , 'headers' : dict ( self . _session . headers ) } ) ) self . refresh ( reload = False ) | Rate this profile as the user that was logged in with the session that this object was instantiated with . |
60,164 | def arity_evaluation_checker ( function ) : is_class = inspect . isclass ( function ) if is_class : function = function . __init__ function_info = inspect . getargspec ( function ) function_args = function_info . args if is_class : function_args = function_args [ 1 : ] def evaluation_checker ( * args , ** kwargs ) : kwarg_keys = set ( kwargs . keys ( ) ) if function_info . keywords is None : acceptable_kwargs = function_args [ len ( args ) : ] if not kwarg_keys . issubset ( acceptable_kwargs ) : TypeError ( "Unrecognized Arguments: {0}" . format ( [ key for key in kwarg_keys if key not in acceptable_kwargs ] ) ) needed_args = function_args [ len ( args ) : ] if function_info . defaults : needed_args = needed_args [ : - len ( function_info . defaults ) ] return not needed_args or kwarg_keys . issuperset ( needed_args ) return evaluation_checker | Build an evaluation checker that will return True when it is guaranteed that all positional arguments have been accounted for . |
60,165 | def rerecord ( ctx , rest ) : run ( 'tox -e py27 -- --cassette-mode all --record --credentials {0} -s' . format ( rest ) , pty = True ) run ( 'tox -e py27 -- --resave --scrub --credentials test_credentials {0} -s' . format ( rest ) , pty = True ) | Rerecord tests . |
60,166 | def runSavedQueryByUrl ( self , saved_query_url , returned_properties = None ) : try : if "=" not in saved_query_url : raise exception . BadValue ( ) saved_query_id = saved_query_url . split ( "=" ) [ - 1 ] if not saved_query_id : raise exception . BadValue ( ) except : error_msg = "No saved query id is found in the url" self . log . error ( error_msg ) raise exception . BadValue ( error_msg ) return self . _runSavedQuery ( saved_query_id , returned_properties = returned_properties ) | Query workitems using the saved query url |
60,167 | def runSavedQueryByID ( self , saved_query_id , returned_properties = None ) : if not isinstance ( saved_query_id , six . string_types ) or not saved_query_id : excp_msg = "Please specify a valid saved query id" self . log . error ( excp_msg ) raise exception . BadValue ( excp_msg ) return self . _runSavedQuery ( saved_query_id , returned_properties = returned_properties ) | Query workitems using the saved query id |
60,168 | def put ( self , url , data = None , verify = False , headers = None , proxies = None , timeout = 60 , ** kwargs ) : self . log . debug ( "Put a request to %s with data: %s" , url , data ) response = requests . put ( url , data = data , verify = verify , headers = headers , proxies = proxies , timeout = timeout , ** kwargs ) if response . status_code not in [ 200 , 201 ] : self . log . error ( 'Failed PUT request at <%s> with response: %s' , url , response . content ) response . raise_for_status ( ) return response | Sends a PUT request . Refactor from requests module |
60,169 | def validate_url ( cls , url ) : if url is None : return None url = url . strip ( ) while url . endswith ( '/' ) : url = url [ : - 1 ] return url | Strip and trailing slash to validate a url |
60,170 | def _initialize ( self ) : self . log . debug ( "Start initializing data from %s" , self . url ) resp = self . get ( self . url , verify = False , proxies = self . rtc_obj . proxies , headers = self . rtc_obj . headers ) self . __initialize ( resp ) self . log . info ( "Finish the initialization for <%s %s>" , self . __class__ . __name__ , self ) | Initialize the object from the request |
60,171 | def __initialize ( self , resp ) : raw_data = xmltodict . parse ( resp . content ) root_key = list ( raw_data . keys ( ) ) [ 0 ] self . raw_data = raw_data . get ( root_key ) self . __initializeFromRaw ( ) | Initialize from the response |
60,172 | def getTemplate ( self , copied_from , template_name = None , template_folder = None , keep = False , encoding = "UTF-8" ) : return self . templater . getTemplate ( copied_from , template_name = template_name , template_folder = template_folder , keep = keep , encoding = encoding ) | Get template from some to - be - copied workitems |
60,173 | def getTemplates ( self , workitems , template_folder = None , template_names = None , keep = False , encoding = "UTF-8" ) : self . templater . getTemplates ( workitems , template_folder = template_folder , template_names = template_names , keep = keep , encoding = encoding ) | Get templates from a group of to - be - copied workitems and write them to files named after the names in template_names respectively . |
60,174 | def listFieldsFromWorkitem ( self , copied_from , keep = False ) : return self . templater . listFieldsFromWorkitem ( copied_from , keep = keep ) | List all the attributes to be rendered directly from some to - be - copied workitems |
60,175 | def createWorkitem ( self , item_type , title , description = None , projectarea_id = None , projectarea_name = None , template = None , copied_from = None , keep = False , ** kwargs ) : if not isinstance ( projectarea_id , six . string_types ) or not projectarea_id : projectarea = self . getProjectArea ( projectarea_name ) projectarea_id = projectarea . id else : projectarea = self . getProjectAreaByID ( projectarea_id ) itemtype = projectarea . getItemType ( item_type ) if not template : if not copied_from : self . log . error ( "Please choose either-or between " "template and copied_from" ) raise exception . EmptyAttrib ( "At least choose either-or " "between template and copied_from" ) self . _checkMissingParamsFromWorkitem ( copied_from , keep = keep , ** kwargs ) kwargs = self . _retrieveValidInfo ( projectarea_id , ** kwargs ) wi_raw = self . templater . renderFromWorkitem ( copied_from , keep = keep , encoding = "UTF-8" , title = title , description = description , ** kwargs ) else : self . _checkMissingParams ( template , ** kwargs ) kwargs = self . _retrieveValidInfo ( projectarea_id , ** kwargs ) wi_raw = self . templater . render ( template , title = title , description = description , ** kwargs ) self . log . info ( "Start to create a new <%s> with raw data: %s" , item_type , wi_raw ) wi_url_post = "/" . join ( [ self . url , "oslc/contexts" , projectarea_id , "workitems/%s" % itemtype . identifier ] ) return self . _createWorkitem ( wi_url_post , wi_raw ) | Create a workitem |
60,176 | def copyWorkitem ( self , copied_from , title = None , description = None , prefix = None ) : copied_wi = self . getWorkitem ( copied_from ) if title is None : title = copied_wi . title if prefix is not None : title = prefix + title if description is None : description = copied_wi . description if prefix is not None : description = prefix + description self . log . info ( "Start to create a new <Workitem>, copied from " , "<Workitem %s>" , copied_from ) wi_url_post = "/" . join ( [ self . url , "oslc/contexts/%s" % copied_wi . contextId , "workitems" , "%s" % copied_wi . type . split ( "/" ) [ - 1 ] ] ) wi_raw = self . templater . renderFromWorkitem ( copied_from , keep = True , encoding = "UTF-8" , title = title , description = description ) return self . _createWorkitem ( wi_url_post , wi_raw ) | Create a workitem by copying from an existing one |
60,177 | def _checkMissingParams ( self , template , ** kwargs ) : parameters = self . listFields ( template ) self . _findMissingParams ( parameters , ** kwargs ) | Check the missing parameters for rendering from the template file |
60,178 | def _checkMissingParamsFromWorkitem ( self , copied_from , keep = False , ** kwargs ) : parameters = self . listFieldsFromWorkitem ( copied_from , keep = keep ) self . _findMissingParams ( parameters , ** kwargs ) | Check the missing parameters for rendering directly from the copied workitem |
60,179 | def queryWorkitems ( self , query_str , projectarea_id = None , projectarea_name = None , returned_properties = None , archived = False ) : rp = returned_properties return self . query . queryWorkitems ( query_str = query_str , projectarea_id = projectarea_id , projectarea_name = projectarea_name , returned_properties = rp , archived = archived ) | Query workitems with the query string in a certain project area |
60,180 | def addComment ( self , msg = None ) : origin_comment = comments_url = "/" . join ( [ self . url , "rtc_cm:comments" ] ) headers = copy . deepcopy ( self . rtc_obj . headers ) resp = self . get ( comments_url , verify = False , proxies = self . rtc_obj . proxies , headers = headers ) raw_data = xmltodict . parse ( resp . content ) total_cnt = raw_data [ "oslc_cm:Collection" ] [ "@oslc_cm:totalCount" ] comment_url = "/" . join ( [ comments_url , total_cnt ] ) comment_msg = origin_comment . format ( comment_url , msg ) headers [ "Content-Type" ] = self . OSLC_CR_RDF headers [ "Accept" ] = self . OSLC_CR_RDF headers [ "OSLC-Core-Version" ] = "2.0" headers [ "If-Match" ] = resp . headers . get ( "etag" ) req_url = "/" . join ( [ comments_url , "oslc:comment" ] ) resp = self . post ( req_url , verify = False , headers = headers , proxies = self . rtc_obj . proxies , data = comment_msg ) self . log . info ( "Successfully add comment: [%s] for <Workitem %s>" , msg , self ) raw_data = xmltodict . parse ( resp . content ) return Comment ( comment_url , self . rtc_obj , raw_data = raw_data [ "rdf:RDF" ] [ "rdf:Description" ] ) | Add a comment to this workitem |
60,181 | def addSubscriber ( self , email ) : headers , raw_data = self . _perform_subscribe ( ) existed_flag , raw_data = self . _add_subscriber ( email , raw_data ) if existed_flag : return self . _update_subscribe ( headers , raw_data ) self . log . info ( "Successfully add a subscriber: %s for <Workitem %s>" , email , self ) | Add a subscriber to this workitem |
60,182 | def addSubscribers ( self , emails_list ) : if not hasattr ( emails_list , "__iter__" ) : error_msg = "Input parameter 'emails_list' is not iterable" self . log . error ( error_msg ) raise exception . BadValue ( error_msg ) existed_flags = False headers , raw_data = self . _perform_subscribe ( ) for email in emails_list : existed_flag , raw_data = self . _add_subscriber ( email , raw_data ) existed_flags = existed_flags and existed_flag if existed_flags : return self . _update_subscribe ( headers , raw_data ) self . log . info ( "Successfully add subscribers: %s for <Workitem %s>" , emails_list , self ) | Add subscribers to this workitem |
60,183 | def removeSubscriber ( self , email ) : headers , raw_data = self . _perform_subscribe ( ) missing_flag , raw_data = self . _remove_subscriber ( email , raw_data ) if missing_flag : return self . _update_subscribe ( headers , raw_data ) self . log . info ( "Successfully remove a subscriber: %s for <Workitem %s>" , email , self ) | Remove a subscriber from this workitem |
60,184 | def removeSubscribers ( self , emails_list ) : if not hasattr ( emails_list , "__iter__" ) : error_msg = "Input parameter 'emails_list' is not iterable" self . log . error ( error_msg ) raise exception . BadValue ( error_msg ) missing_flags = True headers , raw_data = self . _perform_subscribe ( ) for email in emails_list : missing_flag , raw_data = self . _remove_subscriber ( email , raw_data ) missing_flags = missing_flags and missing_flag if missing_flags : return self . _update_subscribe ( headers , raw_data ) self . log . info ( "Successfully remove subscribers: %s for <Workitem %s>" , emails_list , self ) | Remove subscribers from this workitem |
60,185 | def getParent ( self , returned_properties = None ) : parent_tag = ( "rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.parent" ) rp = returned_properties parent = ( self . rtc_obj . _get_paged_resources ( "Parent" , workitem_id = self . identifier , customized_attr = parent_tag , page_size = "5" , returned_properties = rp ) ) if parent : return parent [ 0 ] return None | Get the parent workitem of this workitem |
60,186 | def getChildren ( self , returned_properties = None ) : children_tag = ( "rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.children" ) rp = returned_properties return ( self . rtc_obj . _get_paged_resources ( "Children" , workitem_id = self . identifier , customized_attr = children_tag , page_size = "10" , returned_properties = rp ) ) | Get all the children workitems of this workitem |
60,187 | def getChangeSets ( self ) : changeset_tag = ( "rtc_cm:com.ibm.team.filesystem.workitems." "change_set.com.ibm.team.scm.ChangeSet" ) return ( self . rtc_obj . _get_paged_resources ( "ChangeSet" , workitem_id = self . identifier , customized_attr = changeset_tag , page_size = "10" ) ) | Get all the ChangeSets of this workitem |
60,188 | def addParent ( self , parent_id ) : if isinstance ( parent_id , bool ) : raise exception . BadValue ( "Please input a valid workitem id" ) if isinstance ( parent_id , six . string_types ) : parent_id = int ( parent_id ) if not isinstance ( parent_id , int ) : raise exception . BadValue ( "Please input a valid workitem id" ) self . log . debug ( "Try to add a parent <Workitem %s> to current " "<Workitem %s>" , parent_id , self ) headers = copy . deepcopy ( self . rtc_obj . headers ) headers [ "Content-Type" ] = self . OSLC_CR_JSON req_url = "" . join ( [ self . url , "?oslc_cm.properties=com.ibm.team.workitem." , "linktype.parentworkitem.parent" ] ) parent_tag = ( "rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.parent" ) parent_url = ( "{0}/resource/itemName/com.ibm.team." "workitem.WorkItem/{1}" . format ( self . rtc_obj . url , parent_id ) ) parent_original = { parent_tag : [ { "rdf:resource" : parent_url } ] } self . put ( req_url , verify = False , proxies = self . rtc_obj . proxies , headers = headers , data = json . dumps ( parent_original ) ) self . log . info ( "Successfully add a parent <Workitem %s> to current " "<Workitem %s>" , parent_id , self ) | Add a parent to current workitem |
60,189 | def addChild ( self , child_id ) : self . log . debug ( "Try to add a child <Workitem %s> to current " "<Workitem %s>" , child_id , self ) self . _addChildren ( [ child_id ] ) self . log . info ( "Successfully add a child <Workitem %s> to current " "<Workitem %s>" , child_id , self ) | Add a child to current workitem |
60,190 | def addChildren ( self , child_ids ) : if not hasattr ( child_ids , "__iter__" ) : error_msg = "Input parameter 'child_ids' is not iterable" self . log . error ( error_msg ) raise exception . BadValue ( error_msg ) self . log . debug ( "Try to add children <Workitem %s> to current " "<Workitem %s>" , child_ids , self ) self . _addChildren ( child_ids ) self . log . info ( "Successfully add children <Workitem %s> to current " "<Workitem %s>" , child_ids , self ) | Add children to current workitem |
60,191 | def removeParent ( self ) : self . log . debug ( "Try to remove the parent workitem from current " "<Workitem %s>" , self ) headers = copy . deepcopy ( self . rtc_obj . headers ) headers [ "Content-Type" ] = self . OSLC_CR_JSON req_url = "" . join ( [ self . url , "?oslc_cm.properties=com.ibm.team.workitem." , "linktype.parentworkitem.parent" ] ) parent_tag = ( "rtc_cm:com.ibm.team.workitem.linktype." "parentworkitem.parent" ) parent_original = { parent_tag : [ ] } self . put ( req_url , verify = False , proxies = self . rtc_obj . proxies , headers = headers , data = json . dumps ( parent_original ) ) self . log . info ( "Successfully remove the parent workitem of current " "<Workitem %s>" , self ) | Remove the parent workitem from current workitem |
60,192 | def removeChild ( self , child_id ) : self . log . debug ( "Try to remove a child <Workitem %s> from current " "<Workitem %s>" , child_id , self ) self . _removeChildren ( [ child_id ] ) self . log . info ( "Successfully remove a child <Workitem %s> from " "current <Workitem %s>" , child_id , self ) | Remove a child from current workitem |
60,193 | def removeChildren ( self , child_ids ) : if not hasattr ( child_ids , "__iter__" ) : error_msg = "Input parameter 'child_ids' is not iterable" self . log . error ( error_msg ) raise exception . BadValue ( error_msg ) self . log . debug ( "Try to remove children <Workitem %s> from current " "<Workitem %s>" , child_ids , self ) self . _removeChildren ( child_ids ) self . log . info ( "Successfully remove children <Workitem %s> from " "current <Workitem %s>" , child_ids , self ) | Remove children from current workitem |
60,194 | def addAttachment ( self , filepath ) : proj_id = self . contextId fa = self . rtc_obj . getFiledAgainst ( self . filedAgainst , projectarea_id = proj_id ) fa_id = fa . url . split ( "/" ) [ - 1 ] headers = copy . deepcopy ( self . rtc_obj . headers ) if headers . __contains__ ( "Content-Type" ) : headers . __delitem__ ( "Content-Type" ) filename = os . path . basename ( filepath ) fileh = open ( filepath , "rb" ) files = { "attach" : ( filename , fileh , "application/octet-stream" ) } params = { "projectId" : proj_id , "multiple" : "true" , "category" : fa_id } req_url = "" . join ( [ self . rtc_obj . url , "/service/com.ibm.team.workitem.service." , "internal.rest.IAttachmentRestService/" ] ) resp = self . post ( req_url , verify = False , headers = headers , proxies = self . rtc_obj . proxies , params = params , files = files ) raw_data = xmltodict . parse ( resp . content ) json_body = json . loads ( raw_data [ "html" ] [ "body" ] [ "textarea" ] ) attachment_info = json_body [ "files" ] [ 0 ] return self . _add_attachment_link ( attachment_info ) | Upload attachment to a workitem |
60,195 | def list_containers ( active = True , defined = True , as_object = False , config_path = None ) : if config_path : if not os . path . exists ( config_path ) : return tuple ( ) try : entries = _lxc . list_containers ( active = active , defined = defined , config_path = config_path ) except ValueError : return tuple ( ) else : try : entries = _lxc . list_containers ( active = active , defined = defined ) except ValueError : return tuple ( ) if as_object : return tuple ( [ Container ( name , config_path ) for name in entries ] ) else : return entries | List the containers on the system . |
60,196 | def attach_run_command ( cmd ) : if isinstance ( cmd , tuple ) : return _lxc . attach_run_command ( cmd ) elif isinstance ( cmd , list ) : return _lxc . attach_run_command ( ( cmd [ 0 ] , cmd ) ) else : return _lxc . attach_run_command ( ( cmd , [ cmd ] ) ) | Run a command when attaching |
60,197 | def arch_to_personality ( arch ) : if isinstance ( arch , bytes ) : arch = unicode ( arch ) return _lxc . arch_to_personality ( arch ) | Determine the process personality corresponding to the architecture |
60,198 | def add_device_net ( self , name , destname = None ) : if not self . running : return False if os . path . exists ( "/sys/class/net/%s/phy80211/name" % name ) : with open ( "/sys/class/net/%s/phy80211/name" % name ) as fd : phy = fd . read ( ) . strip ( ) if subprocess . call ( [ 'iw' , 'phy' , phy , 'set' , 'netns' , str ( self . init_pid ) ] ) != 0 : return False if destname : def rename_interface ( args ) : old , new = args return subprocess . call ( [ 'ip' , 'link' , 'set' , 'dev' , old , 'name' , new ] ) return self . attach_wait ( rename_interface , ( name , destname ) , namespaces = ( CLONE_NEWNET ) ) == 0 return True if not destname : destname = name if not os . path . exists ( "/sys/class/net/%s/" % name ) : return False return subprocess . call ( [ 'ip' , 'link' , 'set' , 'dev' , name , 'netns' , str ( self . init_pid ) , 'name' , destname ] ) == 0 | Add network device to running container . |
60,199 | def append_config_item ( self , key , value ) : return _lxc . Container . set_config_item ( self , key , value ) | Append value to key assuming key is a list . If key isn t a list value will be set as the value of key . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.