idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
1,200
def model_to_dict ( instance , ** options ) : "Takes a model instance and converts it into a dict." options = _defaults ( options ) attrs = { } if options [ 'prehook' ] : if isinstance ( options [ 'prehook' ] , collections . Callable ) : instance = options [ 'prehook' ] ( instance ) if instance is None : return attrs for alias in options [ 'fields' ] : accessor = options [ 'aliases' ] . get ( alias , alias ) key = options [ 'prefix' ] + alias if options [ 'camelcase' ] : key = convert_to_camel ( key ) value = get_field_value ( instance , accessor , allow_missing = options [ 'allow_missing' ] ) if isinstance ( value , ( models . Model , QuerySet ) ) : _options = _defaults ( options [ 'related' ] . get ( accessor , { } ) ) if '%(accessor)s' in _options [ 'prefix' ] : _options [ 'prefix' ] = _options [ 'prefix' ] % { 'accessor' : alias } if isinstance ( value , models . Model ) : if len ( _options [ 'fields' ] ) == 1 and _options [ 'flat' ] and not _options [ 'merge' ] : value = list ( serialize ( value , ** _options ) . values ( ) ) [ 0 ] else : _attrs = serialize ( value , ** _options ) if _options [ 'merge' ] : attrs . update ( _attrs ) continue value = _attrs else : value = serialize ( value , ** _options ) attrs [ key ] = value if options [ 'posthook' ] : attrs = options [ 'posthook' ] ( instance , attrs ) return attrs
Takes a model instance and converts it into a dict .
1,201
def set_save_directory ( base , source ) : root = os . path . join ( base , source ) if not os . path . isdir ( root ) : os . makedirs ( root ) world . screenshot_root = root
Sets the root save directory for saving screenshots . Screenshots will be saved in subdirectories under this directory by browser window size .
1,202
def change_password ( self , id , new , old = None , change_token = True ) : schema = UserSchema ( exclude = ( 'password' , 'password_confirm' ) ) resp = self . service . post ( self . base + str ( id ) + '/password/' , params = { 'change_token' : change_token } , json = { 'old' : old , 'new' : new , 'new_confirm' : new } ) return self . service . decode ( schema , resp )
Change a user s password .
1,203
def change_token ( self , id ) : schema = UserSchema ( exclude = ( 'password' , 'password_confirm' ) ) resp = self . service . post ( self . base + str ( id ) + '/token/' ) return self . service . decode ( schema , resp )
Change a user s token .
1,204
def bulk_copy ( self , ids ) : schema = UserSchema ( ) return self . service . bulk_copy ( self . base , self . RESOURCE , ids , schema )
Bulk copy a set of users .
1,205
def list ( self , id , filter = None , type = None , sort = None , limit = None , page = None ) : schema = AttachmentSchema ( exclude = ( 'path' ) ) resp = self . service . list ( self . _base ( id ) , filter , type , sort , limit , page ) at , l = self . service . decode ( schema , resp , many = True , links = True ) return Page ( at , l )
Get a list of a device s attachments .
1,206
def iter_list ( self , id , * args , ** kwargs ) : l = partial ( self . list , id ) return self . service . iter_list ( l , * args , ** kwargs )
Get a list of attachments . Whereas list fetches a single page of attachments according to its limit and page arguments iter_list returns all attachments by internally making successive calls to list .
1,207
def get ( self , id , attid ) : schema = AttachmentSchema ( ) resp = self . service . get_id ( self . _base ( id ) , attid ) return self . service . decode ( schema , resp )
Get a device s attachment .
1,208
def create ( self , id , fd , filename = 'attachment-name' ) : schema = AttachmentSchema ( exclude = ( 'id' , 'created' , 'updated' , 'size' , 'path' , 'device_id' ) ) resp = self . service . post ( self . _base ( id ) , files = { 'file' : ( filename , fd ) } ) return self . service . decode ( schema , resp )
Add an attachment to a device .
1,209
def download ( self , id , attid ) : resp = self . service . get_id ( self . _base ( id ) , attid , params = { 'format' : 'download' } , stream = True ) b = io . BytesIO ( ) stream . stream_response_to_file ( resp , path = b ) resp . close ( ) b . seek ( 0 ) return ( b , self . service . filename ( resp ) )
Download a device s attachment .
1,210
def edit ( self , resource ) : schema = AttachmentSchema ( exclude = ( 'id' , 'created' , 'updated' , 'size' , 'path' , 'device_id' ) ) json = self . service . encode ( schema , resource ) schema = AttachmentSchema ( ) resp = self . service . edit ( self . _base ( resource . device_id ) , resource . id , json ) return self . service . decode ( schema , resp )
Edit a device s attachment .
1,211
def delete ( self , id , attid ) : return self . service . edit ( self . _base ( id ) , attid )
Delete a device s attachment .
1,212
def get_by_name ( self , name ) : rs , _ = self . list ( filter = field ( 'name' ) . eq ( name ) , limit = 1 ) if len ( rs ) is 0 : raise CDRouterError ( 'no such device' ) return rs [ 0 ]
Get a device by name .
1,213
def edit ( self , resource ) : schema = DeviceSchema ( exclude = ( 'id' , 'created' , 'updated' , 'result_id' , 'attachments_dir' ) ) json = self . service . encode ( schema , resource ) schema = DeviceSchema ( ) resp = self . service . edit ( self . base , resource . id , json ) return self . service . decode ( schema , resp )
Edit a device .
1,214
def get_connection ( self , id ) : schema = ConnectionSchema ( ) resp = self . service . get ( self . base + str ( id ) + '/connect/' ) return self . service . decode ( schema , resp )
Get information on proxy connection to a device s management interface .
1,215
def connect ( self , id ) : schema = ConnectionSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/connect/' ) return self . service . decode ( schema , resp )
Open proxy connection to a device s management interface .
1,216
def disconnect ( self , id ) : return self . service . post ( self . base + str ( id ) + '/disconnect/' )
Close proxy connection to a device s management interface .
1,217
def power_on ( self , id ) : schema = PowerCmdSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/power/on/' ) return self . service . decode ( schema , resp )
Power on a device using it s power on command .
1,218
def bulk_copy ( self , ids ) : schema = DeviceSchema ( ) return self . service . bulk_copy ( self . base , self . RESOURCE , ids , schema )
Bulk copy a set of devices .
1,219
def ensure_list ( value : Union [ T , Sequence [ T ] ] ) -> Sequence [ T ] : if value is None : return [ ] return value if isinstance ( value , list ) else [ value ]
Wrap value in list if it is not one .
1,220
def color ( out_string , color = 'grn' ) : c = { 'blk' : Fore . BLACK , 'blu' : Fore . BLUE , 'cyn' : Fore . CYAN , 'grn' : Fore . GREEN , 'mag' : Fore . MAGENTA , 'red' : Fore . RED , 'wht' : Fore . WHITE , 'yel' : Fore . YELLOW , } try : init ( ) return ( c [ color ] + Style . BRIGHT + out_string + Fore . RESET + Style . NORMAL ) except AttributeError : return out_string
Highlight string for terminal color coding .
1,221
def color_diffs ( string ) : string = string . replace ( '--- ' , color ( '--- ' , 'red' ) ) string = string . replace ( '\n+++ ' , color ( '\n+++ ' ) ) string = string . replace ( '\n-' , color ( '\n-' , 'red' ) ) string = string . replace ( '\n+' , color ( '\n+' ) ) string = string . replace ( '\n@@ ' , color ( '\n@@ ' , 'yel' ) ) return string
Add color ANSI codes for diff lines .
1,222
def index ( ) : oauth = current_app . extensions [ 'oauthlib.client' ] services = [ ] service_map = { } i = 0 for appid , conf in six . iteritems ( current_app . config [ 'OAUTHCLIENT_REMOTE_APPS' ] ) : if not conf . get ( 'hide' , False ) : services . append ( dict ( appid = appid , title = conf [ 'title' ] , icon = conf . get ( 'icon' , None ) , description = conf . get ( 'description' , None ) , account = None ) ) service_map [ oauth . remote_apps [ appid ] . consumer_key ] = i i += 1 accounts = RemoteAccount . query . filter_by ( user_id = current_user . get_id ( ) ) . all ( ) for a in accounts : if a . client_id in service_map : services [ service_map [ a . client_id ] ] [ 'account' ] = a services . sort ( key = itemgetter ( 'title' ) ) return render_template ( 'invenio_oauthclient/settings/index.html' , services = services )
List linked accounts .
1,223
def element_id_by_label ( browser , label ) : label = XPathSelector ( browser , unicode ( '//label[contains(., "%s")]' % label ) ) if not label : return False return label . get_attribute ( 'for' )
Return the id of a label s for attribute
1,224
def find_field ( browser , field , value ) : return find_field_by_id ( browser , field , value ) + find_field_by_name ( browser , field , value ) + find_field_by_label ( browser , field , value )
Locate an input field of a given value
1,225
def find_any_field ( browser , field_types , field_name ) : return reduce ( operator . add , ( find_field ( browser , field_type , field_name ) for field_type in field_types ) )
Find a field of any of the specified types .
1,226
def find_field_by_label ( browser , field , label ) : return XPathSelector ( browser , field_xpath ( field , 'id' , escape = False ) % u'//label[contains(., "{0}")]/@for' . format ( label ) )
Locate the control input that has a label pointing to it
1,227
def wait_for ( func ) : def wrapped ( * args , ** kwargs ) : timeout = kwargs . pop ( 'timeout' , 15 ) start = time ( ) result = None while time ( ) - start < timeout : result = func ( * args , ** kwargs ) if result : break sleep ( 0.2 ) return result return wrapped
A decorator to invoke a function periodically until it returns a truthy value .
1,228
def get_defaults ( ) : DEFAULTS = { } if 'PC_PIPE_BUF' in os . pathconf_names : x , y = os . pipe ( ) DEFAULTS [ 'PIPE_BUF' ] = os . fpathconf ( x , "PC_PIPE_BUF" ) else : DEFAULTS [ 'PIPE_BUF' ] = 512 tcp_sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) DEFAULTS [ 'TCP_SNDBUF' ] = tcp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_SNDBUF ) DEFAULTS [ 'TCP_RCVBUF' ] = tcp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_RCVBUF ) udp_sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) DEFAULTS [ 'UDP_SNDBUF' ] = udp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_SNDBUF ) DEFAULTS [ 'UDP_RCVBUF' ] = udp_sock . getsockopt ( socket . SOL_SOCKET , socket . SO_RCVBUF ) DEFAULTS [ 'WHATS_MYIP_URL' ] = 'http://www.whatismyip.com/automation/n09230945.asp' return DEFAULTS
Returns a dictionary of variables and their possibly os - dependent defaults .
1,229
def site_url ( url ) : base_url = 'http://%s' % socket . gethostname ( ) if server . port is not 80 : base_url += ':%d' % server . port return urlparse . urljoin ( base_url , url )
Determine the server URL .
1,230
def _get_external_id ( account_info ) : if all ( k in account_info for k in ( 'external_id' , 'external_method' ) ) : return dict ( id = account_info [ 'external_id' ] , method = account_info [ 'external_method' ] ) return None
Get external id from account info .
1,231
def oauth_get_user ( client_id , account_info = None , access_token = None ) : if access_token : token = RemoteToken . get_by_token ( client_id , access_token ) if token : return token . remote_account . user if account_info : external_id = _get_external_id ( account_info ) if external_id : user_identity = UserIdentity . query . filter_by ( id = external_id [ 'id' ] , method = external_id [ 'method' ] ) . first ( ) if user_identity : return user_identity . user email = account_info . get ( 'user' , { } ) . get ( 'email' ) if email : return User . query . filter_by ( email = email ) . one_or_none ( ) return None
Retrieve user object for the given request .
1,232
def oauth_authenticate ( client_id , user , require_existing_link = False ) : if not requires_confirmation ( user ) : after_this_request ( _commit ) if login_user ( user , remember = False ) : if require_existing_link : account = RemoteAccount . get ( user . id , client_id ) if account is None : logout_user ( ) return False return True return False
Authenticate an oauth authorized callback .
1,233
def oauth_register ( form ) : if form . validate ( ) : data = form . to_dict ( ) if not data . get ( 'password' ) : data [ 'password' ] = '' user = register_user ( ** data ) if not data [ 'password' ] : user . password = None _datastore . commit ( ) return user
Register user if possible .
1,234
def oauth_link_external_id ( user , external_id = None ) : try : with db . session . begin_nested ( ) : db . session . add ( UserIdentity ( id = external_id [ 'id' ] , method = external_id [ 'method' ] , id_user = user . id ) ) except IntegrityError : raise AlreadyLinkedError ( user , external_id )
Link a user to an external id .
1,235
def oauth_unlink_external_id ( external_id ) : with db . session . begin_nested ( ) : UserIdentity . query . filter_by ( id = external_id [ 'id' ] , method = external_id [ 'method' ] ) . delete ( )
Unlink a user from an external id .
1,236
def create_registrationform ( * args , ** kwargs ) : class RegistrationForm ( _security . confirm_register_form ) : password = None recaptcha = None return RegistrationForm ( * args , ** kwargs )
Make a registration form .
1,237
def fill_form ( form , data ) : for ( key , value ) in data . items ( ) : if hasattr ( form , key ) : if isinstance ( value , dict ) : fill_form ( getattr ( form , key ) , value ) else : getattr ( form , key ) . data = value return form
Prefill form with data .
1,238
def _get_csrf_disabled_param ( ) : import flask_wtf from pkg_resources import parse_version supports_meta = parse_version ( flask_wtf . __version__ ) >= parse_version ( "0.14.0" ) return dict ( meta = { 'csrf' : False } ) if supports_meta else dict ( csrf_enabled = False )
Return the right param to disable CSRF depending on WTF - Form version .
1,239
def run ( self ) : try : self . loader . find_and_load_step_definitions ( ) except StepLoadingError , e : print "Error loading step definitions:\n" , e return results = [ ] if self . explicit_features : features_files = self . explicit_features else : features_files = self . loader . find_feature_files ( ) if self . random : random . shuffle ( features_files ) if not features_files : self . output . print_no_features_found ( self . loader . base_dir ) return processes = Pool ( processes = self . parallelization ) test_results_it = processes . imap_unordered ( worker_process , [ ( self , filename ) for filename in features_files ] ) all_total = ParallelTotalResult ( ) for result in test_results_it : all_total += result [ 'total' ] sys . stdout . write ( result [ 'stdout' ] ) sys . stderr . write ( result [ 'stderr' ] ) return all_total
Find and load step definitions and them find and load features under base_path specified on constructor
1,240
def open_connection ( ip , username , password , function , args , write = False , conn_timeout = 5 , sess_timeout = 300 , port = 22 ) : output = color ( '=' * 50 + '\nResults from device: %s\n' % ip , 'yel' ) try : conn = Jaide ( ip , username , password , connect_timeout = conn_timeout , session_timeout = sess_timeout , port = port ) if write is not False : return write , output + function ( conn , * args ) else : return output + function ( conn , * args ) except errors . SSHError : output += color ( 'Unable to connect to port %s on device: %s\n' % ( str ( port ) , ip ) , 'red' ) except errors . AuthenticationError : output += color ( 'Authentication failed for device: %s' % ip , 'red' ) except AuthenticationException : output += color ( 'Authentication failed for device: %s' % ip , 'red' ) except SSHException as e : output += color ( 'Error connecting to device: %s\nError: %s' % ( ip , str ( e ) ) , 'red' ) except socket . timeout : output += color ( 'Timeout exceeded connecting to device: %s' % ip , 'red' ) except socket . gaierror : output += color ( 'No route to host, or invalid hostname: %s' % ip , 'red' ) except socket . error : output += color ( 'The device refused the connection on port %s, or ' 'no route to host.' % port , 'red' ) if write is not False : return write , output else : return output
Open a Jaide session with the device .
1,241
def command ( jaide , commands , format = "text" , xpath = False ) : output = "" for cmd in clean_lines ( commands ) : expression = "" output += color ( '> ' + cmd + '\n' , 'yel' ) if len ( cmd . split ( '%' ) ) == 2 : expression = cmd . split ( '%' ) [ 1 ] . strip ( ) cmd = cmd . split ( '%' ) [ 0 ] + '\n' elif xpath is not False : expression = xpath if expression : try : output += jaide . op_cmd ( command = cmd , req_format = 'xml' , xpath_expr = expression ) + '\n' except lxml . etree . XMLSyntaxError : output += color ( 'Xpath expression resulted in no response.\n' , 'red' ) else : output += jaide . op_cmd ( cmd , req_format = format ) + '\n' return output
Run an operational command .
1,242
def shell ( jaide , commands ) : out = "" for cmd in clean_lines ( commands ) : out += color ( '> %s\n' % cmd , 'yel' ) out += jaide . shell_cmd ( cmd ) + '\n' return out
Send shell commands to a device .
1,243
def get_all_keys ( reactor , key_type , value_type , etcd_address ) : etcd = Client ( reactor , etcd_address ) result = yield etcd . get ( b'\x00' , range_end = b'\x00' ) res = { } for item in result . kvs : if key_type == u'utf8' : key = item . key . decode ( 'utf8' ) elif key_type == u'binary' : key = binascii . b2a_base64 ( item . key ) . decode ( ) . strip ( ) else : raise Exception ( 'logic error' ) if value_type == u'json' : value = json . loads ( item . value . decode ( 'utf8' ) ) elif value_type == u'binary' : value = binascii . b2a_base64 ( item . value ) . decode ( ) . strip ( ) elif value_type == u'utf8' : value = item . value . decode ( 'utf8' ) else : raise Exception ( 'logic error' ) res [ key ] = value returnValue ( res )
Returns all keys from etcd .
1,244
def stop ( self , timeout = None ) : assert self . scan_id is not None , 'No scan_id has been set' if timeout is None : url = '/scans/%s/stop' % self . scan_id self . conn . send_request ( url , method = 'GET' ) return self . stop ( ) for _ in xrange ( timeout ) : time . sleep ( 1 ) is_running = self . get_status ( ) [ 'is_running' ] if not is_running : return msg = 'Failed to stop the scan in %s seconds' raise ScanStopTimeoutException ( msg % timeout )
Send the GET request required to stop the scan
1,245
def xrb_address_to_public_key ( address ) : address = bytearray ( address , 'ascii' ) if not address . startswith ( b'xrb_' ) : raise ValueError ( 'address does not start with xrb_: %s' % address ) if len ( address ) != 64 : raise ValueError ( 'address must be 64 chars long: %s' % address ) address = bytes ( address ) key_b32xrb = b'1111' + address [ 4 : 56 ] key_bytes = b32xrb_decode ( key_b32xrb ) [ 3 : ] checksum = address [ 56 : ] if b32xrb_encode ( address_checksum ( key_bytes ) ) != checksum : raise ValueError ( 'invalid address, invalid checksum: %s' % address ) return key_bytes
Convert an xrb address to public key in bytes
1,246
def generate_account ( seed = None , index = 0 ) : if not seed : seed = unhexlify ( '' . join ( random . choice ( '0123456789ABCDEF' ) for i in range ( 64 ) ) ) pair = keypair_from_seed ( seed , index = index ) result = { 'address' : public_key_to_xrb_address ( pair [ 'public' ] ) , 'private_key_bytes' : pair [ 'private' ] , 'public_key_bytes' : pair [ 'public' ] , } result [ 'private_key_hex' ] = hexlify ( pair [ 'private' ] ) result [ 'public_key_hex' ] = hexlify ( pair [ 'public' ] ) return result
Generates an adhoc account and keypair
1,247
def spasser ( inbox , s = None ) : seq = ( s or range ( len ( inbox ) ) ) return [ input_ for i , input_ in enumerate ( inbox ) if i in seq ]
Passes inputs with indecies in s . By default passes the whole inbox .
1,248
def sjoiner ( inbox , s = None , join = "" ) : return join . join ( [ input_ for i , input_ in enumerate ( inbox ) if i in s ] )
String joins input with indices in s .
1,249
def load_item ( inbox , type = "string" , remove = True , buffer = None ) : is_file , is_fifo , is_socket = False , False , False file = inbox [ 0 ] try : file_type = file [ 0 ] except : raise ValueError ( "invalid inbox item" ) if file_type == "file" : is_file = os . path . exists ( file [ 1 ] ) elif file_type == "fifo" : is_fifo = stat . S_ISFIFO ( os . stat ( file [ 1 ] ) . st_mode ) elif file_type == "socket" : is_socket = True else : raise ValueError ( "type: %s not undertood" % file_type ) if ( is_fifo or is_socket ) and ( type == 'mmap' ) : raise ValueError ( "mmap is not supported for FIFOs and sockets" ) if ( is_fifo or is_socket ) and not remove : raise ValueError ( "FIFOs and sockets have to be removed" ) start = 0 if is_fifo or is_file : stop = os . stat ( file [ 1 ] ) . st_size - 1 fd = os . open ( file [ 1 ] , os . O_RDONLY ) BUFFER = ( buffer or PAPY_DEFAULTS [ 'PIPE_BUF' ] ) elif is_socket : host , port = socket . gethostbyname ( file [ 1 ] ) , file [ 2 ] sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) sock . connect ( ( host , port ) ) stop = - 1 fd = sock . fileno ( ) BUFFER = ( buffer or PAPY_DEFAULTS [ 'TCP_RCVBUF' ] ) else : raise ValueError ( "got unknown inbox: %s" % ( repr ( inbox ) ) ) if type == 'mmap' : offset = start - ( start % ( getattr ( mmap , 'ALLOCATIONGRANULARITY' , None ) or getattr ( mmap , 'PAGESIZE' ) ) ) start = start - offset stop = stop - offset + 1 try : data = mmap . mmap ( fd , stop , access = mmap . ACCESS_READ , offset = offset ) except TypeError : data = mmap . mmap ( fd , stop , access = mmap . ACCESS_READ ) data . seek ( start ) elif type == 'string' : data = [ ] if stop == - 1 : while True : buffer_ = os . read ( fd , BUFFER ) if not buffer_ : break data . append ( buffer_ ) data = "" . join ( data ) else : os . lseek ( fd , start , 0 ) data = os . read ( fd , stop - start + 1 ) else : raise ValueError ( 'type: %s not understood.' % type ) if remove : if is_socket : sock . close ( ) else : os . close ( fd ) os . unlink ( file [ 1 ] ) else : os . close ( fd ) return data
Loads data from a file . Determines the file type automatically file fifo socket but allows to specify the representation type string or mmap for memory mapped access to the file . Returns the loaded item as a str or mmap object . Internally creates an item from a file .
1,250
def pickle_dumps ( inbox ) : gc . disable ( ) str_ = cPickle . dumps ( inbox [ 0 ] , cPickle . HIGHEST_PROTOCOL ) gc . enable ( ) return str_
Serializes the first element of the input using the pickle protocol using the fastes binary protocol .
1,251
def pickle_loads ( inbox ) : gc . disable ( ) obj = cPickle . loads ( inbox [ 0 ] ) gc . enable ( ) return obj
Deserializes the first element of the input using the pickle protocol .
1,252
def json_dumps ( inbox ) : gc . disable ( ) str_ = json . dumps ( inbox [ 0 ] ) gc . enable ( ) return str_
Serializes the first element of the input using the JSON protocol as implemented by the json Python 2 . 6 library .
1,253
def json_loads ( inbox ) : gc . disable ( ) obj = json . loads ( inbox [ 0 ] ) gc . enable ( ) return obj
Deserializes the first element of the input using the JSON protocol as implemented by the json Python 2 . 6 library .
1,254
def at_time_validate ( ctx , param , value ) : if value is not None : if ( re . search ( r'([0-2]\d)(:[0-5]\d){1,2}' , value ) is None and re . search ( r'\d{4}-[01]\d-[0-3]\d [0-2]\d:[0-5]\d(:[0-5]\d)?' , value ) is None ) : raise click . BadParameter ( "A commit at time must be in one of the " "two formats: 'hh:mm[:ss]' or " "'yyyy-mm-dd hh:mm[:ss]' (seconds are " "optional)." ) ctx . obj [ 'at_time' ] = value return value
Callback validating the at_time commit option .
1,255
def write_validate ( ctx , param , value ) : if value != ( "default" , "default" ) : try : mode , dest_file = ( value [ 0 ] , value [ 1 ] ) except IndexError : raise click . BadParameter ( 'Expecting two arguments, one for how to ' 'output (s, single, m, multiple), and ' 'the second is a filepath where to put' ' the output.' ) if mode . lower ( ) not in [ 's' , 'single' , 'm' , 'multiple' ] : raise click . BadParameter ( 'The first argument of the -w/--write ' 'option must specifies whether to write' ' to one file per device, or all device' ' output to a single file. Valid options' ' are "s", "single", "m", and "multiple"' ) ctx . obj [ 'out' ] = ( mode . lower ( ) , dest_file ) else : ctx . obj [ 'out' ] = None
Validate the - w option .
1,256
def write_out ( input ) : to_file , output = input if to_file != "quiet" : try : mode , dest_file = to_file except TypeError : click . echo ( output ) else : ip = output . split ( 'device: ' ) [ 1 ] . split ( '\n' ) [ 0 ] . strip ( ) if mode in [ 'm' , 'multiple' ] : dest_file = path . join ( path . split ( dest_file ) [ 0 ] , ip + "_" + path . split ( dest_file ) [ 1 ] ) try : out_file = open ( dest_file , 'a+b' ) except IOError as e : print ( color ( "Could not open output file '%s' for writing. " "Output would have been:\n%s" % ( dest_file , output ) , 'red' ) ) print ( color ( 'Here is the error for opening the output file:' + str ( e ) , 'red' ) ) else : click . echo ( output , nl = False , file = out_file ) print ( color ( '%s output appended to: %s' % ( ip , dest_file ) ) ) out_file . close ( )
Callback function to write the output from the script .
1,257
def main ( ctx , host , password , port , quiet , session_timeout , connect_timeout , username ) : ctx . obj [ 'hosts' ] = [ ip for ip in clean_lines ( host ) ] ctx . obj [ 'conn' ] = { "username" : username , "password" : password , "port" : port , "session_timeout" : session_timeout , "connect_timeout" : connect_timeout } if quiet : ctx . obj [ 'out' ] = "quiet"
Manipulate one or more Junos devices .
1,258
def compare ( ctx , commands ) : mp_pool = multiprocessing . Pool ( multiprocessing . cpu_count ( ) * 2 ) for ip in ctx . obj [ 'hosts' ] : mp_pool . apply_async ( wrap . open_connection , args = ( ip , ctx . obj [ 'conn' ] [ 'username' ] , ctx . obj [ 'conn' ] [ 'password' ] , wrap . compare , [ commands ] , ctx . obj [ 'out' ] , ctx . obj [ 'conn' ] [ 'connect_timeout' ] , ctx . obj [ 'conn' ] [ 'session_timeout' ] , ctx . obj [ 'conn' ] [ 'port' ] ) , callback = write_out ) mp_pool . close ( ) mp_pool . join ( )
Run show | compare for set commands .
1,259
def diff_config ( ctx , second_host , mode ) : mp_pool = multiprocessing . Pool ( multiprocessing . cpu_count ( ) * 2 ) for ip in ctx . obj [ 'hosts' ] : mp_pool . apply_async ( wrap . open_connection , args = ( ip , ctx . obj [ 'conn' ] [ 'username' ] , ctx . obj [ 'conn' ] [ 'password' ] , wrap . diff_config , [ second_host , mode ] , ctx . obj [ 'out' ] , ctx . obj [ 'conn' ] [ 'connect_timeout' ] , ctx . obj [ 'conn' ] [ 'session_timeout' ] , ctx . obj [ 'conn' ] [ 'port' ] ) , callback = write_out ) mp_pool . close ( ) mp_pool . join ( )
Config comparison between two devices .
1,260
def get_command ( self , ctx , cmd_name ) : rv = click . Group . get_command ( self , ctx , cmd_name ) if rv is not None : return rv matches = [ x for x in self . list_commands ( ctx ) if x . startswith ( cmd_name ) ] if not matches : return None elif len ( matches ) == 1 : return click . Group . get_command ( self , ctx , matches [ 0 ] ) ctx . fail ( 'Command ambiguous, could be: %s' % ', ' . join ( sorted ( matches ) ) )
Allow for partial commands .
1,261
def convert ( value , from_unit , to_unit ) : if isinstance ( value , float ) : raise ValueError ( "float values can lead to unexpected precision loss, please use a" " Decimal or string eg." " convert('%s', %r, %r)" % ( value , from_unit , to_unit ) ) if from_unit not in UNITS_TO_RAW : raise ValueError ( 'unknown unit: %r' % from_unit ) if to_unit not in UNITS_TO_RAW : raise ValueError ( 'unknown unit: %r' % to_unit ) try : value = Decimal ( value ) except Exception : raise ValueError ( 'not a number: %r' % value ) from_value_in_base = UNITS_TO_RAW [ from_unit ] to_value_in_base = UNITS_TO_RAW [ to_unit ] result = value * ( from_value_in_base / to_value_in_base ) return result . normalize ( )
Converts a value from from_unit units to to_unit units
1,262
def endpoint ( request ) : if request . method != 'POST' : raise Http404 if hasattr ( settings , 'BOUNCY_TOPIC_ARN' ) : if 'HTTP_X_AMZ_SNS_TOPIC_ARN' not in request . META : return HttpResponseBadRequest ( 'No TopicArn Header' ) if ( not request . META [ 'HTTP_X_AMZ_SNS_TOPIC_ARN' ] in settings . BOUNCY_TOPIC_ARN ) : return HttpResponseBadRequest ( 'Bad Topic' ) if isinstance ( request . body , str ) : request_body = request . body else : request_body = request . body . decode ( ) try : data = json . loads ( request_body ) except ValueError : logger . warning ( 'Notification Not Valid JSON: {}' . format ( request_body ) ) return HttpResponseBadRequest ( 'Not Valid JSON' ) if not set ( VITAL_NOTIFICATION_FIELDS ) <= set ( data ) : logger . warning ( 'Request Missing Necessary Keys' ) return HttpResponseBadRequest ( 'Request Missing Necessary Keys' ) if not data [ 'Type' ] in ALLOWED_TYPES : logger . info ( 'Notification Type Not Known %s' , data [ 'Type' ] ) return HttpResponseBadRequest ( 'Unknown Notification Type' ) domain = urlparse ( data [ 'SigningCertURL' ] ) . netloc pattern = getattr ( settings , 'BOUNCY_CERT_DOMAIN_REGEX' , r"sns.[a-z0-9\-]+.amazonaws.com$" ) if not re . search ( pattern , domain ) : logger . warning ( 'Improper Certificate Location %s' , data [ 'SigningCertURL' ] ) return HttpResponseBadRequest ( 'Improper Certificate Location' ) if ( getattr ( settings , 'BOUNCY_VERIFY_CERTIFICATE' , True ) and not verify_notification ( data ) ) : logger . error ( 'Verification Failure %s' , ) return HttpResponseBadRequest ( 'Improper Signature' ) signals . notification . send ( sender = 'bouncy_endpoint' , notification = data , request = request ) if data [ 'Type' ] == 'SubscriptionConfirmation' : if not getattr ( settings , 'BOUNCY_AUTO_SUBSCRIBE' , True ) : raise Http404 return approve_subscription ( data ) elif data [ 'Type' ] == 'UnsubscribeConfirmation' : logger . info ( 'UnsubscribeConfirmation Not Handled' ) return HttpResponse ( 'UnsubscribeConfirmation Not Handled' ) try : message = json . loads ( data [ 'Message' ] ) except ValueError : logger . info ( 'Non-Valid JSON Message Received' ) return HttpResponse ( 'Message is not valid JSON' ) return process_message ( message , data )
Endpoint that SNS accesses . Includes logic verifying request
1,263
def process_message ( message , notification ) : if not set ( VITAL_MESSAGE_FIELDS ) <= set ( message ) : logger . info ( 'JSON Message Missing Vital Fields' ) return HttpResponse ( 'Missing Vital Fields' ) if message [ 'notificationType' ] == 'Complaint' : return process_complaint ( message , notification ) if message [ 'notificationType' ] == 'Bounce' : return process_bounce ( message , notification ) if message [ 'notificationType' ] == 'Delivery' : return process_delivery ( message , notification ) else : return HttpResponse ( 'Unknown Notification Type' )
Function to process a JSON message delivered from Amazon
1,264
def process_bounce ( message , notification ) : mail = message [ 'mail' ] bounce = message [ 'bounce' ] bounces = [ ] for recipient in bounce [ 'bouncedRecipients' ] : bounces += [ Bounce . objects . create ( sns_topic = notification [ 'TopicArn' ] , sns_messageid = notification [ 'MessageId' ] , mail_timestamp = clean_time ( mail [ 'timestamp' ] ) , mail_id = mail [ 'messageId' ] , mail_from = mail [ 'source' ] , address = recipient [ 'emailAddress' ] , feedback_id = bounce [ 'feedbackId' ] , feedback_timestamp = clean_time ( bounce [ 'timestamp' ] ) , hard = bool ( bounce [ 'bounceType' ] == 'Permanent' ) , bounce_type = bounce [ 'bounceType' ] , bounce_subtype = bounce [ 'bounceSubType' ] , reporting_mta = bounce . get ( 'reportingMTA' ) , action = recipient . get ( 'action' ) , status = recipient . get ( 'status' ) , diagnostic_code = recipient . get ( 'diagnosticCode' ) ) ] for bounce in bounces : signals . feedback . send ( sender = Bounce , instance = bounce , message = message , notification = notification ) logger . info ( 'Logged %s Bounce(s)' , str ( len ( bounces ) ) ) return HttpResponse ( 'Bounce Processed' )
Function to process a bounce notification
1,265
def process_complaint ( message , notification ) : mail = message [ 'mail' ] complaint = message [ 'complaint' ] if 'arrivalDate' in complaint : arrival_date = clean_time ( complaint [ 'arrivalDate' ] ) else : arrival_date = None complaints = [ ] for recipient in complaint [ 'complainedRecipients' ] : complaints += [ Complaint . objects . create ( sns_topic = notification [ 'TopicArn' ] , sns_messageid = notification [ 'MessageId' ] , mail_timestamp = clean_time ( mail [ 'timestamp' ] ) , mail_id = mail [ 'messageId' ] , mail_from = mail [ 'source' ] , address = recipient [ 'emailAddress' ] , feedback_id = complaint [ 'feedbackId' ] , feedback_timestamp = clean_time ( complaint [ 'timestamp' ] ) , useragent = complaint . get ( 'userAgent' ) , feedback_type = complaint . get ( 'complaintFeedbackType' ) , arrival_date = arrival_date ) ] for complaint in complaints : signals . feedback . send ( sender = Complaint , instance = complaint , message = message , notification = notification ) logger . info ( 'Logged %s Complaint(s)' , str ( len ( complaints ) ) ) return HttpResponse ( 'Complaint Processed' )
Function to process a complaint notification
1,266
def process_delivery ( message , notification ) : mail = message [ 'mail' ] delivery = message [ 'delivery' ] if 'timestamp' in delivery : delivered_datetime = clean_time ( delivery [ 'timestamp' ] ) else : delivered_datetime = None deliveries = [ ] for eachrecipient in delivery [ 'recipients' ] : deliveries += [ Delivery . objects . create ( sns_topic = notification [ 'TopicArn' ] , sns_messageid = notification [ 'MessageId' ] , mail_timestamp = clean_time ( mail [ 'timestamp' ] ) , mail_id = mail [ 'messageId' ] , mail_from = mail [ 'source' ] , address = eachrecipient , delivered_time = delivered_datetime , processing_time = int ( delivery [ 'processingTimeMillis' ] ) , smtp_response = delivery [ 'smtpResponse' ] ) ] for eachdelivery in deliveries : signals . feedback . send ( sender = Delivery , instance = eachdelivery , message = message , notification = notification ) logger . info ( 'Logged %s Deliveries(s)' , str ( len ( deliveries ) ) ) return HttpResponse ( 'Delivery Processed' )
Function to process a delivery notification
1,267
def click_on_label ( step , label ) : with AssertContextManager ( step ) : elem = world . browser . find_element_by_xpath ( str ( '//label[normalize-space(text()) = "%s"]' % label ) ) elem . click ( )
Click on a label
1,268
def element_focused ( step , id ) : elem = world . browser . find_element_by_xpath ( str ( 'id("{id}")' . format ( id = id ) ) ) focused = world . browser . switch_to_active_element ( ) assert_true ( step , elem == focused )
Check if the element is focused
1,269
def element_not_focused ( step , id ) : elem = world . browser . find_element_by_xpath ( str ( 'id("{id}")' . format ( id = id ) ) ) focused = world . browser . switch_to_active_element ( ) assert_false ( step , elem == focused )
Check if the element is not focused
1,270
def input_has_value ( step , field_name , value ) : with AssertContextManager ( step ) : text_field = find_any_field ( world . browser , DATE_FIELDS + TEXT_FIELDS , field_name ) assert_false ( step , text_field is False , 'Can not find a field named "%s"' % field_name ) assert_equals ( text_field . get_attribute ( 'value' ) , value )
Check that the form input element has given value .
1,271
def submit_form_id ( step , id ) : form = world . browser . find_element_by_xpath ( str ( 'id("{id}")' . format ( id = id ) ) ) form . submit ( )
Submit the form having given id .
1,272
def submit_form_action ( step , url ) : form = world . browser . find_element_by_xpath ( str ( '//form[@action="%s"]' % url ) ) form . submit ( )
Submit the form having given action URL .
1,273
def check_alert ( step , text ) : try : alert = Alert ( world . browser ) assert_equals ( alert . text , text ) except WebDriverException : pass
Check the alert text
1,274
def page_title ( step , title ) : with AssertContextManager ( step ) : assert_equals ( world . browser . title , title )
Check that the page title matches the given one .
1,275
def get ( self , name ) : schema = TagSchema ( ) resp = self . service . get_id ( self . base , name ) return self . service . decode ( schema , resp )
Get a tag .
1,276
def edit ( self , resource ) : schema = TagSchema ( only = ( 'name' , 'configs' , 'devices' , 'packages' , 'results' ) ) json = self . service . encode ( schema , resource ) schema = TagSchema ( ) resp = self . service . edit ( self . base , resource . name , json ) return self . service . decode ( schema , resp )
Edit a tag .
1,277
def remaining ( self ) : if self . _expired : raise Expired ( ) obj = { u'ID' : self . lease_id , } data = json . dumps ( obj ) . encode ( 'utf8' ) url = u'{}/v3alpha/kv/lease/timetolive' . format ( self . _client . _url ) . encode ( ) response = yield treq . post ( url , data , headers = self . _client . _REQ_HEADERS ) obj = yield treq . json_content ( response ) ttl = obj . get ( u'TTL' , None ) if not ttl : self . _expired = True raise Expired ( ) returnValue ( ttl )
Get the remaining time - to - live of this lease .
1,278
def revoke ( self ) : if self . _expired : raise Expired ( ) obj = { u'ID' : self . lease_id , } data = json . dumps ( obj ) . encode ( 'utf8' ) url = u'{}/v3alpha/kv/lease/revoke' . format ( self . _client . _url ) . encode ( ) response = yield treq . post ( url , data , headers = self . _client . _REQ_HEADERS ) obj = yield treq . json_content ( response ) header = Header . _parse ( obj [ u'header' ] ) if u'header' in obj else None self . _expired = True returnValue ( header )
Revokes a lease . All keys attached to the lease will expire and be deleted .
1,279
def refresh ( self ) : if self . _expired : raise Expired ( ) obj = { u'ID' : self . lease_id , } data = json . dumps ( obj ) . encode ( 'utf8' ) url = u'{}/v3alpha/lease/keepalive' . format ( self . _client . _url ) . encode ( ) response = yield treq . post ( url , data , headers = self . _client . _REQ_HEADERS ) obj = yield treq . json_content ( response ) if u'result' not in obj : raise Exception ( 'bogus lease refresh response (missing "result") in {}' . format ( obj ) ) ttl = obj [ u'result' ] . get ( u'TTL' , None ) if not ttl : self . _expired = True raise Expired ( ) header = Header . _parse ( obj [ u'result' ] [ u'header' ] ) if u'header' in obj [ u'result' ] else None self . _expired = False returnValue ( header )
Keeps the lease alive by streaming keep alive requests from the client to the server and streaming keep alive responses from the server to the client .
1,280
def read_links_file ( self , file_path ) : articles = [ ] with open ( file_path ) as f : for line in f : line = line . strip ( ) if len ( line ) != 0 : link , category = line . split ( ' ' ) articles . append ( ( category . rstrip ( ) , link . strip ( ) ) ) return articles
Read links and associated categories for specified articles in text file seperated by a space
1,281
def call ( self , action , params = None ) : params = params or { } params [ 'action' ] = action resp = self . session . post ( self . host , json = params , timeout = self . timeout ) result = resp . json ( ) if 'error' in result : raise RPCException ( result [ 'error' ] ) return result
Makes an RPC call to the server and returns the json response
1,282
def _process_value ( self , value , type ) : if not isinstance ( value , six . string_types + ( list , ) ) : value = json . dumps ( value ) return value
Process a value that will be sent to backend
1,283
def block_account ( self , hash ) : hash = self . _process_value ( hash , 'block' ) payload = { "hash" : hash } resp = self . call ( 'block_account' , payload ) return resp [ 'account' ]
Returns the account containing block
1,284
def block_count ( self ) : resp = self . call ( 'block_count' ) for k , v in resp . items ( ) : resp [ k ] = int ( v ) return resp
Reports the number of blocks in the ledger and unchecked synchronizing blocks
1,285
def mrai_from_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'mrai_from_raw' , payload ) return int ( resp [ 'amount' ] )
Divide a raw amount down by the Mrai ratio .
1,286
def mrai_to_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'mrai_to_raw' , payload ) return int ( resp [ 'amount' ] )
Multiply an Mrai amount by the Mrai ratio .
1,287
def krai_from_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'krai_from_raw' , payload ) return int ( resp [ 'amount' ] )
Divide a raw amount down by the krai ratio .
1,288
def krai_to_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'krai_to_raw' , payload ) return int ( resp [ 'amount' ] )
Multiply an krai amount by the krai ratio .
1,289
def rai_from_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'rai_from_raw' , payload ) return int ( resp [ 'amount' ] )
Divide a raw amount down by the rai ratio .
1,290
def rai_to_raw ( self , amount ) : amount = self . _process_value ( amount , 'int' ) payload = { "amount" : amount } resp = self . call ( 'rai_to_raw' , payload ) return int ( resp [ 'amount' ] )
Multiply an rai amount by the rai ratio .
1,291
def payment_begin ( self , wallet ) : wallet = self . _process_value ( wallet , 'wallet' ) payload = { "wallet" : wallet } resp = self . call ( 'payment_begin' , payload ) return resp [ 'account' ]
Begin a new payment session . Searches wallet for an account that s marked as available and has a 0 balance . If one is found the account number is returned and is marked as unavailable . If no account is found a new account is created placed in the wallet and returned .
1,292
def payment_init ( self , wallet ) : wallet = self . _process_value ( wallet , 'wallet' ) payload = { "wallet" : wallet } resp = self . call ( 'payment_init' , payload ) return resp [ 'status' ] == 'Ready'
Marks all accounts in wallet as available for being used as a payment session .
1,293
def payment_end ( self , account , wallet ) : account = self . _process_value ( account , 'account' ) wallet = self . _process_value ( wallet , 'wallet' ) payload = { "account" : account , "wallet" : wallet } resp = self . call ( 'payment_end' , payload ) return resp == { }
End a payment session . Marks the account as available for use in a payment session .
1,294
def representatives ( self , count = None , sorting = False ) : payload = { } if count is not None : payload [ 'count' ] = self . _process_value ( count , 'int' ) if sorting : payload [ 'sorting' ] = self . _process_value ( sorting , 'strbool' ) resp = self . call ( 'representatives' , payload ) representatives = resp . get ( 'representatives' ) or { } for k , v in representatives . items ( ) : representatives [ k ] = int ( v ) return representatives
Returns a list of pairs of representative and its voting weight
1,295
def version ( self ) : resp = self . call ( 'version' ) for key in ( 'rpc_version' , 'store_version' ) : resp [ key ] = int ( resp [ key ] ) return resp
Returns the node s RPC version
1,296
def _extract_email ( gh ) : return next ( ( x . email for x in gh . emails ( ) if x . verified and x . primary ) , None )
Get user email from github .
1,297
def authorized ( resp , remote ) : if resp and 'error' in resp : if resp [ 'error' ] == 'bad_verification_code' : return redirect ( url_for ( 'invenio_oauthclient.login' , remote_app = 'github' ) ) elif resp [ 'error' ] in [ 'incorrect_client_credentials' , 'redirect_uri_mismatch' ] : raise OAuthResponseError ( 'Application mis-configuration in GitHub' , remote , resp ) return authorized_signup_handler ( resp , remote )
Authorized callback handler for GitHub .
1,298
def initial_variant_sequences_from_reads ( variant_reads , max_nucleotides_before_variant = None , max_nucleotides_after_variant = None ) : unique_sequence_groups = group_unique_sequences ( variant_reads , max_prefix_size = max_nucleotides_before_variant , max_suffix_size = max_nucleotides_after_variant ) return [ VariantSequence ( prefix = prefix , alt = alt , suffix = suffix , reads = reads ) for ( ( prefix , alt , suffix ) , reads ) in unique_sequence_groups . items ( ) ]
Get all unique sequences from reads spanning a variant locus . This will include partial sequences due to reads starting in the middle of the sequence around around a variant .
1,299
def trim_variant_sequences ( variant_sequences , min_variant_sequence_coverage ) : n_total = len ( variant_sequences ) trimmed_variant_sequences = [ variant_sequence . trim_by_coverage ( min_variant_sequence_coverage ) for variant_sequence in variant_sequences ] collapsed_variant_sequences = collapse_substrings ( trimmed_variant_sequences ) n_after_trimming = len ( collapsed_variant_sequences ) logger . info ( "Kept %d/%d variant sequences after read coverage trimming to >=%dx" , n_after_trimming , n_total , min_variant_sequence_coverage ) return collapsed_variant_sequences
Trim VariantSequences to desired coverage and then combine any subsequences which get generated .