idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
1,100
def account_groups_and_extra_data ( account , resource , refresh_timedelta = None ) : updated = datetime . utcnow ( ) modified_since = updated if refresh_timedelta is not None : modified_since += refresh_timedelta modified_since = modified_since . isoformat ( ) last_update = account . extra_data . get ( 'updated' , modified_since ) if last_update > modified_since : return account . extra_data . get ( 'groups' , [ ] ) groups = fetch_groups ( resource [ 'Group' ] ) extra_data = current_app . config . get ( 'OAUTHCLIENT_CERN_EXTRA_DATA_SERIALIZER' , fetch_extra_data ) ( resource ) account . extra_data . update ( groups = groups , updated = updated . isoformat ( ) , ** extra_data ) return groups
Fetch account groups and extra data from resource if necessary .
1,101
def extend_identity ( identity , groups ) : provides = set ( [ UserNeed ( current_user . email ) ] + [ RoleNeed ( '{0}@cern.ch' . format ( name ) ) for name in groups ] ) identity . provides |= provides session [ OAUTHCLIENT_CERN_SESSION_KEY ] = provides
Extend identity with roles based on CERN groups .
1,102
def get_dict_from_response ( response ) : result = { } if getattr ( response , '_resp' ) and response . _resp . code > 400 : return result for i in response . data : k = i [ 'Type' ] . replace ( REMOTE_APP_RESOURCE_SCHEMA , '' ) result . setdefault ( k , list ( ) ) result [ k ] . append ( i [ 'Value' ] ) return result
Prepare new mapping with Value s groupped by Type .
1,103
def get_resource ( remote ) : cached_resource = session . pop ( 'cern_resource' , None ) if cached_resource : return cached_resource response = remote . get ( REMOTE_APP_RESOURCE_API_URL ) dict_response = get_dict_from_response ( response ) session [ 'cern_resource' ] = dict_response return dict_response
Query CERN Resources to get user info and groups .
1,104
def on_identity_changed ( sender , identity ) : if isinstance ( identity , AnonymousIdentity ) : return client_id = current_app . config [ 'CERN_APP_CREDENTIALS' ] [ 'consumer_key' ] account = RemoteAccount . get ( user_id = current_user . get_id ( ) , client_id = client_id , ) groups = [ ] if account : remote = find_remote_by_client_id ( client_id ) resource = get_resource ( remote ) refresh = current_app . config . get ( 'OAUTHCLIENT_CERN_REFRESH_TIMEDELTA' , OAUTHCLIENT_CERN_REFRESH_TIMEDELTA ) groups . extend ( account_groups_and_extra_data ( account , resource , refresh_timedelta = refresh ) ) extend_identity ( identity , groups )
Store groups in session whenever identity changes .
1,105
def get ( cls , user_id , client_id ) : return cls . query . filter_by ( user_id = user_id , client_id = client_id , ) . first ( )
Get RemoteAccount object for user .
1,106
def create ( cls , user_id , client_id , extra_data ) : with db . session . begin_nested ( ) : account = cls ( user_id = user_id , client_id = client_id , extra_data = extra_data or dict ( ) ) db . session . add ( account ) return account
Create new remote account for user .
1,107
def update_token ( self , token , secret ) : if self . access_token != token or self . secret != secret : with db . session . begin_nested ( ) : self . access_token = token self . secret = secret db . session . add ( self )
Update token with new values .
1,108
def get ( cls , user_id , client_id , token_type = '' , access_token = None ) : args = [ RemoteAccount . id == RemoteToken . id_remote_account , RemoteAccount . user_id == user_id , RemoteAccount . client_id == client_id , RemoteToken . token_type == token_type , ] if access_token : args . append ( RemoteToken . access_token == access_token ) return cls . query . options ( db . joinedload ( 'remote_account' ) ) . filter ( * args ) . first ( )
Get RemoteToken for user .
1,109
def get_by_token ( cls , client_id , access_token , token_type = '' ) : return cls . query . options ( db . joinedload ( 'remote_account' ) ) . filter ( RemoteAccount . id == RemoteToken . id_remote_account , RemoteAccount . client_id == client_id , RemoteToken . token_type == token_type , RemoteToken . access_token == access_token , ) . first ( )
Get RemoteAccount object for token .
1,110
def bulk_export ( self , config_ids = None , device_ids = None , package_ids = None , result_ids = None , exclude_captures = False ) : if config_ids is None : config_ids = [ ] if device_ids is None : device_ids = [ ] if package_ids is None : package_ids = [ ] if result_ids is None : result_ids = [ ] json = { 'configs' : map ( int , config_ids ) , 'devices' : map ( int , device_ids ) , 'packages' : map ( int , package_ids ) , 'results' : map ( int , result_ids ) , 'options' : { 'exclude_captures' : exclude_captures } } resp = self . service . post ( self . base , json = json , stream = True ) b = io . BytesIO ( ) stream . stream_response_to_file ( resp , path = b ) resp . close ( ) b . seek ( 0 ) return ( b , self . service . filename ( resp ) )
Bulk export a set of configs devices packages and results .
1,111
def _init_report ( self ) : self . sections = [ ] self . section_names = [ ] try : if os . path . isdir ( self . directory ) is False : if self . verbose : print ( "Created directory {}" . format ( self . directory ) ) os . mkdir ( self . directory ) for this in self . _to_create : try : os . mkdir ( self . directory + os . sep + this ) except : pass except Exception : pass finally : temp_path = easydev . get_package_location ( "reports" ) temp_path += os . sep + "reports" + os . sep + "resources" filenames = glob . glob ( os . sep . join ( [ temp_path , "css" , "*.css" ] ) ) filenames += glob . glob ( os . sep . join ( [ self . searchpath , '*.css' ] ) ) filenames += self . extra_css_list for filename in filenames : target = os . sep . join ( [ self . directory , 'css' ] ) if os . path . isfile ( target ) is False : shutil . copy ( filename , target ) for filename in [ 'sorttable.js' , 'highlight.pack.js' , "jquery-1.12.3.min.js" ] : target = os . sep . join ( [ self . directory , 'js' , filename ] ) if os . path . isfile ( target ) is False : filename = os . sep . join ( [ temp_path , "javascript" , filename ] ) shutil . copy ( filename , target ) for filename in self . extra_js_list : basename = os . path . basename ( filename ) target = os . sep . join ( [ self . directory , 'js' , basename ] ) if os . path . isfile ( target ) is False : shutil . copy ( filename , target )
create the report directory and return the directory name
1,112
def get_time_now ( self ) : import datetime import getpass username = getpass . getuser ( ) timenow = str ( datetime . datetime . now ( ) ) timenow = timenow . split ( '.' ) [ 0 ] msg = '<div class="date">Created on ' + timenow msg += " by " + username + '</div>' return msg
Returns a time stamp
1,113
def _track_class_related_field ( cls , field ) : ( field , related_field ) = field . split ( '__' , 1 ) field_obj = cls . _meta . get_field ( field ) related_cls = field_obj . remote_field . model related_name = field_obj . remote_field . get_accessor_name ( ) if not hasattr ( related_cls , '_tracked_related_fields' ) : setattr ( related_cls , '_tracked_related_fields' , { } ) if related_field not in related_cls . _tracked_related_fields . keys ( ) : related_cls . _tracked_related_fields [ related_field ] = [ ] related_cls . _tracked_related_fields [ related_field ] . append ( ( field , related_name ) ) _add_signals_to_cls ( related_cls ) if isinstance ( related_cls . _meta . get_field ( related_field ) , ManyToManyField ) : m2m_changed . connect ( tracking_m2m , sender = getattr ( related_cls , related_field ) . through , dispatch_uid = repr ( related_cls ) , )
Track a field on a related model
1,114
def _track_class_field ( cls , field ) : if '__' in field : _track_class_related_field ( cls , field ) return cls . _meta . get_field ( field ) if isinstance ( cls . _meta . get_field ( field ) , ManyToManyField ) : m2m_changed . connect ( tracking_m2m , sender = getattr ( cls , field ) . through , dispatch_uid = repr ( cls ) , )
Track a field on the current model
1,115
def _track_class ( cls , fields ) : assert not getattr ( cls , '_is_tracked' , False ) for field in fields : _track_class_field ( cls , field ) _add_signals_to_cls ( cls ) cls . _is_tracked = True cls . _tracked_fields = [ field for field in fields if '__' not in field ]
Track fields on the specified model
1,116
def _add_get_tracking_url ( cls ) : def get_tracking_url ( self ) : url = reverse ( 'admin:tracking_fields_trackingevent_changelist' ) object_id = '{0}%3A{1}' . format ( ContentType . objects . get_for_model ( self ) . pk , self . pk ) return '{0}?object={1}' . format ( url , object_id ) if not hasattr ( cls , 'get_tracking_url' ) : setattr ( cls , 'get_tracking_url' , get_tracking_url )
Add a method to get the tracking url of an object .
1,117
def track ( * fields ) : def inner ( cls ) : _track_class ( cls , fields ) _add_get_tracking_url ( cls ) return cls return inner
Decorator used to track changes on Model s fields .
1,118
def indent ( value , n = 2 , character = ' ' ) : prefix = n * character return '\n' . join ( prefix + line for line in value . splitlines ( ) )
Indent a value by n character s
1,119
def check_instance ( function ) : def wrapper ( self , * args , ** kwargs ) : func_trans = { "commit" : manager . Manager , "compare_config" : manager . Manager , "commit_check" : manager . Manager , "device_info" : manager . Manager , "diff_config" : manager . Manager , "health_check" : manager . Manager , "interface_errors" : manager . Manager , "op_cmd" : paramiko . client . SSHClient , "shell_cmd" : paramiko . client . SSHClient , "scp_pull" : paramiko . client . SSHClient , "scp_push" : paramiko . client . SSHClient } if self . username == "root" and function . __name__ == "op_cmd" : if not self . _session : self . conn_type = "paramiko" self . connect ( ) if not self . _shell : self . conn_type = "root" self . connect ( ) self . shell_to_cli ( ) elif function . __name__ == 'shell_cmd' : if not self . _shell : self . conn_type = "shell" self . connect ( ) self . cli_to_shell ( ) if isinstance ( self . _session , func_trans [ function . __name__ ] ) : if function . __name__ in [ 'scp_pull' , 'scp_push' ] : if not isinstance ( self . _scp , SCPClient ) : self . conn_type = "scp" self . connect ( ) else : self . disconnect ( ) if function . __name__ == "op_cmd" : self . conn_type = "paramiko" elif function . __name__ in [ "scp_pull" , "scp_push" ] : self . conn_type = "scp" else : self . conn_type = "ncclient" self . connect ( ) return function ( self , * args , ** kwargs ) return wrapper
Wrapper that tests the type of _session .
1,120
def commit ( self , commands = "" , confirmed = None , comment = None , at_time = None , synchronize = False , req_format = 'text' ) : if not commands : commands = 'annotate system ""' clean_cmds = [ ] for cmd in clean_lines ( commands ) : clean_cmds . append ( cmd ) self . lock ( ) self . _session . load_configuration ( action = 'set' , config = commands ) results = "" if confirmed : results = self . _session . commit ( confirmed = True , timeout = str ( confirmed ) , comment = comment , synchronize = synchronize ) else : results = self . _session . commit ( comment = comment , at_time = at_time , synchronize = synchronize ) self . unlock ( ) if results : if req_format == 'xml' : return results results = ET . fromstring ( results . tostring ) out = '' for i in results . iter ( ) : if i . tag == 'commit-check-success' : out += 'configuration check succeeds\n' elif i . tag == 'commit-success' : out += 'commit complete\n' elif i . tag == 'ok' : out += 'commit complete\n' elif i . text is not None : if i . text . strip ( ) + '\n' != '\n' : out += i . text . strip ( ) + '\n' elif i . text is None : if i . tag + '\n' != '\n' : out += i . tag + '\n' return out return False
Perform a commit operation .
1,121
def commit_check ( self , commands = "" , req_format = "text" ) : if not commands : raise InvalidCommandError ( 'No commands specified' ) clean_cmds = [ ] for cmd in clean_lines ( commands ) : clean_cmds . append ( cmd ) self . lock ( ) self . _session . load_configuration ( action = 'set' , config = clean_cmds ) results = ET . fromstring ( self . _session . validate ( source = 'candidate' ) . tostring ) self . unlock ( ) if req_format == "xml" : return ET . tostring ( results ) out = "" for i in results . iter ( ) : if i . tag == 'commit-check-success' : out += 'configuration check succeeds\n' elif i . text is not None : if i . text . strip ( ) + '\n' != '\n' : out += i . text . strip ( ) + '\n' elif i . text is None : if i . tag + '\n' != '\n' : out += i . tag + '\n' return out
Execute a commit check operation .
1,122
def compare_config ( self , commands = "" , req_format = "text" ) : if not commands : raise InvalidCommandError ( 'No commands specified' ) clean_cmds = [ cmd for cmd in clean_lines ( commands ) ] self . lock ( ) self . _session . load_configuration ( action = 'set' , config = clean_cmds ) out = self . _session . compare_configuration ( ) self . unlock ( ) if req_format . lower ( ) == "xml" : return out return out . xpath ( 'configuration-information/configuration-output' ) [ 0 ] . text
Execute a show | compare against the specified commands .
1,123
def connect ( self ) : if self . conn_type == 'paramiko' : self . _session = paramiko . SSHClient ( ) logger = logging . Logger . manager . getLogger ( 'paramiko.transport' ) logger . setLevel ( logging . CRITICAL ) self . _session . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) self . _session . connect ( hostname = self . host , username = self . username , password = self . password , port = self . port , timeout = self . connect_timeout ) if self . conn_type == 'scp' : self . _scp_session = paramiko . SSHClient ( ) logger = logging . Logger . manager . getLogger ( 'paramiko.transport' ) logger . setLevel ( logging . CRITICAL ) self . _scp_session . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) self . _scp_session . connect ( hostname = self . host , username = self . username , password = self . password , port = self . port , timeout = self . connect_timeout ) self . _scp = SCPClient ( self . _scp_session . get_transport ( ) ) elif self . conn_type == "ncclient" : self . _session = manager . connect ( host = self . host , port = self . port , username = self . username , password = self . password , timeout = self . connect_timeout , device_params = { 'name' : 'junos' } , hostkey_verify = False ) elif self . conn_type == 'shell' : if not self . _session : self . conn_type = 'paramiko' self . connect ( ) self . conn_type = 'shell' if not self . _shell : self . _shell = self . _session . invoke_shell ( ) time . sleep ( 2 ) if self . username != 'root' and not self . _in_cli : self . _in_cli = True if not self . cli_to_shell ( ) : self . _shell . recv ( 9999 ) elif self . conn_type == 'root' : if not self . _shell : self . _shell = self . _session . invoke_shell ( ) time . sleep ( 2 ) if not self . shell_to_cli ( ) : self . _shell . recv ( 9999 ) self . _update_timeout ( self . session_timeout )
Establish a connection to the device .
1,124
def _copy_status ( self , filename , size , sent ) : output = "Transferred %.0f%% of the file %s" % ( ( float ( sent ) / float ( size ) * 100 ) , path . normpath ( filename ) ) output += ( ' ' * ( 120 - len ( output ) ) ) if filename != self . _filename : if self . _filename is not None : print ( '' ) self . _filename = filename print ( output , end = '\r' )
Echo status of an SCP operation .
1,125
def device_info ( self ) : resp = self . _session . get_software_information ( format = 'xml' ) hostname = resp . xpath ( '//software-information/host-name' ) [ 0 ] . text model = resp . xpath ( '//software-information/product-model' ) [ 0 ] . text version = 'Unknown' if resp . xpath ( '//junos-version' ) : try : version = resp . xpath ( '//junos-version' ) [ 0 ] . text except IndexError : pass elif resp . xpath ( "//package-information[name = 'junos-version']" ) : try : version = ( resp . xpath ( "//package-information[name = 'junos-version']/comment" ) [ 0 ] . text ) . split ( ) [ 1 ] except IndexError : pass else : try : version = ( ( resp . xpath ( '//software-information/package-information/comment' ) [ 0 ] . text . split ( '[' ) [ 1 ] . split ( ']' ) [ 0 ] ) ) except IndexError : pass resp = self . _session . get_system_uptime_information ( format = 'xml' ) try : current_time = resp . xpath ( '//current-time/date-time' ) [ 0 ] . text except IndexError : current_time = 'Unknown' try : uptime = resp . xpath ( '//uptime-information/up-time' ) [ 0 ] . text except IndexError : uptime = 'Unknown' show_hardware = self . _session . get_chassis_inventory ( format = 'xml' ) try : chassis_module = show_hardware . xpath ( '//chassis-inventory/chassis/chassis-module/description' ) [ 0 ] . text except IndexError : chassis_module = 'Unknown' if ( 'EX' or 'ex' or 'Ex' ) in chassis_module : serial_num = '' for eng in show_hardware . xpath ( '//chassis-inventory/chassis/chassis-module' ) : if 'Routing Engine' in eng . xpath ( 'name' ) [ 0 ] . text : serial_num += ( eng . xpath ( 'name' ) [ 0 ] . text + ' Serial #: ' + eng . xpath ( 'serial-number' ) [ 0 ] . text ) else : try : serial_num = ( 'Chassis Serial Number: ' + show_hardware . xpath ( '//chassis-inventory/chassis/serial-number' ) [ 0 ] . text ) except IndexError : serial_num = 'Chassis Serial Number: ' + 'Unknown (virtual machine?)' return ( 'Hostname: %s\nModel: %s\nJunos Version: %s\n%s\nCurrent Time:' ' %s\nUptime: %s\n' % ( hostname , model , version , serial_num , current_time , uptime ) )
Pull basic device information .
1,126
def diff_config ( self , second_host , mode = 'stanza' ) : second_conn = manager . connect ( host = second_host , port = self . port , username = self . username , password = self . password , timeout = self . connect_timeout , device_params = { 'name' : 'junos' } , hostkey_verify = False ) command = 'show configuration' if mode == 'set' : command += ' | display set' config1 = self . _session . command ( command , format = 'text' ) config1 = '' . join ( [ snippet . text . lstrip ( '\n' ) for snippet in config1 . xpath ( '//configuration-output' ) ] ) config2 = second_conn . command ( command , format = 'text' ) config2 = '' . join ( [ snippet . text . lstrip ( '\n' ) for snippet in config2 . xpath ( '//configuration-output' ) ] ) return difflib . unified_diff ( config1 . splitlines ( ) , config2 . splitlines ( ) , self . host , second_host )
Generate configuration differences with a second device .
1,127
def _error_parse ( self , interface , face ) : try : error_list = interface . xpath ( face + '-error-list' ) [ 0 ] . getchildren ( ) except IndexError : pass else : for x in range ( len ( error_list ) ) : if error_list [ x ] . tag == "carrier-transitions" : if int ( error_list [ x ] . text . strip ( ) ) > 50 : yield " has greater than 50 flaps." elif int ( error_list [ x ] . text . strip ( ) ) > 0 : yield " has %s of %s." % ( error_list [ x ] . text . strip ( ) , error_list [ x ] . tag . strip ( ) )
Parse the extensive xml output of an interface and yield errors .
1,128
def health_check ( self ) : output = 'Chassis Alarms:\n\t' chassis_alarms = self . _session . command ( "show chassis alarms" ) chassis_alarms = chassis_alarms . xpath ( '//alarm-detail' ) system_alarms = self . _session . command ( "show system alarms" ) system_alarms = system_alarms . xpath ( '//alarm-detail' ) chass = self . _session . command ( command = "show chassis routing-engine" , format = 'text' ) . xpath ( '//output' ) [ 0 ] . text proc = self . _session . command ( "show system processes extensive" ) proc = proc . xpath ( 'output' ) [ 0 ] . text . split ( '\n' ) if chassis_alarms == [ ] : output += 'No chassis alarms active.\n' else : for i in chassis_alarms : output += ( i . xpath ( 'alarm-class' ) [ 0 ] . text . strip ( ) + ' Alarm \t' '\t' + i . xpath ( 'alarm-time' ) [ 0 ] . text . strip ( ) + '\n\t' + i . xpath ( 'alarm-description' ) [ 0 ] . text . strip ( ) + '\n' ) output += '\nSystem Alarms: \n\t' if system_alarms == [ ] : output += 'No system alarms active.\n' else : for i in system_alarms : output += ( i . xpath ( 'alarm-class' ) [ 0 ] . text . strip ( ) + ' Alarm ' '\t\t' + i . xpath ( 'alarm-time' ) [ 0 ] . text . strip ( ) + '\n\t' + i . xpath ( 'alarm-description' ) [ 0 ] . text . strip ( ) + '\n' ) output += '\n' + chass output += ( '\n\nTop 5 busiest processes (high mgd values likely from ' 'script execution):\n' ) for line_number in range ( 8 , 14 ) : output += proc [ line_number ] + '\n' return output
Pull health and alarm information from the device .
1,129
def interface_errors ( self ) : output = [ ] dev_response = self . _session . command ( 'sh interfaces extensive' ) ints = dev_response . xpath ( '//physical-interface' ) ints += dev_response . xpath ( '//logical-interface' ) for i in ints : int_name = i . xpath ( 'name' ) [ 0 ] . text . strip ( ) if ( ( 'ge' or 'fe' or 'ae' or 'xe' or 'so' or 'et' or 'vlan' or 'lo0' or 'irb' ) in int_name ) : try : status = ( i . xpath ( 'admin-status' ) [ 0 ] . text . strip ( ) + '/' + i . xpath ( 'oper-status' ) [ 0 ] . text . strip ( ) ) except IndexError : pass else : for error in self . _error_parse ( i , "input" ) : output . append ( "%s (%s)%s" % ( int_name , status , error ) ) for error in self . _error_parse ( i , "output" ) : output . append ( "%s (%s)%s" % ( int_name , status , error ) ) if output == [ ] : output . append ( 'No interface errors were detected on this device.' ) return '\n' . join ( output ) + '\n'
Parse show interfaces extensive and return interfaces with errors .
1,130
def lock ( self ) : if isinstance ( self . _session , manager . Manager ) : self . _session . lock ( )
Lock the candidate config . Requires ncclient . manager . Manager .
1,131
def op_cmd ( self , command , req_format = 'text' , xpath_expr = "" ) : if not command : raise InvalidCommandError ( "Parameter 'command' cannot be empty" ) if req_format . lower ( ) == 'xml' or xpath_expr : command = command . strip ( ) + ' | display xml' command = command . strip ( ) + ' | no-more\n' out = '' if self . username == 'root' : self . _shell . send ( command ) time . sleep ( 3 ) while self . _shell . recv_ready ( ) : out += self . _shell . recv ( 999999 ) time . sleep ( .75 ) out = '\n' . join ( out . split ( '\n' ) [ 1 : - 2 ] ) else : stdin , stdout , stderr = self . _session . exec_command ( command = command , timeout = float ( self . session_timeout ) ) stdin . close ( ) while not stdout . channel . exit_status_ready ( ) : out += stdout . read ( ) stdout . close ( ) while not stderr . channel . exit_status_ready ( ) : out += stderr . read ( ) stderr . close ( ) return out if not xpath_expr else xpath ( out , xpath_expr )
Execute an operational mode command .
1,132
def unlock ( self ) : if isinstance ( self . _session , manager . Manager ) : self . _session . unlock ( )
Unlock the candidate config .
1,133
def intercept ( obj , methodname , wrapper ) : original = getattr ( obj , methodname ) def replacement ( * args , ** kwargs ) : wrapfn = wrapper ( * args , ** kwargs ) wrapfn . send ( None ) result = original ( * args , ** kwargs ) try : wrapfn . send ( result ) except StopIteration : return result else : raise AssertionError ( 'Generator did not stop' ) def unwrap ( ) : setattr ( obj , methodname , original ) replacement . unwrap = unwrap setattr ( obj , methodname , replacement )
Wraps an existing method on an object with the provided generator which will be sent the value when it yields control .
1,134
def next ( self ) : if self . r == self . repeats : self . i = ( self . i + 1 ) % self . lenght self . r = 0 self . r += 1 if self . stopping and self . i == 0 and self . r == 1 : self . stopped = True if self . i == 0 and self . stopped : raise StopIteration else : iterator = self . iterators [ self . i ] return iterator . next ( )
Returns the next element or raises StopIteration if stopped .
1,135
def next ( self ) : return self . iterator . next ( task = self . task , timeout = self . timeout , block = self . block )
Returns a result if availble within timeout else raises a TimeoutError exception . See documentation for NuMap . next .
1,136
def write_template ( fn , lang = "python" ) : with open ( fn , "wb" ) as fh : if lang == "python" : fh . write ( PY_TEMPLATE ) elif lang == "bash" : fh . write ( SH_TEMPLATE )
Write language - specific script template to file .
1,137
def script ( inbox , cfg ) : script_name = cfg [ "id" ] script_id = str ( abs ( hash ( ( cfg [ "id" ] , ) + tuple ( inbox [ 0 ] . values ( ) ) ) ) ) [ 0 : 8 ] args = { } args [ "params" ] = dict ( cfg [ "params" ] ) args [ "in" ] = { } for in_port in cfg [ "in" ] : for inin_ports in inbox : in_path = inin_ports . get ( in_port , None ) if ( in_path is not None ) : args [ "in" ] [ in_port ] = in_path break if len ( args [ "in" ] ) < len ( cfg [ "in" ] ) : raise Exception ( "not all in_ports connected, got: %s" % ( args [ "in" ] , ) ) args [ "out" ] = { } out = { } for i , ( out_port , out_ext ) in enumerate ( cfg [ "out" ] ) : if cfg [ "in" ] == tuple ( out_port_ for out_port_ , _ in cfg [ "out" ] ) : pfx = args [ "in" ] [ cfg [ "in" ] [ i ] ] . split ( "/" ) [ - 1 ] . split ( "." ) [ 0 ] + "_" base = cfg [ "id" ] else : pfx = args [ "in" ] [ cfg [ "in" ] [ 0 ] ] . split ( "/" ) [ - 1 ] . split ( "." ) [ 0 ] + "_" base = cfg [ "id" ] + "-" + out_port if out_ext : out_path = cfg [ "dir" ] + "/" + pfx + base + "." + out_ext else : out_path = cfg [ "dir" ] + "/" + pfx + base args [ "out" ] [ out_port ] = out_path out [ out_port ] = out_path ret = _eval_script ( cfg [ "evaluator" ] , cfg [ "preamble" ] , cfg [ "dir" ] , cfg [ "executable" ] , cfg [ "script" ] , args ) if ret [ 0 ] != 0 : raise Exception ( ret [ 0 ] , cfg [ "script" ] , ret [ 1 ] , ret [ 2 ] ) return out
Execute arbitrary scripts .
1,138
def edit ( self , resource ) : schema = JobSchema ( exclude = ( 'id' , 'status' , 'options' , 'package_name' , 'config_name' , 'device_name' , 'result_id' , 'user_id' , 'created' , 'updated' , 'automatic' , 'run_at' ) ) json = self . service . encode ( schema , resource ) schema = JobSchema ( ) resp = self . service . edit ( self . base , resource . name , json ) return self . service . decode ( schema , resp )
Edit a job .
1,139
def launch ( self , resource ) : schema = JobSchema ( exclude = ( 'id' , 'status' , 'package_name' , 'config_name' , 'device_name' , 'result_id' , 'user_id' , 'created' , 'updated' , 'automatic' ) ) json = self . service . encode ( schema , resource ) schema = JobSchema ( ) resp = self . service . create ( self . base , json ) return self . service . decode ( schema , resp )
Launch a new job .
1,140
def bulk_launch ( self , jobs = None , filter = None , all = False ) : json = None if jobs is not None : schema = JobSchema ( exclude = ( 'id' , 'status' , 'package_name' , 'config_name' , 'device_name' , 'result_id' , 'user_id' , 'created' , 'updated' , 'automatic' ) ) jobs_json = self . service . encode ( schema , jobs , many = True ) json = { self . RESOURCE : jobs_json } schema = JobSchema ( ) resp = self . service . post ( self . base , params = { 'bulk' : 'launch' , 'filter' : filter , 'all' : all } , json = json ) return self . service . decode ( schema , resp , many = True )
Bulk launch a set of jobs .
1,141
def get ( self , id , seq , line ) : schema = HighlightSchema ( ) resp = self . service . get_id ( self . _base ( id , seq ) , line ) return self . service . decode ( schema , resp )
Get a highlight .
1,142
def create_or_edit ( self , id , seq , resource ) : schema = HighlightSchema ( exclude = ( 'id' , 'seq' ) ) json = self . service . encode ( schema , resource ) schema = HighlightSchema ( ) resp = self . service . edit ( self . _base ( id , seq ) , resource . line , json ) return self . service . decode ( schema , resp )
Create or edit a highlight .
1,143
def create ( self , id , seq , resource ) : return self . create_or_edit ( id , seq , resource )
Create a highlight .
1,144
def edit ( self , id , seq , resource ) : return self . create_or_edit ( id , seq , resource )
Edit a highlight .
1,145
def delete ( self , id , seq , line ) : return self . service . delete_id ( self . _base ( id , seq ) , line )
Delete a highlight .
1,146
def post_ext_init ( state ) : app = state . app app . config . setdefault ( 'OAUTHCLIENT_SITENAME' , app . config . get ( 'THEME_SITENAME' , 'Invenio' ) ) app . config . setdefault ( 'OAUTHCLIENT_BASE_TEMPLATE' , app . config . get ( 'BASE_TEMPLATE' , 'invenio_oauthclient/base.html' ) ) app . config . setdefault ( 'OAUTHCLIENT_COVER_TEMPLATE' , app . config . get ( 'COVER_TEMPLATE' , 'invenio_oauthclient/base_cover.html' ) ) app . config . setdefault ( 'OAUTHCLIENT_SETTINGS_TEMPLATE' , app . config . get ( 'SETTINGS_TEMPLATE' , 'invenio_oauthclient/settings/base.html' ) )
Setup blueprint .
1,147
def login ( remote_app ) : oauth = current_app . extensions [ 'oauthlib.client' ] if remote_app not in oauth . remote_apps : return abort ( 404 ) next_param = get_safe_redirect_target ( arg = 'next' ) callback_url = url_for ( '.authorized' , remote_app = remote_app , _external = True , ) state_token = serializer . dumps ( { 'app' : remote_app , 'next' : next_param , 'sid' : _create_identifier ( ) , } ) return oauth . remote_apps [ remote_app ] . authorize ( callback = callback_url , state = state_token , )
Send user to remote application for authentication .
1,148
def authorized ( remote_app = None ) : if remote_app not in current_oauthclient . handlers : return abort ( 404 ) state_token = request . args . get ( 'state' ) try : assert state_token state = serializer . loads ( state_token ) assert state [ 'sid' ] == _create_identifier ( ) assert state [ 'app' ] == remote_app set_session_next_url ( remote_app , state [ 'next' ] ) except ( AssertionError , BadData ) : if current_app . config . get ( 'OAUTHCLIENT_STATE_ENABLED' , True ) or ( not ( current_app . debug or current_app . testing ) ) : abort ( 403 ) try : handler = current_oauthclient . handlers [ remote_app ] ( ) except OAuthException as e : if e . type == 'invalid_response' : abort ( 500 ) else : raise return handler
Authorized handler callback .
1,149
def signup ( remote_app ) : if remote_app not in current_oauthclient . signup_handlers : return abort ( 404 ) res = current_oauthclient . signup_handlers [ remote_app ] [ 'view' ] ( ) return abort ( 404 ) if res is None else res
Extra signup step .
1,150
def disconnect ( remote_app ) : if remote_app not in current_oauthclient . disconnect_handlers : return abort ( 404 ) ret = current_oauthclient . disconnect_handlers [ remote_app ] ( ) db . session . commit ( ) return ret
Disconnect user from remote application .
1,151
def address_checksum ( address ) : address_bytes = address h = blake2b ( digest_size = 5 ) h . update ( address_bytes ) checksum = bytearray ( h . digest ( ) ) checksum . reverse ( ) return checksum
Returns the checksum in bytes for an address in bytes
1,152
def keypair_from_seed ( seed , index = 0 ) : h = blake2b ( digest_size = 32 ) h . update ( seed + struct . pack ( ">L" , index ) ) priv_key = h . digest ( ) pub_key = private_to_public_key ( priv_key ) return { 'private' : priv_key , 'public' : pub_key }
Generates a deterministic keypair from seed based on index
1,153
def verify_signature ( message , signature , public_key ) : try : ed25519_blake2 . checkvalid ( signature , message , public_key ) except ed25519_blake2 . SignatureMismatch : return False return True
Verifies signature is correct for a message signed with public_key
1,154
def sign_message ( message , private_key , public_key = None ) : if public_key is None : public_key = private_to_public_key ( private_key ) return ed25519_blake2 . signature_unsafe ( message , private_key , public_key )
Signs a message using private_key and public_key
1,155
def check_for_lounge_upgrade ( self , email , password ) : schema = ReleaseSchema ( ) resp = self . service . post ( self . base + 'lounge/check/' , json = { 'email' : email , 'password' : password } ) return self . service . decode ( schema , resp )
Check the CDRouter Support Lounge for eligible upgrades using your Support Lounge email & password .
1,156
def lounge_upgrade ( self , email , password , release_id ) : schema = UpgradeSchema ( ) resp = self . service . post ( self . base + 'lounge/upgrade/' , json = { 'email' : email , 'password' : password , 'release' : { 'id' : int ( release_id ) } } ) return self . service . decode ( schema , resp )
Download & install an upgrade from the CDRouter Support Lounge using your Support Lounge email & password . Please note that any running tests will be stopped .
1,157
def lounge_update_license ( self ) : schema = UpgradeSchema ( ) resp = self . service . post ( self . base + 'license/' ) return self . service . decode ( schema , resp )
Download & install a license for your CDRouter system from the CDRouter Support Lounge .
1,158
def manual_update_license ( self , fd , filename = 'cdrouter.lic' ) : schema = UpgradeSchema ( ) resp = self . service . post ( self . base + 'license/' , files = { 'file' : ( filename , fd ) } ) return self . service . decode ( schema , resp )
Update the license on your CDRouter system manually by uploading a . lic license from the CDRouter Support Lounge .
1,159
def space ( self ) : schema = SpaceSchema ( ) resp = self . service . get ( self . base + 'space/' ) return self . service . decode ( schema , resp )
Get system disk space usage .
1,160
def interfaces ( self , addresses = False ) : schema = InterfaceSchema ( ) resp = self . service . get ( self . base + 'interfaces/' , params = { 'addresses' : addresses } ) return self . service . decode ( schema , resp , many = True )
Get system interfaces .
1,161
def _set_original_fields ( instance ) : original_fields = { } def _set_original_field ( instance , field ) : if instance . pk is None : original_fields [ field ] = None else : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : original_fields [ field ] = getattr ( instance , '{0}_id' . format ( field ) ) else : original_fields [ field ] = getattr ( instance , field ) for field in getattr ( instance , '_tracked_fields' , [ ] ) : _set_original_field ( instance , field ) for field in getattr ( instance , '_tracked_related_fields' , { } ) . keys ( ) : _set_original_field ( instance , field ) instance . _original_fields = original_fields instance . _original_fields [ 'pk' ] = instance . pk
Save fields value only for non - m2m fields .
1,162
def _has_changed ( instance ) : for field , value in instance . _original_fields . items ( ) : if field != 'pk' and not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : try : if field in getattr ( instance , '_tracked_fields' , [ ] ) : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : if getattr ( instance , '{0}_id' . format ( field ) ) != value : return True else : if getattr ( instance , field ) != value : return True except TypeError : return True return False
Check if some tracked fields have changed
1,163
def _has_changed_related ( instance ) : tracked_related_fields = getattr ( instance , '_tracked_related_fields' , { } ) . keys ( ) for field , value in instance . _original_fields . items ( ) : if field != 'pk' and not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : if field in tracked_related_fields : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : if getattr ( instance , '{0}_id' . format ( field ) ) != value : return True else : if getattr ( instance , field ) != value : return True return False
Check if some related tracked fields have changed
1,164
def _create_event ( instance , action ) : user = None user_repr = repr ( user ) if CUSER : user = CuserMiddleware . get_user ( ) user_repr = repr ( user ) if user is not None and user . is_anonymous : user = None return TrackingEvent . objects . create ( action = action , object = instance , object_repr = repr ( instance ) , user = user , user_repr = user_repr , )
Create a new event getting the use if django - cuser is available .
1,165
def _create_tracked_field ( event , instance , field , fieldname = None ) : fieldname = fieldname or field if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : model = instance . _meta . get_field ( field ) . remote_field . model pk = instance . _original_fields [ field ] try : old_value = model . objects . get ( pk = pk ) except model . DoesNotExist : old_value = None else : old_value = instance . _original_fields [ field ] return TrackedFieldModification . objects . create ( event = event , field = fieldname , old_value = _serialize_field ( old_value ) , new_value = _serialize_field ( getattr ( instance , field ) ) )
Create a TrackedFieldModification for the instance .
1,166
def _create_create_tracking_event ( instance ) : event = _create_event ( instance , CREATE ) for field in instance . _tracked_fields : if not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : _create_tracked_field ( event , instance , field )
Create a TrackingEvent and TrackedFieldModification for a CREATE event .
1,167
def _create_update_tracking_event ( instance ) : event = _create_event ( instance , UPDATE ) for field in instance . _tracked_fields : if not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : try : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : value = getattr ( instance , '{0}_id' . format ( field ) ) else : value = getattr ( instance , field ) if instance . _original_fields [ field ] != value : _create_tracked_field ( event , instance , field ) except TypeError : _create_tracked_field ( event , instance , field )
Create a TrackingEvent and TrackedFieldModification for an UPDATE event .
1,168
def _create_update_tracking_related_event ( instance ) : events = { } for field , related_fields in instance . _tracked_related_fields . items ( ) : if not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : value = getattr ( instance , '{0}_id' . format ( field ) ) else : value = getattr ( instance , field ) if instance . _original_fields [ field ] != value : for related_field in related_fields : events . setdefault ( related_field , [ ] ) . append ( field ) for related_field , fields in events . items ( ) : try : related_instances = getattr ( instance , related_field [ 1 ] ) except ObjectDoesNotExist : continue if hasattr ( related_instances , 'all' ) : related_instances = related_instances . all ( ) else : related_instances = [ related_instances ] for related_instance in related_instances : event = _create_event ( related_instance , UPDATE ) for field in fields : fieldname = '{0}__{1}' . format ( related_field [ 0 ] , field ) _create_tracked_field ( event , instance , field , fieldname = fieldname )
Create a TrackingEvent and TrackedFieldModification for an UPDATE event for each related model .
1,169
def _get_m2m_field ( model , sender ) : for field in getattr ( model , '_tracked_fields' , [ ] ) : if isinstance ( model . _meta . get_field ( field ) , ManyToManyField ) : if getattr ( model , field ) . through == sender : return field for field in getattr ( model , '_tracked_related_fields' , { } ) . keys ( ) : if isinstance ( model . _meta . get_field ( field ) , ManyToManyField ) : if getattr ( model , field ) . through == sender : return field
Get the field name from a model and a sender from m2m_changed signal .
1,170
def tracking_save ( sender , instance , raw , using , update_fields , ** kwargs ) : if _has_changed ( instance ) : if instance . _original_fields [ 'pk' ] is None : _create_create_tracking_event ( instance ) else : _create_update_tracking_event ( instance ) if _has_changed_related ( instance ) : _create_update_tracking_related_event ( instance ) if _has_changed ( instance ) or _has_changed_related ( instance ) : _set_original_fields ( instance )
Post save detect creation or changes and log them . We need post_save to have the object for a create .
1,171
def from_entry_dict ( cls , entry_dict ) : try : _type = entry_dict [ 'type' ] _id = entry_dict [ 'id' ] _time = entry_dict [ 'time' ] message = entry_dict [ 'message' ] severity = entry_dict [ 'severity' ] except KeyError : msg = ( 'Missing expected log entry attribute. Log entry' ' object is:\n\n%s' ) raise APIException ( msg % json . dumps ( entry_dict , indent = 4 ) ) return cls ( _type , message , _time , severity , _id )
This is a constructor for the LogEntry class .
1,172
def list ( self , id , seq ) : schema = CaptureSchema ( exclude = ( 'id' , 'seq' ) ) resp = self . service . list ( self . _base ( id , seq ) ) return self . service . decode ( schema , resp , many = True )
Get a list of captures .
1,173
def get ( self , id , seq , intf ) : schema = CaptureSchema ( ) resp = self . service . get_id ( self . _base ( id , seq ) , intf ) return self . service . decode ( schema , resp )
Get a capture .
1,174
def download ( self , id , seq , intf , inline = False ) : resp = self . service . get_id ( self . _base ( id , seq ) , intf , params = { 'format' : 'cap' , 'inline' : inline } , stream = True ) b = io . BytesIO ( ) stream . stream_response_to_file ( resp , path = b ) resp . close ( ) b . seek ( 0 ) return ( b , self . service . filename ( resp ) )
Download a capture as a PCAP file .
1,175
def summary ( self , id , seq , intf , filter = None , inline = False ) : schema = SummarySchema ( ) resp = self . service . get ( self . _base ( id , seq ) + str ( intf ) + '/summary/' , params = { 'filter' : filter , 'inline' : inline } ) return self . service . decode ( schema , resp )
Get a capture s summary .
1,176
def decode ( self , id , seq , intf , filter = None , frame = None , inline = False ) : schema = DecodeSchema ( ) resp = self . service . get ( self . _base ( id , seq ) + str ( intf ) + '/decode/' , params = { 'filter' : filter , 'frame' : frame , 'inline' : inline } ) return self . service . decode ( schema , resp )
Get a capture s decode .
1,177
def send_to_cloudshark ( self , id , seq , intf , inline = False ) : schema = CloudSharkSchema ( ) resp = self . service . post ( self . _base ( id , seq ) + str ( intf ) + '/cloudshark/' , params = { 'inline' : inline } ) return self . service . decode ( schema , resp )
Send a capture to a CloudShark Appliance . Both cloudshark_appliance_url and cloudshark_appliance_token must be properly configured via system preferences .
1,178
def get_dict_from_response ( response ) : if getattr ( response , '_resp' ) and response . _resp . code > 400 : raise OAuthResponseError ( 'Application mis-configuration in Globus' , None , response ) return response . data
Check for errors in the response and return the resulting JSON .
1,179
def get_user_info ( remote ) : response = remote . get ( GLOBUS_USER_INFO_URL ) user_info = get_dict_from_response ( response ) response . data [ 'username' ] = response . data [ 'preferred_username' ] if '@' in response . data [ 'username' ] : user_info [ 'username' ] , _ = response . data [ 'username' ] . split ( '@' ) return user_info
Get user information from Globus .
1,180
def get_user_id ( remote , email ) : try : url = '{}?usernames={}' . format ( GLOBUS_USER_ID_URL , email ) user_id = get_dict_from_response ( remote . get ( url ) ) return user_id [ 'identities' ] [ 0 ] [ 'id' ] except KeyError : raise OAuthResponseError ( 'Failed to fetch user id, likely server ' 'mis-configuration' , None , remote )
Get the Globus identity for a users given email .
1,181
def get_function_signature ( func ) : if func is None : return 'Function is None' try : func_name = func . __name__ except AttributeError : func_name = 'None' if not inspect . isfunction ( func ) : raise TypeError ( 'The argument must be a function object: %s type is %s' % ( func_name , type ( func ) ) ) return func_name + str ( inspect . signature ( func ) )
Return the signature string of the specified function .
1,182
def acquire_reader ( self ) : with self . mutex : while self . rwlock < 0 or self . rwlock == self . max_reader_concurrency or self . writers_waiting : self . readers_ok . wait ( ) self . rwlock += 1
Acquire a read lock several threads can hold this type of lock .
1,183
def acquire_writer ( self ) : with self . mutex : while self . rwlock != 0 : self . _writer_wait ( ) self . rwlock = - 1
Acquire a write lock only one thread can hold this lock and only when no read locks are also held .
1,184
def list ( self , filter = None , type = None , sort = None , limit = None , page = None ) : schema = PackageSchema ( exclude = ( 'testlist' , 'extra_cli_args' , 'agent_id' , 'options' , 'note' ) ) resp = self . service . list ( self . base , filter , type , sort , limit , page ) ps , l = self . service . decode ( schema , resp , many = True , links = True ) return Page ( ps , l )
Get a list of packages .
1,185
def get ( self , id ) : schema = PackageSchema ( ) resp = self . service . get_id ( self . base , id ) return self . service . decode ( schema , resp )
Get a package .
1,186
def create ( self , resource ) : schema = PackageSchema ( exclude = ( 'id' , 'created' , 'updated' , 'test_count' , 'agent_id' , 'result_id' ) ) json = self . service . encode ( schema , resource ) schema = PackageSchema ( ) resp = self . service . create ( self . base , json ) return self . service . decode ( schema , resp )
Create a new package .
1,187
def analyze ( self , id ) : schema = AnalysisSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/' , params = { 'process' : 'analyze' } ) return self . service . decode ( schema , resp )
Get a list of tests that will be skipped for a package .
1,188
def bulk_copy ( self , ids ) : schema = PackageSchema ( ) return self . service . bulk_copy ( self . base , self . RESOURCE , ids , schema )
Bulk copy a set of packages .
1,189
def bulk_edit ( self , _fields , ids = None , filter = None , type = None , all = False ) : schema = PackageSchema ( exclude = ( 'id' , 'created' , 'updated' , 'test_count' , 'agent_id' , 'result_id' ) ) _fields = self . service . encode ( schema , _fields , skip_none = True ) return self . service . bulk_edit ( self . base , self . RESOURCE , _fields , ids = ids , filter = filter , type = type , all = all )
Bulk edit a set of packages .
1,190
def clean_lines ( commands ) : if isinstance ( commands , basestring ) : if path . isfile ( commands ) : commands = open ( commands , 'rb' ) elif len ( commands . split ( ',' ) ) > 1 : commands = commands . split ( ',' ) else : try : if commands . strip ( ) [ 0 ] != "#" : yield commands . strip ( ) + '\n' return except IndexError : pass elif isinstance ( commands , list ) : pass else : raise TypeError ( 'clean_lines() accepts a \'str\' or \'list\'' ) for cmd in commands : try : if cmd . strip ( ) [ 0 ] != "#" : yield cmd . strip ( ) + '\n' except IndexError : pass
Generate strings that are not comments or lines with only whitespace .
1,191
def xpath ( source_xml , xpath_expr , req_format = 'string' ) : tree = source_xml if not isinstance ( source_xml , ET . Element ) : tree = objectify . fromstring ( source_xml ) for elem in tree . getiterator ( ) : if isinstance ( elem . tag , basestring ) : i = elem . tag . find ( '}' ) if i >= 0 : elem . tag = elem . tag [ i + 1 : ] objectify . deannotate ( tree , cleanup_namespaces = True ) filtered_list = tree . xpath ( xpath_expr ) if req_format == 'xml' : return filtered_list matches = '' . join ( etree . tostring ( element , pretty_print = True ) for element in filtered_list ) return matches if matches else ""
Filter xml based on an xpath expression .
1,192
def set ( self , key , value , lease = None , return_previous = None , timeout = None ) : assembler = commons . PutRequestAssembler ( self . _url , key , value , lease , return_previous ) obj = yield self . _post ( assembler . url , assembler . data , timeout ) revision = Revision . _parse ( obj ) returnValue ( revision )
Set the value for the key in the key - value store .
1,193
def watch ( self , keys , on_watch , filters = None , start_revision = None , return_previous = None ) : d = self . _start_watching ( keys , on_watch , filters , start_revision , return_previous ) def on_err ( * args ) : if args [ 0 ] . type not in [ CancelledError , ResponseFailed ] : self . log . warn ( 'etcd watch terminated with "{error}"' , error = args [ 0 ] . type ) return args [ 0 ] d . addErrback ( on_err ) return d
Watch one or more keys or key sets and invoke a callback .
1,194
def lease ( self , time_to_live , lease_id = None , timeout = None ) : assembler = commons . LeaseRequestAssembler ( self . _url , time_to_live , lease_id ) obj = yield self . _post ( assembler . url , assembler . data , timeout ) lease = Lease . _parse ( self , obj ) returnValue ( lease )
Creates a lease which expires if the server does not receive a keep alive within a given time to live period .
1,195
def stage_import_from_file ( self , fd , filename = 'upload.gz' ) : schema = ImportSchema ( ) resp = self . service . post ( self . base , files = { 'file' : ( filename , fd ) } ) return self . service . decode ( schema , resp )
Stage an import from a file upload .
1,196
def stage_import_from_filesystem ( self , filepath ) : schema = ImportSchema ( ) resp = self . service . post ( self . base , params = { 'path' : filepath } ) return self . service . decode ( schema , resp )
Stage an import from a filesystem path .
1,197
def stage_import_from_url ( self , url , token = None , username = None , password = None , insecure = False ) : schema = ImportSchema ( ) resp = self . service . post ( self . base , params = { 'url' : url , 'token' : token , 'username' : username , 'password' : password , 'insecure' : insecure } ) return self . service . decode ( schema , resp )
Stage an import from a URL to another CDRouter system .
1,198
def get_commit_request ( self , id ) : schema = RequestSchema ( ) resp = self . service . get ( self . base + str ( id ) + '/request/' ) return self . service . decode ( schema , resp )
Get a commit request for a staged import .
1,199
def commit ( self , id , impreq ) : schema = RequestSchema ( ) json = self . service . encode ( schema , impreq ) schema = RequestSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/' , json = json ) return self . service . decode ( schema , resp )
Commit a staged import .