idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
55,600
def guess_backend ( identifier ) : if identifier . startswith ( 'usb://' ) or identifier . startswith ( '0x' ) : return 'pyusb' elif identifier . startswith ( 'file://' ) or identifier . startswith ( '/dev/usb/' ) or identifier . startswith ( 'lp' ) : return 'linux_kernel' elif identifier . startswith ( 'tcp://' ) : return 'network' else : raise ValueError ( 'Cannot guess backend for given identifier: %s' % identifier )
guess the backend from a given identifier string for the device
55,601
def featured_event_query ( self , ** kwargs ) : if not kwargs . get ( 'location' ) and ( not kwargs . get ( 'latitude' ) or not kwargs . get ( 'longitude' ) ) : raise ValueError ( 'A valid location (parameter "location") or latitude/longitude combination ' '(parameters "latitude" and "longitude") must be provided.' ) return self . _query ( FEATURED_EVENT_API_URL , ** kwargs )
Query the Yelp Featured Event API .
55,602
def _get_clean_parameters ( kwargs ) : return dict ( ( k , v ) for k , v in kwargs . items ( ) if v is not None )
Clean the parameters by filtering out any parameters that have a None value .
55,603
def _query ( self , url , ** kwargs ) : parameters = YelpAPI . _get_clean_parameters ( kwargs ) response = self . _yelp_session . get ( url , headers = self . _headers , params = parameters , timeout = self . _timeout_s , ) response_json = response . json ( ) if 'error' in response_json : raise YelpAPI . YelpAPIError ( '{}: {}' . format ( response_json [ 'error' ] [ 'code' ] , response_json [ 'error' ] [ 'description' ] ) ) return response_json
All query methods have the same logic so don t repeat it! Query the URL parse the response as JSON and check for errors . If all goes well return the parsed JSON .
55,604
def url_join ( base , * args ) : scheme , netloc , path , query , fragment = urlsplit ( base ) path = path if len ( path ) else "/" path = posixpath . join ( path , * [ ( '%s' % x ) for x in args ] ) return urlunsplit ( [ scheme , netloc , path , query , fragment ] )
Helper function to join an arbitrary number of url segments together .
55,605
def probe_to_graphsrv ( probe ) : config = probe . config if "group" in config : source , group = config [ "group" ] . split ( "." ) group_field = config . get ( "group_field" , "host" ) group_value = config [ group_field ] graphsrv . group . add ( source , group , { group_value : { group_field : group_value } } , ** config ) return for k , v in list ( config . items ( ) ) : if isinstance ( v , dict ) and "hosts" in v : r = { } for host in v . get ( "hosts" ) : if isinstance ( host , dict ) : r [ host [ "host" ] ] = host else : r [ host ] = { "host" : host } graphsrv . group . add ( probe . name , k , r , ** v )
takes a probe instance and generates a graphsrv data group for it using the probe s config
55,606
def new_message ( self ) : msg = { } msg [ 'data' ] = [ ] msg [ 'type' ] = self . plugin_type msg [ 'source' ] = self . name msg [ 'ts' ] = ( datetime . datetime . utcnow ( ) - datetime . datetime ( 1970 , 1 , 1 ) ) . total_seconds ( ) return msg
creates a new message setting type source ts data - data is initialized to an empty array
55,607
def popen ( self , args , ** kwargs ) : self . log . debug ( "popen %s" , ' ' . join ( args ) ) return vaping . io . subprocess . Popen ( args , ** kwargs )
creates a subprocess with passed args
55,608
def queue_emission ( self , msg ) : if not msg : return for _emitter in self . _emit : if not hasattr ( _emitter , 'emit' ) : continue def emit ( emitter = _emitter ) : self . log . debug ( "emit to {}" . format ( emitter . name ) ) emitter . emit ( msg ) self . log . debug ( "queue emission to {} ({})" . format ( _emitter . name , self . _emit_queue . qsize ( ) ) ) self . _emit_queue . put ( emit )
queue an emission of a message for all output plugins
55,609
def send_emission ( self ) : if self . _emit_queue . empty ( ) : return emit = self . _emit_queue . get ( ) emit ( )
emit and remove the first emission in the queue
55,610
def validate_file_handler ( self ) : if self . fh . closed : try : self . fh = open ( self . path , "r" ) self . fh . seek ( 0 , 2 ) except OSError as err : logging . error ( "Could not reopen file: {}" . format ( err ) ) return False open_stat = os . fstat ( self . fh . fileno ( ) ) try : file_stat = os . stat ( self . path ) except OSError as err : logging . error ( "Could not stat file: {}" . format ( err ) ) return False if open_stat != file_stat : self . log self . fh . close ( ) return False return True
Here we validate that our filehandler is pointing to an existing file .
55,611
def probe ( self ) : if not self . validate_file_handler ( ) : return [ ] messages = [ ] for line in self . fh . readlines ( self . max_lines ) : data = { "path" : self . path } msg = self . new_message ( ) parsed = self . process_line ( line , data ) if not parsed : continue data . update ( parsed ) data = self . process_probe ( data ) msg [ "data" ] = [ data ] messages . append ( msg ) messages = self . process_messages ( messages ) return messages
Probe the file for new lines
55,612
def filename_formatters ( self , data , row ) : r = { "source" : data . get ( "source" ) , "field" : self . field , "type" : data . get ( "type" ) } r . update ( ** row ) return r
Returns a dict containing the various filename formatter values
55,613
def format_filename ( self , data , row ) : return self . filename . format ( ** self . filename_formatters ( data , row ) )
Returns a formatted filename using the template stored in self . filename
55,614
def emit ( self , message ) : if isinstance ( message . get ( "data" ) , list ) : for row in message . get ( "data" ) : filename = self . format_filename ( message , row ) if not os . path . exists ( filename ) : self . create ( filename ) self . log . debug ( "storing time:%d, %s:%.5f in %s" % ( message . get ( "ts" ) , self . field , row . get ( self . field ) , filename ) ) self . update ( filename , message . get ( "ts" ) , row . get ( self . field ) )
emit to database
55,615
def parse_interval ( val ) : re_intv = re . compile ( r"([\d\.]+)([a-zA-Z]+)" ) val = val . strip ( ) total = 0.0 for match in re_intv . findall ( val ) : unit = match [ 1 ] count = float ( match [ 0 ] ) if unit == 's' : total += count elif unit == 'm' : total += count * 60 elif unit == 'ms' : total += count / 1000 elif unit == "h" : total += count * 3600 elif unit == 'd' : total += count * 86400 else : raise ValueError ( "unknown unit from interval string '%s'" % val ) return total
converts a string to float of seconds . 5 = 500ms 90 = 1m30s
55,616
def hosts_args ( self ) : host_args = [ ] for row in self . hosts : if isinstance ( row , dict ) : host_args . append ( row [ "host" ] ) else : host_args . append ( row ) dedupe = list ( ) for each in host_args : if each not in dedupe : dedupe . append ( each ) return dedupe
hosts list can contain strings specifying a host directly or dicts containing a host key to specify the host
55,617
def parse_verbose ( self , line ) : try : logging . debug ( line ) ( host , pings ) = line . split ( ' : ' ) cnt = 0 lost = 0 times = [ ] pings = pings . strip ( ) . split ( ' ' ) cnt = len ( pings ) for latency in pings : if latency == '-' : continue times . append ( float ( latency ) ) lost = cnt - len ( times ) if lost : loss = lost / float ( cnt ) else : loss = 0.0 rv = { 'host' : host . strip ( ) , 'cnt' : cnt , 'loss' : loss , 'data' : times , } if times : rv [ 'min' ] = min ( times ) rv [ 'max' ] = max ( times ) rv [ 'avg' ] = sum ( times ) / len ( times ) rv [ 'last' ] = times [ - 1 ] return rv except Exception as e : logging . error ( "failed to get data: {}" . format ( e ) )
parse output from verbose format
55,618
def start ( ctx , ** kwargs ) : update_context ( ctx , kwargs ) daemon = mk_daemon ( ctx ) if ctx . debug or kwargs [ 'no_fork' ] : daemon . run ( ) else : daemon . start ( )
start a vaping process
55,619
def stop ( ctx , ** kwargs ) : update_context ( ctx , kwargs ) daemon = mk_daemon ( ctx ) daemon . stop ( )
stop a vaping process
55,620
def restart ( ctx , ** kwargs ) : update_context ( ctx , kwargs ) daemon = mk_daemon ( ctx ) daemon . stop ( ) daemon . start ( )
restart a vaping process
55,621
def render_content ( self , request ) : context = self . data . copy ( ) context . update ( self . render_vars ( request ) ) return render ( self . template , request . app , context , request = request )
Return a string containing the HTML to be rendered for the panel .
55,622
def inject ( self , request , response ) : if not isinstance ( response , Response ) : return settings = request . app [ APP_KEY ] [ 'settings' ] response_html = response . body route = request . app . router [ 'debugtoolbar.request' ] toolbar_url = route . url_for ( request_id = request [ 'id' ] ) button_style = settings [ 'button_style' ] css_path = request . app . router [ STATIC_ROUTE_NAME ] . url_for ( filename = 'css/toolbar_button.css' ) toolbar_css = toolbar_css_template % { 'css_path' : css_path } toolbar_html = toolbar_html_template % { 'button_style' : button_style , 'css_path' : css_path , 'toolbar_url' : toolbar_url } toolbar_html = toolbar_html . encode ( response . charset or 'utf-8' ) toolbar_css = toolbar_css . encode ( response . charset or 'utf-8' ) response_html = replace_insensitive ( response_html , b'</head>' , toolbar_css + b'</head>' ) response . body = replace_insensitive ( response_html , b'</body>' , toolbar_html + b'</body>' )
Inject the debug toolbar iframe into an HTML response .
55,623
def common_segment_count ( path , value ) : i = 0 if len ( path ) <= len ( value ) : for x1 , x2 in zip ( path , value ) : if x1 == x2 : i += 1 else : return 0 return i
Return the number of path segments common to both
55,624
def timing ( self , stat , delta , rate = 1 ) : if isinstance ( delta , timedelta ) : delta = delta . total_seconds ( ) * 1000. self . _send_stat ( stat , '%0.6f|ms' % delta , rate )
Send new timing information .
55,625
def decr ( self , stat , count = 1 , rate = 1 ) : self . incr ( stat , - count , rate )
Decrement a stat by count .
55,626
def gauge ( self , stat , value , rate = 1 , delta = False ) : if value < 0 and not delta : if rate < 1 : if random . random ( ) > rate : return with self . pipeline ( ) as pipe : pipe . _send_stat ( stat , '0|g' , 1 ) pipe . _send_stat ( stat , '%s|g' % value , 1 ) else : prefix = '+' if delta and value >= 0 else '' self . _send_stat ( stat , '%s%s|g' % ( prefix , value ) , rate )
Set a gauge value .
55,627
def set ( self , stat , value , rate = 1 ) : self . _send_stat ( stat , '%s|s' % value , rate )
Set a set value .
55,628
def safe_wraps ( wrapper , * args , ** kwargs ) : while isinstance ( wrapper , functools . partial ) : wrapper = wrapper . func return functools . wraps ( wrapper , * args , ** kwargs )
Safely wraps partial functions .
55,629
def find_rule_classes ( extra_path ) : files = [ ] modules = [ ] if os . path . isfile ( extra_path ) : files = [ os . path . basename ( extra_path ) ] directory = os . path . dirname ( extra_path ) elif os . path . isdir ( extra_path ) : files = os . listdir ( extra_path ) directory = extra_path else : raise UserRuleError ( u"Invalid extra-path: {0}" . format ( extra_path ) ) for filename in files : if fnmatch . fnmatch ( filename , '*.py' ) : modules . append ( os . path . splitext ( filename ) [ 0 ] ) if not modules : return [ ] sys . path . append ( directory ) rule_classes = [ ] for module in modules : try : importlib . import_module ( module ) except Exception as e : raise UserRuleError ( u"Error while importing extra-path module '{0}': {1}" . format ( module , ustr ( e ) ) ) rule_classes . extend ( [ clazz for _ , clazz in inspect . getmembers ( sys . modules [ module ] ) if inspect . isclass ( clazz ) and clazz . __module__ == module and ( issubclass ( clazz , rules . LineRule ) or issubclass ( clazz , rules . CommitRule ) ) ] ) for rule_class in rule_classes : assert_valid_rule_class ( rule_class ) return rule_classes
Searches a given directory or python module for rule classes . This is done by adding the directory path to the python path importing the modules and then finding any Rule class in those modules .
55,630
def ustr ( obj ) : if sys . version_info [ 0 ] == 2 : if type ( obj ) in [ str , basestring ] : return unicode ( obj , DEFAULT_ENCODING ) else : return unicode ( obj ) else : if type ( obj ) in [ bytes ] : return obj . decode ( DEFAULT_ENCODING ) else : return str ( obj )
Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3
55,631
def get_rule_option ( self , rule_name_or_id , option_name ) : option = self . _get_option ( rule_name_or_id , option_name ) return option . value
Returns the value of a given option for a given rule . LintConfigErrors will be raised if the rule or option don t exist .
55,632
def set_rule_option ( self , rule_name_or_id , option_name , option_value ) : option = self . _get_option ( rule_name_or_id , option_name ) try : option . set ( option_value ) except options . RuleOptionError as e : msg = u"'{0}' is not a valid value for option '{1}.{2}'. {3}." raise LintConfigError ( msg . format ( option_value , rule_name_or_id , option_name , ustr ( e ) ) )
Attempts to set a given value for a given option for a given rule . LintConfigErrors will be raised if the rule or option don t exist or if the value is invalid .
55,633
def set_from_config_file ( self , filename ) : if not os . path . exists ( filename ) : raise LintConfigError ( u"Invalid file path: {0}" . format ( filename ) ) self . _config_path = os . path . abspath ( filename ) try : parser = ConfigParser ( ) parser . read ( filename ) for section_name in parser . sections ( ) : for option_name , option_value in parser . items ( section_name ) : self . set_option ( section_name , option_name , ustr ( option_value ) ) except ConfigParserError as e : raise LintConfigError ( ustr ( e ) )
Loads lint config from a ini - style config file
55,634
def build ( self , config = None ) : if not config : config = LintConfig ( ) config . _config_path = self . _config_path general_section = self . _config_blueprint . get ( 'general' ) if general_section : for option_name , option_value in general_section . items ( ) : config . set_general_option ( option_name , option_value ) for section_name , section_dict in self . _config_blueprint . items ( ) : for option_name , option_value in section_dict . items ( ) : if section_name != "general" : config . set_rule_option ( section_name , option_name , option_value ) return config
Build a real LintConfig object by normalizing and validating the options that were previously set on this factory .
55,635
def clone ( self ) : builder = LintConfigBuilder ( ) builder . _config_blueprint = copy . deepcopy ( self . _config_blueprint ) builder . _config_path = self . _config_path return builder
Creates an exact copy of a LintConfigBuilder .
55,636
def _git ( * command_parts , ** kwargs ) : git_kwargs = { '_tty_out' : False } git_kwargs . update ( kwargs ) try : result = sh . git ( * command_parts , ** git_kwargs ) if hasattr ( result , 'exit_code' ) and result . exit_code > 0 : return result return ustr ( result ) except CommandNotFound : raise GitNotInstalledError ( ) except ErrorReturnCode as e : error_msg = e . stderr . strip ( ) if '_cwd' in git_kwargs and b"not a git repository" in error_msg . lower ( ) : error_msg = u"{0} is not a git repository." . format ( git_kwargs [ '_cwd' ] ) else : error_msg = u"An error occurred while executing '{0}': {1}" . format ( e . full_cmd , error_msg ) raise GitContextError ( error_msg )
Convenience function for running git commands . Automatically deals with exceptions and unicode .
55,637
def git_commentchar ( ) : commentchar = _git ( "config" , "--get" , "core.commentchar" , _ok_code = [ 1 ] ) if hasattr ( commentchar , 'exit_code' ) and commentchar . exit_code == 1 : commentchar = "#" return ustr ( commentchar ) . replace ( u"\n" , u"" )
Shortcut for retrieving comment char from git config
55,638
def from_full_message ( commit_msg_str ) : all_lines = commit_msg_str . splitlines ( ) try : cutline_index = all_lines . index ( GitCommitMessage . CUTLINE ) except ValueError : cutline_index = None lines = [ line for line in all_lines [ : cutline_index ] if not line . startswith ( GitCommitMessage . COMMENT_CHAR ) ] full = "\n" . join ( lines ) title = lines [ 0 ] if lines else "" body = lines [ 1 : ] if len ( lines ) > 1 else [ ] return GitCommitMessage ( original = commit_msg_str , full = full , title = title , body = body )
Parses a full git commit message by parsing a given string into the different parts of a commit message
55,639
def should_ignore_rule ( self , rule ) : return rule . id in self . config . ignore or rule . name in self . config . ignore
Determines whether a rule should be ignored based on the general list of commits to ignore
55,640
def _apply_line_rules ( lines , commit , rules , line_nr_start ) : all_violations = [ ] line_nr = line_nr_start for line in lines : for rule in rules : violations = rule . validate ( line , commit ) if violations : for violation in violations : violation . line_nr = line_nr all_violations . append ( violation ) line_nr += 1 return all_violations
Iterates over the lines in a given list of lines and validates a given list of rules against each line
55,641
def _apply_commit_rules ( rules , commit ) : all_violations = [ ] for rule in rules : violations = rule . validate ( commit ) if violations : all_violations . extend ( violations ) return all_violations
Applies a set of rules against a given commit and gitcontext
55,642
def lint ( self , commit ) : LOG . debug ( "Linting commit %s" , commit . sha or "[SHA UNKNOWN]" ) LOG . debug ( "Commit Object\n" + ustr ( commit ) ) for rule in self . configuration_rules : rule . apply ( self . config , commit ) ignore_commit_types = [ "merge" , "squash" , "fixup" ] for commit_type in ignore_commit_types : if getattr ( commit , "is_{0}_commit" . format ( commit_type ) ) and getattr ( self . config , "ignore_{0}_commits" . format ( commit_type ) ) : return [ ] violations = [ ] violations . extend ( self . _apply_line_rules ( [ commit . message . title ] , commit , self . title_line_rules , 1 ) ) violations . extend ( self . _apply_line_rules ( commit . message . body , commit , self . body_line_rules , 2 ) ) violations . extend ( self . _apply_commit_rules ( self . commit_rules , commit ) ) violations . sort ( key = lambda v : ( - 1 if v . line_nr is None else v . line_nr , v . rule_id ) ) return violations
Lint the last commit in a given git context by applying all ignore title body and commit rules .
55,643
def print_violations ( self , violations ) : for v in violations : line_nr = v . line_nr if v . line_nr else "-" self . display . e ( u"{0}: {1}" . format ( line_nr , v . rule_id ) , exact = True ) self . display . ee ( u"{0}: {1} {2}" . format ( line_nr , v . rule_id , v . message ) , exact = True ) if v . content : self . display . eee ( u"{0}: {1} {2}: \"{3}\"" . format ( line_nr , v . rule_id , v . message , v . content ) , exact = True ) else : self . display . eee ( u"{0}: {1} {2}" . format ( line_nr , v . rule_id , v . message ) , exact = True )
Print a given set of violations to the standard error output
55,644
def _output ( self , message , verbosity , exact , stream ) : if exact : if self . config . verbosity == verbosity : stream . write ( message + "\n" ) else : if self . config . verbosity >= verbosity : stream . write ( message + "\n" )
Output a message if the config s verbosity is > = to the given verbosity . If exact == True the message will only be outputted if the given verbosity exactly matches the config s verbosity .
55,645
def setup_logging ( ) : root_log = logging . getLogger ( "gitlint" ) root_log . propagate = False handler = logging . StreamHandler ( ) formatter = logging . Formatter ( LOG_FORMAT ) handler . setFormatter ( formatter ) root_log . addHandler ( handler ) root_log . setLevel ( logging . ERROR )
Setup gitlint logging
55,646
def build_config ( ctx , target , config_path , c , extra_path , ignore , verbose , silent , debug ) : config_builder = LintConfigBuilder ( ) try : if config_path : config_builder . set_from_config_file ( config_path ) elif os . path . exists ( DEFAULT_CONFIG_FILE ) : config_builder . set_from_config_file ( DEFAULT_CONFIG_FILE ) config_builder . set_config_from_string_list ( c ) if ignore : config_builder . set_option ( 'general' , 'ignore' , ignore ) if silent : config_builder . set_option ( 'general' , 'verbosity' , 0 ) elif verbose > 0 : config_builder . set_option ( 'general' , 'verbosity' , verbose ) if extra_path : config_builder . set_option ( 'general' , 'extra-path' , extra_path ) if target : config_builder . set_option ( 'general' , 'target' , target ) if debug : config_builder . set_option ( 'general' , 'debug' , debug ) config = config_builder . build ( ) return config , config_builder except LintConfigError as e : click . echo ( u"Config Error: {0}" . format ( ustr ( e ) ) ) ctx . exit ( CONFIG_ERROR_CODE )
Creates a LintConfig object based on a set of commandline parameters .
55,647
def get_stdin_data ( ) : mode = os . fstat ( sys . stdin . fileno ( ) ) . st_mode stdin_is_pipe_or_file = stat . S_ISFIFO ( mode ) or stat . S_ISREG ( mode ) if stdin_is_pipe_or_file : input_data = sys . stdin . read ( ) if input_data : return ustr ( input_data ) return False
Helper function that returns data send to stdin or False if nothing is send
55,648
def cli ( ctx , target , config , c , commits , extra_path , ignore , msg_filename , verbose , silent , debug , ) : try : if debug : logging . getLogger ( "gitlint" ) . setLevel ( logging . DEBUG ) log_system_info ( ) config , config_builder = build_config ( ctx , target , config , c , extra_path , ignore , verbose , silent , debug ) LOG . debug ( u"Configuration\n%s" , ustr ( config ) ) ctx . obj = ( config , config_builder , commits , msg_filename ) if ctx . invoked_subcommand is None : ctx . invoke ( lint ) except GitContextError as e : click . echo ( ustr ( e ) ) ctx . exit ( GIT_CONTEXT_ERROR_CODE )
Git lint tool checks your git commit messages for styling issues
55,649
def install_hook ( ctx ) : try : lint_config = ctx . obj [ 0 ] hooks . GitHookInstaller . install_commit_msg_hook ( lint_config ) hook_path = hooks . GitHookInstaller . commit_msg_hook_path ( lint_config ) click . echo ( u"Successfully installed gitlint commit-msg hook in {0}" . format ( hook_path ) ) ctx . exit ( 0 ) except hooks . GitHookInstallerError as e : click . echo ( ustr ( e ) , err = True ) ctx . exit ( GIT_CONTEXT_ERROR_CODE )
Install gitlint as a git commit - msg hook .
55,650
def uninstall_hook ( ctx ) : try : lint_config = ctx . obj [ 0 ] hooks . GitHookInstaller . uninstall_commit_msg_hook ( lint_config ) hook_path = hooks . GitHookInstaller . commit_msg_hook_path ( lint_config ) click . echo ( u"Successfully uninstalled gitlint commit-msg hook from {0}" . format ( hook_path ) ) ctx . exit ( 0 ) except hooks . GitHookInstallerError as e : click . echo ( ustr ( e ) , err = True ) ctx . exit ( GIT_CONTEXT_ERROR_CODE )
Uninstall gitlint commit - msg hook .
55,651
def generate_config ( ctx ) : path = click . prompt ( 'Please specify a location for the sample gitlint config file' , default = DEFAULT_CONFIG_FILE ) path = os . path . abspath ( path ) dir_name = os . path . dirname ( path ) if not os . path . exists ( dir_name ) : click . echo ( u"Error: Directory '{0}' does not exist." . format ( dir_name ) , err = True ) ctx . exit ( USAGE_ERROR_CODE ) elif os . path . exists ( path ) : click . echo ( u"Error: File \"{0}\" already exists." . format ( path ) , err = True ) ctx . exit ( USAGE_ERROR_CODE ) LintConfigGenerator . generate_config ( path ) click . echo ( u"Successfully generated {0}" . format ( path ) ) ctx . exit ( 0 )
Generates a sample gitlint config file .
55,652
def _assert_git_repo ( target ) : hooks_dir = os . path . abspath ( os . path . join ( target , HOOKS_DIR_PATH ) ) if not os . path . isdir ( hooks_dir ) : raise GitHookInstallerError ( u"{0} is not a git repository." . format ( target ) )
Asserts that a given target directory is a git repository
55,653
def get_job_url ( config , hub , group , project ) : if ( ( config is not None ) and ( 'hub' in config ) and ( hub is None ) ) : hub = config [ "hub" ] if ( ( config is not None ) and ( 'group' in config ) and ( group is None ) ) : group = config [ "group" ] if ( ( config is not None ) and ( 'project' in config ) and ( project is None ) ) : project = config [ "project" ] if ( ( hub is not None ) and ( group is not None ) and ( project is not None ) ) : return '/Network/{}/Groups/{}/Projects/{}/jobs' . format ( hub , group , project ) return '/Jobs'
Util method to get job url
55,654
def get_backend_stats_url ( config , hub , backend_type ) : if ( ( config is not None ) and ( 'hub' in config ) and ( hub is None ) ) : hub = config [ "hub" ] if ( hub is not None ) : return '/Network/{}/devices/{}' . format ( hub , backend_type ) return '/Backends/{}' . format ( backend_type )
Util method to get backend stats url
55,655
def get_backend_url ( config , hub , group , project ) : if ( ( config is not None ) and ( 'hub' in config ) and ( hub is None ) ) : hub = config [ "hub" ] if ( ( config is not None ) and ( 'group' in config ) and ( group is None ) ) : group = config [ "group" ] if ( ( config is not None ) and ( 'project' in config ) and ( project is None ) ) : project = config [ "project" ] if ( ( hub is not None ) and ( group is not None ) and ( project is not None ) ) : return '/Network/{}/Groups/{}/Projects/{}/devices' . format ( hub , group , project ) return '/Backends'
Util method to get backend url
55,656
def obtain_token ( self , config = None ) : client_application = CLIENT_APPLICATION if self . config and ( "client_application" in self . config ) : client_application += ':' + self . config [ "client_application" ] headers = { 'x-qx-client-application' : client_application } if self . token_unique : try : response = requests . post ( str ( self . config . get ( 'url' ) + "/users/loginWithToken" ) , data = { 'apiToken' : self . token_unique } , verify = self . verify , headers = headers , ** self . extra_args ) except requests . RequestException as e : raise ApiError ( 'error during login: %s' % str ( e ) ) elif config and ( "email" in config ) and ( "password" in config ) : email = config . get ( 'email' , None ) password = config . get ( 'password' , None ) credentials = { 'email' : email , 'password' : password } try : response = requests . post ( str ( self . config . get ( 'url' ) + "/users/login" ) , data = credentials , verify = self . verify , headers = headers , ** self . extra_args ) except requests . RequestException as e : raise ApiError ( 'error during login: %s' % str ( e ) ) else : raise CredentialsError ( 'invalid token' ) if response . status_code == 401 : error_message = None try : error_message = response . json ( ) [ 'error' ] [ 'message' ] except : pass if error_message : raise CredentialsError ( 'error during login: %s' % error_message ) else : raise CredentialsError ( 'invalid token' ) try : response . raise_for_status ( ) self . data_credentials = response . json ( ) except ( requests . HTTPError , ValueError ) as e : raise ApiError ( 'error during login: %s' % str ( e ) ) if self . get_token ( ) is None : raise CredentialsError ( 'invalid token' )
Obtain the token to access to QX Platform .
55,657
def check_token ( self , respond ) : if respond . status_code == 401 : self . credential . obtain_token ( config = self . config ) return False return True
Check is the user s token is valid
55,658
def post ( self , path , params = '' , data = None ) : self . result = None data = data or { } headers = { 'Content-Type' : 'application/json' , 'x-qx-client-application' : self . client_application } url = str ( self . credential . config [ 'url' ] + path + '?access_token=' + self . credential . get_token ( ) + params ) retries = self . retries while retries > 0 : respond = requests . post ( url , data = data , headers = headers , verify = self . verify , ** self . extra_args ) if not self . check_token ( respond ) : respond = requests . post ( url , data = data , headers = headers , verify = self . verify , ** self . extra_args ) if self . _response_good ( respond ) : if self . result : return self . result elif retries < 2 : return respond . json ( ) else : retries -= 1 else : retries -= 1 time . sleep ( self . timeout_interval ) raise ApiError ( usr_msg = 'Failed to get proper ' + 'response from backend.' )
POST Method Wrapper of the REST API
55,659
def _parse_response ( self , respond ) : mobj = self . _max_qubit_error_re . match ( respond . text ) if mobj : raise RegisterSizeError ( 'device register size must be <= {}' . format ( mobj . group ( 1 ) ) ) return True
parse text of response for HTTP errors
55,660
def _check_backend ( self , backend , endpoint ) : original_backend = backend backend = backend . lower ( ) if endpoint == 'experiment' : if backend in self . __names_backend_ibmqxv2 : return 'real' elif backend in self . __names_backend_ibmqxv3 : return 'ibmqx3' elif backend in self . __names_backend_simulator : return 'sim_trivial_2' backends = self . available_backends ( ) for backend in backends : if backend [ 'name' ] == original_backend : return original_backend return None
Check if the name of a backend is valid to run in QX Platform
55,661
def get_execution ( self , id_execution , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) execution = self . req . get ( '/Executions/' + id_execution ) if "codeId" in execution : execution [ 'code' ] = self . get_code ( execution [ "codeId" ] ) return execution
Get a execution by its id
55,662
def get_result_from_execution ( self , id_execution , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) execution = self . req . get ( '/Executions/' + id_execution ) result = { } if "result" in execution and "data" in execution [ "result" ] : if execution [ "result" ] [ "data" ] . get ( 'p' , None ) : result [ "measure" ] = execution [ "result" ] [ "data" ] [ "p" ] if execution [ "result" ] [ "data" ] . get ( 'valsxyz' , None ) : result [ "bloch" ] = execution [ "result" ] [ "data" ] [ "valsxyz" ] if "additionalData" in execution [ "result" ] [ "data" ] : ad_aux = execution [ "result" ] [ "data" ] [ "additionalData" ] result [ "extraInfo" ] = ad_aux if "calibration" in execution : result [ "calibration" ] = execution [ "calibration" ] if execution [ "result" ] [ "data" ] . get ( 'cregLabels' , None ) : result [ "creg_labels" ] = execution [ "result" ] [ "data" ] [ "cregLabels" ] if execution [ "result" ] [ "data" ] . get ( 'time' , None ) : result [ "time_taken" ] = execution [ "result" ] [ "data" ] [ "time" ] return result
Get the result of a execution by the execution id
55,663
def get_code ( self , id_code , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) code = self . req . get ( '/Codes/' + id_code ) executions = self . req . get ( '/Codes/' + id_code + '/executions' , '&filter={"limit":3}' ) if isinstance ( executions , list ) : code [ "executions" ] = executions return code
Get a code by its id
55,664
def get_image_code ( self , id_code , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) return self . req . get ( '/Codes/' + id_code + '/export/png/url' )
Get the image of a code by its id
55,665
def get_last_codes ( self , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) last = '/users/' + self . req . credential . get_user_id ( ) + '/codes/lastest' return self . req . get ( last , '&includeExecutions=true' ) [ 'codes' ]
Get the last codes of the user
55,666
def run_job ( self , job , backend = 'simulator' , shots = 1 , max_credits = None , seed = None , hub = None , group = None , project = None , hpc = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : return { "error" : "Not credentials valid" } backend_type = self . _check_backend ( backend , 'job' ) if not backend_type : raise BadBackendError ( backend ) if isinstance ( job , ( list , tuple ) ) : qasms = job for qasm in qasms : qasm [ 'qasm' ] = qasm [ 'qasm' ] . replace ( 'IBMQASM 2.0;' , '' ) qasm [ 'qasm' ] = qasm [ 'qasm' ] . replace ( 'OPENQASM 2.0;' , '' ) data = { 'qasms' : qasms , 'shots' : shots , 'backend' : { } } if max_credits : data [ 'maxCredits' ] = max_credits if seed and len ( str ( seed ) ) < 11 and str ( seed ) . isdigit ( ) : data [ 'seed' ] = seed elif seed : return { "error" : "Not seed allowed. Max 10 digits." } data [ 'backend' ] [ 'name' ] = backend_type elif isinstance ( job , dict ) : q_obj = job data = { 'qObject' : q_obj , 'backend' : { } } data [ 'backend' ] [ 'name' ] = backend_type else : return { "error" : "Not a valid data to send" } if hpc : data [ 'hpc' ] = hpc url = get_job_url ( self . config , hub , group , project ) job = self . req . post ( url , data = json . dumps ( data ) ) return job
Execute a job
55,667
def get_job ( self , id_job , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Not credentials valid" return respond if not id_job : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Job ID not specified" return respond url = get_job_url ( self . config , hub , group , project ) url += '/' + id_job job = self . req . get ( url ) if 'qasms' in job : for qasm in job [ 'qasms' ] : if ( 'result' in qasm ) and ( 'data' in qasm [ 'result' ] ) : qasm [ 'data' ] = qasm [ 'result' ] [ 'data' ] del qasm [ 'result' ] [ 'data' ] for key in qasm [ 'result' ] : qasm [ 'data' ] [ key ] = qasm [ 'result' ] [ key ] del qasm [ 'result' ] return job
Get the information about a job by its id
55,668
def get_jobs ( self , limit = 10 , skip = 0 , backend = None , only_completed = False , filter = None , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : return { "error" : "Not credentials valid" } url = get_job_url ( self . config , hub , group , project ) url_filter = '&filter=' query = { "order" : "creationDate DESC" , "limit" : limit , "skip" : skip , "where" : { } } if filter is not None : query [ 'where' ] = filter else : if backend is not None : query [ 'where' ] [ 'backend.name' ] = backend if only_completed : query [ 'where' ] [ 'status' ] = 'COMPLETED' url_filter = url_filter + json . dumps ( query ) jobs = self . req . get ( url , url_filter ) return jobs
Get the information about the user jobs
55,669
def get_status_job ( self , id_job , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Not credentials valid" return respond if not id_job : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Job ID not specified" return respond url = get_job_url ( self . config , hub , group , project ) url += '/' + id_job + '/status' status = self . req . get ( url ) return status
Get the status about a job by its id
55,670
def cancel_job ( self , id_job , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Not credentials valid" return respond if not id_job : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Job ID not specified" return respond url = get_job_url ( self . config , hub , group , project ) url += '/{}/cancel' . format ( id_job ) res = self . req . post ( url ) return res
Cancel the information about a job by its id
55,671
def backend_status ( self , backend = 'ibmqx4' , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) backend_type = self . _check_backend ( backend , 'status' ) if not backend_type : raise BadBackendError ( backend ) status = self . req . get ( '/Backends/' + backend_type + '/queue/status' , with_token = False ) ret = { } if 'state' in status : ret [ 'available' ] = bool ( status [ 'state' ] ) if 'busy' in status : ret [ 'busy' ] = bool ( status [ 'busy' ] ) if 'lengthQueue' in status : ret [ 'pending_jobs' ] = status [ 'lengthQueue' ] ret [ 'backend' ] = backend_type return ret
Get the status of a chip
55,672
def backend_calibration ( self , backend = 'ibmqx4' , hub = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) backend_type = self . _check_backend ( backend , 'calibration' ) if not backend_type : raise BadBackendError ( backend ) if backend_type in self . __names_backend_simulator : ret = { } return ret url = get_backend_stats_url ( self . config , hub , backend_type ) ret = self . req . get ( url + '/calibration' ) if not bool ( ret ) : ret = { } else : ret [ "backend" ] = backend_type return ret
Get the calibration of a real chip
55,673
def available_backends ( self , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) else : url = get_backend_url ( self . config , hub , group , project ) ret = self . req . get ( url ) if ( ret is not None ) and ( isinstance ( ret , dict ) ) : return [ ] return [ backend for backend in ret if backend . get ( 'status' ) == 'on' ]
Get the backends available to use in the QX Platform
55,674
def available_backend_simulators ( self , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) else : ret = self . req . get ( '/Backends' ) if ( ret is not None ) and ( isinstance ( ret , dict ) ) : return [ ] return [ backend for backend in ret if backend . get ( 'status' ) == 'on' and backend . get ( 'simulator' ) is True ]
Get the backend simulators available to use in the QX Platform
55,675
def get_my_credits ( self , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) else : user_data_url = '/users/' + self . req . credential . get_user_id ( ) user_data = self . req . get ( user_data_url ) if "credit" in user_data : if "promotionalCodesUsed" in user_data [ "credit" ] : del user_data [ "credit" ] [ "promotionalCodesUsed" ] if "lastRefill" in user_data [ "credit" ] : del user_data [ "credit" ] [ "lastRefill" ] return user_data [ "credit" ] return { }
Get the credits by user to use in the QX Platform
55,676
def trace ( self , predicate ) : self . _handler = predicate if self . threading_support is None or self . threading_support : self . _threading_previous = getattr ( threading , '_trace_hook' , None ) threading . settrace ( self ) self . _previous = sys . gettrace ( ) sys . settrace ( self ) return self
Starts tracing with the given callable .
55,677
def And ( * predicates , ** kwargs ) : if kwargs : predicates += Query ( ** kwargs ) , return _flatten ( _And , * predicates )
And predicate . Returns False at the first sub - predicate that returns False .
55,678
def Or ( * predicates , ** kwargs ) : if kwargs : predicates += tuple ( Query ( ** { k : v } ) for k , v in kwargs . items ( ) ) return _flatten ( _Or , * predicates )
Or predicate . Returns True at the first sub - predicate that returns True .
55,679
def wrap ( function_to_trace = None , ** trace_options ) : def tracing_decorator ( func ) : @ functools . wraps ( func ) def tracing_wrapper ( * args , ** kwargs ) : predicates = [ ] local = trace_options . pop ( 'local' , False ) if local : predicates . append ( Q ( depth_lt = 2 ) ) predicates . append ( ~ When ( Q ( calls_gt = 0 , depth = 0 ) & ~ Q ( kind = 'return' ) , Stop ) ) local_tracer = trace ( * predicates , ** trace_options ) try : return func ( * args , ** kwargs ) finally : local_tracer . stop ( ) return tracing_wrapper if function_to_trace is None : return tracing_decorator else : return tracing_decorator ( function_to_trace )
Functions decorated with this will be traced .
55,680
def threadid ( self ) : current = self . thread . ident main = get_main_thread ( ) if main is None : return current else : return current if current != main . ident else None
Current thread ident . If current thread is main thread then it returns None .
55,681
def filename ( self , exists = os . path . exists , cython_suffix_re = CYTHON_SUFFIX_RE ) : filename = self . frame . f_globals . get ( '__file__' , '' ) if filename is None : filename = '' if filename . endswith ( ( '.pyc' , '.pyo' ) ) : filename = filename [ : - 1 ] elif filename . endswith ( '$py.class' ) : filename = filename [ : - 9 ] + ".py" elif filename . endswith ( ( '.so' , '.pyd' ) ) : basename = cython_suffix_re . sub ( '' , filename ) for ext in ( '.pyx' , '.py' ) : cyfilename = basename + ext if exists ( cyfilename ) : filename = cyfilename break return filename
A string with absolute path to file .
55,682
def stdlib ( self ) : if self . module == 'pkg_resources' or self . module . startswith ( 'pkg_resources.' ) : return False elif self . filename . startswith ( SITE_PACKAGES_PATHS ) : return False elif self . filename . startswith ( SYS_PREFIX_PATHS ) : return True else : return False
A boolean flag . True if frame is in stdlib .
55,683
def _iter_symbols ( code ) : for node in ast . walk ( ast . parse ( code ) ) : if isinstance ( node , ast . Name ) : yield node . id
Iterate all the variable names in the given expression .
55,684
def __make_request_url ( self , teststep_dict , entry_json ) : request_params = utils . convert_list_to_dict ( entry_json [ "request" ] . get ( "queryString" , [ ] ) ) url = entry_json [ "request" ] . get ( "url" ) if not url : logging . exception ( "url missed in request." ) sys . exit ( 1 ) parsed_object = urlparse . urlparse ( url ) if request_params : parsed_object = parsed_object . _replace ( query = '' ) teststep_dict [ "request" ] [ "url" ] = parsed_object . geturl ( ) teststep_dict [ "request" ] [ "params" ] = request_params else : teststep_dict [ "request" ] [ "url" ] = url teststep_dict [ "name" ] = parsed_object . path
parse HAR entry request url and queryString and make teststep url and params
55,685
def __make_request_method ( self , teststep_dict , entry_json ) : method = entry_json [ "request" ] . get ( "method" ) if not method : logging . exception ( "method missed in request." ) sys . exit ( 1 ) teststep_dict [ "request" ] [ "method" ] = method
parse HAR entry request method and make teststep method .
55,686
def __make_request_headers ( self , teststep_dict , entry_json ) : teststep_headers = { } for header in entry_json [ "request" ] . get ( "headers" , [ ] ) : if header [ "name" ] . lower ( ) in IGNORE_REQUEST_HEADERS : continue teststep_headers [ header [ "name" ] ] = header [ "value" ] if teststep_headers : teststep_dict [ "request" ] [ "headers" ] = teststep_headers
parse HAR entry request headers and make teststep headers . header in IGNORE_REQUEST_HEADERS will be ignored .
55,687
def _make_request_data ( self , teststep_dict , entry_json ) : method = entry_json [ "request" ] . get ( "method" ) if method in [ "POST" , "PUT" , "PATCH" ] : postData = entry_json [ "request" ] . get ( "postData" , { } ) mimeType = postData . get ( "mimeType" ) if "text" in postData : post_data = postData . get ( "text" ) else : params = postData . get ( "params" , [ ] ) post_data = utils . convert_list_to_dict ( params ) request_data_key = "data" if not mimeType : pass elif mimeType . startswith ( "application/json" ) : try : post_data = json . loads ( post_data ) request_data_key = "json" except JSONDecodeError : pass elif mimeType . startswith ( "application/x-www-form-urlencoded" ) : post_data = utils . convert_x_www_form_urlencoded_to_dict ( post_data ) else : pass teststep_dict [ "request" ] [ request_data_key ] = post_data
parse HAR entry request data and make teststep request data
55,688
def _make_validate ( self , teststep_dict , entry_json ) : teststep_dict [ "validate" ] . append ( { "eq" : [ "status_code" , entry_json [ "response" ] . get ( "status" ) ] } ) resp_content_dict = entry_json [ "response" ] . get ( "content" ) headers_mapping = utils . convert_list_to_dict ( entry_json [ "response" ] . get ( "headers" , [ ] ) ) if "Content-Type" in headers_mapping : teststep_dict [ "validate" ] . append ( { "eq" : [ "headers.Content-Type" , headers_mapping [ "Content-Type" ] ] } ) text = resp_content_dict . get ( "text" ) if not text : return mime_type = resp_content_dict . get ( "mimeType" ) if mime_type and mime_type . startswith ( "application/json" ) : encoding = resp_content_dict . get ( "encoding" ) if encoding and encoding == "base64" : content = base64 . b64decode ( text ) . decode ( 'utf-8' ) else : content = text try : resp_content_json = json . loads ( content ) except JSONDecodeError : logging . warning ( "response content can not be loaded as json: {}" . format ( content . encode ( "utf-8" ) ) ) return if not isinstance ( resp_content_json , dict ) : return for key , value in resp_content_json . items ( ) : if isinstance ( value , ( dict , list ) ) : continue teststep_dict [ "validate" ] . append ( { "eq" : [ "content.{}" . format ( key ) , value ] } )
parse HAR entry response and make teststep validate .
55,689
def load_har_log_entries ( file_path ) : with io . open ( file_path , "r+" , encoding = "utf-8-sig" ) as f : try : content_json = json . loads ( f . read ( ) ) return content_json [ "log" ] [ "entries" ] except ( KeyError , TypeError ) : logging . error ( "HAR file content error: {}" . format ( file_path ) ) sys . exit ( 1 )
load HAR file and return log entries list
55,690
def x_www_form_urlencoded ( post_data ) : if isinstance ( post_data , dict ) : return "&" . join ( [ u"{}={}" . format ( key , value ) for key , value in post_data . items ( ) ] ) else : return post_data
convert origin dict to x - www - form - urlencoded
55,691
def convert_x_www_form_urlencoded_to_dict ( post_data ) : if isinstance ( post_data , str ) : converted_dict = { } for k_v in post_data . split ( "&" ) : try : key , value = k_v . split ( "=" ) except ValueError : raise Exception ( "Invalid x_www_form_urlencoded data format: {}" . format ( post_data ) ) converted_dict [ key ] = unquote ( value ) return converted_dict else : return post_data
convert x_www_form_urlencoded data to dict
55,692
def dump_yaml ( testcase , yaml_file ) : logging . info ( "dump testcase to YAML format." ) with io . open ( yaml_file , 'w' , encoding = "utf-8" ) as outfile : yaml . dump ( testcase , outfile , allow_unicode = True , default_flow_style = False , indent = 4 ) logging . info ( "Generate YAML testcase successfully: {}" . format ( yaml_file ) )
dump HAR entries to yaml testcase
55,693
def dump_json ( testcase , json_file ) : logging . info ( "dump testcase to JSON format." ) with io . open ( json_file , 'w' , encoding = "utf-8" ) as outfile : my_json_str = json . dumps ( testcase , ensure_ascii = ensure_ascii , indent = 4 ) if isinstance ( my_json_str , bytes ) : my_json_str = my_json_str . decode ( "utf-8" ) outfile . write ( my_json_str ) logging . info ( "Generate JSON testcase successfully: {}" . format ( json_file ) )
dump HAR entries to json testcase
55,694
def prepare_request ( self , request ) : try : request_id = local . request_id except AttributeError : request_id = NO_REQUEST_ID if self . request_id_header and request_id != NO_REQUEST_ID : request . headers [ self . request_id_header ] = request_id return super ( Session , self ) . prepare_request ( request )
Include the request ID if available in the outgoing request
55,695
def _get ( self , url , params = None , headers = None ) : url = self . clean_url ( url ) response = requests . get ( url , params = params , verify = self . verify , timeout = self . timeout , headers = headers ) return response
Wraps a GET request with a url check
55,696
def _post ( self , url , data = None , json = None , params = None , headers = None ) : url = self . clean_url ( url ) response = requests . post ( url , data = data , json = json , params = params , headers = headers , timeout = self . timeout , verify = self . verify ) return response
Wraps a POST request with a url check
55,697
def expand ( self , url ) : url = self . clean_url ( url ) response = self . _get ( url ) if response . ok : return response . url raise ExpandingErrorException
Base expand method . Only visits the link and return the response url
55,698
def clean_url ( url ) : if not url . startswith ( ( 'http://' , 'https://' ) ) : url = f'http://{url}' if not URL_RE . match ( url ) : raise BadURLException ( f'{url} is not valid' ) return url
URL Validation function
55,699
def create_function_from_request_pdu ( pdu ) : function_code = get_function_code_from_request_pdu ( pdu ) try : function_class = function_code_to_function_map [ function_code ] except KeyError : raise IllegalFunctionError ( function_code ) return function_class . create_from_request_pdu ( pdu )
Return function instance based on request PDU .