idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
52,000
def merge_pages ( self , replacements ) : warnings . warn ( "merge_pages has been deprecated in favour of merge_templates" , category = DeprecationWarning , stacklevel = 2 ) self . merge_templates ( replacements , "page_break" )
Deprecated method .
52,001
def importpath ( path , error_text = None ) : result = None attrs = [ ] parts = path . split ( '.' ) exception = None while parts : try : result = __import__ ( '.' . join ( parts ) , { } , { } , [ '' ] ) except ImportError as e : if exception is None : exception = e attrs = parts [ - 1 : ] + attrs parts = parts [ : - 1 ] else : break for attr in attrs : try : result = getattr ( result , attr ) except ( AttributeError , ValueError ) as e : if error_text is not None : raise ImproperlyConfigured ( 'Error: %s can import "%s"' % ( error_text , path ) ) else : raise exception return result
Import value by specified path . Value can represent module class object attribute or method . If error_text is not None and import will raise ImproperlyConfigured with user friendly text .
52,002
def get_booking ( request ) : booking = None if request . user . is_authenticated ( ) : try : booking = Booking . objects . get ( user = request . user , booking_status__slug = 'inprogress' ) except Booking . DoesNotExist : pass else : session = Session . objects . get ( session_key = request . session . session_key ) try : booking = Booking . objects . get ( session = session ) except Booking . DoesNotExist : pass return booking
Returns the booking that is in progress for the current user or None
52,003
def persist_booking ( booking , user ) : if booking is not None : existing_bookings = Booking . objects . filter ( user = user , booking_status__slug = 'inprogress' ) . exclude ( pk = booking . pk ) existing_bookings . delete ( ) booking . session = None booking . user = user booking . save ( )
Ties an in - progress booking from a session to a user when the user logs in .
52,004
def compare_config ( self , target , init = True , indent_level = 0 ) : if init : fwd = self . full_path_fwd bwd = self . full_path_bwd else : fwd = self . rel_path_fwd bwd = self . rel_path_bwd indent = 4 * indent_level * ' ' if indent_level == 0 and self . vdom is not None : if self . vdom == 'global' : pre = 'conf global\n' else : pre = 'conf vdom\n edit %s\n' % self . vdom post = 'end' else : pre = '' post = '' pre_block = '%s%s' % ( indent , fwd ) post_block = '%s%s' % ( indent , bwd ) my_params = self . parameters . keys ( ) ot_params = target . parameters . keys ( ) text = '' for param in my_params : if param not in ot_params : text += ' %sunset %s\n' % ( indent , param ) else : if str ( self . get_param ( param ) ) . replace ( '"' , '' ) != str ( target . get_param ( param ) ) . replace ( '"' , '' ) : text += ' %sset %s %s\n' % ( indent , param , target . get_param ( param ) ) for param in ot_params : if param not in my_params : text += ' %sset %s %s\n' % ( indent , param , target . get_param ( param ) ) my_blocks = self . sub_blocks . keys ( ) ot_blocks = target . sub_blocks . keys ( ) for block_name in my_blocks : if block_name not in ot_blocks : text += " %sdelete %s\n" % ( indent , block_name ) else : text += self [ block_name ] . compare_config ( target [ block_name ] , False , indent_level + 1 ) for block_name in ot_blocks : if block_name not in my_blocks : text += target [ block_name ] . to_text ( True , indent_level + 1 , True ) if text == '' : return '' else : return '%s%s%s%s%s' % ( pre , pre_block , text , post_block , post )
This method will return all the necessary commands to get from the config we are in to the target config .
52,005
def to_text ( self , relative = False , indent_level = 0 , clean_empty_block = False ) : if relative : fwd = self . rel_path_fwd bwd = self . rel_path_bwd else : fwd = self . full_path_fwd bwd = self . full_path_bwd indent = 4 * indent_level * ' ' pre = '%s%s' % ( indent , fwd ) post = '%s%s' % ( indent , bwd ) text = '' for param , value in self . iterparams ( ) : text += ' %sset %s %s\n' % ( indent , param , value ) for key , block in self . iterblocks ( ) : text += block . to_text ( True , indent_level + 1 ) if len ( text ) > 0 or not clean_empty_block : text = '%s%s%s' % ( pre , text , post ) return text
This method returns the object model in text format . You should be able to copy&paste this text into any device running a supported version of FortiOS .
52,006
def open ( self ) : logger . debug ( 'Connecting to device %s, vdom %s' % ( self . hostname , self . vdom ) ) self . ssh = paramiko . SSHClient ( ) self . ssh . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) cfg = { 'hostname' : self . hostname , 'timeout' : self . timeout , 'username' : self . username , 'password' : self . password , 'key_filename' : self . keyfile } if os . path . exists ( os . path . expanduser ( "~/.ssh/config" ) ) : ssh_config = paramiko . SSHConfig ( ) user_config_file = os . path . expanduser ( "~/.ssh/config" ) with io . open ( user_config_file , 'rt' , encoding = 'utf-8' ) as f : ssh_config . parse ( f ) host_conf = ssh_config . lookup ( self . hostname ) if host_conf : if 'proxycommand' in host_conf : cfg [ 'sock' ] = paramiko . ProxyCommand ( host_conf [ 'proxycommand' ] ) if 'user' in host_conf : cfg [ 'username' ] = host_conf [ 'user' ] if 'identityfile' in host_conf : cfg [ 'key_filename' ] = host_conf [ 'identityfile' ] if 'hostname' in host_conf : cfg [ 'hostname' ] = host_conf [ 'hostname' ] self . ssh . connect ( ** cfg )
Opens the ssh session with the device .
52,007
def _read_wrapper ( data ) : if isinstance ( data , int ) : data = chr ( data ) return py23_compat . text_type ( data )
Ensure unicode always returned on read .
52,008
def _parse_batch_lastlog ( last_log ) : regexp = re . compile ( '(-?[0-9]\d*):\W+(.*)' ) wrong_commands = list ( ) for line in last_log : result = regexp . match ( line ) if result is not None : status_code = result . group ( 1 ) command = result . group ( 2 ) if int ( status_code ) < 0 : wrong_commands . append ( ( status_code , command ) ) return wrong_commands
This static method will help reading the result of the commit command by command .
52,009
def _reload_config ( self , reload_original_config ) : if reload_original_config : self . original_config = self . running_config self . original_config . set_name ( 'original' ) paths = self . running_config . get_paths ( ) self . running_config = FortiConfig ( 'running' , vdom = self . vdom ) for path in paths : self . load_config ( path , empty_candidate = True )
This command will update the running config from the live device .
52,010
def generate_states ( self , initial_condition = None , with_noise = True , stateseq = None ) : from pybasicbayes . util . stats import sample_discrete T , K , n = self . T , self . num_states , self . D_latent A = self . trans_matrix dss = - 1 * np . ones ( T , dtype = np . int32 ) if stateseq is None else stateseq . astype ( np . int32 ) assert dss . shape == ( T , ) gss = np . empty ( ( T , n ) , dtype = 'double' ) if initial_condition is None : if dss [ 0 ] == - 1 : dss [ 0 ] = sample_discrete ( self . pi_0 ) gss [ 0 ] = self . init_dynamics_distns [ dss [ 0 ] ] . rvs ( ) else : dss [ 0 ] = initial_condition [ 0 ] gss [ 0 ] = initial_condition [ 1 ] for t in range ( 1 , T ) : if with_noise : if dss [ t ] == - 1 : dss [ t ] = sample_discrete ( A [ dss [ t - 1 ] , : ] ) gss [ t ] = self . dynamics_distns [ dss [ t - 1 ] ] . rvs ( x = np . hstack ( ( gss [ t - 1 ] [ None , : ] , self . inputs [ t - 1 ] [ None , : ] ) ) , return_xy = False ) else : if dss [ t ] == - 1 : dss [ t ] = np . argmax ( A [ dss [ t - 1 ] , : ] ) gss [ t ] = self . dynamics_distns [ dss [ t - 1 ] ] . predict ( np . hstack ( ( gss [ t - 1 ] [ None , : ] , self . inputs [ t - 1 ] [ None , : ] ) ) ) assert np . all ( np . isfinite ( gss [ t ] ) ) , "SLDS appears to be unstable!" self . stateseq = dss self . gaussian_states = gss
Jointly sample the discrete and continuous states
52,011
def heldout_log_likelihood ( self , test_mask = None ) : if test_mask is None : if self . mask is None : return 0 else : test_mask = ~ self . mask xs = np . hstack ( ( self . gaussian_states , self . inputs ) ) if self . single_emission : return self . emission_distns [ 0 ] . log_likelihood ( ( xs , self . data ) , mask = test_mask ) . sum ( ) else : hll = 0 z = self . stateseq for idx , ed in enumerate ( self . emission_distns ) : hll += ed . log_likelihood ( ( xs [ z == idx ] , self . data [ z == idx ] ) , mask = test_mask [ z == idx ] ) . sum ( )
Compute the log likelihood of the masked data given the latent discrete and continuous states .
52,012
def empirical_rate ( data , sigma = 3.0 ) : from scipy . ndimage . filters import gaussian_filter1d return 0.001 + gaussian_filter1d ( data . astype ( np . float ) , sigma , axis = 0 )
Smooth count data to get an empirical rate
52,013
def get_empirical_ar_params ( train_datas , params ) : assert isinstance ( train_datas , list ) and len ( train_datas ) > 0 datadimension = train_datas [ 0 ] . shape [ 1 ] assert params [ "nu_0" ] > datadimension + 1 obs_params = dict ( nu_0 = params [ "nu_0" ] , S_0 = params [ 'S_0' ] , M_0 = params [ 'M_0' ] , K_0 = params [ 'K_0' ] , affine = params [ 'affine' ] ) obs_distn = AutoRegression ( ** obs_params ) obs_distn . max_likelihood ( train_datas ) obs_params [ "S_0" ] = obs_distn . sigma * ( params [ "nu_0" ] - datadimension - 1 ) obs_params [ "M_0" ] = obs_distn . A . copy ( ) return obs_params
Estimate the parameters of an AR observation model by fitting a single AR model to the entire dataset .
52,014
def capture ( target_url , user_agent = "savepagenow (https://github.com/pastpages/savepagenow)" , accept_cache = False ) : domain = "https://web.archive.org" save_url = urljoin ( domain , "/save/" ) request_url = save_url + target_url headers = { 'User-Agent' : user_agent , } response = requests . get ( request_url , headers = headers ) has_error_header = 'X-Archive-Wayback-Runtime-Error' in response . headers if has_error_header : error_header = response . headers [ 'X-Archive-Wayback-Runtime-Error' ] if error_header == 'RobotAccessControlException: Blocked By Robots' : raise BlockedByRobots ( "archive.org returned blocked by robots.txt error" ) else : raise WaybackRuntimeError ( error_header ) if response . status_code in [ 403 , 502 ] : raise WaybackRuntimeError ( response . headers ) try : archive_id = response . headers [ 'Content-Location' ] except KeyError : raise WaybackRuntimeError ( dict ( status_code = response . status_code , headers = response . headers ) ) archive_url = urljoin ( domain , archive_id ) cached = 'X-Page-Cache' in response . headers and response . headers [ 'X-Page-Cache' ] == 'HIT' if cached : if not accept_cache : raise CachedPage ( "archive.org returned a cached version of this page: {}" . format ( archive_url ) ) return archive_url
Archives the provided URL using archive . org s Wayback Machine .
52,015
def capture_or_cache ( target_url , user_agent = "savepagenow (https://github.com/pastpages/savepagenow)" ) : try : return capture ( target_url , user_agent = user_agent , accept_cache = False ) , True except CachedPage : return capture ( target_url , user_agent = user_agent , accept_cache = True ) , False
Archives the provided URL using archive . org s Wayback Machine unless the page has been recently captured .
52,016
def get_unique_id ( element ) : this_id = make_id ( element ) dup = True while dup : if this_id not in ids : dup = False ids . append ( this_id ) else : this_id = make_id ( element ) return ids [ - 1 ]
Returns a unique id for a given element
52,017
def get_xml_type ( val ) : if type ( val ) . __name__ in ( 'str' , 'unicode' ) : return 'str' if type ( val ) . __name__ in ( 'int' , 'long' ) : return 'int' if type ( val ) . __name__ == 'float' : return 'float' if type ( val ) . __name__ == 'bool' : return 'bool' if isinstance ( val , numbers . Number ) : return 'number' if type ( val ) . __name__ == 'NoneType' : return 'null' if isinstance ( val , dict ) : return 'dict' if isinstance ( val , collections . Iterable ) : return 'list' return type ( val ) . __name__
Returns the data type for the xml type attribute
52,018
def make_attrstring ( attr ) : attrstring = ' ' . join ( [ '%s="%s"' % ( k , v ) for k , v in attr . items ( ) ] ) return '%s%s' % ( ' ' if attrstring != '' else '' , attrstring )
Returns an attribute string in the form key = val
52,019
def key_is_valid_xml ( key ) : LOG . info ( 'Inside key_is_valid_xml(). Testing "%s"' % ( unicode_me ( key ) ) ) test_xml = '<?xml version="1.0" encoding="UTF-8" ?><%s>foo</%s>' % ( key , key ) try : parseString ( test_xml ) return True except Exception : return False
Checks that a key is a valid XML name
52,020
def make_valid_xml_name ( key , attr ) : LOG . info ( 'Inside make_valid_xml_name(). Testing key "%s" with attr "%s"' % ( unicode_me ( key ) , unicode_me ( attr ) ) ) key = escape_xml ( key ) attr = escape_xml ( attr ) if key_is_valid_xml ( key ) : return key , attr if key . isdigit ( ) : return 'n%s' % ( key ) , attr if key_is_valid_xml ( key . replace ( ' ' , '_' ) ) : return key . replace ( ' ' , '_' ) , attr attr [ 'name' ] = key key = 'key' return key , attr
Tests an XML name and fixes it if invalid
52,021
def convert ( obj , ids , attr_type , item_func , cdata , parent = 'root' ) : LOG . info ( 'Inside convert(). obj type is: "%s", obj="%s"' % ( type ( obj ) . __name__ , unicode_me ( obj ) ) ) item_name = item_func ( parent ) if isinstance ( obj , numbers . Number ) or type ( obj ) in ( str , unicode ) : return convert_kv ( item_name , obj , attr_type , cdata ) if hasattr ( obj , 'isoformat' ) : return convert_kv ( item_name , obj . isoformat ( ) , attr_type , cdata ) if type ( obj ) == bool : return convert_bool ( item_name , obj , attr_type , cdata ) if obj is None : return convert_none ( item_name , '' , attr_type , cdata ) if isinstance ( obj , dict ) : return convert_dict ( obj , ids , parent , attr_type , item_func , cdata ) if isinstance ( obj , collections . Iterable ) : return convert_list ( obj , ids , parent , attr_type , item_func , cdata ) raise TypeError ( 'Unsupported data type: %s (%s)' % ( obj , type ( obj ) . __name__ ) )
Routes the elements of an object to the right function to convert them based on their data type
52,022
def convert_dict ( obj , ids , parent , attr_type , item_func , cdata ) : LOG . info ( 'Inside convert_dict(): obj type is: "%s", obj="%s"' % ( type ( obj ) . __name__ , unicode_me ( obj ) ) ) output = [ ] addline = output . append item_name = item_func ( parent ) for key , val in obj . items ( ) : LOG . info ( 'Looping inside convert_dict(): key="%s", val="%s", type(val)="%s"' % ( unicode_me ( key ) , unicode_me ( val ) , type ( val ) . __name__ ) ) attr = { } if not ids else { 'id' : '%s' % ( get_unique_id ( parent ) ) } key , attr = make_valid_xml_name ( key , attr ) if isinstance ( val , numbers . Number ) or type ( val ) in ( str , unicode ) : addline ( convert_kv ( key , val , attr_type , attr , cdata ) ) elif hasattr ( val , 'isoformat' ) : addline ( convert_kv ( key , val . isoformat ( ) , attr_type , attr , cdata ) ) elif type ( val ) == bool : addline ( convert_bool ( key , val , attr_type , attr , cdata ) ) elif isinstance ( val , dict ) : if attr_type : attr [ 'type' ] = get_xml_type ( val ) addline ( '<%s%s>%s</%s>' % ( key , make_attrstring ( attr ) , convert_dict ( val , ids , key , attr_type , item_func , cdata ) , key ) ) elif isinstance ( val , collections . Iterable ) : if attr_type : attr [ 'type' ] = get_xml_type ( val ) addline ( '<%s%s>%s</%s>' % ( key , make_attrstring ( attr ) , convert_list ( val , ids , key , attr_type , item_func , cdata ) , key ) ) elif val is None : addline ( convert_none ( key , val , attr_type , attr , cdata ) ) else : raise TypeError ( 'Unsupported data type: %s (%s)' % ( val , type ( val ) . __name__ ) ) return '' . join ( output )
Converts a dict into an XML string .
52,023
def convert_list ( items , ids , parent , attr_type , item_func , cdata ) : LOG . info ( 'Inside convert_list()' ) output = [ ] addline = output . append item_name = item_func ( parent ) if ids : this_id = get_unique_id ( parent ) for i , item in enumerate ( items ) : LOG . info ( 'Looping inside convert_list(): item="%s", item_name="%s", type="%s"' % ( unicode_me ( item ) , item_name , type ( item ) . __name__ ) ) attr = { } if not ids else { 'id' : '%s_%s' % ( this_id , i + 1 ) } if isinstance ( item , numbers . Number ) or type ( item ) in ( str , unicode ) : addline ( convert_kv ( item_name , item , attr_type , attr , cdata ) ) elif hasattr ( item , 'isoformat' ) : addline ( convert_kv ( item_name , item . isoformat ( ) , attr_type , attr , cdata ) ) elif type ( item ) == bool : addline ( convert_bool ( item_name , item , attr_type , attr , cdata ) ) elif isinstance ( item , dict ) : if not attr_type : addline ( '<%s>%s</%s>' % ( item_name , convert_dict ( item , ids , parent , attr_type , item_func , cdata ) , item_name , ) ) else : addline ( '<%s type="dict">%s</%s>' % ( item_name , convert_dict ( item , ids , parent , attr_type , item_func , cdata ) , item_name , ) ) elif isinstance ( item , collections . Iterable ) : if not attr_type : addline ( '<%s %s>%s</%s>' % ( item_name , make_attrstring ( attr ) , convert_list ( item , ids , item_name , attr_type , item_func , cdata ) , item_name , ) ) else : addline ( '<%s type="list"%s>%s</%s>' % ( item_name , make_attrstring ( attr ) , convert_list ( item , ids , item_name , attr_type , item_func , cdata ) , item_name , ) ) elif item is None : addline ( convert_none ( item_name , None , attr_type , attr , cdata ) ) else : raise TypeError ( 'Unsupported data type: %s (%s)' % ( item , type ( item ) . __name__ ) ) return '' . join ( output )
Converts a list into an XML string .
52,024
def convert_kv ( key , val , attr_type , attr = { } , cdata = False ) : LOG . info ( 'Inside convert_kv(): key="%s", val="%s", type(val) is: "%s"' % ( unicode_me ( key ) , unicode_me ( val ) , type ( val ) . __name__ ) ) key , attr = make_valid_xml_name ( key , attr ) if attr_type : attr [ 'type' ] = get_xml_type ( val ) attrstring = make_attrstring ( attr ) return '<%s%s>%s</%s>' % ( key , attrstring , wrap_cdata ( val ) if cdata == True else escape_xml ( val ) , key )
Converts a number or string into an XML element
52,025
def convert_bool ( key , val , attr_type , attr = { } , cdata = False ) : LOG . info ( 'Inside convert_bool(): key="%s", val="%s", type(val) is: "%s"' % ( unicode_me ( key ) , unicode_me ( val ) , type ( val ) . __name__ ) ) key , attr = make_valid_xml_name ( key , attr ) if attr_type : attr [ 'type' ] = get_xml_type ( val ) attrstring = make_attrstring ( attr ) return '<%s%s>%s</%s>' % ( key , attrstring , unicode ( val ) . lower ( ) , key )
Converts a boolean into an XML element
52,026
def convert_none ( key , val , attr_type , attr = { } , cdata = False ) : LOG . info ( 'Inside convert_none(): key="%s"' % ( unicode_me ( key ) ) ) key , attr = make_valid_xml_name ( key , attr ) if attr_type : attr [ 'type' ] = get_xml_type ( val ) attrstring = make_attrstring ( attr ) return '<%s%s></%s>' % ( key , attrstring , key )
Converts a null value into an XML element
52,027
def getCallSourceLines ( callFrame , icNames , icMethod ) : code = callFrame . f_code try : if code . co_name == '<module>' : parentBlockStartLine = 1 lines = inspect . findsource ( code ) [ 0 ] parentBlockSource = '' . join ( lines ) else : parentBlockStartLine = code . co_firstlineno parentBlockSource = inspect . getsource ( code ) except ( IOError , OSError ) as err : if 'source code' in err . args [ 0 ] : raise NoSourceAvailableError ( ) else : raise lineno = inspect . getframeinfo ( callFrame ) [ 1 ] linenoRelativeToParent = lineno - parentBlockStartLine + 1 parentBlockSource = textwrap . dedent ( parentBlockSource ) potentialCalls = [ node for node in ast . walk ( ast . parse ( parentBlockSource ) ) if isAstNodeIceCreamCall ( node , icNames , icMethod ) and linenoRelativeToParent in getAllLineNumbersOfAstNode ( node ) ] if not potentialCalls : raise NoSourceAvailableError ( ) endLine = lineno - parentBlockStartLine + 1 startLine = min ( call . lineno for call in potentialCalls ) lines = parentBlockSource . splitlines ( ) [ startLine - 1 : endLine ] if isCallStrMissingClosingRightParenthesis ( '\n' . join ( lines ) . strip ( ) ) : lines . append ( ')' ) source = stripCommentsAndNewlines ( '\n' . join ( lines ) ) . strip ( ) absoluteStartLineNum = parentBlockStartLine + startLine - 1 startLineOffset = calculateLineOffsets ( code ) [ absoluteStartLineNum ] return source , absoluteStartLineNum , startLineOffset
Raises NoSourceAvailableError .
52,028
def init_websocket ( self , event_handler , websocket_cls = Websocket ) : self . websocket = websocket_cls ( self . options , self . client . token ) loop = asyncio . get_event_loop ( ) loop . run_until_complete ( self . websocket . connect ( event_handler ) ) return loop
Will initialize the websocket connection to the mattermost server .
52,029
def login ( self ) : if self . options [ 'token' ] : self . client . token = self . options [ 'token' ] result = self . users . get_user ( 'me' ) else : response = self . users . login_user ( { 'login_id' : self . options [ 'login_id' ] , 'password' : self . options [ 'password' ] , 'token' : self . options [ 'mfa_token' ] } ) if response . status_code == 200 : self . client . token = response . headers [ 'Token' ] self . client . cookies = response . cookies try : result = response . json ( ) except ValueError : log . debug ( 'Could not convert response to json, returning raw response' ) result = response log . debug ( result ) if 'id' in result : self . client . userid = result [ 'id' ] if 'username' in result : self . client . username = result [ 'username' ] return result
Logs the user in .
52,030
def connect ( self , event_handler ) : context = ssl . create_default_context ( purpose = ssl . Purpose . CLIENT_AUTH ) if not self . options [ 'verify' ] : context . verify_mode = ssl . CERT_NONE scheme = 'wss://' if self . options [ 'scheme' ] != 'https' : scheme = 'ws://' context = None url = '{scheme:s}{url:s}:{port:s}{basepath:s}/websocket' . format ( scheme = scheme , url = self . options [ 'url' ] , port = str ( self . options [ 'port' ] ) , basepath = self . options [ 'basepath' ] ) websocket = yield from websockets . connect ( url , ssl = context , ) yield from self . _authenticate_websocket ( websocket , event_handler ) yield from self . _start_loop ( websocket , event_handler )
Connect to the websocket and authenticate it . When the authentication has finished start the loop listening for messages sending a ping to the server to keep the connection alive .
52,031
def _authenticate_websocket ( self , websocket , event_handler ) : log . debug ( 'Authenticating websocket' ) json_data = json . dumps ( { "seq" : 1 , "action" : "authentication_challenge" , "data" : { "token" : self . _token } } ) . encode ( 'utf8' ) yield from websocket . send ( json_data ) while True : message = yield from websocket . recv ( ) status = json . loads ( message ) log . debug ( status ) yield from event_handler ( message ) if ( 'status' in status and status [ 'status' ] == 'OK' ) and ( 'seq_reply' in status and status [ 'seq_reply' ] == 1 ) : log . info ( 'Websocket authentification OK' ) return True elif 'seq_reply' in status and status [ 'seq_reply' ] == 1 : log . error ( 'Websocket authentification failed' )
Sends a authentication challenge over a websocket . This is not needed when we just send the cookie we got on login when connecting to the websocket .
52,032
def run_cell ( self , cell ) : logging . info ( 'Running cell:\n%s\n' , cell . input ) self . kc . execute ( cell . input ) reply = self . kc . get_shell_msg ( ) status = reply [ 'content' ] [ 'status' ] traceback_text = '' if status == 'error' : traceback_text = 'Cell raised uncaught exception: \n' + '\n' . join ( reply [ 'content' ] [ 'traceback' ] ) logging . info ( traceback_text ) else : logging . info ( 'Cell returned' ) outs = list ( ) while True : try : msg = self . kc . get_iopub_msg ( timeout = 1 ) if msg [ 'msg_type' ] == 'status' : if msg [ 'content' ] [ 'execution_state' ] == 'idle' : break except Empty : raise content = msg [ 'content' ] msg_type = msg [ 'msg_type' ] notebook3_format_conversions = { 'error' : 'pyerr' , 'execute_result' : 'pyout' } msg_type = notebook3_format_conversions . get ( msg_type , msg_type ) out = NotebookNode ( output_type = msg_type ) if 'execution_count' in content : cell [ 'prompt_number' ] = content [ 'execution_count' ] out . prompt_number = content [ 'execution_count' ] if msg_type in ( 'status' , 'pyin' , 'execute_input' ) : continue elif msg_type == 'stream' : out . stream = content [ 'name' ] if 'text' in content : out . text = content [ 'text' ] else : out . text = content [ 'data' ] elif msg_type in ( 'display_data' , 'pyout' ) : for mime , data in content [ 'data' ] . items ( ) : try : attr = self . MIME_MAP [ mime ] except KeyError : raise NotImplementedError ( 'unhandled mime type: %s' % mime ) json_encode = ( IPython . version_info [ 0 ] >= 3 and mime == "application/json" ) data_out = data if not json_encode else json . dumps ( data ) setattr ( out , attr , data_out ) elif msg_type == 'pyerr' : out . ename = content [ 'ename' ] out . evalue = content [ 'evalue' ] out . traceback = content [ 'traceback' ] elif msg_type == 'clear_output' : outs = list ( ) continue else : raise NotImplementedError ( 'unhandled iopub message: %s' % msg_type ) outs . append ( out ) cell [ 'outputs' ] = outs if status == 'error' : raise NotebookError ( traceback_text )
Run a notebook cell and update the output of that cell in - place .
52,033
def iter_code_cells ( self ) : for ws in self . nb . worksheets : for cell in ws . cells : if cell . cell_type == 'code' : yield cell
Iterate over the notebook cells containing code .
52,034
def run_notebook ( self , skip_exceptions = False , progress_callback = None ) : for i , cell in enumerate ( self . iter_code_cells ( ) ) : try : self . run_cell ( cell ) except NotebookError : if not skip_exceptions : raise if progress_callback : progress_callback ( i )
Run all the notebook cells in order and update the outputs in - place .
52,035
def config ( filename ) : Config = collections . namedtuple ( 'Config' , [ 'git' , 'lock_file' , 'version' , 'name' , 'src' , 'dst' , 'files' , 'post_commands' , ] ) return [ Config ( ** d ) for d in _get_config_generator ( filename ) ]
Construct Config object and return a list .
52,036
def _get_files_config ( src_dir , files_list ) : FilesConfig = collections . namedtuple ( 'FilesConfig' , [ 'src' , 'dst' , 'post_commands' ] ) return [ FilesConfig ( ** d ) for d in _get_files_generator ( src_dir , files_list ) ]
Construct FileConfig object and return a list .
52,037
def _get_config ( filename ) : i = interpolation . Interpolator ( interpolation . TemplateWithDefaults , os . environ ) with open ( filename , 'r' ) as stream : try : interpolated_config = i . interpolate ( stream . read ( ) ) return yaml . safe_load ( interpolated_config ) except yaml . parser . ParserError as e : msg = 'Error parsing gilt config: {0}' . format ( e ) raise ParseError ( msg )
Parse the provided YAML file and return a dict .
52,038
def _get_dst_dir ( dst_dir ) : wd = os . getcwd ( ) _makedirs ( dst_dir ) return os . path . join ( wd , dst_dir )
Prefix the provided string with working directory and return a str .
52,039
def _makedirs ( path ) : dirname , _ = os . path . split ( path ) try : os . makedirs ( dirname ) except OSError as exc : if exc . errno == errno . EEXIST : pass else : raise
Create a base directory of the provided path and return None .
52,040
def main ( ctx , config , debug ) : ctx . obj = { } ctx . obj [ 'args' ] = { } ctx . obj [ 'args' ] [ 'debug' ] = debug ctx . obj [ 'args' ] [ 'config' ] = config
gilt - A GIT layering tool .
52,041
def overlay ( ctx ) : args = ctx . obj . get ( 'args' ) filename = args . get ( 'config' ) debug = args . get ( 'debug' ) _setup ( filename ) for c in config . config ( filename ) : with fasteners . InterProcessLock ( c . lock_file ) : util . print_info ( '{}:' . format ( c . name ) ) if not os . path . exists ( c . src ) : git . clone ( c . name , c . git , c . src , debug = debug ) if c . dst : git . extract ( c . src , c . dst , c . version , debug = debug ) post_commands = { c . dst : c . post_commands } else : git . overlay ( c . src , c . files , c . version , debug = debug ) post_commands = { conf . dst : conf . post_commands for conf in c . files } for dst , commands in post_commands . items ( ) : for command in commands : msg = ' - running `{}` in {}' . format ( command , dst ) util . print_info ( msg ) cmd = util . build_sh_cmd ( command , cwd = dst ) util . run_command ( cmd , debug = debug )
Install gilt dependencies
52,042
def clone ( name , repository , destination , debug = False ) : msg = ' - cloning {} to {}' . format ( name , destination ) util . print_info ( msg ) cmd = sh . git . bake ( 'clone' , repository , destination ) util . run_command ( cmd , debug = debug )
Clone the specified repository into a temporary directory and return None .
52,043
def _get_version ( version , debug = False ) : if not any ( ( _has_branch ( version , debug ) , _has_tag ( version , debug ) , _has_commit ( version , debug ) ) ) : cmd = sh . git . bake ( 'fetch' ) util . run_command ( cmd , debug = debug ) cmd = sh . git . bake ( 'checkout' , version ) util . run_command ( cmd , debug = debug ) cmd = sh . git . bake ( 'clean' , '-d' , '-x' , '-f' ) util . run_command ( cmd , debug = debug ) if _has_branch ( version , debug ) : cmd = sh . git . bake ( 'pull' , rebase = True , ff_only = True ) util . run_command ( cmd , debug = debug )
Handle switching to the specified version and return None .
52,044
def _has_commit ( version , debug = False ) : if _has_tag ( version , debug ) or _has_branch ( version , debug ) : return False cmd = sh . git . bake ( 'cat-file' , '-e' , version ) try : util . run_command ( cmd , debug = debug ) return True except sh . ErrorReturnCode : return False
Determine a version is a local git commit sha or not .
52,045
def _has_tag ( version , debug = False ) : cmd = sh . git . bake ( 'show-ref' , '--verify' , '--quiet' , "refs/tags/{}" . format ( version ) ) try : util . run_command ( cmd , debug = debug ) return True except sh . ErrorReturnCode : return False
Determine a version is a local git tag name or not .
52,046
def run_command ( cmd , debug = False ) : if debug : msg = ' PWD: {}' . format ( os . getcwd ( ) ) print_warn ( msg ) msg = ' COMMAND: {}' . format ( cmd ) print_warn ( msg ) cmd ( )
Execute the given command and return None .
52,047
def build_sh_cmd ( cmd , cwd = None ) : args = cmd . split ( ) return getattr ( sh , args [ 0 ] ) . bake ( _cwd = cwd , * args [ 1 : ] )
Build a sh . Command from a string .
52,048
def copy ( src , dst ) : try : shutil . copytree ( src , dst ) except OSError as exc : if exc . errno == errno . ENOTDIR : shutil . copy ( src , dst ) else : raise
Handle the copying of a file or directory .
52,049
def to_dict ( self ) : return { 'schema' : self . schema , 'name' : self . name , 'columns' : [ col . to_dict ( ) for col in self . _columns ] , 'foreign_keys' : self . foreign_keys . to_dict ( ) , 'ref_keys' : self . ref_keys . to_dict ( ) }
Serialize representation of the table for local caching .
52,050
def list_profiles ( ) : profiles = { } user = os . path . expanduser ( "~" ) for f in os . listdir ( user ) : if f . startswith ( ".db.py_" ) : profile = load_from_json ( os . path . join ( user , f ) ) tables = profile . pop ( 'tables' , None ) if tables : profile [ 'metadata' ] = True else : profile [ 'metadata' ] = False profiles [ f [ 7 : ] ] = profile return profiles
Lists all of the database profiles available
52,051
def remove_profile ( name , s3 = False ) : user = os . path . expanduser ( "~" ) if s3 : f = os . path . join ( user , S3_PROFILE_ID + name ) else : f = os . path . join ( user , DBPY_PROFILE_ID + name ) try : try : open ( f ) except : raise Exception ( "Profile '{0}' does not exist. Could not find file {1}" . format ( name , f ) ) os . remove ( f ) except Exception as e : raise Exception ( "Could not remove profile {0}! Excpetion: {1}" . format ( name , e ) )
Removes a profile from your config
52,052
def tables ( self ) : if len ( self . _tables ) == 0 : self . refresh_schema ( self . _exclude_system_tables , self . _use_cache ) return self . _tables
A lazy loaded reference to the table metadata for the DB .
52,053
def save_credentials ( self , profile = "default" ) : f = profile_path ( DBPY_PROFILE_ID , profile ) dump_to_json ( f , self . credentials )
Save your database credentials so you don t have to save them in script .
52,054
def save_metadata ( self , profile = "default" ) : if len ( self . tables ) > 0 : f = profile_path ( DBPY_PROFILE_ID , profile ) dump_to_json ( f , self . to_dict ( ) )
Save the database credentials plus the database properties to your db . py profile .
52,055
def credentials ( self ) : if self . filename : db_filename = os . path . join ( os . getcwd ( ) , self . filename ) else : db_filename = None return { "username" : self . username , "password" : self . password , "hostname" : self . hostname , "port" : self . port , "filename" : db_filename , "dbname" : self . dbname , "dbtype" : self . dbtype , "schemas" : self . schemas , "limit" : self . limit , "keys_per_column" : self . keys_per_column , }
Dict representation of all credentials for the database .
52,056
def find_table ( self , search ) : tables = [ ] for table in self . tables : if glob . fnmatch . fnmatch ( table . name , search ) : tables . append ( table ) return TableSet ( tables )
Aggresively search through your database s schema for a table .
52,057
def find_column ( self , search , data_type = None ) : if isinstance ( data_type , str ) : data_type = [ data_type ] cols = [ ] for table in self . tables : for col in vars ( table ) : if glob . fnmatch . fnmatch ( col , search ) : if data_type and isinstance ( getattr ( table , col ) , Column ) and getattr ( table , col ) . type not in data_type : continue if isinstance ( getattr ( table , col ) , Column ) : cols . append ( getattr ( table , col ) ) return ColumnSet ( cols )
Aggresively search through your database s schema for a column .
52,058
def query ( self , q , data = None , union = True , limit = None ) : if data : q = self . _apply_handlebars ( q , data , union ) if limit : q = self . _assign_limit ( q , limit ) return pd . read_sql ( q , self . con )
Query your database with a raw string .
52,059
def query_from_file ( self , filename , data = None , union = True , limit = None ) : with open ( filename ) as fp : q = fp . read ( ) return self . query ( q , data = data , union = union , limit = limit )
Query your database from a file .
52,060
def refresh_schema ( self , exclude_system_tables = True , use_cache = False ) : col_meta , table_meta = self . _get_db_metadata ( exclude_system_tables , use_cache ) tables = self . _gen_tables_from_col_tuples ( col_meta ) if use_cache : self . _tables = TableSet ( [ Table ( self . con , self . _query_templates , table_meta [ t ] [ 'schema' ] , t , tables [ t ] , keys_per_column = self . keys_per_column , foreign_keys = table_meta [ t ] [ 'foreign_keys' ] [ 'columns' ] , ref_keys = table_meta [ t ] [ 'ref_keys' ] [ 'columns' ] ) for t in sorted ( tables . keys ( ) ) ] ) elif not use_cache and isinstance ( self . _query_templates . get ( 'system' , { } ) . get ( 'foreign_keys_for_db' , None ) , str ) : self . cur . execute ( self . _query_templates [ 'system' ] [ 'foreign_keys_for_db' ] ) table_db_foreign_keys = defaultdict ( list ) for rel in self . cur : table_db_foreign_keys [ rel [ 1 ] ] . append ( rel ) self . cur . execute ( self . _query_templates [ 'system' ] [ 'ref_keys_for_db' ] ) table_db_ref_keys = defaultdict ( list ) for rel in self . cur : table_db_ref_keys [ rel [ 1 ] ] . append ( rel ) self . _tables = TableSet ( [ Table ( self . con , self . _query_templates , tables [ t ] [ 0 ] . schema , t , tables [ t ] , keys_per_column = self . keys_per_column , foreign_keys = table_db_foreign_keys [ t ] , ref_keys = table_db_ref_keys [ t ] ) for t in sorted ( tables . keys ( ) ) ] ) elif not use_cache : self . _tables = TableSet ( [ Table ( self . con , self . _query_templates , tables [ t ] [ 0 ] . schema , t , tables [ t ] , keys_per_column = self . keys_per_column ) for t in sorted ( tables . keys ( ) ) ] ) sys . stderr . write ( "done!\n" )
Pulls your database s schema again and looks for any new tables and columns .
52,061
def to_dict ( self ) : db_dict = self . credentials db_dict . update ( self . tables . to_dict ( ) ) return db_dict
Dict representation of the database as credentials plus tables dict representation .
52,062
def profile_path ( profile_id , profile ) : user = os . path . expanduser ( "~" ) return os . path . join ( user , profile_id + profile )
Create full path to given provide for the current user .
52,063
def load_from_json ( file_path ) : if os . path . exists ( file_path ) : raw_data = open ( file_path , 'rb' ) . read ( ) return json . loads ( base64 . decodestring ( raw_data ) . decode ( 'utf-8' ) )
Load the stored data from json and return as a dict .
52,064
def save_credentials ( self , profile ) : filename = profile_path ( S3_PROFILE_ID , profile ) creds = { "access_key" : self . access_key , "secret_key" : self . secret_key } dump_to_json ( filename , creds )
Saves credentials to a dotfile so you can open them grab them later .
52,065
def to_dict ( self ) : return { 'schema' : self . schema , 'table' : self . table , 'name' : self . name , 'type' : self . type }
Serialize representation of the column for local caching .
52,066
def rate_limit ( self , rate_limit ) : self . _rate_limit = bool ( rate_limit ) self . _rate_limit_last_call = None self . clear_memoized ( )
Turn on or off rate limiting
52,067
def language ( self , lang ) : lang = lang . lower ( ) if self . _lang == lang : return url = self . _api_url tmp = url . replace ( "/{0}." . format ( self . _lang ) , "/{0}." . format ( lang ) ) self . _api_url = tmp self . _lang = lang self . clear_memoized ( )
Set the language to use ; attempts to change the API URL
52,068
def refresh_interval ( self , refresh_interval ) : if isinstance ( refresh_interval , int ) and refresh_interval > 0 : self . _refresh_interval = refresh_interval else : self . _refresh_interval = None
Set the new cache refresh interval
52,069
def login ( self , username , password , strict = True ) : params = { "action" : "query" , "meta" : "tokens" , "type" : "login" , "format" : "json" , } token_res = self . _get_response ( params ) if "query" in token_res and "tokens" in token_res [ "query" ] : token = token_res [ "query" ] [ "tokens" ] [ "logintoken" ] params = { "action" : "login" , "lgname" : username , "lgpassword" : password , "lgtoken" : token , "format" : "json" , } res = self . _post_response ( params ) if res [ "login" ] [ "result" ] == "Success" : self . _is_logged_in = True return True self . _is_logged_in = False reason = res [ "login" ] [ "reason" ] if strict : msg = "MediaWiki login failure: {}" . format ( reason ) raise MediaWikiLoginError ( msg ) return False
Login as specified user
52,070
def set_api_url ( self , api_url = "https://{lang}.wikipedia.org/w/api.php" , lang = "en" ) : old_api_url = self . _api_url old_lang = self . _lang self . _lang = lang . lower ( ) self . _api_url = api_url . format ( lang = self . _lang ) try : self . _get_site_info ( ) self . __supported_languages = None except MediaWikiException : self . _api_url = old_api_url self . _lang = old_lang raise MediaWikiAPIURLError ( api_url ) self . clear_memoized ( )
Set the API URL and language
52,071
def _reset_session ( self ) : headers = { "User-Agent" : self . _user_agent } self . _session = requests . Session ( ) self . _session . headers . update ( headers ) self . _is_logged_in = False
Set session information
52,072
def random ( self , pages = 1 ) : if pages is None or pages < 1 : raise ValueError ( "Number of pages must be greater than 0" ) query_params = { "list" : "random" , "rnnamespace" : 0 , "rnlimit" : pages } request = self . wiki_request ( query_params ) titles = [ page [ "title" ] for page in request [ "query" ] [ "random" ] ] if len ( titles ) == 1 : return titles [ 0 ] return titles
Request a random page title or list of random titles
52,073
def search ( self , query , results = 10 , suggestion = False ) : self . _check_query ( query , "Query must be specified" ) search_params = { "list" : "search" , "srprop" : "" , "srlimit" : results , "srsearch" : query , } if suggestion : search_params [ "srinfo" ] = "suggestion" raw_results = self . wiki_request ( search_params ) self . _check_error_response ( raw_results , query ) search_results = [ d [ "title" ] for d in raw_results [ "query" ] [ "search" ] ] if suggestion : sug = None if raw_results [ "query" ] . get ( "searchinfo" ) : sug = raw_results [ "query" ] [ "searchinfo" ] [ "suggestion" ] return search_results , sug return search_results
Search for similar titles
52,074
def suggest ( self , query ) : res , suggest = self . search ( query , results = 1 , suggestion = True ) try : title = suggest or res [ 0 ] except IndexError : title = None return title
Gather suggestions based on the provided title or None if no suggestions found
52,075
def geosearch ( self , latitude = None , longitude = None , radius = 1000 , title = None , auto_suggest = True , results = 10 , ) : def test_lat_long ( val ) : if not isinstance ( val , Decimal ) : error = ( "Latitude and Longitude must be specified either as " "a Decimal or in formats that can be coerced into " "a Decimal." ) try : return Decimal ( val ) except ( DecimalException , TypeError ) : raise ValueError ( error ) return val params = { "list" : "geosearch" , "gsradius" : radius , "gslimit" : results } if title is not None : if auto_suggest : title = self . suggest ( title ) params [ "gspage" ] = title else : lat = test_lat_long ( latitude ) lon = test_lat_long ( longitude ) params [ "gscoord" ] = "{0}|{1}" . format ( lat , lon ) raw_results = self . wiki_request ( params ) self . _check_error_response ( raw_results , title ) return [ d [ "title" ] for d in raw_results [ "query" ] [ "geosearch" ] ]
Search for pages that relate to the provided geocoords or near the page
52,076
def opensearch ( self , query , results = 10 , redirect = True ) : self . _check_query ( query , "Query must be specified" ) query_params = { "action" : "opensearch" , "search" : query , "limit" : ( 100 if results > 100 else results ) , "redirects" : ( "resolve" if redirect else "return" ) , "warningsaserror" : True , "namespace" : "" , } results = self . wiki_request ( query_params ) self . _check_error_response ( results , query ) res = list ( ) for i , item in enumerate ( results [ 1 ] ) : res . append ( ( item , results [ 2 ] [ i ] , results [ 3 ] [ i ] ) ) return res
Execute a MediaWiki opensearch request similar to search box suggestions and conforming to the OpenSearch specification
52,077
def prefixsearch ( self , prefix , results = 10 ) : self . _check_query ( prefix , "Prefix must be specified" ) query_params = { "list" : "prefixsearch" , "pssearch" : prefix , "pslimit" : ( "max" if results > 500 else results ) , "psnamespace" : 0 , "psoffset" : 0 , } raw_results = self . wiki_request ( query_params ) self . _check_error_response ( raw_results , prefix ) return [ rec [ "title" ] for rec in raw_results [ "query" ] [ "prefixsearch" ] ]
Perform a prefix search using the provided prefix string
52,078
def summary ( self , title , sentences = 0 , chars = 0 , auto_suggest = True , redirect = True ) : page_info = self . page ( title , auto_suggest = auto_suggest , redirect = redirect ) return page_info . summarize ( sentences , chars )
Get the summary for the title in question
52,079
def categorytree ( self , category , depth = 5 ) : def __cat_tree_rec ( cat , depth , tree , level , categories , links ) : tree [ cat ] = dict ( ) tree [ cat ] [ "depth" ] = level tree [ cat ] [ "sub-categories" ] = dict ( ) tree [ cat ] [ "links" ] = list ( ) tree [ cat ] [ "parent-categories" ] = list ( ) parent_cats = list ( ) if cat not in categories : tries = 0 while True : if tries > 10 : raise MediaWikiCategoryTreeError ( cat ) try : pag = self . page ( "{0}:{1}" . format ( self . category_prefix , cat ) ) categories [ cat ] = pag parent_cats = categories [ cat ] . categories links [ cat ] = self . categorymembers ( cat , results = None , subcategories = True ) break except PageError : raise PageError ( "{0}:{1}" . format ( self . category_prefix , cat ) ) except KeyboardInterrupt : raise except Exception : tries = tries + 1 time . sleep ( 1 ) else : parent_cats = categories [ cat ] . categories tree [ cat ] [ "parent-categories" ] . extend ( parent_cats ) tree [ cat ] [ "links" ] . extend ( links [ cat ] [ 0 ] ) if depth and level >= depth : for ctg in links [ cat ] [ 1 ] : tree [ cat ] [ "sub-categories" ] [ ctg ] = None else : for ctg in links [ cat ] [ 1 ] : __cat_tree_rec ( ctg , depth , tree [ cat ] [ "sub-categories" ] , level + 1 , categories , links , ) if not isinstance ( category , list ) : cats = [ category ] else : cats = category if len ( cats ) == 1 and ( cats [ 0 ] is None or cats [ 0 ] == "" ) : msg = ( "CategoryTree: Parameter 'category' must either " "be a list of one or more categories or a string; " "provided: '{}'" . format ( category ) ) raise ValueError ( msg ) if depth is not None and depth < 1 : msg = ( "CategoryTree: Parameter 'depth' must be either None " "(for the full tree) or be greater than 0" ) raise ValueError ( msg ) results = dict ( ) categories = dict ( ) links = dict ( ) for cat in cats : if cat is None or cat == "" : continue __cat_tree_rec ( cat , depth , results , 0 , categories , links ) return results
Generate the Category Tree for the given categories
52,080
def page ( self , title = None , pageid = None , auto_suggest = True , redirect = True , preload = False ) : if ( title is None or title . strip ( ) == "" ) and pageid is None : raise ValueError ( "Either a title or a pageid must be specified" ) elif title : if auto_suggest : temp_title = self . suggest ( title ) if temp_title is None : raise PageError ( title = title ) else : title = temp_title return MediaWikiPage ( self , title , redirect = redirect , preload = preload ) else : return MediaWikiPage ( self , pageid = pageid , preload = preload )
Get MediaWiki page based on the provided title or pageid
52,081
def wiki_request ( self , params ) : params [ "format" ] = "json" if "action" not in params : params [ "action" ] = "query" limit = self . _rate_limit last_call = self . _rate_limit_last_call if limit and last_call and last_call + self . _min_wait > datetime . now ( ) : wait_time = ( last_call + self . _min_wait ) - datetime . now ( ) time . sleep ( int ( wait_time . total_seconds ( ) ) ) req = self . _get_response ( params ) if self . _rate_limit : self . _rate_limit_last_call = datetime . now ( ) return req
Make a request to the MediaWiki API using the given search parameters
52,082
def _get_site_info ( self ) : response = self . wiki_request ( { "meta" : "siteinfo" , "siprop" : "extensions|general" } ) query = response . get ( "query" , None ) if query is None or query . get ( "general" , None ) is None : raise MediaWikiException ( "Missing query in response" ) gen = query . get ( "general" , None ) api_version = gen [ "generator" ] . split ( " " ) [ 1 ] . split ( "-" ) [ 0 ] major_minor = api_version . split ( "." ) for i , item in enumerate ( major_minor ) : major_minor [ i ] = int ( item ) self . _api_version = tuple ( major_minor ) self . _api_version_str = "." . join ( [ str ( x ) for x in self . _api_version ] ) tmp = gen . get ( "server" , "" ) if tmp == "" : raise MediaWikiException ( "Unable to parse base url" ) if tmp . startswith ( "http://" ) or tmp . startswith ( "https://" ) : self . _base_url = tmp elif gen [ "base" ] . startswith ( "https:" ) : self . _base_url = "https:{}" . format ( tmp ) else : self . _base_url = "http:{}" . format ( tmp ) self . _extensions = [ ext [ "name" ] for ext in query [ "extensions" ] ] self . _extensions = sorted ( list ( set ( self . _extensions ) ) )
Parse out the Wikimedia site information including API Version and Extensions
52,083
def _check_error_response ( response , query ) : if "error" in response : http_error = [ "HTTP request timed out." , "Pool queue is full" ] geo_error = [ "Page coordinates unknown." , "One of the parameters gscoord, gspage, gsbbox is required" , "Invalid coordinate provided" , ] err = response [ "error" ] [ "info" ] if err in http_error : raise HTTPTimeoutError ( query ) elif err in geo_error : raise MediaWikiGeoCoordError ( err ) else : raise MediaWikiException ( err )
check for default error messages and throw correct exception
52,084
def _get_response ( self , params ) : return self . _session . get ( self . _api_url , params = params , timeout = self . _timeout ) . json ( encoding = "utf8" )
wrap the call to the requests package
52,085
def _post_response ( self , params ) : return self . _session . post ( self . _api_url , data = params , timeout = self . _timeout ) . json ( encoding = "utf8" )
wrap a post call to the requests package
52,086
def parse_all_arguments ( func ) : args = dict ( ) if sys . version_info < ( 3 , 0 ) : func_args = inspect . getargspec ( func ) if func_args . defaults is not None : val = len ( func_args . defaults ) for i , itm in enumerate ( func_args . args [ - val : ] ) : args [ itm ] = func_args . defaults [ i ] else : func_args = inspect . signature ( func ) for itm in list ( func_args . parameters ) [ 1 : ] : param = func_args . parameters [ itm ] if param . default is not param . empty : args [ param . name ] = param . default return args
determine all positional and named arguments as a dict
52,087
def str_or_unicode ( text ) : encoding = sys . stdout . encoding if sys . version_info > ( 3 , 0 ) : return text . encode ( encoding ) . decode ( encoding ) return text . encode ( encoding )
handle python 3 unicode and python 2 . 7 byte strings
52,088
def is_relative_url ( url ) : if url . startswith ( "#" ) : return None if url . find ( "://" ) > 0 or url . startswith ( "//" ) : return False return True
simple method to determine if a url is relative or absolute
52,089
def read_file ( filepath ) : with io . open ( filepath , "r" ) as filepointer : res = filepointer . read ( ) return res
read the file
52,090
def _pull_content_revision_parent ( self ) : if self . _revision_id is None : query_params = { "prop" : "extracts|revisions" , "explaintext" : "" , "rvprop" : "ids" , } query_params . update ( self . __title_query_param ( ) ) request = self . mediawiki . wiki_request ( query_params ) page_info = request [ "query" ] [ "pages" ] [ self . pageid ] self . _content = page_info [ "extract" ] self . _revision_id = page_info [ "revisions" ] [ 0 ] [ "revid" ] self . _parent_id = page_info [ "revisions" ] [ 0 ] [ "parentid" ] return self . _content , self . _revision_id , self . _parent_id
combine the pulling of these three properties
52,091
def section ( self , section_title ) : section = "== {0} ==" . format ( section_title ) try : content = self . content index = content . index ( section ) + len ( section ) while True : if content [ index + 1 ] == "=" : index += 1 else : break except ValueError : return None except IndexError : pass try : next_index = self . content . index ( "==" , index ) except ValueError : next_index = len ( self . content ) return self . content [ index : next_index ] . lstrip ( "=" ) . strip ( )
Plain text section content
52,092
def parse_section_links ( self , section_title ) : soup = BeautifulSoup ( self . html , "html.parser" ) headlines = soup . find_all ( "span" , { "class" : "mw-headline" } ) tmp_soup = BeautifulSoup ( section_title , "html.parser" ) tmp_sec_title = tmp_soup . get_text ( ) . lower ( ) id_tag = None for headline in headlines : tmp_id = headline . text if tmp_id . lower ( ) == tmp_sec_title : id_tag = headline . get ( "id" ) break if id_tag is not None : return self . _parse_section_links ( id_tag ) return None
Parse all links within a section
52,093
def __load ( self , redirect = True , preload = False ) : query_params = { "prop" : "info|pageprops" , "inprop" : "url" , "ppprop" : "disambiguation" , "redirects" : "" , } query_params . update ( self . __title_query_param ( ) ) request = self . mediawiki . wiki_request ( query_params ) query = request [ "query" ] pageid = list ( query [ "pages" ] . keys ( ) ) [ 0 ] page = query [ "pages" ] [ pageid ] if "missing" in page : self . _raise_page_error ( ) elif "redirects" in query : self . _handle_redirect ( redirect , preload , query , page ) elif "pageprops" in page : self . _raise_disambiguation_error ( page , pageid ) else : self . pageid = pageid self . title = page [ "title" ] self . url = page [ "fullurl" ]
load the basic page information
52,094
def _raise_page_error ( self ) : if hasattr ( self , "title" ) : raise PageError ( title = self . title ) else : raise PageError ( pageid = self . pageid )
raise the correct type of page error
52,095
def _raise_disambiguation_error ( self , page , pageid ) : query_params = { "prop" : "revisions" , "rvprop" : "content" , "rvparse" : "" , "rvlimit" : 1 , } query_params . update ( self . __title_query_param ( ) ) request = self . mediawiki . wiki_request ( query_params ) html = request [ "query" ] [ "pages" ] [ pageid ] [ "revisions" ] [ 0 ] [ "*" ] lis = BeautifulSoup ( html , "html.parser" ) . find_all ( "li" ) filtered_lis = [ li for li in lis if "tocsection" not in "" . join ( li . get ( "class" , list ( ) ) ) ] may_refer_to = [ li . a . get_text ( ) for li in filtered_lis if li . a ] disambiguation = list ( ) for lis_item in filtered_lis : item = lis_item . find_all ( "a" ) one_disambiguation = dict ( ) one_disambiguation [ "description" ] = lis_item . text if item and hasattr ( item [ 0 ] , "title" ) : one_disambiguation [ "title" ] = item [ 0 ] [ "title" ] else : one_disambiguation [ "title" ] = lis_item . text disambiguation . append ( one_disambiguation ) raise DisambiguationError ( getattr ( self , "title" , page [ "title" ] ) , may_refer_to , page [ "fullurl" ] , disambiguation , )
parse and throw a disambiguation error
52,096
def _parse_section_links ( self , id_tag ) : soup = BeautifulSoup ( self . html , "html.parser" ) info = soup . find ( "span" , { "id" : id_tag } ) all_links = list ( ) if info is None : return all_links for node in soup . find ( id = id_tag ) . parent . next_siblings : if not isinstance ( node , Tag ) : continue elif node . get ( "role" , "" ) == "navigation" : continue elif "infobox" in node . get ( "class" , [ ] ) : continue is_headline = node . find ( "span" , { "class" : "mw-headline" } ) if is_headline is not None : break elif node . name == "a" : all_links . append ( self . __parse_link_info ( node ) ) else : for link in node . findAll ( "a" ) : all_links . append ( self . __parse_link_info ( link ) ) return all_links
given a section id parse the links in the unordered list
52,097
def _parse_sections ( self ) : def _list_to_dict ( _dict , path , sec ) : tmp = _dict for elm in path [ : - 1 ] : tmp = tmp [ elm ] tmp [ sec ] = OrderedDict ( ) self . _sections = list ( ) section_regexp = r"\n==* .* ==*\n" found_obj = re . findall ( section_regexp , self . content ) res = OrderedDict ( ) path = list ( ) last_depth = 0 for obj in found_obj : depth = obj . count ( "=" ) / 2 depth -= 2 sec = obj . lstrip ( "\n= " ) . rstrip ( " =\n" ) if depth == 0 : last_depth = 0 path = [ sec ] res [ sec ] = OrderedDict ( ) elif depth > last_depth : last_depth = depth path . append ( sec ) _list_to_dict ( res , path , sec ) elif depth < last_depth : while last_depth > depth : path . pop ( ) last_depth -= 1 path . pop ( ) path . append ( sec ) _list_to_dict ( res , path , sec ) last_depth = depth else : path . pop ( ) path . append ( sec ) _list_to_dict ( res , path , sec ) last_depth = depth self . _sections . append ( sec ) self . _table_of_contents = res
parse sections and TOC
52,098
def __pull_combined_properties ( self ) : query_params = { "titles" : self . title , "prop" : "extracts|redirects|links|coordinates|categories|extlinks" , "continue" : dict ( ) , "explaintext" : "" , "exintro" : "" , "rdprop" : "title" , "rdlimit" : "max" , "plnamespace" : 0 , "pllimit" : "max" , "colimit" : "max" , "cllimit" : "max" , "clshow" : "!hidden" , "ellimit" : "max" , } last_cont = dict ( ) results = dict ( ) idx = 0 while True : params = query_params . copy ( ) params . update ( last_cont ) request = self . mediawiki . wiki_request ( params ) idx += 1 if "query" not in request : break keys = [ "extracts" , "redirects" , "links" , "coordinates" , "categories" , "extlinks" , ] new_cont = request . get ( "continue" ) request = request [ "query" ] [ "pages" ] [ self . pageid ] if not results : results = request else : for key in keys : if key in request and request . get ( key ) is not None : val = request . get ( key ) tmp = results . get ( key ) if isinstance ( tmp , ( list , tuple ) ) : results [ key ] = results . get ( key , list ) + val if new_cont is None or new_cont == last_cont : break last_cont = new_cont tmp = [ link [ "title" ] for link in results . get ( "redirects" , list ( ) ) ] self . _redirects = sorted ( tmp ) self . _summary = results [ "extract" ] tmp = [ link [ "title" ] for link in results . get ( "links" , list ( ) ) ] self . _links = sorted ( tmp ) def _get_cat ( val ) : tmp = val [ "title" ] if tmp . startswith ( self . mediawiki . category_prefix ) : return tmp [ len ( self . mediawiki . category_prefix ) + 1 : ] return tmp tmp = [ _get_cat ( link ) for link in results . get ( "categories" , list ( ) ) ] self . _categories = sorted ( tmp ) if "coordinates" in results : self . _coordinates = ( Decimal ( results [ "coordinates" ] [ 0 ] [ "lat" ] ) , Decimal ( results [ "coordinates" ] [ 0 ] [ "lon" ] ) , ) tmp = [ link [ "*" ] for link in results . get ( "extlinks" , list ( ) ) ] self . _references = sorted ( tmp )
something here ...
52,099
def is_armed ( self ) : alarm_code = self . get_armed_status ( ) if alarm_code == YALE_STATE_ARM_FULL : return True if alarm_code == YALE_STATE_ARM_PARTIAL : return True return False
Return True or False if the system is armed in any way