idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
7,600 | def sha1_hmac ( secret , document ) : signature = hmac . new ( secret , document , hashlib . sha1 ) . digest ( ) . encode ( "base64" ) [ : - 1 ] return signature | Calculate the Base 64 encoding of the HMAC for the given document . |
7,601 | def filter_query_string ( query ) : return '&' . join ( [ q for q in query . split ( '&' ) if not ( q . startswith ( '_k=' ) or q . startswith ( '_e=' ) or q . startswith ( '_s' ) ) ] ) | Return a version of the query string with the _e _k and _s values removed . |
7,602 | def fost_hmac_url_signature ( key , secret , host , path , query_string , expires ) : if query_string : document = '%s%s?%s\n%s' % ( host , path , query_string , expires ) else : document = '%s%s\n%s' % ( host , path , expires ) signature = sha1_hmac ( secret , document ) return signature | Return a signature that corresponds to the signed URL . |
7,603 | def fost_hmac_request_signature ( secret , method , path , timestamp , headers = { } , body = '' ) : signed_headers , header_values = 'X-FOST-Headers' , [ ] for header , value in headers . items ( ) : signed_headers += ' ' + header header_values . append ( value ) return fost_hmac_request_signature_with_headers ( secret , method , path , timestamp , [ signed_headers ] + header_values , body ) | Calculate the signature for the given secret and arguments . |
7,604 | def fost_hmac_request_signature_with_headers ( secret , method , path , timestamp , headers , body ) : document = "%s %s\n%s\n%s\n%s" % ( method , path , timestamp , '\n' . join ( headers ) , body ) signature = sha1_hmac ( secret , document ) logging . info ( "Calculated signature %s for document\n%s" , signature , document ) return document , signature | Calculate the signature for the given secret and other arguments . |
7,605 | def get_order ( membersuite_id , client = None ) : if not membersuite_id : return None client = client or get_new_client ( request_session = True ) if not client . session_id : client . request_session ( ) object_query = "SELECT Object() FROM ORDER WHERE ID = '{}'" . format ( membersuite_id ) result = client . execute_object_query ( object_query ) msql_result = result [ "body" ] [ "ExecuteMSQLResult" ] if msql_result [ "Success" ] : membersuite_object_data = ( msql_result [ "ResultValue" ] [ "SingleObject" ] ) else : raise ExecuteMSQLError ( result = result ) return Order ( membersuite_object_data = membersuite_object_data ) | Get an Order by ID . |
7,606 | def export_private_key ( self , password = None ) : if self . __private_key is None : raise ValueError ( 'Unable to call this method. Private key must be set' ) if password is not None : if isinstance ( password , str ) is True : password = password . encode ( ) return self . __private_key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . PKCS8 , encryption_algorithm = serialization . BestAvailableEncryption ( password ) ) return self . __private_key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . TraditionalOpenSSL , encryption_algorithm = serialization . NoEncryption ( ) ) | Export a private key in PEM - format |
7,607 | def export_public_key ( self ) : if self . __public_key is None : raise ValueError ( 'Unable to call this method. Public key must be set' ) return self . __public_key . public_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PublicFormat . SubjectPublicKeyInfo ) | Export a public key in PEM - format |
7,608 | def import_private_key ( self , pem_text , password = None ) : if isinstance ( pem_text , str ) is True : pem_text = pem_text . encode ( ) if password is not None and isinstance ( password , str ) is True : password = password . encode ( ) self . __set_private_key ( serialization . load_pem_private_key ( pem_text , password = password , backend = default_backend ( ) ) ) | Import a private key from data in PEM - format |
7,609 | def decrypt ( self , data , oaep_hash_fn_name = None , mgf1_hash_fn_name = None ) : if self . __private_key is None : raise ValueError ( 'Unable to call this method. Private key must be set' ) if oaep_hash_fn_name is None : oaep_hash_fn_name = self . __class__ . __default_oaep_hash_function_name__ if mgf1_hash_fn_name is None : mgf1_hash_fn_name = self . __class__ . __default_mgf1_hash_function_name__ oaep_hash_cls = getattr ( hashes , oaep_hash_fn_name ) mgf1_hash_cls = getattr ( hashes , mgf1_hash_fn_name ) return self . __private_key . decrypt ( data , padding . OAEP ( mgf = padding . MGF1 ( algorithm = mgf1_hash_cls ( ) ) , algorithm = oaep_hash_cls ( ) , label = None ) ) | Decrypt a data that used PKCS1 OAEP protocol |
7,610 | def validate ( self , value , model = None , context = None ) : length = len ( str ( value ) ) params = dict ( min = self . min , max = self . max ) if self . min and self . max is None : if length < self . min : return Error ( self . too_short , params ) if self . max and self . min is None : if length > self . max : return Error ( self . too_long , params ) if self . min and self . max : if length < self . min or length > self . max : return Error ( self . not_in_range , params ) return Error ( ) | Validate Perform value validation against validation settings and return simple result object |
7,611 | def qteSaveMacroData ( self , data , widgetObj : QtGui . QWidget = None ) : if not hasattr ( widgetObj , '_qteAdmin' ) and ( widgetObj is not None ) : msg = '<widgetObj> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError ( msg ) if not widgetObj : widgetObj = self . qteWidget widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ] = data | Associate arbitrary data with widgetObj . |
7,612 | def qteMacroData ( self , widgetObj : QtGui . QWidget = None ) : if not hasattr ( widgetObj , '_qteAdmin' ) and ( widgetObj is not None ) : msg = '<widgetObj> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError ( msg ) if not widgetObj : widgetObj = self . qteWidget try : _ = widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ] except KeyError : widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ] = None return widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ] | Retrieve widgetObj specific data previously saved with qteSaveMacroData . |
7,613 | def qteSetAppletSignature ( self , appletSignatures : ( str , tuple , list ) ) : if not isinstance ( appletSignatures , ( tuple , list ) ) : appletSignatures = appletSignatures , for idx , val in enumerate ( appletSignatures ) : if not isinstance ( val , str ) : args = ( 'appletSignatures' , 'str' , inspect . stack ( ) [ 0 ] [ 3 ] ) raise QtmacsArgumentError ( * args ) self . _qteAppletSignatures = tuple ( appletSignatures ) | Specify the applet signatures with which this macro is compatible . |
7,614 | def qteSetWidgetSignature ( self , widgetSignatures : ( str , tuple , list ) ) : if not isinstance ( widgetSignatures , ( tuple , list ) ) : widgetSignatures = widgetSignatures , for idx , val in enumerate ( widgetSignatures ) : if not isinstance ( val , str ) : args = ( 'widgetSignatures' , 'str' , inspect . stack ( ) [ 0 ] [ 3 ] ) raise QtmacsArgumentError ( * args ) self . _qteWidgetSignatures = tuple ( widgetSignatures ) | Specify the widget signatures with which this macro is compatible . |
7,615 | def qtePrepareToRun ( self ) : msgObj = QtmacsMessage ( ( self . qteMacroName ( ) , self . qteWidget ) , None ) msgObj . setSignalName ( 'qtesigMacroStart' ) self . qteMain . qtesigMacroStart . emit ( msgObj ) try : self . qteRun ( ) self . qteMain . qtesigMacroFinished . emit ( msgObj ) except Exception as err : if self . qteApplet is None : appID = appSig = None else : appID = self . qteApplet . qteAppletID ( ) appSig = self . qteApplet . qteAppletSignature ( ) msg = ( 'Macro <b>{}</b> (called from the <b>{}</b> applet' ' with ID <b>{}</b>) did not execute properly.' ) msg = msg . format ( self . qteMacroName ( ) , appSig , appID ) if isinstance ( err , QtmacsArgumentError ) : msg += '<br/>' + str ( err ) self . qteMain . qteEnableMacroProcessing ( ) self . qteMain . qtesigMacroError . emit ( msgObj ) self . qteLogger . exception ( msg , exc_info = True , stack_info = True ) | This method is called by Qtmacs to prepare the macro for execution . |
7,616 | def all_valid ( formsets ) : valid = True for formset in formsets : if not formset . is_valid ( ) : valid = False return valid | Returns true if every formset in formsets is valid . |
7,617 | def forms_valid ( self , inlines ) : for formset in inlines : formset . save ( ) return HttpResponseRedirect ( self . get_success_url ( ) ) | If the form and formsets are valid save the associated models . |
7,618 | def post ( self , request , * args , ** kwargs ) : self . object = self . get_object ( ) self . get_context_data ( ) inlines = self . construct_inlines ( ) if all_valid ( inlines ) : return self . forms_valid ( inlines ) return self . forms_invalid ( inlines ) | Handles POST requests instantiating a form and formset instances with the passed POST variables and then checked for validity . |
7,619 | def get_success_url ( self ) : if self . success_url : url = force_text ( self . success_url ) else : raise ImproperlyConfigured ( "No URL to redirect to. Provide a success_url." ) return url | Returns the supplied success URL . |
7,620 | def displayStatusMessage ( self , msgObj ) : msg = msgObj . data if not msg . endswith ( '\n' ) : msg = msg + '\n' self . qteLabel . setText ( msg ) | Display the last status message and partially completed key sequences . |
7,621 | def qteUpdateLogSlot ( self ) : log = self . logHandler . fetch ( start = self . qteLogCnt ) self . qteLogCnt += len ( log ) if not len ( log ) : return log_pruned = [ ] last_entry = log [ 0 ] num_rep = - 1 for cur_entry in log : if last_entry . msg == cur_entry . msg : num_rep += 1 else : log_pruned . append ( [ last_entry , num_rep ] ) num_rep = 0 last_entry = cur_entry log_pruned . append ( [ cur_entry , num_rep ] ) log_formatted = "" for cur_entry in log_pruned : log_formatted += self . qteFormatMessage ( cur_entry [ 0 ] , cur_entry [ 1 ] ) log_formatted + '\n' self . qteText . insertHtml ( log_formatted ) self . qteMoveToEndOfBuffer ( ) if self . qteAutoActivate : self . qteAutoActivate = False self . qteMain . qteMakeAppletActive ( self ) | Fetch and display the next batch of log messages . |
7,622 | def qteMoveToEndOfBuffer ( self ) : tc = self . qteText . textCursor ( ) tc . movePosition ( QtGui . QTextCursor . End ) self . qteText . setTextCursor ( tc ) | Move cursor to the end of the buffer to facilitate auto scrolling . |
7,623 | def sign_token_records ( profile_components , parent_private_key , signing_algorithm = "ES256K" ) : if signing_algorithm != "ES256K" : raise ValueError ( "Signing algorithm not supported" ) token_records = [ ] for profile_component in profile_components : private_key = ECPrivateKey ( parent_private_key ) public_key = private_key . public_key ( ) subject = { "publicKey" : public_key . to_hex ( ) } token = sign_token ( profile_component , private_key . to_hex ( ) , subject , signing_algorithm = signing_algorithm ) token_record = wrap_token ( token ) token_record [ "parentPublicKey" ] = public_key . to_hex ( ) token_records . append ( token_record ) return token_records | Function for iterating through a list of profile components and signing separate individual profile tokens . |
7,624 | def normalize_locale ( locale ) : import re match = re . match ( r'^[a-z]+' , locale . lower ( ) ) if match : return match . group ( ) | Normalize locale Extracts language code from passed in locale string to be used later for dictionaries loading . |
7,625 | def get_translations ( self , locale ) : locale = self . normalize_locale ( locale ) if locale in self . translations : return self . translations [ locale ] translations = { } for path in self . dirs : file = os . path . join ( path , '{}.py' . format ( locale ) ) if not os . path . isfile ( file ) : continue loader = SourceFileLoader ( locale , file ) locale_dict = loader . load_module ( ) if not hasattr ( locale_dict , 'translations' ) : continue language = getattr ( locale_dict , 'translations' ) if translations : translations = language else : merged = dict ( translations . items ( ) | language . items ( ) ) translations = merged if translations : self . translations [ locale ] = translations return translations err = 'No translations found for locale [{}]' raise NoTranslations ( err . format ( locale ) ) | Get translation dictionary Returns a dictionary for locale or raises an exception if such can t be located . If a dictionary for locale was previously loaded returns that otherwise goes through registered locations and merges any found custom dictionaries with defaults . |
7,626 | def translate ( self , message , locale ) : translations = self . get_translations ( locale ) if message in translations : return translations [ message ] return message | Translate Translates a message to the given locale language . Will return original message if no translation exists for the message . |
7,627 | def flatten ( l ) : for el in l : if isinstance ( el , Iterable ) and not isinstance ( el , ( str , bytes ) ) and not isinstance ( el , dict ) : yield from flatten ( el ) else : yield el | Flatten a multi - deminision list and return a iterable |
7,628 | def crossCombine ( l ) : resultList = [ ] firstList = l [ 0 ] rest = l [ 1 : ] if len ( rest ) == 0 : return firstList for e in firstList : for e1 in crossCombine ( rest ) : resultList . append ( combinteDict ( e , e1 ) ) return resultList | Taken a list of lists returns a big list of lists contain all the possibilities of elements of sublist combining together . |
7,629 | def combine ( a1 , a2 ) : if not isinstance ( a1 , list ) : a1 = [ a1 ] if not isinstance ( a2 , list ) : a2 = [ a2 ] return a1 + a2 | Combine to argument into a single flat list |
7,630 | def singleOrPair ( obj ) : if len ( list ( obj . __class__ . __mro__ ) ) <= 2 : return 'Neither' else : if ancestorJr ( obj ) is Pair : return 'Pair' elif ancestor ( obj ) is Single : return 'Single' else : return 'Neither' | Chech an object is single or pair or neither . |
7,631 | def removeEverything ( toBeRemoved , l ) : successful = True while successful : try : l . remove ( toBeRemoved ) except : successful = False | Remove every instance that matches the input from a list |
7,632 | def add ( self , * value ) : flattenedValueList = list ( flatten ( value ) ) return self . _add ( flattenedValueList , self . value ) | convert value and add to self . value |
7,633 | def _remove ( self , removeList , selfValue ) : for removeValue in removeList : print ( removeValue , removeList ) removeEverything ( removeValue , selfValue ) | Remove elements from a list by matching the elements in the other list . |
7,634 | def remove ( self , * l ) : removeList = list ( flatten ( l ) ) self . _remove ( removeList , self . value ) | remove elements from self . value by matching . |
7,635 | def write ( self ) : with open ( self . me , 'w' ) as f : f . write ( self . printMe ( self . tag , self . value ) ) | Write the job to the corresponding plist . |
7,636 | def add ( self , * l ) : for a in flatten ( l ) : self . _add ( [ self . Inner ( a ) ] , self . l ) | add inner to outer |
7,637 | def remove ( self , * l ) : for a in flatten ( l ) : self . _remove ( [ self . Inner ( a ) ] , self . l ) | remove inner from outer |
7,638 | def add ( self , dic ) : for kw in dic : checkKey ( kw , self . keyWord ) self . _add ( [ Pair ( kw , StringSingle ( dic [ kw ] ) ) ] , self . d ) | adds a dict as pair |
7,639 | def remove ( self , dic ) : for kw in dic : removePair = Pair ( kw , dic [ kw ] ) self . _remove ( [ removePair ] ) | remove the pair by passing a identical dict |
7,640 | def _update ( self , baseNumber , magnification ) : interval = int ( baseNumber * magnification ) self . value = [ IntegerSingle ( interval ) ] | update self . value with basenumber and time interval |
7,641 | def second ( self ) : self . magnification = 1 self . _update ( self . baseNumber , self . magnification ) return self | set unit to second |
7,642 | def minute ( self ) : self . magnification = 60 self . _update ( self . baseNumber , self . magnification ) return self | set unit to minute |
7,643 | def hour ( self ) : self . magnification = 3600 self . _update ( self . baseNumber , self . magnification ) return self | set unit to hour |
7,644 | def day ( self ) : self . magnification = 86400 self . _update ( self . baseNumber , self . magnification ) return self | set unit to day |
7,645 | def week ( self ) : self . magnification = 345600 self . _update ( self . baseNumber , self . magnification ) return self | set unit to week |
7,646 | def add ( self , * dic ) : dicList = list ( flatten ( dic ) ) for d in dicList : di = [ ] for k in d : di . append ( Pair ( k , IntegerSingle ( d [ k ] ) ) ) dictSingle = DictSingle ( di ) self . _add ( [ dictSingle ] , self . l ) | add a config to StartCalendarInterval . |
7,647 | def remove ( self , * dic ) : dicList = list ( flatten ( dic ) ) for d in dicList : di = [ ] for k in d : di . append ( Pair ( k , IntegerSingle ( d [ k ] ) ) ) dictSingle = DictSingle ( di ) self . _remove ( [ dictSingle ] , self . l ) | remove a calendar config . |
7,648 | def get_index_url ( self , resource = None , ** kwargs ) : default_kwargs = self . default_kwargs_for_urls ( ) if resource == self . get_resource_name ( ) else { } default_kwargs . update ( kwargs ) return self . get_full_url ( self . app . reverse ( '{}_index' . format ( resource or self . get_resource_name ( ) ) , ** default_kwargs ) ) | Builds the url of the resource s index . |
7,649 | def get_parent ( self ) : if self . is_entity ( ) : return self . get_index_url ( ** self . default_kwargs_for_urls ( ) ) elif self . _parent is not None : resource = self . _parent . rsplit ( '_' , 1 ) [ 0 ] parts = self . default_kwargs_for_urls ( ) if '{}_id' . format ( resource ) in parts : id = parts . pop ( '{}_id' . format ( resource ) ) parts [ 'id' ] = id return self . get_full_url ( self . app . reverse ( self . _parent , ** parts ) ) | Returns the url to the parent endpoint . |
7,650 | def export_context ( cls , context ) : if context is None : return result = [ ( x . context_name ( ) , x . context_value ( ) ) for x in context ] result . reverse ( ) return tuple ( result ) | Export the specified context to be capable context transferring |
7,651 | def match ( self , command_context = None , ** command_env ) : spec = self . specification ( ) if command_context is None and spec is None : return True elif command_context is not None and spec is not None : return command_context == spec return False | Check if context request is compatible with adapters specification . True - if compatible False - otherwise |
7,652 | def any_shared ( enum_one , enum_two ) : if not is_collection ( enum_one ) or not is_collection ( enum_two ) : return False enum_one = enum_one if isinstance ( enum_one , ( set , dict ) ) else set ( enum_one ) enum_two = enum_two if isinstance ( enum_two , ( set , dict ) ) else set ( enum_two ) return any ( e in enum_two for e in enum_one ) | Truthy if any element in enum_one is present in enum_two |
7,653 | def match ( self , request , service ) : uri = self . normalize_uri ( request . path ( ) ) if request . session ( ) . protocol ( ) not in self . protocols : return if request . method ( ) not in self . methods : return if self . virtual_hosts and request . virtual_host ( ) not in self . virtual_hosts : return if self . ports and int ( request . session ( ) . server_address ( ) . port ( ) ) not in self . ports : return match_obj = self . re_pattern . match ( uri ) if not match_obj : return presenter_action = self . action presenter_args = self . presenter_args . copy ( ) for i in range ( len ( self . route_args ) ) : if self . route_args [ i ] == 'action' : presenter_action = match_obj . group ( i + 1 ) else : presenter_args [ self . route_args [ i ] ] = match_obj . group ( i + 1 ) return WWebTargetRoute ( self . presenter , presenter_action , self , service . route_map ( ) , ** presenter_args ) | Check this route for matching the given request . If this route is matched then target route is returned . |
7,654 | def connect ( self , pattern , presenter , ** kwargs ) : self . __routes . append ( WWebRoute ( pattern , presenter , ** kwargs ) ) | Connect the given pattern with the given presenter |
7,655 | def import_route ( self , route_as_txt ) : route_match = WWebRouteMap . import_route_re . match ( route_as_txt ) if route_match is None : raise ValueError ( 'Invalid route code' ) pattern = route_match . group ( 1 ) presenter_name = route_match . group ( 2 ) route_args = route_match . group ( 4 ) if route_args is not None : result_args = { } for arg_declaration in route_args . split ( "," ) : arg_match = WWebRouteMap . import_route_arg_re . match ( arg_declaration ) if arg_match is None : raise RuntimeError ( 'Invalid argument declaration in route' ) result_args [ arg_match . group ( 1 ) ] = arg_match . group ( 3 ) self . connect ( pattern , presenter_name , ** result_args ) else : self . connect ( pattern , presenter_name ) | Import route written as a string |
7,656 | def process_request ( self , session ) : debugger = self . debugger ( ) debugger_session_id = debugger . session_id ( ) if debugger is not None else None try : request = session . read_request ( ) if debugger_session_id is not None : debugger . request ( debugger_session_id , request , session . protocol_version ( ) , session . protocol ( ) ) try : target_route = self . route_map ( ) . route ( request , self ) if debugger_session_id is not None : debugger . target_route ( debugger_session_id , target_route ) if target_route is not None : response = self . execute ( request , target_route ) else : presenter_cls = self . route_map ( ) . error_presenter ( ) presenter = presenter_cls ( request ) response = presenter . error_code ( code = 404 ) if debugger_session_id is not None : debugger . response ( debugger_session_id , response ) except Exception as e : if debugger_session_id is not None : debugger . exception ( debugger_session_id , e ) presenter_cls = self . route_map ( ) . error_presenter ( ) presenter = presenter_cls ( request ) response = presenter . exception_error ( e ) session . write_response ( request , response , * response . __pushed_responses__ ( ) ) except Exception as e : if debugger_session_id is not None : debugger . exception ( debugger_session_id , e ) session . session_close ( ) if debugger_session_id is not None : debugger . finalize ( debugger_session_id ) | Process single request from the given session |
7,657 | def create_presenter ( self , request , target_route ) : presenter_name = target_route . presenter_name ( ) if self . presenter_collection ( ) . has ( presenter_name ) is False : raise RuntimeError ( 'No such presenter: %s' % presenter_name ) presenter_class = self . presenter_collection ( ) . presenter ( presenter_name ) return self . presenter_factory ( ) . instantiate ( presenter_class , request , target_route , self ) | Create presenter from the given requests and target routes |
7,658 | def proxy ( self , request , original_target_route , presenter_name , ** kwargs ) : action_kwargs = kwargs . copy ( ) action_name = 'index' if 'action' in action_kwargs : action_name = action_kwargs [ 'action' ] action_kwargs . pop ( 'action' ) original_route = original_target_route . route ( ) original_route_map = original_target_route . route_map ( ) target_route = WWebTargetRoute ( presenter_name , action_name , original_route , original_route_map , ** action_kwargs ) return self . execute ( request , target_route ) | Execute the given presenter as a target for the given client request |
7,659 | def find_point_in_section_list ( point , section_list ) : if point < section_list [ 0 ] or point > section_list [ - 1 ] : return None if point in section_list : if point == section_list [ - 1 ] : return section_list [ - 2 ] ind = section_list . bisect ( point ) - 1 if ind == 0 : return section_list [ 0 ] return section_list [ ind ] try : ind = section_list . bisect ( point ) return section_list [ ind - 1 ] except IndexError : return None | Returns the start of the section the given point belongs to . |
7,660 | def find_range_ix_in_section_list ( start , end , section_list ) : if start > section_list [ - 1 ] or end < section_list [ 0 ] : return [ 0 , 0 ] if start < section_list [ 0 ] : start_section = section_list [ 0 ] else : start_section = find_point_in_section_list ( start , section_list ) if end > section_list [ - 1 ] : end_section = section_list [ - 2 ] else : end_section = find_point_in_section_list ( end , section_list ) return [ section_list . index ( start_section ) , section_list . index ( end_section ) + 1 ] | Returns the index range all sections belonging to the given range . |
7,661 | def find_range_in_section_list ( start , end , section_list ) : ind = find_range_ix_in_section_list ( start , end , section_list ) return section_list [ ind [ 0 ] : ind [ 1 ] ] | Returns all sections belonging to the given range . |
7,662 | def find_range_ix_in_point_list ( start , end , point_list ) : return [ point_list . bisect_left ( start ) , point_list . bisect_right ( end ) ] | Returns the index range all points inside the given range . |
7,663 | def split_option ( self , section , option ) : value = self [ section ] [ option ] . strip ( ) if value == "" : return [ ] return [ x . strip ( ) for x in ( value . split ( "," ) ) ] | Return list of strings that are made by splitting coma - separated option value . Method returns empty list if option value is empty string |
7,664 | def merge ( self , config ) : if isinstance ( config , ConfigParser ) is True : self . update ( config ) elif isinstance ( config , str ) : self . read ( config ) | Load configuration from given configuration . |
7,665 | def merge_section ( self , config , section_to , section_from = None ) : section_from = section_from if section_from is not None else section_to if section_from not in config . sections ( ) : raise ValueError ( 'There is no such section "%s" in config' % section_from ) if section_to not in self . sections ( ) : self . add_section ( section_to ) for option in config [ section_from ] . keys ( ) : self . set ( section_to , option , config [ section_from ] [ option ] ) | Load configuration section from other configuration . If specified section doesn t exist in current configuration then it will be added automatically . |
7,666 | def __option ( self ) : section = self . section ( ) option = self . option_prefix ( ) if self . config ( ) . has_option ( section , option ) is False : raise NoOptionError ( option , section ) return section , option | Check and return option from section from configuration . Option name is equal to option prefix |
7,667 | def select_options ( self , options_prefix ) : return WConfigSelection ( self . config ( ) , self . section ( ) , self . option_prefix ( ) + options_prefix ) | Select options from this selection that are started with the specified prefix |
7,668 | def has_option ( self , option_name = None ) : if option_name is None : option_name = '' return self . config ( ) . has_option ( self . section ( ) , self . option_prefix ( ) + option_name ) | Check whether configuration selection has the specified option . |
7,669 | def _args_checks_gen ( self , decorated_function , function_spec , arg_specs ) : inspected_args = function_spec . args args_check = { } for i in range ( len ( inspected_args ) ) : arg_name = inspected_args [ i ] if arg_name in arg_specs . keys ( ) : args_check [ arg_name ] = self . check ( arg_specs [ arg_name ] , arg_name , decorated_function ) return args_check | Generate checks for positional argument testing |
7,670 | def _kwargs_checks_gen ( self , decorated_function , function_spec , arg_specs ) : args_names = [ ] args_names . extend ( function_spec . args ) if function_spec . varargs is not None : args_names . append ( function_spec . args ) args_check = { } for arg_name in arg_specs . keys ( ) : if arg_name not in args_names : args_check [ arg_name ] = self . check ( arg_specs [ arg_name ] , arg_name , decorated_function ) return args_check | Generate checks for keyword argument testing |
7,671 | def decorator ( self , ** arg_specs ) : if self . decorate_disabled ( ) is True : def empty_decorator ( decorated_function ) : return decorated_function return empty_decorator def first_level_decorator ( decorated_function ) : function_spec = getfullargspec ( decorated_function ) args_checks = self . _args_checks_gen ( decorated_function , function_spec , arg_specs ) varargs_check = self . _varargs_checks_gen ( decorated_function , function_spec , arg_specs ) kwargs_checks = self . _kwargs_checks_gen ( decorated_function , function_spec , arg_specs ) def second_level_decorator ( original_function , * args , ** kwargs ) : self . _args_checks_test ( original_function , function_spec , args_checks , args , arg_specs ) self . _varargs_checks_test ( original_function , function_spec , varargs_check , args , arg_specs ) self . _kwargs_checks_test ( original_function , kwargs_checks , kwargs , arg_specs ) return original_function ( * args , ** kwargs ) return decorator ( second_level_decorator ) ( decorated_function ) return first_level_decorator | Return decorator that can decorate target function |
7,672 | def function_name ( fn ) : fn_name = fn . __name__ if hasattr ( fn , '__qualname__' ) : return fn . __qualname__ elif hasattr ( fn , '__self__' ) : owner = fn . __self__ if isclass ( owner ) is False : owner = owner . __class__ return '%s.%s' % ( owner . __name__ , fn_name ) return fn_name | Return function name in pretty style |
7,673 | def check ( self , type_spec , arg_name , decorated_function ) : def raise_exception ( x_spec ) : exc_text = 'Argument "%s" for function "%s" has invalid type' % ( arg_name , Verifier . function_name ( decorated_function ) ) exc_text += ' (%s should be %s)' % ( x_spec , type_spec ) raise TypeError ( exc_text ) if isinstance ( type_spec , ( tuple , list , set ) ) : for single_type in type_spec : if ( single_type is not None ) and isclass ( single_type ) is False : raise RuntimeError ( 'Invalid specification. Must be type or tuple/list/set of types' ) if None in type_spec : type_spec = tuple ( filter ( lambda x : x is not None , type_spec ) ) return lambda x : None if x is None or isinstance ( x , tuple ( type_spec ) ) is True else raise_exception ( str ( ( type ( x ) ) ) ) else : return lambda x : None if isinstance ( x , tuple ( type_spec ) ) is True else raise_exception ( str ( ( type ( x ) ) ) ) elif isclass ( type_spec ) : return lambda x : None if isinstance ( x , type_spec ) is True else raise_exception ( str ( ( type ( x ) ) ) ) else : raise RuntimeError ( 'Invalid specification. Must be type or tuple/list/set of types' ) | Return callable that checks function parameter for type validity . Checks parameter if it is instance of specified class or classes |
7,674 | def check ( self , type_spec , arg_name , decorated_function ) : def raise_exception ( text_spec ) : exc_text = 'Argument "%s" for function "%s" has invalid type' % ( arg_name , Verifier . function_name ( decorated_function ) ) exc_text += ' (%s)' % text_spec raise TypeError ( exc_text ) if isinstance ( type_spec , ( tuple , list , set ) ) : for single_type in type_spec : if ( single_type is not None ) and isclass ( single_type ) is False : raise RuntimeError ( 'Invalid specification. Must be type or tuple/list/set of types' ) if None in type_spec : type_spec = tuple ( filter ( lambda x : x is not None , type_spec ) ) return lambda x : None if x is None or ( isclass ( x ) is True and issubclass ( x , type_spec ) is True ) else raise_exception ( str ( x ) ) else : return lambda x : None if ( isclass ( x ) is True and issubclass ( x , type_spec ) is True ) else raise_exception ( str ( x ) ) elif isclass ( type_spec ) : return lambda x : None if ( isclass ( x ) is True and issubclass ( x , type_spec ) is True ) else raise_exception ( str ( x ) ) else : raise RuntimeError ( 'Invalid specification. Must be type or tuple/list/set of types' ) | Return callable that checks function parameter for class validity . Checks parameter if it is class or subclass of specified class or classes |
7,675 | def check ( self , value_spec , arg_name , decorated_function ) : def raise_exception ( text_spec ) : exc_text = 'Argument "%s" for function "%s" has invalid value' % ( arg_name , Verifier . function_name ( decorated_function ) ) exc_text += ' (%s)' % text_spec raise ValueError ( exc_text ) if isinstance ( value_spec , ( tuple , list , set ) ) : for single_value in value_spec : if isfunction ( single_value ) is False : raise RuntimeError ( 'Invalid specification. Must be function or tuple/list/set of functions' ) def check ( x ) : for f in value_spec : if f ( x ) is not True : raise_exception ( str ( x ) ) return check elif isfunction ( value_spec ) : return lambda x : None if value_spec ( x ) is True else raise_exception ( str ( x ) ) else : raise RuntimeError ( 'Invalid specification. Must be function or tuple/list/set of functions' ) | Return callable that checks function parameter for value validity . Checks parameter if its value passes specified restrictions . |
7,676 | def cache_control ( validator = None , storage = None ) : def default_validator ( * args , ** kwargs ) : return True if validator is None : validator = default_validator if storage is None : storage = WGlobalSingletonCacheStorage ( ) def first_level_decorator ( decorated_function ) : def second_level_decorator ( original_function , * args , ** kwargs ) : validator_check = validator ( original_function , * args , ** kwargs ) cache_entry = storage . get_cache ( original_function , * args , ** kwargs ) if validator_check is not True or cache_entry . has_value is False : result = original_function ( * args , ** kwargs ) storage . put ( result , original_function , * args , ** kwargs ) return result else : return cache_entry . cached_value return decorator ( second_level_decorator ) ( decorated_function ) return first_level_decorator | Decorator that is used for caching result . |
7,677 | def has ( self , decorated_function , * args , ** kwargs ) : return self . get_cache ( decorated_function , * args , ** kwargs ) . has_value | Check if there is a result for given function |
7,678 | def __check ( self , decorated_function , * args , ** kwargs ) : if len ( args ) >= 1 : obj = args [ 0 ] function_name = decorated_function . __name__ if hasattr ( obj , function_name ) is True : fn = getattr ( obj , function_name ) if callable ( fn ) and fn . __self__ == obj : return raise RuntimeError ( 'Only bounded methods are allowed' ) | Check whether function is a bounded method or not . If check fails then exception is raised |
7,679 | def ensure_dir ( directory : str ) -> None : if not os . path . isdir ( directory ) : LOG . debug ( f"Directory {directory} does not exist, creating it." ) os . makedirs ( directory ) | Create a directory if it doesn t exist . |
7,680 | def expand ( directory : str ) -> str : temp1 = os . path . expanduser ( directory ) return os . path . expandvars ( temp1 ) | Apply expanduser and expandvars to directory to expand ~ and env vars . |
7,681 | def generate_downloader ( headers : Dict [ str , str ] , args : Any , max_per_hour : int = 30 ) -> Callable [ ... , None ] : def _downloader ( url : str , dest : str ) -> None : @ rate_limited ( max_per_hour , args ) def _rate_limited_download ( ) -> None : parent = os . path . dirname ( dest ) if not os . path . exists ( parent ) : os . makedirs ( parent ) response = requests . get ( url , headers = headers , stream = True ) LOG . info ( f"Downloading from '{url}'." ) LOG . info ( f"Trying to save to '{dest}'." ) length = response . headers . get ( "content-length" ) if length is None : total_length = 0 else : total_length = int ( length ) expected_size = ( total_length / CHUNK_SIZE ) + 1 chunks = response . iter_content ( chunk_size = CHUNK_SIZE ) open ( dest , "a" , encoding = FORCED_ENCODING ) . close ( ) with open ( dest , "wb" ) as stream : for chunk in tui . progress . bar ( chunks , expected_size = expected_size ) : if not chunk : return stream . write ( chunk ) stream . flush ( ) _rate_limited_download ( ) return _downloader | Create function to download with rate limiting and text progress . |
7,682 | def parse_int_string ( int_string : str ) -> List [ int ] : cleaned = " " . join ( int_string . strip ( ) . split ( ) ) cleaned = cleaned . replace ( " - " , "-" ) cleaned = cleaned . replace ( "," , " " ) tokens = cleaned . split ( " " ) indices : Set [ int ] = set ( ) for token in tokens : if "-" in token : endpoints = token . split ( "-" ) if len ( endpoints ) != 2 : LOG . info ( f"Dropping '{token}' as invalid - weird range." ) continue start = int ( endpoints [ 0 ] ) end = int ( endpoints [ 1 ] ) + 1 indices = indices . union ( indices , set ( range ( start , end ) ) ) else : try : indices . add ( int ( token ) ) except ValueError : LOG . info ( f"Dropping '{token}' as invalid - not an int." ) return list ( indices ) | Given a string like 1 23 4 - 8 32 1 return a unique list of those integers in the string and the integers in the ranges in the string . Non - numbers ignored . Not necessarily sorted |
7,683 | def set_up_logging ( log_filename : str = "log" , verbosity : int = 0 ) -> logging . Logger : LOG . setLevel ( logging . DEBUG ) file_handler = RotatingFileHandler ( filename = log_filename , maxBytes = 1024000000 , backupCount = 10 ) verbose_form = logging . Formatter ( fmt = "%(asctime)s - %(levelname)s - %(module)s - %(message)s" ) file_handler . setFormatter ( verbose_form ) file_handler . setLevel ( logging . DEBUG ) LOG . addHandler ( file_handler ) stream_handler = logging . StreamHandler ( sys . stdout ) simple_form = logging . Formatter ( fmt = "%(message)s" ) stream_handler . setFormatter ( simple_form ) if verbosity > 0 : stream_handler . setLevel ( logging . DEBUG ) else : stream_handler . setLevel ( logging . INFO ) LOG . addHandler ( stream_handler ) return LOG | Set up proper logging . |
7,684 | def random_line ( file_path : str , encoding : str = FORCED_ENCODING ) -> str : line_num = 0 selected_line = "" with open ( file_path , encoding = encoding ) as stream : while True : line = stream . readline ( ) if not line : break line_num += 1 if random . uniform ( 0 , line_num ) < 1 : selected_line = line return selected_line . strip ( ) | Get random line from a file . |
7,685 | def get_percentage_from_prob ( prob ) : assert isinstance ( prob , ( float , int ) ) prob = float ( prob ) assert prob >= 0 assert prob <= 1 percentages = list ( probability_list . keys ( ) ) percentages . sort ( ) for percentage in percentages : if prob < probability_list [ percentage ] : return percentage - 1 return 100 | Converted probability of being treated to total percentage of clinical cases treated |
7,686 | def listify ( generator_func ) : def list_func ( * args , ** kwargs ) : return degenerate ( generator_func ( * args , ** kwargs ) ) return list_func | Converts generator functions into list returning functions . |
7,687 | def locations_within ( a , b , tolerance ) : ret = '' b = dict ( b ) for ( key , value ) in a . items ( ) : if key not in b : raise ValueError ( "b does not have the key: " + key ) if abs ( int ( value ) - int ( b [ key ] ) ) > tolerance : ret += 'key {0} differs: {1} {2}' . format ( key , int ( value ) , int ( b [ key ] ) ) del b [ key ] if b : raise ValueError ( "keys in b not seen in a: " + ", " . join ( b . keys ( ) ) ) return ret | Verifies whether two positions are the same . A tolerance value determines how close the two positions must be to be considered same . |
7,688 | def ctrl_x ( self , x , to = None ) : seq = [ Keys . CONTROL , x , Keys . CONTROL ] if ( self . firefox and self . windows ) or ( self . linux and self . chrome ) : seq . append ( Keys . PAUSE ) if to is None : ActionChains ( self . driver ) . send_keys ( seq ) . perform ( ) else : self . send_keys ( to , seq ) | Sends a character to the currently active element with Ctrl pressed . This method takes care of pressing and releasing Ctrl . |
7,689 | def command_x ( self , x , to = None ) : if to is None : ActionChains ( self . driver ) . send_keys ( [ Keys . COMMAND , x , Keys . COMMAND ] ) . perform ( ) else : self . send_keys ( to , [ Keys . COMMAND , x , Keys . COMMAND ] ) | Sends a character to the currently active element with Command pressed . This method takes care of pressing and releasing Command . |
7,690 | def wait ( self , condition ) : return WebDriverWait ( self . driver , self . timeout ) . until ( condition ) | Waits for a condition to be true . |
7,691 | def wait_until_not ( self , condition ) : return WebDriverWait ( self . driver , self . timeout ) . until_not ( condition ) | Waits for a condition to be false . |
7,692 | def persistence2stats ( rev_docs , min_persisted = 5 , min_visible = 1209600 , include = None , exclude = None , verbose = False ) : rev_docs = mwxml . utilities . normalize ( rev_docs ) min_persisted = int ( min_persisted ) min_visible = int ( min_visible ) include = include if include is not None else lambda t : True exclude = exclude if exclude is not None else lambda t : False for rev_doc in rev_docs : persistence_doc = rev_doc [ 'persistence' ] stats_doc = { 'tokens_added' : 0 , 'persistent_tokens' : 0 , 'non_self_persistent_tokens' : 0 , 'sum_log_persisted' : 0 , 'sum_log_non_self_persisted' : 0 , 'sum_log_seconds_visible' : 0 , 'censored' : False , 'non_self_censored' : False } filtered_docs = ( t for t in persistence_doc [ 'tokens' ] if include ( t [ 'text' ] ) and not exclude ( t [ 'text' ] ) ) for token_doc in filtered_docs : if verbose : sys . stderr . write ( "." ) sys . stderr . flush ( ) stats_doc [ 'tokens_added' ] += 1 stats_doc [ 'sum_log_persisted' ] += log ( token_doc [ 'persisted' ] + 1 ) stats_doc [ 'sum_log_non_self_persisted' ] += log ( token_doc [ 'non_self_persisted' ] + 1 ) stats_doc [ 'sum_log_seconds_visible' ] += log ( token_doc [ 'seconds_visible' ] + 1 ) if token_doc [ 'seconds_visible' ] >= min_visible : stats_doc [ 'persistent_tokens' ] += 1 stats_doc [ 'non_self_persistent_tokens' ] += 1 else : stats_doc [ 'persistent_tokens' ] += token_doc [ 'persisted' ] >= min_persisted stats_doc [ 'non_self_persistent_tokens' ] += token_doc [ 'non_self_persisted' ] >= min_persisted if persistence_doc [ 'seconds_possible' ] < min_visible : stats_doc [ 'censored' ] = True stats_doc [ 'non_self_censored' ] = True else : if persistence_doc [ 'revisions_processed' ] < min_persisted : stats_doc [ 'censored' ] = True if persistence_doc [ 'non_self_processed' ] < min_persisted : stats_doc [ 'non_self_censored' ] = True if verbose : sys . stderr . write ( "\n" ) sys . stderr . flush ( ) rev_doc [ 'persistence' ] . update ( stats_doc ) yield rev_doc | Processes a sorted and page - partitioned sequence of revision documents into and adds statistics to the persistence field each token added in the revision persisted through future revisions . |
7,693 | def healthy ( self ) : state = None for sensor in self . _sensors . values ( ) : if sensor . healthy ( ) is False : if state is None or sensor . severity ( ) . value > state . value : state = sensor . severity ( ) if state == WTaskHealthSensor . WTaskSensorSeverity . critical : break return state | Return task health . If None - task is healthy otherwise - maximum severity of sensors |
7,694 | def preproc ( self , which = 'sin' , ** kwargs ) : self . app_main ( ** kwargs ) config = self . exp_config config [ 'infile' ] = infile = osp . join ( config [ 'expdir' ] , 'input.dat' ) func = getattr ( np , which ) data = func ( np . linspace ( - np . pi , np . pi ) ) self . logger . info ( 'Saving input data to %s' , infile ) np . savetxt ( infile , data ) | Create preprocessing data |
7,695 | def mounts ( cls ) : result = [ ] with open ( cls . __mounts_file__ ) as f : for mount_record in f : result . append ( WMountPoint ( mount_record ) ) return tuple ( result ) | Return tuple of current mount points |
7,696 | def mount_point ( cls , file_path ) : mount = None for mp in cls . mounts ( ) : mp_path = mp . path ( ) if file_path . startswith ( mp_path ) is True : if mount is None or len ( mount . path ( ) ) <= len ( mp_path ) : mount = mp return mount | Return mount point that where the given path is reside on |
7,697 | def mount ( cls , device , mount_directory , fs = None , options = None , cmd_timeout = None , sudo = False ) : cmd = [ ] if sudo is False else [ 'sudo' ] cmd . extend ( [ 'mount' , device , os . path . abspath ( mount_directory ) ] ) if fs is not None : cmd . extend ( [ '-t' , fs ] ) if options is not None and len ( options ) > 0 : cmd . append ( '-o' ) cmd . extend ( options ) subprocess . check_output ( cmd , timeout = cmd_timeout ) | Mount a device to mount directory |
7,698 | def get_org_types ( self ) : if not self . client . session_id : self . client . request_session ( ) object_query = "SELECT Objects() FROM OrganizationType" result = self . client . execute_object_query ( object_query = object_query ) msql_result = result [ 'body' ] [ "ExecuteMSQLResult" ] return self . package_org_types ( msql_result [ "ResultValue" ] [ "ObjectSearchResult" ] [ "Objects" ] [ "MemberSuiteObject" ] ) | Retrieves all current OrganizationType objects |
7,699 | def package_org_types ( self , obj_list ) : org_type_list = [ ] for obj in obj_list : sane_obj = convert_ms_object ( obj [ 'Fields' ] [ 'KeyValueOfstringanyType' ] ) org = OrganizationType ( sane_obj ) org_type_list . append ( org ) return org_type_list | Loops through MS objects returned from queries to turn them into OrganizationType objects and pack them into a list for later use . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.