idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
11,200
def acceptColumn ( self ) : self . navigator ( ) . hide ( ) self . lineEdit ( ) . setText ( self . navigator ( ) . currentSchemaPath ( ) ) self . emitSchemaColumnChanged ( self . navigator ( ) . currentColumn ( ) )
Accepts the current item as the current column .
11,201
def showPopup ( self ) : nav = self . navigator ( ) nav . move ( self . mapToGlobal ( QPoint ( 0 , self . height ( ) ) ) ) nav . resize ( 400 , 250 ) nav . show ( )
Displays the popup associated with this navigator .
11,202
def sha256 ( content ) : if isinstance ( content , str ) : content = content . encode ( 'utf-8' ) return hashlib . sha256 ( content ) . hexdigest ( )
Finds the sha256 hash of the content .
11,203
def cursor ( self , as_dict = False ) : self . ensure_connect ( ) ctype = self . real_ctype ( as_dict ) return self . _connect . cursor ( ctype )
Gets the cursor by type if as_dict is ture make a dict sql connection cursor
11,204
def members ( self ) : resp = self . _rtm_client . get ( 'v1/current_team.members?all=true' ) if resp . is_fail ( ) : raise RTMServiceError ( 'Failed to get members of current team' , resp ) return resp . data [ 'result' ]
Gets members of current team
11,205
def channels ( self ) : resp = self . _rtm_client . get ( 'v1/current_team.channels' ) if resp . is_fail ( ) : raise RTMServiceError ( 'Failed to get channels of current team' , resp ) return resp . data [ 'result' ]
Gets channels of current team
11,206
def info ( self , user_id ) : resp = self . _rtm_client . get ( 'v1/user.info?user_id={}' . format ( user_id ) ) if resp . is_fail ( ) : raise RTMServiceError ( 'Failed to get user information' , resp ) return resp . data [ 'result' ]
Gets user information by user id
11,207
def info ( self , channel_id ) : resource = 'v1/channel.info?channel_id={}' . format ( channel_id ) resp = self . _rtm_client . get ( resource ) if resp . is_fail ( ) : raise RTMServiceError ( "Failed to get channel information" , resp ) return resp . data [ 'result' ]
Gets channel information by channel id
11,208
def _transform_to_dict ( result ) : result_dict = { } property_list = result . item for item in property_list : result_dict [ item . key [ 0 ] ] = item . value [ 0 ] return result_dict
Transform the array from Ideone into a Python dictionary .
11,209
def _collapse_language_array ( language_array ) : language_dict = { } for language in language_array . item : key = language . key [ 0 ] value = language . value [ 0 ] language_dict [ key ] = value return language_dict
Convert the Ideone language list into a Python dictionary .
11,210
def _translate_language_name ( self , language_name ) : languages = self . languages ( ) language_id = None for ideone_index , ideone_language in languages . items ( ) : if ideone_language . lower ( ) == language_name . lower ( ) : return ideone_index simple_languages = dict ( ( k , v . split ( '(' ) [ 0 ] . strip ( ) ) for ( k , v ) in languages . items ( ) ) for ideone_index , simple_name in simple_languages . items ( ) : if simple_name . lower ( ) == language_name . lower ( ) : return ideone_index language_choices = languages . values ( ) + simple_languages . values ( ) similar_choices = difflib . get_close_matches ( language_name , language_choices , n = 3 , cutoff = 0.3 ) similar_choices_string = ", " . join ( [ "'" + s + "'" for s in similar_choices ] ) error_string = ( "Couldn't match '%s' to an Ideone accepted language.\n" "Did you mean one of the following: %s" ) raise IdeoneError ( error_string % ( language_name , similar_choices_string ) )
Translate a human readable langauge name into its Ideone integer representation .
11,211
def create_submission ( self , source_code , language_name = None , language_id = None , std_input = "" , run = True , private = False ) : language_id = language_id or self . _translate_language_name ( language_name ) result = self . client . service . createSubmission ( self . user , self . password , source_code , language_id , std_input , run , private ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) return result_dict
Create a submission and upload it to Ideone .
11,212
def submission_status ( self , link ) : result = self . client . service . getSubmissionStatus ( self . user , self . password , link ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) return result_dict
Given the unique link of a submission returns its current status .
11,213
def submission_details ( self , link , with_source = True , with_input = True , with_output = True , with_stderr = True , with_compilation_info = True ) : result = self . client . service . getSubmissionDetails ( self . user , self . password , link , with_source , with_input , with_output , with_stderr , with_compilation_info ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) return result_dict
Return a dictionary of requested details about a submission with the id of link .
11,214
def languages ( self ) : if self . _language_dict is None : result = self . client . service . getLanguages ( self . user , self . password ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) languages = result_dict [ 'languages' ] result_dict [ 'languages' ] = Ideone . _collapse_language_array ( languages ) self . _language_dict = result_dict [ 'languages' ] return self . _language_dict
Get a list of supported languages and cache it .
11,215
def is_binary ( filename ) : with open ( filename , 'rb' ) as fp : data = fp . read ( 1024 ) if not data : return False if b'\0' in data : return True return False
Returns True if the file is binary
11,216
def walk_files ( args , root , directory , action ) : for entry in os . listdir ( directory ) : if is_hidden ( args , entry ) : continue if is_excluded_directory ( args , entry ) : continue if is_in_default_excludes ( entry ) : continue if not is_included ( args , entry ) : continue if is_excluded ( args , entry , directory ) : continue entry = os . path . join ( directory , entry ) if os . path . isdir ( entry ) : walk_files ( args , root , entry , action ) if os . path . isfile ( entry ) : if is_binary ( entry ) : continue action ( entry )
Recusively go do the subdirectories of the directory calling the action on each file
11,217
def main ( args = None ) : parser = ArgumentParser ( usage = __usage__ ) parser . add_argument ( "--no-skip-hidden" , action = "store_false" , dest = "skip_hidden" , help = "Do not skip hidden files. " "Use this if you know what you are doing..." ) parser . add_argument ( "--include" , dest = "includes" , action = "append" , help = "Only replace in files matching theses patterns" ) parser . add_argument ( "--exclude" , dest = "excludes" , action = "append" , help = "Ignore files matching theses patterns" ) parser . add_argument ( "--backup" , action = "store_true" , dest = "backup" , help = "Create a backup for each file. " "By default, files are modified in place" ) parser . add_argument ( "--go" , action = "store_true" , dest = "go" , help = "Perform changes rather than just printing then" ) parser . add_argument ( "--dry-run" , "-n" , action = "store_false" , dest = "go" , help = "Do not change anything. This is the default" ) parser . add_argument ( "--color" , choices = [ "always" , "never" , "auto" ] , help = "When to colorize the output. " "Default: when output is a tty" ) parser . add_argument ( "--no-color" , action = "store_false" , dest = "color" , help = "Do not colorize output" ) parser . add_argument ( "--quiet" , "-q" , action = "store_true" , dest = "quiet" , help = "Do not produce any output" ) parser . add_argument ( "pattern" ) parser . add_argument ( "replacement" ) parser . add_argument ( "paths" , nargs = "*" ) parser . set_defaults ( includes = list ( ) , excludes = list ( ) , skip_hidden = True , backup = False , go = False , color = "auto" , quiet = False , ) args = parser . parse_args ( args = args ) setup_colors ( args ) repl_main ( args )
manages options when called from command line
11,218
def load ( self ) : if self . _loaded : return self . _loaded = True self . setChildIndicatorPolicy ( self . DontShowIndicatorWhenChildless ) if not self . isFolder ( ) : return path = self . filepath ( ) if not os . path . isdir ( path ) : path = os . path . dirname ( path ) for name in os . listdir ( path ) : if name . startswith ( '_' ) and not name . startswith ( '__' ) : continue if '-' in name : continue if name in ( 'index.html' , '__init__.html' ) : self . _url = 'file:///%s/%s' % ( path , name ) continue filepath = os . path . join ( path , name ) folder = os . path . isdir ( filepath ) XdkEntryItem ( self , filepath , folder = folder )
Loads this item .
11,219
def get_version ( module ) : version_names = [ "__version__" , "get_version" , "version" ] version_names . extend ( [ name . upper ( ) for name in version_names ] ) for name in version_names : try : version = getattr ( module , name ) except AttributeError : continue if callable ( version ) : version = version ( ) try : version = "." . join ( [ str ( i ) for i in version . __iter__ ( ) ] ) except AttributeError : pass return version
Attempts to read a version attribute from the given module that could be specified via several different names and formats .
11,220
def get_setup_attribute ( attribute , setup_path ) : args = [ "python" , setup_path , "--%s" % attribute ] return Popen ( args , stdout = PIPE ) . communicate ( ) [ 0 ] . decode ( 'utf-8' ) . strip ( )
Runs the project s setup . py script in a process with an arg that will print out the value for a particular attribute such as author or version and returns the value .
11,221
def fetch_items ( self , category , ** kwargs ) : from_date = kwargs [ 'from_date' ] logger . info ( "Fetching modules from %s" , str ( from_date ) ) from_date_ts = datetime_to_utc ( from_date ) . timestamp ( ) nmodules = 0 stop_fetching = False raw_pages = self . client . modules ( ) for raw_modules in raw_pages : modules = [ mod for mod in self . parse_json ( raw_modules ) ] for module in modules : updated_at_ts = self . metadata_updated_on ( module ) if from_date_ts > updated_at_ts : stop_fetching = True break owner = module [ 'owner' ] [ 'username' ] name = module [ 'name' ] module [ 'releases' ] = self . __fetch_and_parse_releases ( owner , name ) module [ 'owner_data' ] = self . __get_or_fetch_owner ( owner ) yield module nmodules += 1 if stop_fetching : break logger . info ( "Fetch process completed: %s modules fetched" , nmodules )
Fetch the modules
11,222
def parse_json ( raw_json ) : result = json . loads ( raw_json ) if 'results' in result : result = result [ 'results' ] return result
Parse a Puppet forge JSON stream .
11,223
def modules ( self ) : resource = self . RMODULES params = { self . PLIMIT : self . max_items , self . PSORT_BY : self . VLATEST_RELEASE } for page in self . _fetch ( resource , params ) : yield page
Fetch modules pages .
11,224
def releases ( self , owner , module ) : resource = self . RRELEASES params = { self . PMODULE : owner + '-' + module , self . PLIMIT : self . max_items , self . PSHOW_DELETED : 'true' , self . PSORT_BY : self . VRELEASE_DATE , } for page in self . _fetch ( resource , params ) : yield page
Fetch the releases of a module .
11,225
def reset_tree ( self ) : self . tree = { } self . tree [ 'leaves' ] = [ ] self . tree [ 'levels' ] = [ ] self . tree [ 'is_ready' ] = False
Resets the current tree to empty .
11,226
def add_leaves ( self , values_array , do_hash = False ) : self . tree [ 'is_ready' ] = False [ self . _add_leaf ( value , do_hash ) for value in values_array ]
Add leaves to the tree .
11,227
def make_tree ( self ) : self . tree [ 'is_ready' ] = False leaf_count = len ( self . tree [ 'leaves' ] ) if leaf_count > 0 : self . _unshift ( self . tree [ 'levels' ] , self . tree [ 'leaves' ] ) while len ( self . tree [ 'levels' ] [ 0 ] ) > 1 : self . _unshift ( self . tree [ 'levels' ] , self . _calculate_next_level ( ) ) self . tree [ 'is_ready' ] = True
Generates the merkle tree .
11,228
def duration_to_timedelta ( obj ) : matches = DURATION_PATTERN . search ( obj ) matches = matches . groupdict ( default = "0" ) matches = { k : int ( v ) for k , v in matches . items ( ) } return timedelta ( ** matches )
Converts duration to timedelta
11,229
def timedelta_to_duration ( obj ) : minutes , hours , days = 0 , 0 , 0 seconds = int ( obj . total_seconds ( ) ) if seconds > 59 : minutes = seconds // 60 seconds = seconds % 60 if minutes > 59 : hours = minutes // 60 minutes = minutes % 60 if hours > 23 : days = hours // 24 hours = hours % 24 response = [ ] if days : response . append ( '%sd' % days ) if hours : response . append ( '%sh' % hours ) if minutes : response . append ( '%sm' % minutes ) if seconds or not response : response . append ( '%ss' % seconds ) return "" . join ( response )
Converts timedelta to duration
11,230
def create_dm_pkg ( secret , username ) : secret = tools . EncodeString ( secret ) username = tools . EncodeString ( username ) pkg_format = '>HHHH32sHH32s' pkg_vals = [ IK_RAD_PKG_VER , IK_RAD_PKG_AUTH , IK_RAD_PKG_USR_PWD_TAG , len ( secret ) , secret . ljust ( 32 , '\x00' ) , IK_RAD_PKG_CMD_ARGS_TAG , len ( username ) , username . ljust ( 32 , '\x00' ) ] return struct . pack ( pkg_format , * pkg_vals )
create ikuai dm message
11,231
def clearBreakpoints ( self ) : self . markerDeleteAll ( self . _breakpointMarker ) if ( not self . signalsBlocked ( ) ) : self . breakpointsChanged . emit ( )
Clears the file of all the breakpoints .
11,232
def unindentSelection ( self ) : sel = self . getSelection ( ) for line in range ( sel [ 0 ] , sel [ 2 ] + 1 ) : self . unindent ( line )
Unindents the current selected text .
11,233
def _removePunctuation ( text_string ) : try : return text_string . translate ( None , _punctuation ) except TypeError : return text_string . translate ( str . maketrans ( '' , '' , _punctuation ) )
Removes punctuation symbols from a string .
11,234
def _removeStopwords ( text_list ) : output_list = [ ] for word in text_list : if word . lower ( ) not in _stopwords : output_list . append ( word ) return output_list
Removes stopwords contained in a list of words .
11,235
def getBlocks ( sentences , n ) : blocks = [ ] for i in range ( 0 , len ( sentences ) , n ) : blocks . append ( sentences [ i : ( i + n ) ] ) return blocks
Get blocks of n sentences together .
11,236
def __destroyLockedView ( self ) : if self . _lockedView : self . _lockedView . close ( ) self . _lockedView . deleteLater ( ) self . _lockedView = None
Destroys the locked view from this widget .
11,237
def clear ( self ) : for item in self . traverseItems ( ) : if isinstance ( item , XTreeWidgetItem ) : item . destroy ( ) super ( XTreeWidget , self ) . clear ( )
Removes all the items from this tree widget . This will go through and also destroy any XTreeWidgetItems prior to the model clearing its references .
11,238
def exportAs ( self , action ) : plugin = self . exporter ( unwrapVariant ( action . data ( ) ) ) if not plugin : return False ftypes = '{0} (*{1});;All Files (*.*)' . format ( plugin . name ( ) , plugin . filetype ( ) ) filename = QtGui . QFileDialog . getSaveFileName ( self . window ( ) , 'Export Data' , '' , ftypes ) if type ( filename ) == tuple : filename = filename [ 0 ] if filename : return self . export ( nativestring ( filename ) , exporter = plugin ) return False
Prompts the user to export the information for this tree based on the available exporters .
11,239
def headerHideColumn ( self ) : self . setColumnHidden ( self . _headerIndex , True ) found = False for col in range ( self . columnCount ( ) ) : if ( not self . isColumnHidden ( col ) ) : found = True break if ( not found ) : self . setColumnHidden ( 0 , False )
Hides the current column set by the header index .
11,240
def headerSortAscending ( self ) : self . setSortingEnabled ( True ) self . sortByColumn ( self . _headerIndex , QtCore . Qt . AscendingOrder )
Sorts the column at the current header index by ascending order .
11,241
def headerSortDescending ( self ) : self . setSortingEnabled ( True ) self . sortByColumn ( self . _headerIndex , QtCore . Qt . DescendingOrder )
Sorts the column at the current header index by descending order .
11,242
def highlightByAlternate ( self ) : palette = QtGui . QApplication . palette ( ) palette . setColor ( palette . HighlightedText , palette . color ( palette . Text ) ) clr = palette . color ( palette . AlternateBase ) palette . setColor ( palette . Highlight , clr . darker ( 110 ) ) self . setPalette ( palette )
Sets the palette highlighting for this tree widget to use a darker version of the alternate color vs . the standard highlighting .
11,243
def smartResizeColumnsToContents ( self ) : self . blockSignals ( True ) self . setUpdatesEnabled ( False ) header = self . header ( ) header . blockSignals ( True ) columns = range ( self . columnCount ( ) ) sizes = [ self . columnWidth ( c ) for c in columns ] header . resizeSections ( header . ResizeToContents ) for col in columns : width = self . columnWidth ( col ) if ( width < sizes [ col ] ) : self . setColumnWidth ( col , sizes [ col ] ) header . blockSignals ( False ) self . setUpdatesEnabled ( True ) self . blockSignals ( False )
Resizes the columns to the contents based on the user preferences .
11,244
def gevent_run ( app , monkey_patch = True , start = True , debug = False , ** kwargs ) : if monkey_patch : from gevent import monkey monkey . patch_all ( ) import gevent gevent . spawn ( app . run , debug = debug , ** kwargs ) if start : while not app . stopped : gevent . sleep ( 0.1 )
Run your app in gevent . spawn run simple loop if start == True
11,245
def dict_update ( d , u ) : for k , v in six . iteritems ( u ) : if isinstance ( v , collections . Mapping ) : r = dict_update ( d . get ( k , { } ) , v ) d [ k ] = r else : d [ k ] = u [ k ] return d
Recursive dict update
11,246
def get_schema ( self , schema_id ) : res = requests . get ( self . _url ( '/schemas/ids/{}' , schema_id ) ) raise_if_failed ( res ) return json . loads ( res . json ( ) [ 'schema' ] )
Retrieves the schema with the given schema_id from the registry and returns it as a dict .
11,247
def get_subjects ( self ) : res = requests . get ( self . _url ( '/subjects' ) ) raise_if_failed ( res ) return res . json ( )
Returns the list of subject names present in the schema registry .
11,248
def get_subject_version_ids ( self , subject ) : res = requests . get ( self . _url ( '/subjects/{}/versions' , subject ) ) raise_if_failed ( res ) return res . json ( )
Return the list of schema version ids which have been registered under the given subject .
11,249
def get_subject_version ( self , subject , version_id ) : res = requests . get ( self . _url ( '/subjects/{}/versions/{}' , subject , version_id ) ) raise_if_failed ( res ) return json . loads ( res . json ( ) [ 'schema' ] )
Retrieves the schema registered under the given subject with the given version id . Returns the schema as a dict .
11,250
def schema_is_registered_for_subject ( self , subject , schema ) : data = json . dumps ( { 'schema' : json . dumps ( schema ) } ) res = requests . post ( self . _url ( '/subjects/{}' , subject ) , data = data , headers = HEADERS ) if res . status_code == 404 : return False raise_if_failed ( res ) return True
Returns True if the given schema is already registered under the given subject .
11,251
def get_global_compatibility_level ( self ) : res = requests . get ( self . _url ( '/config' ) , headers = HEADERS ) raise_if_failed ( res ) return res . json ( ) [ 'compatibility' ]
Gets the global compatibility level .
11,252
def set_subject_compatibility_level ( self , subject , level ) : res = requests . put ( self . _url ( '/config/{}' , subject ) , data = json . dumps ( { 'compatibility' : level } ) , headers = HEADERS ) raise_if_failed ( res )
Sets the compatibility level for the given subject .
11,253
def get_subject_compatibility_level ( self , subject ) : res = requests . get ( self . _url ( '/config/{}' , subject ) , headers = HEADERS ) raise_if_failed ( res ) return res . json ( ) [ 'compatibility' ]
Gets the compatibility level for the given subject .
11,254
def from_api ( cls , api , ** kwargs ) : if not cls . _api_attrs : raise NotImplementedError ( ) def resolve_attribute_type ( attr_type ) : while isinstance ( attr_type , list ) : attr_type = attr_type [ 0 ] if attr_type == 'self' : attr_type = cls if attr_type == 'date' : attr_type = datetime . datetime . fromtimestamp if attr_type is str : attr_type = unicode if hasattr ( attr_type , 'from_api' ) : return lambda ** kw : attr_type . from_api ( api , ** kw ) return attr_type def instantiate_attr ( attr_value , attr_type ) : if isinstance ( attr_value , dict ) : return attr_type ( ** attr_value ) return attr_type ( attr_value ) def instantiate_array ( attr_values , attr_type ) : func = instantiate_attr if isinstance ( attr_values [ 0 ] , list ) : func = instantiate_array return [ func ( val , attr_type ) for val in attr_values ] def instantiate ( attr_value , attr_type ) : if isinstance ( attr_value , list ) : return instantiate_array ( attr_value , attr_type ) return instantiate_attr ( attr_value , attr_type ) instance = cls ( api ) for attr_name , attr_type , attr_default in cls . _api_attrs : attr_value = kwargs . get ( attr_name , attr_default ) if attr_value is TypeError : raise TypeError ( '{} requires argument {}' . format ( cls . __name__ , attr_name ) ) attr_type = resolve_attribute_type ( attr_type ) if attr_value != attr_default : attr_value = instantiate ( attr_value , attr_type ) if attr_name == 'from' : attr_name = 'froom' setattr ( instance , attr_name , attr_value ) return instance
Parses a payload from the API guided by _api_attrs
11,255
def api_method ( self ) : if not self . _api_method : raise NotImplementedError ( ) return getattr ( self . api , self . _api_method )
Returns the api method to send the current API Object type
11,256
def api_payload ( self ) : if not self . _api_payload : raise NotImplementedError ( ) payload = { } for attr_name in self . _api_payload : value = getattr ( self , attr_name , None ) if value is not None : payload [ attr_name ] = value return payload
Generates a payload ready for submission to the API guided by _api_payload
11,257
def send ( self , ** kwargs ) : payload = self . api_payload ( ) payload . update ( ** kwargs ) return self . api_method ( ) ( ** payload )
Combines api_payload and api_method to submit the current object to the API
11,258
def parse_addr ( addr , * , proto = None , host = None ) : port = None if isinstance ( addr , Address ) : return addr elif isinstance ( addr , str ) : if addr . startswith ( 'http://' ) : proto , addr = 'http' , addr [ 7 : ] if addr . startswith ( 'udp://' ) : proto , addr = 'udp' , addr [ 6 : ] elif addr . startswith ( 'tcp://' ) : proto , addr = 'tcp' , addr [ 6 : ] elif addr . startswith ( 'unix://' ) : proto , addr = 'unix' , addr [ 7 : ] a , _ , b = addr . partition ( ':' ) host = a or host port = b or port elif isinstance ( addr , ( tuple , list ) ) : a , b = addr host = a or host port = b or port elif isinstance ( addr , int ) : port = addr else : raise ValueError ( 'bad value' ) if port is not None : port = int ( port ) return Address ( proto , host , port )
Parses an address
11,259
def applyRule ( self ) : widget = self . queryBuilderWidget ( ) if ( not widget ) : return rule = widget . findRule ( self . uiTermDDL . currentText ( ) ) self . setCurrentRule ( rule )
Applies the rule from the builder system to this line edit .
11,260
def updateEditor ( self ) : rule = self . currentRule ( ) operator = self . currentOperator ( ) widget = self . uiWidgetAREA . widget ( ) editorType = None text = '' if ( rule ) : editorType = rule . editorType ( operator ) if ( widget and editorType and type ( widget ) == editorType ) : return elif ( widget ) : if ( type ( widget ) != QWidget ) : text = widget . text ( ) widget . setParent ( None ) widget . deleteLater ( ) self . uiWidgetAREA . setWidget ( None ) if ( editorType ) : widget = editorType ( self ) if ( isinstance ( widget , QLineEdit ) ) : terms = rule . completionTerms ( ) if ( not terms ) : qwidget = self . queryBuilderWidget ( ) if ( qwidget ) : terms = qwidget . completionTerms ( ) if ( terms ) : widget . setCompleter ( XQueryCompleter ( terms , widget ) ) self . uiWidgetAREA . setWidget ( widget ) if ( type ( widget ) != QWidget ) : widget . setText ( text )
Updates the editor based on the current selection .
11,261
def emitCurrentChanged ( self ) : if ( not self . signalsBlocked ( ) ) : schema = self . currentSchema ( ) self . currentSchemaChanged . emit ( schema ) if ( schema ) : self . currentTableChanged . emit ( schema . model ( ) ) else : self . currentTableChanged . emit ( None )
Emits the current schema changed signal for this combobox provided \ the signals aren t blocked .
11,262
def save ( self ) : if ( not self . updateShortcut ( ) ) : return False for i in range ( self . uiActionTREE . topLevelItemCount ( ) ) : item = self . uiActionTREE . topLevelItem ( i ) action = item . action ( ) action . setShortcut ( QKeySequence ( item . text ( 1 ) ) ) return True
Saves the current settings for the actions in the list and exits the widget .
11,263
def showPopup ( self ) : as_dialog = QApplication . keyboardModifiers ( ) anchor = self . defaultAnchor ( ) if anchor : self . popupWidget ( ) . setAnchor ( anchor ) else : anchor = self . popupWidget ( ) . anchor ( ) if ( anchor & ( XPopupWidget . Anchor . BottomLeft | XPopupWidget . Anchor . BottomCenter | XPopupWidget . Anchor . BottomRight ) ) : pos = QPoint ( self . width ( ) / 2 , 0 ) else : pos = QPoint ( self . width ( ) / 2 , self . height ( ) ) pos = self . mapToGlobal ( pos ) if not self . signalsBlocked ( ) : self . popupAboutToShow . emit ( ) self . _popupWidget . popup ( pos ) if as_dialog : self . _popupWidget . setCurrentMode ( XPopupWidget . Mode . Dialog )
Shows the popup for this button .
11,264
def togglePopup ( self ) : if not self . _popupWidget . isVisible ( ) : self . showPopup ( ) elif self . _popupWidget . currentMode ( ) != self . _popupWidget . Mode . Dialog : self . _popupWidget . close ( )
Toggles whether or not the popup is visible .
11,265
def form_field ( self ) : "Returns appropriate form field." label = unicode ( self ) defaults = dict ( required = False , label = label , widget = self . widget ) defaults . update ( self . extra ) return self . field_class ( ** defaults )
Returns appropriate form field .
11,266
def attr_name ( self ) : "Returns attribute name for this facet" return self . schema . name if self . schema else self . field . name
Returns attribute name for this facet
11,267
def get_field_and_lookup ( self , name ) : name = self . get_queryset ( ) . model . _meta . get_field ( name ) lookup_prefix = '' return name , lookup_prefix
Returns field instance and lookup prefix for given attribute name . Can be overloaded in subclasses to provide filtering across multiple models .
11,268
def StreamMetrics ( self , request_iterator , context ) : LOG . debug ( "StreamMetrics called" ) collect_args = ( next ( request_iterator ) ) max_metrics_buffer = 0 max_collect_duration = 0 cfg = Metric ( pb = collect_args . Metrics_Arg . metrics [ 0 ] ) try : max_metrics_buffer = int ( cfg . config [ "max-metrics-buffer" ] ) except Exception as ex : LOG . debug ( "Unable to get schedule parameters: {}" . format ( ex ) ) try : max_collect_duration = int ( cfg . config [ "max-collect-duration" ] ) except Exception as ex : LOG . debug ( "Unable to get schedule parameters: {}" . format ( ex ) ) if max_metrics_buffer > 0 : self . max_metrics_buffer = max_metrics_buffer if max_collect_duration > 0 : self . max_collect_duration = max_collect_duration thread = threading . Thread ( target = self . _stream_wrapper , args = ( collect_args , ) , ) thread . daemon = True thread . start ( ) metrics = [ ] metrics_to_stream = [ ] stream_timeout = self . max_collect_duration while context . is_active ( ) : try : t_start = time . time ( ) metrics = self . metrics_queue . get ( block = True , timeout = stream_timeout ) elapsed = round ( time . time ( ) - t_start ) stream_timeout -= elapsed except queue . Empty : LOG . debug ( "Max collect duration exceeded. Streaming {} metrics" . format ( len ( metrics_to_stream ) ) ) metrics_col = CollectReply ( Metrics_Reply = MetricsReply ( metrics = [ m . pb for m in metrics_to_stream ] ) ) metrics_to_stream = [ ] stream_timeout = self . max_collect_duration yield metrics_col else : for metric in metrics : metrics_to_stream . append ( metric ) if len ( metrics_to_stream ) == self . max_metrics_buffer : LOG . debug ( "Max metrics buffer reached. Streaming {} metrics" . format ( len ( metrics_to_stream ) ) ) metrics_col = CollectReply ( Metrics_Reply = MetricsReply ( metrics = [ m . pb for m in metrics_to_stream ] ) ) metrics_to_stream = [ ] stream_timeout = self . max_collect_duration yield metrics_col if self . max_metrics_buffer == 0 : LOG . debug ( "Max metrics buffer set to 0. Streaming {} metrics" . format ( len ( metrics_to_stream ) ) ) metrics_col = CollectReply ( Metrics_Reply = MetricsReply ( metrics = [ m . pb for m in metrics_to_stream ] ) ) metrics_to_stream = [ ] stream_timeout = self . max_collect_duration yield metrics_col self . done_queue . put ( True )
Dispatches metrics streamed by collector
11,269
def GetMetricTypes ( self , request , context ) : LOG . debug ( "GetMetricTypes called" ) try : metrics = self . plugin . update_catalog ( ConfigMap ( pb = request . config ) ) return MetricsReply ( metrics = [ m . pb for m in metrics ] ) except Exception as err : msg = "message: {}\n\nstack trace: {}" . format ( err , traceback . format_exc ( ) ) return MetricsReply ( metrics = [ ] , error = msg )
Dispatches the request to the plugins update_catalog method
11,270
def command_publish ( self , command , ** kwargs ) : mqttc = mqtt . Client ( ) mqttc . connect ( command [ 'host' ] , port = int ( command [ 'port' ] ) ) mqttc . loop_start ( ) try : mqttc . publish ( command [ 'endpoint' ] , command [ 'payload' ] ) finally : mqttc . loop_stop ( force = False )
Publish a MQTT message
11,271
def command_subscribe ( self , command , ** kwargs ) : topic = command [ 'topic' ] encoding = command . get ( 'encoding' , 'utf-8' ) name = command [ 'name' ] if not hasattr ( self . engine , '_mqtt' ) : self . engine . _mqtt = { } self . engine . variables [ name ] = [ ] def on_message ( client , userdata , msg ) : userdata . append ( msg . payload . decode ( encoding ) ) self . engine . _mqtt [ name ] = client = mqtt . Client ( userdata = self . engine . variables [ name ] ) client . on_message = on_message client . connect ( command [ 'host' ] , port = int ( command [ 'port' ] ) ) client . subscribe ( topic ) client . loop_start ( ) self . engine . register_teardown_callback ( client . loop_stop )
Subscribe to a topic or list of topics
11,272
def get_data ( param , data ) : try : for ( _ , selected_time_entry ) in data : loc_data = selected_time_entry [ 'location' ] if param not in loc_data : continue if param == 'precipitation' : new_state = loc_data [ param ] [ '@value' ] elif param == 'symbol' : new_state = int ( float ( loc_data [ param ] [ '@number' ] ) ) elif param in ( 'temperature' , 'pressure' , 'humidity' , 'dewpointTemperature' ) : new_state = round ( float ( loc_data [ param ] [ '@value' ] ) , 1 ) elif param in ( 'windSpeed' , 'windGust' ) : new_state = round ( float ( loc_data [ param ] [ '@mps' ] ) * 3.6 , 1 ) elif param == 'windDirection' : new_state = round ( float ( loc_data [ param ] [ '@deg' ] ) , 1 ) elif param in ( 'fog' , 'cloudiness' , 'lowClouds' , 'mediumClouds' , 'highClouds' ) : new_state = round ( float ( loc_data [ param ] [ '@percent' ] ) , 1 ) return new_state except ( ValueError , IndexError , KeyError ) : return None
Retrieve weather parameter .
11,273
def parse_datetime ( dt_str ) : date_format = "%Y-%m-%dT%H:%M:%S %z" dt_str = dt_str . replace ( "Z" , " +0000" ) return datetime . datetime . strptime ( dt_str , date_format )
Parse datetime .
11,274
async def fetching_data ( self , * _ ) : try : with async_timeout . timeout ( 10 ) : resp = await self . _websession . get ( self . _api_url , params = self . _urlparams ) if resp . status != 200 : _LOGGER . error ( '%s returned %s' , self . _api_url , resp . status ) return False text = await resp . text ( ) except ( asyncio . TimeoutError , aiohttp . ClientError ) as err : _LOGGER . error ( '%s returned %s' , self . _api_url , err ) return False try : self . data = xmltodict . parse ( text ) [ 'weatherdata' ] except ( ExpatError , IndexError ) as err : _LOGGER . error ( '%s returned %s' , resp . url , err ) return False return True
Get the latest data from met . no .
11,275
def get_forecast ( self , time_zone ) : if self . data is None : return [ ] now = datetime . datetime . now ( time_zone ) . replace ( hour = 12 , minute = 0 , second = 0 , microsecond = 0 ) times = [ now + datetime . timedelta ( days = k ) for k in range ( 1 , 6 ) ] return [ self . get_weather ( _time ) for _time in times ]
Get the forecast weather data from met . no .
11,276
def get_weather ( self , time , max_hour = 6 ) : if self . data is None : return { } ordered_entries = [ ] for time_entry in self . data [ 'product' ] [ 'time' ] : valid_from = parse_datetime ( time_entry [ '@from' ] ) valid_to = parse_datetime ( time_entry [ '@to' ] ) if time > valid_to : continue average_dist = ( abs ( ( valid_to - time ) . total_seconds ( ) ) + abs ( ( valid_from - time ) . total_seconds ( ) ) ) if average_dist > max_hour * 3600 : continue ordered_entries . append ( ( average_dist , time_entry ) ) if not ordered_entries : return { } ordered_entries . sort ( key = lambda item : item [ 0 ] ) res = dict ( ) res [ 'datetime' ] = time res [ 'temperature' ] = get_data ( 'temperature' , ordered_entries ) res [ 'condition' ] = CONDITIONS . get ( get_data ( 'symbol' , ordered_entries ) ) res [ 'pressure' ] = get_data ( 'pressure' , ordered_entries ) res [ 'humidity' ] = get_data ( 'humidity' , ordered_entries ) res [ 'wind_speed' ] = get_data ( 'windSpeed' , ordered_entries ) res [ 'wind_bearing' ] = get_data ( 'windDirection' , ordered_entries ) return res
Get the current weather data from met . no .
11,277
def prepare_node ( data ) : if not data : return None , { } if isinstance ( data , str ) : return data , { } if all ( field in data for field in ( "Node" , "Service" , "Checks" ) ) : return data [ "Node" ] [ "Node" ] , data [ "Node" ] result = { } if "ID" in data : result [ "Node" ] = data [ "ID" ] for k in ( "Datacenter" , "Node" , "Address" , "TaggedAddresses" , "Service" , "Check" , "Checks" ) : if k in data : result [ k ] = data [ k ] if list ( result ) == [ "Node" ] : return result [ "Node" ] , { } return result . get ( "Node" ) , result
Prepare node for catalog endpoint
11,278
def prepare_service ( data ) : if not data : return None , { } if isinstance ( data , str ) : return data , { } if all ( field in data for field in ( "Node" , "Service" , "Checks" ) ) : return data [ "Service" ] [ "ID" ] , data [ "Service" ] if all ( field in data for field in ( "ServiceName" , "ServiceID" ) ) : return data [ "ServiceID" ] , { "ID" : data [ "ServiceID" ] , "Service" : data [ "ServiceName" ] , "Tags" : data . get ( "ServiceTags" ) , "Address" : data . get ( "ServiceAddress" ) , "Port" : data . get ( "ServicePort" ) , } if list ( data ) == [ "ID" ] : return data [ "ID" ] , { } result = { } if "Name" in data : result [ "Service" ] = data [ "Name" ] for k in ( "Service" , "ID" , "Tags" , "Address" , "Port" ) : if k in data : result [ k ] = data [ k ] return result . get ( "ID" ) , result
Prepare service for catalog endpoint
11,279
def prepare_check ( data ) : if not data : return None , { } if isinstance ( data , str ) : return data , { } result = { } if "ID" in data : result [ "CheckID" ] = data [ "ID" ] for k in ( "Node" , "CheckID" , "Name" , "Notes" , "Status" , "ServiceID" ) : if k in data : result [ k ] = data [ k ] if list ( result ) == [ "CheckID" ] : return result [ "CheckID" ] , { } return result . get ( "CheckID" ) , result
Prepare check for catalog endpoint
11,280
def optimize_no ( self ) : self . optimization = 0 self . relax = False self . gc_sections = False self . ffunction_sections = False self . fdata_sections = False self . fno_inline_small_functions = False
all options set to default
11,281
def init ( self ) : if not self . _initialized : self . _initialized = True wrap = projexui . qt . QT_WRAPPER . lower ( ) ignore = lambda x : not x . split ( '.' ) [ - 1 ] . startswith ( wrap ) projex . importmodules ( self . plugins ( ) , ignore = ignore )
Initializes the plugins for this resource manager .
11,282
def policies ( self ) : policies = [ self . _pb . integer_policy , self . _pb . float_policy , self . _pb . string_policy , self . _pb . bool_policy ] key_types = [ "integer" , "float" , "string" , "bool" ] return zip ( key_types , policies )
Return list of policies zipped with their respective data type
11,283
def add_measurement ( measurement ) : global _buffer_size if not _enabled : LOGGER . debug ( 'Discarding measurement for %s while not enabled' , measurement . database ) return if _stopping : LOGGER . warning ( 'Discarding measurement for %s while stopping' , measurement . database ) return if _buffer_size > _max_buffer_size : LOGGER . warning ( 'Discarding measurement due to buffer size limit' ) return if not measurement . fields : raise ValueError ( 'Measurement does not contain a field' ) if measurement . database not in _measurements : _measurements [ measurement . database ] = [ ] value = measurement . marshall ( ) _measurements [ measurement . database ] . append ( value ) if not _timeout : if ( _batch_future and _batch_future . done ( ) ) or not _batch_future : _start_timeout ( ) _buffer_size = _pending_measurements ( ) if _buffer_size >= _trigger_size : _trigger_batch_write ( )
Add measurement data to the submission buffer for eventual writing to InfluxDB .
11,284
def flush ( ) : flush_future = concurrent . Future ( ) if _batch_future and not _batch_future . done ( ) : LOGGER . debug ( 'Flush waiting on incomplete _batch_future' ) _flush_wait ( flush_future , _batch_future ) else : LOGGER . info ( 'Flushing buffer with %i measurements to InfluxDB' , _pending_measurements ( ) ) _flush_wait ( flush_future , _write_measurements ( ) ) return flush_future
Flush all pending measurements to InfluxDB . This will ensure that all measurements that are in the buffer for any database are written . If the requests fail it will continue to try and submit the metrics until they are successfully written .
11,285
def set_auth_credentials ( username , password ) : global _credentials , _dirty LOGGER . debug ( 'Setting authentication credentials' ) _credentials = username , password _dirty = True
Override the default authentication credentials obtained from the environment variable configuration .
11,286
def set_base_url ( url ) : global _base_url , _dirty LOGGER . debug ( 'Setting base URL to %s' , url ) _base_url = url _dirty = True
Override the default base URL value created from the environment variable configuration .
11,287
def set_max_clients ( limit ) : global _dirty , _max_clients LOGGER . debug ( 'Setting maximum client limit to %i' , limit ) _dirty = True _max_clients = limit
Set the maximum number of simultaneous batch submission that can execute in parallel .
11,288
def set_sample_probability ( probability ) : global _sample_probability if not 0.0 <= probability <= 1.0 : raise ValueError ( 'Invalid probability value' ) LOGGER . debug ( 'Setting sample probability to %.2f' , probability ) _sample_probability = float ( probability )
Set the probability that a batch will be submitted to the InfluxDB server . This should be a value that is greater than or equal to 0 and less than or equal to 1 . 0 . A value of 0 . 25 would represent a probability of 25% that a batch would be written to InfluxDB .
11,289
def set_timeout ( milliseconds ) : global _timeout , _timeout_interval LOGGER . debug ( 'Setting batch wait timeout to %i ms' , milliseconds ) _timeout_interval = milliseconds _maybe_stop_timeout ( ) _timeout = ioloop . IOLoop . current ( ) . add_timeout ( milliseconds , _on_timeout )
Override the maximum duration to wait for submitting measurements to InfluxDB .
11,290
def _create_http_client ( ) : global _http_client defaults = { 'user_agent' : USER_AGENT } auth_username , auth_password = _credentials if auth_username and auth_password : defaults [ 'auth_username' ] = auth_username defaults [ 'auth_password' ] = auth_password _http_client = httpclient . AsyncHTTPClient ( force_instance = True , defaults = defaults , max_clients = _max_clients )
Create the HTTP client with authentication credentials if required .
11,291
def _flush_wait ( flush_future , write_future ) : if write_future . done ( ) : if not _pending_measurements ( ) : flush_future . set_result ( True ) return else : write_future = _write_measurements ( ) ioloop . IOLoop . current ( ) . add_timeout ( ioloop . IOLoop . current ( ) . time ( ) + 0.25 , _flush_wait , flush_future , write_future )
Pause briefly allowing any pending metric writes to complete before shutting down .
11,292
def _futures_wait ( wait_future , futures ) : global _buffer_size , _writing remaining = [ ] for ( future , batch , database , measurements ) in futures : if not future . done ( ) : remaining . append ( ( future , batch , database , measurements ) ) continue error = future . exception ( ) if isinstance ( error , httpclient . HTTPError ) : if error . code == 400 : _write_error_batch ( batch , database , measurements ) elif error . code >= 500 : _on_5xx_error ( batch , error , database , measurements ) else : LOGGER . error ( 'Error submitting %s batch %s to InfluxDB (%s): ' '%s' , database , batch , error . code , error . response . body ) elif isinstance ( error , ( TimeoutError , OSError , socket . error , select . error , ssl . socket_error ) ) : _on_5xx_error ( batch , error , database , measurements ) if remaining : return ioloop . IOLoop . current ( ) . add_timeout ( ioloop . IOLoop . current ( ) . time ( ) + 0.1 , _futures_wait , wait_future , remaining ) else : _buffer_size = _pending_measurements ( ) LOGGER . debug ( 'Batch submitted, %i measurements remain' , _buffer_size ) if _buffer_size >= _trigger_size : ioloop . IOLoop . current ( ) . add_callback ( _trigger_batch_write ) elif _buffer_size : _start_timeout ( ) _writing = False wait_future . set_result ( True )
Waits for all futures to be completed . If the futures are not done wait 100ms and then invoke itself via the ioloop and check again . If they are done set a result on wait_future indicating the list of futures are done .
11,293
def _maybe_stop_timeout ( ) : global _timeout if _timeout is not None : LOGGER . debug ( 'Removing the pending timeout (%r)' , _timeout ) ioloop . IOLoop . current ( ) . remove_timeout ( _timeout ) _timeout = None
If there is a pending timeout remove it from the IOLoop and set the _timeout global to None .
11,294
def _maybe_warn_about_buffer_size ( ) : global _last_warning if not _last_warning : _last_warning = time . time ( ) if _buffer_size > _warn_threshold and ( time . time ( ) - _last_warning ) > 120 : LOGGER . warning ( 'InfluxDB measurement buffer has %i entries' , _buffer_size )
Check the buffer size and issue a warning if it s too large and a warning has not been issued for more than 60 seconds .
11,295
def _on_5xx_error ( batch , error , database , measurements ) : LOGGER . info ( 'Appending %s measurements to stack due to batch %s %r' , database , batch , error ) _measurements [ database ] = _measurements [ database ] + measurements
Handle a batch submission error logging the problem and adding the measurements back to the stack .
11,296
def _on_timeout ( ) : global _buffer_size LOGGER . debug ( 'No metrics submitted in the last %.2f seconds' , _timeout_interval / 1000.0 ) _buffer_size = _pending_measurements ( ) if _buffer_size : return _trigger_batch_write ( ) _start_timeout ( )
Invoked periodically to ensure that metrics that have been collected are submitted to InfluxDB .
11,297
def _sample_batch ( ) : if _sample_probability == 1.0 or random . random ( ) < _sample_probability : return True for database in _measurements : _measurements [ database ] = _measurements [ database ] [ _max_batch_size : ] return False
Determine if a batch should be processed and if not pop off all of the pending metrics for that batch .
11,298
def _start_timeout ( ) : global _timeout LOGGER . debug ( 'Adding a new timeout in %i ms' , _timeout_interval ) _maybe_stop_timeout ( ) _timeout = ioloop . IOLoop . current ( ) . add_timeout ( ioloop . IOLoop . current ( ) . time ( ) + _timeout_interval / 1000.0 , _on_timeout )
Stop a running timeout if it s there then create a new one .
11,299
def _trigger_batch_write ( ) : global _batch_future LOGGER . debug ( 'Batch write triggered (%r/%r)' , _buffer_size , _trigger_size ) _maybe_stop_timeout ( ) _maybe_warn_about_buffer_size ( ) _batch_future = _write_measurements ( ) return _batch_future
Stop a timeout if it s running and then write the measurements .