idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
54,500
def should_run ( self ) : should_run = True config = self . target or self . source if config . has ( 'systems' ) : should_run = False valid_systems = [ s . lower ( ) for s in config . get ( 'systems' ) . split ( "," ) ] for system_type , param in [ ( 'is_osx' , 'osx' ) , ( 'is_debian' , 'debian' ) ] : if param in valid_systems and getattr ( system , system_type ) ( ) : should_run = True return should_run
Returns true if the feature should run
54,501
def resolve ( self ) : if self . source and self . target : for key in self . source . keys ( ) : if ( key not in self . dont_carry_over_options and not self . target . has ( key ) ) : self . target . set ( key , self . source . get ( key ) )
Resolve differences between the target and the source configuration
54,502
def _log_error ( self , message ) : key = ( self . feature_name , self . target . get ( 'formula' ) ) self . environment . log_feature_error ( key , "ERROR: " + message )
Log an error for the feature
54,503
def jinja_fragment_extension ( tag , endtag = None , name = None , tag_only = False , allow_args = True , callblock_args = None ) : if endtag is None : endtag = "end" + tag def decorator ( f ) : def parse ( self , parser ) : lineno = parser . stream . next ( ) . lineno args = [ ] kwargs = [ ] if allow_args : args , kwargs = parse_block_signature ( parser ) call = self . call_method ( "support_method" , args , kwargs , lineno = lineno ) if tag_only : return nodes . Output ( [ call ] , lineno = lineno ) call_args = [ ] if callblock_args is not None : for arg in callblock_args : call_args . append ( nodes . Name ( arg , 'param' , lineno = lineno ) ) body = parser . parse_statements ( [ 'name:' + endtag ] , drop_needle = True ) return nodes . CallBlock ( call , call_args , [ ] , body , lineno = lineno ) def support_method ( self , * args , ** kwargs ) : return f ( * args , ** kwargs ) attrs = { "tags" : set ( [ tag ] ) , "parse" : parse , "support_method" : support_method } return type ( name or f . __name__ , ( Extension , ) , attrs ) return decorator
Decorator to easily create a jinja extension which acts as a fragment .
54,504
def jinja_block_as_fragment_extension ( name , tagname = None , classname = None ) : if tagname is None : tagname = name if classname is None : classname = "%sBlockFragmentExtension" % name . capitalize ( ) return type ( classname , ( BaseJinjaBlockAsFragmentExtension , ) , { "tags" : set ( [ tagname ] ) , "end_tag" : "end" + tagname , "block_name" : name } )
Creates a fragment extension which will just act as a replacement of the block statement .
54,505
def find_copies ( input_dir , exclude_list ) : copies = [ ] def copy_finder ( copies , dirname ) : for obj in os . listdir ( dirname ) : pathname = os . path . join ( dirname , obj ) if os . path . isdir ( pathname ) : continue if obj in exclude_list : continue if obj . endswith ( '.mustache' ) : continue copies . append ( os . path . join ( dirname , obj ) ) dir_visitor ( input_dir , functools . partial ( copy_finder , copies ) ) return copies
find files that are not templates and not in the exclude_list for copying from template to image
54,506
def get_devices ( self , refresh = False , generic_type = None ) : _LOGGER . info ( "Updating all devices..." ) if refresh or self . _devices is None : if self . _devices is None : self . _devices = { } responseObject = self . get_sensors ( ) if ( responseObject and not isinstance ( responseObject , ( tuple , list ) ) ) : responseObject = responseObject for deviceJson in responseObject : device = self . _devices . get ( deviceJson [ 'name' ] ) if device : device . update ( deviceJson ) else : device = newDevice ( deviceJson , self ) if not device : _LOGGER . info ( 'Device is unknown' ) continue self . _devices [ device . device_id ] = device panelJson = self . get_panel ( ) _LOGGER . debug ( "Get the panel in get_devices: %s" , panelJson ) self . _panel . update ( panelJson ) alarmDevice = self . _devices . get ( '0' ) if alarmDevice : alarmDevice . update ( panelJson ) else : alarmDevice = ALARM . create_alarm ( panelJson , self ) self . _devices [ '0' ] = alarmDevice switches = self . get_power_switches ( ) _LOGGER . debug ( 'Get active the power switches in get_devices: %s' , switches ) for deviceJson in switches : device = self . _devices . get ( deviceJson [ 'name' ] ) if device : device . update ( deviceJson ) else : device = newDevice ( deviceJson , self ) if not device : _LOGGER . info ( 'Device is unknown' ) continue self . _devices [ device . device_id ] = device if generic_type : devices = [ ] for device in self . _devices . values ( ) : if ( device . type is not None and device . type in generic_type [ 0 ] ) : devices . append ( device ) return devices return list ( self . _devices . values ( ) )
Get all devices from Lupusec .
54,507
def parse_from_dict ( json_dict ) : history_columns = json_dict [ 'columns' ] history_list = MarketHistoryList ( upload_keys = json_dict [ 'uploadKeys' ] , history_generator = json_dict [ 'generator' ] , ) for rowset in json_dict [ 'rowsets' ] : generated_at = parse_datetime ( rowset [ 'generatedAt' ] ) region_id = rowset [ 'regionID' ] type_id = rowset [ 'typeID' ] history_list . set_empty_region ( region_id , type_id , generated_at ) for row in rowset [ 'rows' ] : history_kwargs = _columns_to_kwargs ( SPEC_TO_KWARG_CONVERSION , history_columns , row ) historical_date = parse_datetime ( history_kwargs [ 'historical_date' ] ) history_kwargs . update ( { 'type_id' : type_id , 'region_id' : region_id , 'historical_date' : historical_date , 'generated_at' : generated_at , } ) history_list . add_entry ( MarketHistoryEntry ( ** history_kwargs ) ) return history_list
Given a Unified Uploader message parse the contents and return a MarketHistoryList instance .
54,508
def encode_to_json ( history_list ) : rowsets = [ ] for items_in_region_list in history_list . _history . values ( ) : region_id = items_in_region_list . region_id type_id = items_in_region_list . type_id generated_at = gen_iso_datetime_str ( items_in_region_list . generated_at ) rows = [ ] for entry in items_in_region_list . entries : historical_date = gen_iso_datetime_str ( entry . historical_date ) rows . append ( [ historical_date , entry . num_orders , entry . total_quantity , entry . low_price , entry . high_price , entry . average_price , ] ) rowsets . append ( dict ( generatedAt = generated_at , regionID = region_id , typeID = type_id , rows = rows , ) ) json_dict = { 'resultType' : 'history' , 'version' : '0.1' , 'uploadKeys' : history_list . upload_keys , 'generator' : history_list . history_generator , 'currentTime' : gen_iso_datetime_str ( now_dtime_in_utc ( ) ) , 'columns' : STANDARD_ENCODED_COLUMNS , 'rowsets' : rowsets , } return json . dumps ( json_dict )
Encodes this MarketHistoryList instance to a JSON string .
54,509
def load ( self , configuration ) : try : self . config = yaml . load ( open ( configuration , "rb" ) ) except IOError : try : self . config = yaml . load ( configuration ) except ParserError , e : raise ParserError ( 'Error parsing config: %s' % e ) if isinstance ( self . config , dict ) : self . customer = self . config . get ( 'customer' , { } ) self . instances_dict = self . config . get ( 'instances' , { } ) self . web2py_dir = self . config . get ( 'web2py' , None ) self . api_type = self . config . get ( 'api_type' , 'jsonrpc' ) self . valid = True else : self . customer = { } self . instances_dict = { } self . web2py_dir = None self . valid = False
Load a YAML configuration file .
54,510
def instances ( self , test_type = ".*" ) : import re data = { } for k , v in self . instances_dict . iteritems ( ) : if re . match ( test_type , v . get ( 'test_type' ) , re . IGNORECASE ) : if 'filter_type' in v : hostfilter = { 'filtertype' : v [ 'filter_type' ] , 'content' : v [ 'filter_value' ] } else : hostfilter = { } data [ k ] = { 'name' : v . get ( 'name' ) , 'start' : v . get ( 'start' ) , 'end' : v . get ( 'end' ) , 'url' : v . get ( 'url' ) , 'hostfilter' : hostfilter , 'test_type' : v . get ( 'test_type' ) } return data
Returns a dict of all instances defined using a regex
54,511
def none_to_blank ( s , exchange = '' ) : if isinstance ( s , list ) : return [ none_to_blank ( z ) for y , z in enumerate ( s ) ] return exchange if s is None else unicode ( s )
Replaces NoneType with
54,512
def make_good_url ( url = None , addition = "/" ) : if url is None : return None if isinstance ( url , str ) and isinstance ( addition , str ) : return "%s/%s" % ( url . rstrip ( '/' ) , addition . lstrip ( '/' ) ) else : return None
Appends addition to url ensuring the right number of slashes exist and the path doesn t get clobbered .
54,513
def build_kvasir_url ( proto = "https" , server = "localhost" , port = "8443" , base = "Kvasir" , user = "test" , password = "test" , path = KVASIR_JSONRPC_PATH ) : uri = proto + '://' + user + '@' + password + '/' + server + ':' + port + '/' + base return make_good_url ( uri , path )
Creates a full URL to reach Kvasir given specific data
54,514
def get_default ( parser , section , option , default ) : try : result = parser . get ( section , option ) except ( ConfigParser . NoSectionError , ConfigParser . NoOptionError ) : result = default return result
helper to get config settings with a default if not present
54,515
def set_db_application_prefix ( prefix , sep = None ) : global _APPLICATION_PREFIX , _APPLICATION_SEP _APPLICATION_PREFIX = prefix if ( sep is not None ) : _APPLICATION_SEP = sep
Set the global app prefix and separator .
54,516
def find_by_index ( self , cls , index_name , value ) : return self . backend . find_by_index ( cls , index_name , value )
Find records matching index query - defer to backend .
54,517
def humanTime ( seconds ) : intervals = [ 'days' , 'hours' , 'minutes' , 'seconds' ] x = deltaTime ( seconds = seconds ) return ' ' . join ( '{} {}' . format ( getattr ( x , k ) , k ) for k in intervals if getattr ( x , k ) )
Convert seconds to something more human - friendly
54,518
def humanTimeConverter ( ) : if len ( sys . argv ) == 2 : print humanFriendlyTime ( seconds = int ( sys . argv [ 1 ] ) ) else : for line in sys . stdin : print humanFriendlyTime ( int ( line ) ) sys . exit ( 0 )
Cope whether we re passed a time in seconds on the command line or via stdin
54,519
def train ( self , data , ** kwargs ) : self . data = data for i in xrange ( 0 , data . shape [ 1 ] ) : column_mean = np . mean ( data . icol ( i ) ) column_stdev = np . std ( data . icol ( i ) ) self . column_means += [ column_mean ] self . column_stdevs += [ column_stdev ] self . data = self . predict ( data )
Calculate the standard deviations and means in the training data
54,520
def predict ( self , test_data , ** kwargs ) : if test_data . shape [ 1 ] != self . data . shape [ 1 ] : raise Exception ( "Test data has different number of columns than training data." ) for i in xrange ( 0 , test_data . shape [ 1 ] ) : test_data . loc [ : , i ] = test_data . icol ( i ) - self . column_means [ i ] if int ( self . column_stdevs [ i ] ) != 0 : test_data . loc [ : , i ] = test_data . icol ( i ) / self . column_stdevs [ i ] return test_data
Adjust new input by the values in the training data
54,521
def action_decorator ( name ) : def decorator ( cls ) : action_decorators . append ( ( name , cls ) ) return cls return decorator
Decorator to register an action decorator
54,522
def load_global_config ( config_path ) : config = configparser . RawConfigParser ( ) if os . path . exists ( config_path ) : logger . debug ( "Checking and setting global parameters..." ) config . read ( config_path ) else : _initial_run ( ) logger . info ( "Unable to find a global sprinter configuration!" ) logger . info ( "Creating one now. Please answer some questions" + " about what you would like sprinter to do." ) logger . info ( "" ) if not config . has_section ( 'global' ) : config . add_section ( 'global' ) configure_config ( config ) write_config ( config , config_path ) return config
Load a global configuration object and query for any required variables along the way
54,523
def print_global_config ( global_config ) : if global_config . has_section ( 'shell' ) : print ( "\nShell configurations:" ) for shell_type , set_value in global_config . items ( 'shell' ) : print ( "{0}: {1}" . format ( shell_type , set_value ) ) if global_config . has_option ( 'global' , 'env_source_rc' ) : print ( "\nHave sprinter env source rc: {0}" . format ( global_config . get ( 'global' , 'env_source_rc' ) ) )
print the global configuration
54,524
def create_default_config ( ) : config = configparser . RawConfigParser ( ) config . add_section ( 'global' ) config . set ( 'global' , 'env_source_rc' , False ) config . add_section ( 'shell' ) config . set ( 'shell' , 'bash' , "true" ) config . set ( 'shell' , 'zsh' , "true" ) config . set ( 'shell' , 'gui' , "true" ) return config
Create a default configuration object with all parameters filled
54,525
def _initial_run ( ) : if not system . is_officially_supported ( ) : logger . warn ( warning_template + "===========================================================\n" + "Sprinter is not officially supported on {0}! Please use at your own risk.\n\n" . format ( system . operating_system ( ) ) + "You can find the supported platforms here:\n" + "(http://sprinter.readthedocs.org/en/latest/index.html#compatible-systems)\n\n" + "Conversely, please help us support your system by reporting on issues\n" + "(http://sprinter.readthedocs.org/en/latest/faq.html#i-need-help-who-do-i-talk-to)\n" + "===========================================================" ) else : logger . info ( "\nThanks for using \n" + "=" * 60 + sprinter_template + "=" * 60 )
Check things during the initial setting of sprinter s global config
54,526
def _configure_shell ( config ) : config . has_section ( 'shell' ) or config . add_section ( 'shell' ) logger . info ( "What shells or environments would you like sprinter to work with?\n" "(Sprinter will not try to inject into environments not specified here.)\n" "If you specify 'gui', sprinter will attempt to inject it's state into graphical programs as well.\n" "i.e. environment variables sprinter set will affect programs as well, not just shells\n" "WARNING: injecting into the GUI can be very dangerous. it usually requires a restart\n" " to modify any environmental configuration." ) environments = list ( enumerate ( sorted ( SHELL_CONFIG ) , start = 1 ) ) logger . info ( "[0]: All, " + ", " . join ( [ "[%d]: %s" % ( index , val ) for index , val in environments ] ) ) desired_environments = lib . prompt ( "type the environment, comma-separated" , default = "0" ) for index , val in environments : if str ( index ) in desired_environments or "0" in desired_environments : config . set ( 'shell' , val , 'true' ) else : config . set ( 'shell' , val , 'false' )
Checks and queries values for the shell
54,527
def _configure_env_source_rc ( config ) : config . set ( 'global' , 'env_source_rc' , False ) if system . is_osx ( ) : logger . info ( "On OSX, login shells are default, which only source sprinter's 'env' configuration." ) logger . info ( "I.E. environment variables would be sourced, but not shell functions " + "or terminal status lines." ) logger . info ( "The typical solution to get around this is to source your rc file (.bashrc, .zshrc) " + "from your login shell." ) env_source_rc = lib . prompt ( "would you like sprinter to source the rc file too?" , default = "yes" , boolean = True ) config . set ( 'global' , 'env_source_rc' , env_source_rc )
Configures wether to have . env source . rc
54,528
def get_members ( self ) : res = self . __con__ . search_s ( self . __ldap_base_dn__ , ldap . SCOPE_SUBTREE , "(memberof=%s)" % self . __dn__ , [ 'uid' ] ) ret = [ ] for val in res : val = val [ 1 ] [ 'uid' ] [ 0 ] try : ret . append ( val . decode ( 'utf-8' ) ) except UnicodeDecodeError : ret . append ( val ) except KeyError : continue return [ CSHMember ( self . __lib__ , result , uid = True ) for result in ret ]
Return all members in the group as CSHMember objects
54,529
def check_member ( self , member , dn = False ) : if dn : res = self . __con__ . search_s ( self . __dn__ , ldap . SCOPE_BASE , "(member=%s)" % dn , [ 'ipaUniqueID' ] ) else : res = self . __con__ . search_s ( self . __dn__ , ldap . SCOPE_BASE , "(member=%s)" % member . get_dn ( ) , [ 'ipaUniqueID' ] ) return len ( res ) > 0
Check if a Member is in the bound group .
54,530
def add_member ( self , member , dn = False ) : if dn : if self . check_member ( member , dn = True ) : return mod = ( ldap . MOD_ADD , 'member' , member . encode ( 'ascii' ) ) else : if self . check_member ( member ) : return mod = ( ldap . MOD_ADD , 'member' , member . get_dn ( ) . encode ( 'ascii' ) ) if self . __lib__ . __batch_mods__ : self . __lib__ . enqueue_mod ( self . __dn__ , mod ) elif not self . __lib__ . __ro__ : mod_attrs = [ mod ] self . __con__ . modify_s ( self . __dn__ , mod_attrs ) else : print ( "ADD VALUE member = {} FOR {}" . format ( mod [ 2 ] , self . __dn__ ) )
Add a member to the bound group
54,531
def read_object_from_yaml ( desired_type : Type [ Any ] , file_object : TextIOBase , logger : Logger , fix_imports : bool = True , errors : str = 'strict' , * args , ** kwargs ) -> Any : return yaml . load ( file_object )
Parses a yaml file .
54,532
def read_collection_from_yaml ( desired_type : Type [ Any ] , file_object : TextIOBase , logger : Logger , conversion_finder : ConversionFinder , fix_imports : bool = True , errors : str = 'strict' , ** kwargs ) -> Any : res = yaml . load ( file_object ) return ConversionFinder . convert_collection_values_according_to_pep ( res , desired_type , conversion_finder , logger , ** kwargs )
Parses a collection from a yaml file .
54,533
def pass_feature ( * feature_names ) : def decorator ( f ) : @ functools . wraps ( f ) def wrapper ( * args , ** kwargs ) : for name in feature_names : kwargs [ name ] = feature_proxy ( name ) return f ( * args , ** kwargs ) return wrapper return decorator
Injects a feature instance into the kwargs
54,534
def extract_tar ( url , target_dir , additional_compression = "" , remove_common_prefix = False , overwrite = False ) : try : if not os . path . exists ( target_dir ) : os . makedirs ( target_dir ) tf = tarfile . TarFile . open ( fileobj = download_to_bytesio ( url ) ) if not os . path . exists ( target_dir ) : os . makedirs ( target_dir ) common_prefix = os . path . commonprefix ( tf . getnames ( ) ) if not common_prefix . endswith ( '/' ) : common_prefix += "/" for tfile in tf . getmembers ( ) : if remove_common_prefix : tfile . name = tfile . name . replace ( common_prefix , "" , 1 ) if tfile . name != "" : target_path = os . path . join ( target_dir , tfile . name ) if target_path != target_dir and os . path . exists ( target_path ) : if overwrite : remove_path ( target_path ) else : continue tf . extract ( tfile , target_dir ) except OSError : e = sys . exc_info ( ) [ 1 ] raise ExtractException ( str ( e ) ) except IOError : e = sys . exc_info ( ) [ 1 ] raise ExtractException ( str ( e ) )
extract a targz and install to the target directory
54,535
def remove_path ( target_path ) : if os . path . isdir ( target_path ) : shutil . rmtree ( target_path ) else : os . unlink ( target_path )
Delete the target path
54,536
def save ( self , obj , id_code ) : filestream = open ( '{0}/{1}' . format ( self . data_path , id_code ) , 'w+' ) pickle . dump ( obj , filestream ) filestream . close ( )
Save an object and use id_code in the filename obj - any object id_code - unique identifier
54,537
def load ( self , id_code ) : filestream = open ( '{0}/{1}' . format ( self . data_path , id_code ) , 'rb' ) workflow = pickle . load ( filestream ) return workflow
Loads a workflow identified by id_code id_code - unique identifier previously must have called save with same id_code
54,538
def read_object_from_pickle ( desired_type : Type [ T ] , file_path : str , encoding : str , fix_imports : bool = True , errors : str = 'strict' , * args , ** kwargs ) -> Any : import pickle file_object = open ( file_path , mode = 'rb' ) try : return pickle . load ( file_object , fix_imports = fix_imports , encoding = encoding , errors = errors ) finally : file_object . close ( )
Parses a pickle file .
54,539
def should_display_warnings_for ( to_type ) : if not hasattr ( to_type , '__module__' ) : return True elif to_type . __module__ in { 'builtins' } or to_type . __module__ . startswith ( 'parsyfiles' ) or to_type . __name__ in { 'DataFrame' } : return False elif issubclass ( to_type , int ) or issubclass ( to_type , str ) or issubclass ( to_type , float ) or issubclass ( to_type , bool ) : return False else : return True
Central method where we control whether warnings should be displayed
54,540
def print_dict ( dict_name , dict_value , logger : Logger = None ) : if logger is None : print ( dict_name + ' = ' ) try : from pprint import pprint pprint ( dict_value ) except : print ( dict_value ) else : logger . info ( dict_name + ' = ' ) try : from pprint import pformat logger . info ( pformat ( dict_value ) ) except : logger . info ( dict_value )
Utility method to print a named dictionary
54,541
def is_able_to_parse_detailed ( self , desired_type : Type [ Any ] , desired_ext : str , strict : bool ) : if not _is_valid_for_dict_to_object_conversion ( strict , None , None if desired_type is JOKER else desired_type ) : return False , None else : return super ( MultifileObjectParser , self ) . is_able_to_parse_detailed ( desired_type , desired_ext , strict )
Explicitly declare that we are not able to parse collections
54,542
def parsyfiles_global_config ( multiple_errors_tb_limit : int = None , full_paths_in_logs : bool = None , dict_to_object_subclass_limit : int = None ) : if multiple_errors_tb_limit is not None : GLOBAL_CONFIG . multiple_errors_tb_limit = multiple_errors_tb_limit if full_paths_in_logs is not None : GLOBAL_CONFIG . full_paths_in_logs = full_paths_in_logs if dict_to_object_subclass_limit is not None : GLOBAL_CONFIG . dict_to_object_subclass_limit = dict_to_object_subclass_limit
This is the method you should use to configure the parsyfiles library
54,543
def is_valid ( self , context ) : if self . requires : for r in self . requires : if not r in context . executed_actions : raise RequirementMissingError ( "Action '%s' requires '%s'" % ( self . name , r ) ) return True
Checks through the previous_actions iterable if required actions have been executed
54,544
def get_file_contents ( file_path ) : full_path = os . path . join ( package_dir , file_path ) return open ( full_path , 'r' ) . read ( )
Get the context of the file using full path name
54,545
def refresh ( self ) : if self . type in CONST . BINARY_SENSOR_TYPES : response = self . _lupusec . get_sensors ( ) for device in response : if device [ 'device_id' ] == self . _device_id : self . update ( device ) return device elif self . type == CONST . ALARM_TYPE : response = self . _lupusec . get_panel ( ) self . update ( response ) return response elif self . type == CONST . TYPE_POWER_SWITCH : response = self . _lupusec . get_power_switches ( ) for pss in response : if pss [ 'device_id' ] == self . _device_id : self . update ( pss ) return pss
Refresh a device
54,546
def desc ( self ) : return '{0} (ID: {1}) - {2} - {3}' . format ( self . name , self . device_id , self . type , self . status )
Get a short description of the device .
54,547
def list ( declared , undeclared ) : queues = current_queues . queues . values ( ) if declared : queues = filter ( lambda queue : queue . exists , queues ) elif undeclared : queues = filter ( lambda queue : not queue . exists , queues ) queue_names = [ queue . routing_key for queue in queues ] queue_names . sort ( ) for queue in queue_names : click . secho ( queue )
List configured queues .
54,548
def declare ( queues ) : current_queues . declare ( queues = queues ) click . secho ( 'Queues {} have been declared.' . format ( queues or current_queues . queues . keys ( ) ) , fg = 'green' )
Initialize the given queues .
54,549
def purge_queues ( queues = None ) : current_queues . purge ( queues = queues ) click . secho ( 'Queues {} have been purged.' . format ( queues or current_queues . queues . keys ( ) ) , fg = 'green' )
Purge the given queues .
54,550
def delete_queue ( queues ) : current_queues . delete ( queues = queues ) click . secho ( 'Queues {} have been deleted.' . format ( queues or current_queues . queues . keys ( ) ) , fg = 'green' )
Delete the given queues .
54,551
def find_needed_formatter ( input_format , output_format ) : selected_registry = [ re . cls for re in registry if re . category == RegistryCategories . formatters ] needed_formatters = [ ] for formatter in selected_registry : formatter_inst = formatter ( ) if input_format in formatter_inst . input_formats and output_format in formatter_inst . output_formats : needed_formatters . append ( formatter ) if len ( needed_formatters ) > 0 : return needed_formatters [ 0 ] return None
Find a data formatter given an input and output format input_format - needed input format . see utils . input . dataformats output_format - needed output format . see utils . input . dataformats
54,552
def find_needed_input ( input_format ) : needed_inputs = [ re . cls for re in registry if re . category == RegistryCategories . inputs and re . cls . input_format == input_format ] if len ( needed_inputs ) > 0 : return needed_inputs [ 0 ] return None
Find a needed input class input_format - needed input format see utils . input . dataformats
54,553
def exists_in_registry ( category , namespace , name ) : selected_registry = [ re for re in registry if re . category == category and re . namespace == namespace and re . name == name ] if len ( selected_registry ) > 0 : return True return False
See if a given category namespace name combination exists in the registry category - See registrycategories . Type of module namespace - Namespace of the module defined in settings name - the lowercase name of the module
54,554
def register ( cls ) : registry_entry = RegistryEntry ( category = cls . category , namespace = cls . namespace , name = cls . name , cls = cls ) if registry_entry not in registry and not exists_in_registry ( cls . category , cls . namespace , cls . name ) : registry . append ( registry_entry ) else : log . warn ( "Class {0} already in registry" . format ( cls ) )
Register a given model in the registry
54,555
def _set_fields ( self ) : self . fields = [ ] self . required_input = [ ] for member_name , member_object in inspect . getmembers ( self . __class__ ) : if inspect . isdatadescriptor ( member_object ) and not member_name . startswith ( "__" ) : self . fields . append ( member_name ) if member_object . required_input : self . required_input . append ( member_name )
Initialize the fields for data caching .
54,556
def subscriber ( address , topics , callback , message_type ) : return Subscriber ( address , topics , callback , message_type )
Creates a subscriber binding to the given address and subscribe the given topics . The callback is invoked for every message received .
54,557
def start ( self ) : t = threading . Thread ( target = self . _consume ) t . start ( )
Start a thread that consumes the messages and invokes the callback
54,558
def get_forecast_api ( self , longitude : str , latitude : str ) -> { } : api_url = APIURL_TEMPLATE . format ( longitude , latitude ) response = urlopen ( api_url ) data = response . read ( ) . decode ( 'utf-8' ) json_data = json . loads ( data ) return json_data
gets data from API
54,559
async def async_get_forecast_api ( self , longitude : str , latitude : str ) -> { } : api_url = APIURL_TEMPLATE . format ( longitude , latitude ) if self . session is None : self . session = aiohttp . ClientSession ( ) async with self . session . get ( api_url ) as response : if response . status != 200 : raise SmhiForecastException ( "Failed to access weather API with status code {}" . format ( response . status ) ) data = await response . text ( ) return json . loads ( data )
gets data from API asyncronious
54,560
def all ( iterable = None , * , name = None , metric = call_default ) : if iterable is None : return _iter_decorator ( name , metric ) else : return _do_all ( iterable , name , metric )
Measure total time and item count for consuming an iterable
54,561
def each ( iterable = None , * , name = None , metric = call_default ) : if iterable is None : return _each_decorator ( name , metric ) else : return _do_each ( iterable , name , metric )
Measure time elapsed to produce each item of an iterable
54,562
def first ( iterable = None , * , name = None , metric = call_default ) : if iterable is None : return _first_decorator ( name , metric ) else : return _do_first ( iterable , name , metric )
Measure time elapsed to produce first item of an iterable
54,563
def reducer ( * , name = None , metric = call_default ) : class instrument_reducer_decorator ( object ) : def __init__ ( self , func ) : self . orig_func = func self . wrapping = wraps ( func ) self . metric_name = name if name is not None else func . __module__ + '.' + func . __name__ self . varargs = inspect . getargspec ( func ) . varargs is not None if self . varargs : self . method = _varargs_to_iterable_method ( func ) self . func = _varargs_to_iterable_func ( func ) self . callme = _iterable_to_varargs_func ( self . _call ) else : self . method = func self . func = func self . callme = self . _call def __call__ ( self , * args , ** kwargs ) : return self . callme ( * args , ** kwargs ) def _call ( self , iterable , ** kwargs ) : it = counted_iterable ( iterable ) t = time . time ( ) try : return self . func ( it , ** kwargs ) finally : metric ( self . metric_name , it . count , time . time ( ) - t ) def __get__ ( self , instance , class_ ) : metric_name = name if name is not None else "." . join ( ( class_ . __module__ , class_ . __name__ , self . orig_func . __name__ ) ) def wrapped_method ( iterable , ** kwargs ) : it = counted_iterable ( iterable ) t = time . time ( ) try : return self . method ( instance , it , ** kwargs ) finally : metric ( metric_name , it . count , time . time ( ) - t ) if self . varargs : wrapped_method = _iterable_to_varargs_func ( wrapped_method ) wrapped_method = self . wrapping ( wrapped_method ) return wrapped_method return instrument_reducer_decorator
Decorator to measure a function that consumes many items .
54,564
def producer ( * , name = None , metric = call_default ) : def wrapper ( func ) : def instrumenter ( name_ , * args , ** kwargs ) : t = time . time ( ) try : ret = func ( * args , ** kwargs ) except Exception : metric ( name_ , 0 , time . time ( ) - t ) raise else : metric ( name_ , len ( ret ) , time . time ( ) - t ) return ret name_ = name if name is not None else func . __module__ + '.' + func . __name__ class instrument_decorator ( object ) : @ wraps ( func ) def __call__ ( self , * args , ** kwargs ) : return instrumenter ( name_ , * args , ** kwargs ) def __get__ ( self , instance , class_ ) : name_ = name if name is not None else "." . join ( ( class_ . __module__ , class_ . __name__ , func . __name__ ) ) @ wraps ( func ) def wrapped_method ( * args , ** kwargs ) : return instrumenter ( name_ , instance , * args , ** kwargs ) return wrapped_method return instrument_decorator ( ) return wrapper
Decorator to measure a function that produces many items .
54,565
def block ( * , name = None , metric = call_default , count = 1 ) : t = time . time ( ) try : yield finally : metric ( name , count , time . time ( ) - t )
Context manager to measure execution time of a block
54,566
def __get_package_manager ( self ) : package_manager = "" args = "" sudo_required = True if system . is_osx ( ) : package_manager = "brew" sudo_required = False args = " install" elif system . is_debian ( ) : package_manager = "apt-get" args = " -y install" elif system . is_fedora ( ) : package_manager = "yum" args = " install" elif system . is_arch ( ) : package_manager = "pacman" args = " --noconfirm -S" if lib . which ( package_manager ) is None : self . logger . warn ( "Package manager %s not installed! Packages will not be installed." % package_manager ) self . package_manager = None self . package_manager = package_manager self . sudo_required = sudo_required self . args = args
Installs and verifies package manager
54,567
def parse ( self , data , doctype ) : self . doctype = doctype self . lexer . lineno = 0 del self . errors [ : ] del self . warnings [ : ] self . lexer . lexerror = False ast = self . parser . parse ( data , lexer = self . lexer ) if self . lexer . lexerror : ast = None if ast is None : self . errors . append ( "Couldn't build AST." ) else : for check in self . sema [ self . doctype ] : visitor = check ( ) if not visitor . visit ( ast ) : self . errors . append ( "Couldn't visit AST." ) self . errors . extend ( visitor . errors ) self . warnings . extend ( visitor . warnings ) return ( ast , list ( self . errors ) , list ( self . warnings ) )
Parse an input string and return an AST doctype must have WCADocument as a baseclass
54,568
def p_error ( self , elem ) : self . errors . append ( "Syntax error on line " + str ( self . lexer . lineno ) + ". Got unexpected token " + elem . type )
Handle syntax error
54,569
def set_progress_brackets ( self , start , end ) : self . sep_start = start self . sep_end = end
Set brackets to set around a progress bar .
54,570
def format_progress ( self , width ) : chunk_widths = self . _get_chunk_sizes ( width ) progress_chunks = [ chunk . format_chunk ( chunk_width ) for ( chunk , chunk_width ) in zip ( self . _progress_chunks , chunk_widths ) ] return "{sep_start}{progress}{sep_end}" . format ( sep_start = self . sep_start , progress = "" . join ( progress_chunks ) , sep_end = self . sep_end )
Create the formatted string that displays the progress .
54,571
def summary_width ( self ) : chunk_counts = [ chunk . count for chunk in self . _progress_chunks ] numbers_width = sum ( max ( 1 , ceil ( log10 ( count + 1 ) ) ) for count in chunk_counts ) separators_with = len ( chunk_counts ) - 1 return numbers_width + separators_with
Calculate how long a string is needed to show a summary string .
54,572
def format_summary ( self ) : chunks = [ chunk . format_chunk_summary ( ) for chunk in self . _progress_chunks ] return "/" . join ( chunks )
Generate a summary string for the progress bar .
54,573
def format_status ( self , width = None , label_width = None , progress_width = None , summary_width = None ) : if width is None : width = shutil . get_terminal_size ( ) [ 0 ] if label_width is None : label_width = len ( self . label ) if summary_width is None : summary_width = self . summary_width ( ) if progress_width is None : progress_width = width - label_width - summary_width - 2 if len ( self . label ) > label_width : label = self . label [ : label_width - 3 ] + "..." else : label_format = "{{label:{fill_char}<{width}}}" . format ( width = label_width , fill_char = self . fill_char ) label = label_format . format ( label = self . label ) summary_format = "{{:>{width}}}" . format ( width = summary_width ) summary = summary_format . format ( self . _progress . format_summary ( ) ) progress = self . _progress . format_progress ( width = progress_width ) return "{label} {progress} {summary}" . format ( label = label , progress = progress , summary = summary )
Generate the formatted status bar string .
54,574
def add_status_line ( self , label ) : status_line = StatusBar ( label , self . _sep_start , self . _sep_end , self . _fill_char ) self . _lines . append ( status_line ) return status_line
Add a status bar line to the table .
54,575
def calculate_field_widths ( self , width = None , min_label_width = 10 , min_progress_width = 10 ) : if width is None : width = shutil . get_terminal_size ( ) [ 0 ] summary_width = self . summary_width ( ) label_width = self . label_width ( ) remaining = width - summary_width - label_width - 2 if remaining >= min_progress_width : progress_width = remaining else : progress_width = min_progress_width remaining = width - summary_width - progress_width - 2 if remaining >= min_label_width : label_width = remaining else : label_width = min_label_width return ( label_width , progress_width , summary_width )
Calculate how wide each field should be so we can align them .
54,576
def format_table ( self , width = None , min_label_width = 10 , min_progress_width = 10 ) : if len ( self . _lines ) == 0 : return [ ] if width is None : width = shutil . get_terminal_size ( ) [ 0 ] labelw , progw , summaryw = self . calculate_field_widths ( width = width , min_label_width = min_label_width , min_progress_width = min_progress_width ) output = [ sb . format_status ( label_width = labelw , progress_width = progw , summary_width = summaryw ) for sb in self . _lines ] return output
Format the entire table of progress bars .
54,577
def create_log_dict ( request , response ) : remote_addr = request . META . get ( 'REMOTE_ADDR' ) if remote_addr in getattr ( settings , 'INTERNAL_IPS' , [ ] ) : remote_addr = request . META . get ( 'HTTP_X_FORWARDED_FOR' ) or remote_addr user_email = "-" if hasattr ( request , 'user' ) : user_email = getattr ( request . user , 'email' , '-' ) if response . streaming : content_length = 'streaming' else : content_length = len ( response . content ) return { 'event' : settings . LOGUTILS_LOGGING_MIDDLEWARE_EVENT , 'remote_address' : remote_addr , 'user_email' : user_email , 'method' : request . method , 'url' : request . get_full_path ( ) , 'status' : response . status_code , 'content_length' : content_length , 'request_time' : - 1 , }
Create a dictionary with logging data .
54,578
def create_log_message ( log_dict , use_sql_info = False , fmt = True ) : log_msg = ( "%(remote_address)s %(user_email)s %(method)s %(url)s %(status)d " "%(content_length)d (%(request_time).2f seconds)" ) if use_sql_info : sql_time = sum ( float ( q [ 'time' ] ) for q in connection . queries ) * 1000 extra_log = { 'nr_queries' : len ( connection . queries ) , 'sql_time' : sql_time } log_msg += " (%(nr_queries)d SQL queries, %(sql_time)f ms)" log_dict . update ( extra_log ) return log_msg % log_dict if fmt else log_msg
Create the logging message string .
54,579
def process_response ( self , request , response ) : try : log_dict = create_log_dict ( request , response ) request_time = ( time . time ( ) - self . start_time if hasattr ( self , 'start_time' ) and self . start_time else - 1 ) log_dict . update ( { 'request_time' : request_time } ) is_request_time_too_high = ( request_time > float ( settings . LOGUTILS_REQUEST_TIME_THRESHOLD ) ) use_sql_info = settings . DEBUG or is_request_time_too_high log_msg = create_log_message ( log_dict , use_sql_info , fmt = False ) if is_request_time_too_high : logger . warning ( log_msg , log_dict , extra = log_dict ) else : logger . info ( log_msg , log_dict , extra = log_dict ) except Exception as e : logger . exception ( e ) return response
Create the logging message ..
54,580
def as_completed ( jobs ) : jobs = tuple ( jobs ) event = threading . Event ( ) callback = lambda f , ev : event . set ( ) [ job . add_listener ( Job . SUCCESS , callback , once = True ) for job in jobs ] [ job . add_listener ( Job . ERROR , callback , once = True ) for job in jobs ] while jobs : event . wait ( ) event . clear ( ) jobs , finished = split_list_by ( jobs , lambda x : x . finished ) for job in finished : yield job
Generator function that yields the jobs in order of their completion . Attaches a new listener to each job .
54,581
def reraise ( tpe , value , tb = None ) : " Reraise an exception from an exception info tuple. " Py3 = ( sys . version_info [ 0 ] == 3 ) if value is None : value = tpe ( ) if Py3 : if value . __traceback__ is not tb : raise value . with_traceback ( tb ) raise value else : exec ( 'raise tpe, value, tb' )
Reraise an exception from an exception info tuple .
54,582
def finished ( self ) : return self . __state in ( Job . ERROR , Job . SUCCESS , Job . CANCELLED )
True if the job run and finished . There is no difference if the job finished successfully or errored .
54,583
def _trigger_event ( self , event ) : if event is None or event not in self . __listeners : raise ValueError ( 'invalid event type: {0!r}' . format ( event ) ) if event in self . __event_set : raise RuntimeError ( 'event already triggered: {0!r}' . format ( event ) ) self . __event_set . add ( event ) listeners = self . __listeners [ event ] + self . __listeners [ None ] self . __listeners [ event ] [ : ] = ( l for l in self . __listeners [ event ] if not l . once ) self . __listeners [ None ] [ : ] = ( l for l in self . __listeners [ None ] if not l . once ) for listener in listeners : listener . callback ( self , event )
Private . Triggers and event and removes all one - off listeners for that event .
54,584
def wait ( self , timeout = None ) : def cond ( self ) : return self . __state not in ( Job . PENDING , Job . RUNNING ) or self . __cancelled if not wait_for_condition ( self , cond , timeout ) : raise Job . Timeout return self . result
Waits for the job to finish and returns the result .
54,585
def factory ( start_immediately = True ) : def decorator ( func ) : def wrapper ( * args , ** kwargs ) : job = Job ( task = lambda j : func ( j , * args , ** kwargs ) ) if start_immediately : job . start ( ) return job return wrapper return decorator
This is a decorator function that creates new Job s with the wrapped function as the target .
54,586
def wait ( self , timeout = None ) : if not self . __running : raise RuntimeError ( "ThreadPool ain't running" ) self . __queue . wait ( timeout )
Block until all jobs in the ThreadPool are finished . Beware that this can make the program run into a deadlock if another thread adds new jobs to the pool!
54,587
def shutdown ( self , wait = True ) : if self . __running : for thread in self . __threads : assert thread . isAlive ( ) self . __queue . append ( None ) self . __running = False if wait : self . __queue . wait ( ) for thread in self . __threads : thread . join ( )
Shut down the ThreadPool .
54,588
def new_event_type ( self , name , mergeable = False ) : self . event_types [ name ] = self . EventType ( name , mergeable )
Declare a new event . May overwrite an existing entry .
54,589
def pop_event ( self ) : with self . lock : if not self . events : raise ValueError ( 'no events queued' ) return self . events . popleft ( )
Pop the next queued event from the queue .
54,590
def logger ( message , level = 10 ) : logging . getLogger ( __name__ ) . log ( level , str ( message ) )
Handle logging .
54,591
async def get_data ( self ) : try : await self . get_session_data ( ) await self . get_home_data ( ) await self . get_users ( ) await self . get_user_data ( ) except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror ) : msg = "Can not load data from Tautulli." logger ( msg , 40 )
Get Tautulli data .
54,592
async def get_session_data ( self ) : cmd = 'get_activity' url = self . base_url + cmd try : async with async_timeout . timeout ( 8 , loop = self . _loop ) : response = await self . _session . get ( url ) logger ( "Status from Tautulli: " + str ( response . status ) ) self . tautulli_session_data = await response . json ( ) logger ( self . tautulli_session_data ) except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror , AttributeError ) as error : msg = "Can not load data from Tautulli: {} - {}" . format ( url , error ) logger ( msg , 40 )
Get Tautulli sessions .
54,593
async def get_home_data ( self ) : cmd = 'get_home_stats' url = self . base_url + cmd data = { } try : async with async_timeout . timeout ( 8 , loop = self . _loop ) : request = await self . _session . get ( url ) response = await request . json ( ) for stat in response . get ( 'response' , { } ) . get ( 'data' , { } ) : if stat . get ( 'stat_id' ) == 'top_movies' : try : row = stat . get ( 'rows' , { } ) [ 0 ] data [ 'movie' ] = row . get ( 'title' ) except ( IndexError , KeyError ) : data [ 'movie' ] = None if stat . get ( 'stat_id' ) == 'top_tv' : try : row = stat . get ( 'rows' , { } ) [ 0 ] data [ 'tv' ] = row . get ( 'title' ) except ( IndexError , KeyError ) : data [ 'tv' ] = None if stat . get ( 'stat_id' ) == 'top_users' : try : row = stat . get ( 'rows' , { } ) [ 0 ] data [ 'user' ] = row . get ( 'user' ) except ( IndexError , KeyError ) : data [ 'user' ] = None logger ( "Status from Tautulli: " + str ( request . status ) ) self . tautulli_home_data = data logger ( self . tautulli_home_data ) except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror , AttributeError ) as error : msg = "Can not load data from Tautulli: {} - {}" . format ( url , error ) logger ( msg , 40 )
Get Tautulli home stats .
54,594
async def get_users ( self ) : cmd = 'get_users' url = self . base_url + cmd users = [ ] try : async with async_timeout . timeout ( 8 , loop = self . _loop ) : response = await self . _session . get ( url ) logger ( "Status from Tautulli: " + str ( response . status ) ) all_user_data = await response . json ( ) for user in all_user_data [ 'response' ] [ 'data' ] : if user [ 'username' ] != 'Local' : users . append ( user [ 'username' ] ) self . tautulli_users = users logger ( self . tautulli_users ) except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror , AttributeError ) as error : msg = "Can not load data from Tautulli: {} - {}" . format ( url , error ) logger ( msg , 40 )
Get Tautulli users .
54,595
async def get_user_data ( self ) : userdata = { } sessions = self . session_data . get ( 'sessions' , { } ) try : async with async_timeout . timeout ( 8 , loop = self . _loop ) : for username in self . tautulli_users : userdata [ username ] = { } userdata [ username ] [ 'Activity' ] = None for session in sessions : if session [ 'username' ] . lower ( ) == username . lower ( ) : userdata [ username ] [ 'Activity' ] = session [ 'state' ] for key in session : if key != 'Username' : userdata [ username ] [ key ] = session [ key ] break self . tautulli_user_data = userdata except ( asyncio . TimeoutError , aiohttp . ClientError , KeyError ) : msg = "Can not load data from Tautulli." logger ( msg , 40 )
Get Tautulli userdata .
54,596
def find_classes_in_module ( module , clstypes ) : classes = [ ] for item in dir ( module ) : item = getattr ( module , item ) try : for cls in clstypes : if issubclass ( item , cls ) and item != cls : classes . append ( item ) except Exception as e : pass return classes
Find classes of clstypes in module
54,597
def remove_yaml_frontmatter ( source , return_frontmatter = False ) : if source . startswith ( "---\n" ) : frontmatter_end = source . find ( "\n---\n" , 4 ) if frontmatter_end == - 1 : frontmatter = source source = "" else : frontmatter = source [ 0 : frontmatter_end ] source = source [ frontmatter_end + 5 : ] if return_frontmatter : return ( source , frontmatter ) return source if return_frontmatter : return ( source , None ) return source
If there s one remove the YAML front - matter from the source
54,598
def populate_obj ( obj , attrs ) : for k , v in attrs . iteritems ( ) : setattr ( obj , k , v )
Populates an object s attributes using the provided dict
54,599
def build_parser_for_fileobject_and_desiredtype ( self , obj_on_filesystem : PersistedObject , object_type : Type [ T ] , logger : Logger = None ) -> Parser : pass
Returns the most appropriate parser to use to parse object obj_on_filesystem as an object of type object_type