idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
57,500
def post ( self , ddata , url = SETUP_ENDPOINT , referer = SETUP_ENDPOINT ) : headers = HEADERS . copy ( ) if referer is None : headers . pop ( 'Referer' ) else : headers [ 'Referer' ] = referer if 'csrfmiddlewaretoken' not in ddata . keys ( ) : ddata [ 'csrfmiddlewaretoken' ] = self . _parent . csrftoken req = self . _parent . client . post ( url , headers = headers , data = ddata ) if req . status_code == 200 : self . update ( )
Method to update some attributes on namespace .
57,501
def _get_cu_and_fu_status ( self ) : headers = HEADERS . copy ( ) headers [ 'Accept' ] = '*/*' headers [ 'X-Requested-With' ] = 'XMLHttpRequest' headers [ 'X-CSRFToken' ] = self . _parent . csrftoken args = '?controller_serial=' + self . serial + '&faucet_serial=' + self . faucet . serial req = self . _parent . client . get ( STATUS_ENDPOINT + args , headers = headers ) if req . status_code == 403 : self . _parent . login ( ) self . update ( ) elif req . status_code == 200 : self . attributes = req . json ( ) else : req . raise_for_status ( )
Submit GET request to update information .
57,502
def name ( self , value ) : data = { '_set_controller_name' : 'Set Name' , 'controller_name' : value , } self . post ( data , url = SETUP_ENDPOINT , referer = SETUP_ENDPOINT )
Set a new name to controller .
57,503
def faucet ( self ) : if hasattr ( self , 'faucets' ) : if len ( self . faucets ) > 1 : raise TypeError ( "Only one faucet per account." ) return self . faucets [ 0 ] raise AttributeError ( "There is no faucet assigned." )
Show current linked faucet .
57,504
def serial_finder ( data ) : if not isinstance ( data , BeautifulSoup ) : raise TypeError ( "Function requires BeautifulSoup HTML element." ) try : controllersElement = data . find_all ( 'select' , { 'id' : 'id_select_controller2' } ) faucetsElement = data . find_all ( 'select' , { 'id' : 'id_select_faucet2' } ) controllerSerial = controllersElement [ 0 ] . text . split ( '-' ) [ 1 ] . strip ( ) faucetSerial = faucetsElement [ 0 ] . text . split ( '-' ) [ 1 ] . strip ( ) parsed_dict = { } parsed_dict [ 'controller_serial' ] = controllerSerial parsed_dict [ 'faucet_serial' ] = [ faucetSerial ] return parsed_dict except ( AttributeError , IndexError , ValueError ) : raise RainCloudyException ( 'Could not find any valid controller or faucet' )
Find controller serial and faucet_serial from the setup page .
57,505
def find_controller_or_faucet_name ( data , p_type ) : if not isinstance ( data , BeautifulSoup ) : raise TypeError ( "Function requires BeautilSoup HTML element." ) if not ( p_type == 'controller' or p_type == 'faucet' ) : raise TypeError ( "Function p_type must be controller or faucet" ) try : search_field = 'id_select_{0}' . format ( p_type ) child = data . find ( 'select' , { 'id' : search_field } ) return child . get_text ( ) . strip ( ) except AttributeError : return None
Find on the HTML document the controller name .
57,506
def find_zone_name ( data , zone_id ) : if not isinstance ( data , BeautifulSoup ) : raise TypeError ( "Function requires BeautilSoup HTML element." ) table = data . find ( 'table' , { 'class' : 'zone_table' } ) table_body = table . find ( 'tbody' ) rows = table_body . find_all ( 'span' , { 'class' : 'more_info' } ) for row in rows : if row . get_text ( ) . startswith ( str ( zone_id ) ) : return row . get_text ( ) [ 4 : ] . strip ( ) return None
Find on the HTML document the zone name .
57,507
def new_payment_query_listener ( sender , order = None , payment = None , ** kwargs ) : payment . amount = order . total payment . currency = order . currency logger . debug ( "new_payment_query_listener, amount=%s, currency=%s" , payment . amount , payment . currency )
Here we fill only two obligatory fields of payment and leave signal handler
57,508
def payment_status_changed_listener ( sender , instance , old_status , new_status , ** kwargs ) : logger . debug ( "payment_status_changed_listener, old=%s, new=%s" , old_status , new_status ) if old_status != 'paid' and new_status == 'paid' : instance . order . status = 'P' instance . order . save ( )
Here we will actually do something when payment is accepted . E . g . lets change an order status .
57,509
def register_to_payment ( order_class , ** kwargs ) : global Payment global Order class Payment ( PaymentFactory . construct ( order = order_class , ** kwargs ) ) : objects = PaymentManager ( ) class Meta : ordering = ( '-created_on' , ) verbose_name = _ ( "Payment" ) verbose_name_plural = _ ( "Payments" ) Order = order_class backend_models_modules = import_backend_modules ( 'models' ) for backend_name , models_module in backend_models_modules . items ( ) : for model in models_module . build_models ( Payment ) : apps . register_model ( backend_name , model ) return Payment
A function for registering unaware order class to getpaid . This will generate a Payment model class that will store payments with ForeignKey to original order class
57,510
def get_backend_choices ( currency = None ) : choices = [ ] backends_names = getattr ( settings , 'GETPAID_BACKENDS' , [ ] ) for backend_name in backends_names : backend = import_module ( backend_name ) if currency : if currency in backend . PaymentProcessor . BACKEND_ACCEPTED_CURRENCY : choices . append ( ( backend_name , backend . PaymentProcessor . BACKEND_NAME ) ) else : choices . append ( ( backend_name , backend . PaymentProcessor . BACKEND_NAME ) ) return choices
Get active backends modules . Backend list can be filtered by supporting given currency .
57,511
def online ( cls , payload , ip , req_sig ) : from getpaid . models import Payment params = json . loads ( payload ) order_data = params . get ( 'order' , { } ) pos_id = order_data . get ( 'merchantPosId' ) payment_id = order_data . get ( 'extOrderId' ) key2 = cls . get_backend_setting ( 'key2' ) if pos_id != cls . get_backend_setting ( 'pos_id' ) : logger . warning ( 'Received message for different pos: {}' . format ( pos_id ) ) return 'ERROR' req_sig_dict = cls . parse_payu_sig ( req_sig ) sig = cls . compute_sig ( payload , key2 , algorithm = req_sig_dict . get ( 'algorithm' , 'md5' ) ) if sig != req_sig_dict [ 'signature' ] : logger . warning ( 'Received message with malformed signature. Payload: {}' . format ( payload ) ) return 'ERROR' try : payment = Payment . objects . get ( id = payment_id ) except Payment . DoesNotExist : logger . warning ( 'Received message for nonexistent payment: {}.\nPayload: {}' . format ( payment_id , payload ) ) return 'ERROR' status = order_data [ 'status' ] if payment . status != 'paid' : if status == 'COMPLETED' : payment . external_id = order_data [ 'orderId' ] payment . amount = Decimal ( order_data [ 'totalAmount' ] ) / Decimal ( 100 ) payment . amount_paid = payment . amount payment . currenct = order_data [ 'currencyCode' ] payment . paid_on = pendulum . parse ( params [ 'localReceiptDateTime' ] ) . in_tz ( 'utc' ) payment . description = order_data [ 'description' ] payment . change_status ( 'paid' ) elif status == 'PENDING' : payment . change_status ( 'in_progress' ) elif status in [ 'CANCELED' , 'REJECTED' ] : payment . change_status ( 'cancelled' ) return 'OK'
Receive and analyze request from payment service with information on payment status change .
57,512
def get_order_description ( self , payment , order ) : template = getattr ( settings , 'GETPAID_ORDER_DESCRIPTION' , None ) if template : return Template ( template ) . render ( Context ( { "payment" : payment , "order" : order } ) ) else : return six . text_type ( order )
Renders order description using django template provided in settings . GETPAID_ORDER_DESCRIPTION or if not provided return unicode representation of Order object .
57,513
def get_backend_setting ( cls , name , default = None ) : backend_settings = get_backend_settings ( cls . BACKEND ) if default is not None : return backend_settings . get ( name , default ) else : try : return backend_settings [ name ] except KeyError : raise ImproperlyConfigured ( "getpaid '%s' requires backend '%s' setting" % ( cls . BACKEND , name ) )
Reads name setting from backend settings dictionary .
57,514
def get_gateway_url ( self , request ) : params = { 'id' : self . get_backend_setting ( 'id' ) , 'description' : self . get_order_description ( self . payment , self . payment . order ) , 'amount' : self . payment . amount , 'currency' : self . payment . currency , 'type' : 0 , 'control' : self . payment . pk , 'URL' : self . get_URL ( self . payment . pk ) , 'URLC' : self . get_URLC ( ) , 'api_version' : 'dev' , } user_data = { 'email' : None , 'lang' : None , } signals . user_data_query . send ( sender = None , order = self . payment . order , user_data = user_data ) if user_data [ 'email' ] : params [ 'email' ] = user_data [ 'email' ] if user_data [ 'lang' ] and user_data [ 'lang' ] . lower ( ) in self . _ACCEPTED_LANGS : params [ 'lang' ] = user_data [ 'lang' ] . lower ( ) elif self . get_backend_setting ( 'lang' , False ) and self . get_backend_setting ( 'lang' ) . lower ( ) in self . _ACCEPTED_LANGS : params [ 'lang' ] = self . get_backend_setting ( 'lang' ) . lower ( ) if self . get_backend_setting ( 'onlinetransfer' , False ) : params [ 'onlinetransfer' ] = 1 if self . get_backend_setting ( 'p_email' , False ) : params [ 'p_email' ] = self . get_backend_setting ( 'p_email' ) if self . get_backend_setting ( 'p_info' , False ) : params [ 'p_info' ] = self . get_backend_setting ( 'p_info' ) if self . get_backend_setting ( 'tax' , False ) : params [ 'tax' ] = 1 gateway_url = self . get_backend_setting ( 'gateway_url' , self . _GATEWAY_URL ) if self . get_backend_setting ( 'method' , 'get' ) . lower ( ) == 'post' : return gateway_url , 'POST' , params elif self . get_backend_setting ( 'method' , 'get' ) . lower ( ) == 'get' : for key in params . keys ( ) : params [ key ] = six . text_type ( params [ key ] ) . encode ( 'utf-8' ) return gateway_url + '?' + urlencode ( params ) , "GET" , { } else : raise ImproperlyConfigured ( 'Dotpay payment backend accepts only GET or POST' )
Routes a payment to Gateway should return URL for redirection .
57,515
def channel_ready_future ( channel ) : fut = channel . _loop . create_future ( ) def _set_result ( state ) : if not fut . done ( ) and state is _grpc . ChannelConnectivity . READY : fut . set_result ( None ) fut . add_done_callback ( lambda f : channel . unsubscribe ( _set_result ) ) channel . subscribe ( _set_result , try_to_connect = True ) return fut
Creates a Future that tracks when a Channel is ready .
57,516
def insecure_channel ( target , options = None , * , loop = None , executor = None , standalone_pool_for_streaming = False ) : return Channel ( _grpc . insecure_channel ( target , options ) , loop , executor , standalone_pool_for_streaming )
Creates an insecure Channel to a server .
57,517
def secure_channel ( target , credentials , options = None , * , loop = None , executor = None , standalone_pool_for_streaming = False ) : return Channel ( _grpc . secure_channel ( target , credentials , options ) , loop , executor , standalone_pool_for_streaming )
Creates a secure Channel to a server .
57,518
def future ( self , request , timeout = None , metadata = None , credentials = None ) : return _utils . wrap_future_call ( self . _inner . future ( request , timeout , metadata , credentials ) , self . _loop , self . _executor )
Asynchronously invokes the underlying RPC .
57,519
async def with_call ( self , request_iterator , timeout = None , metadata = None , credentials = None ) : fut = self . future ( request_iterator , timeout , metadata , credentials ) try : result = await fut return ( result , fut ) finally : if not fut . done ( ) : fut . cancel ( )
Synchronously invokes the underlying RPC on the client .
57,520
def future ( self , request_iterator , timeout = None , metadata = None , credentials = None ) : return _utils . wrap_future_call ( self . _inner . future ( _utils . WrappedAsyncIterator ( request_iterator , self . _loop ) , timeout , metadata , credentials ) , self . _loop , self . _executor )
Asynchronously invokes the underlying RPC on the client .
57,521
def config_field_type ( field , cls ) : return defs . ConfigField ( lambda _ : isinstance ( _ , cls ) , lambda : CONFIG_FIELD_TYPE_ERROR . format ( field , cls . __name__ ) )
Validate a config field against a type .
57,522
def get_config_parameters ( plugin_path ) : json_config_path = os . path . join ( plugin_path , defs . CONFIG_FILE_NAME ) with open ( json_config_path , "r" ) as f : config = json . load ( f ) return config . get ( defs . PARAMETERS , [ ] )
Return the parameters section from config . json .
57,523
def validate_config_parameters ( config_json , allowed_keys , allowed_types ) : custom_fields = config_json . get ( defs . PARAMETERS , [ ] ) for field in custom_fields : validate_field ( field , allowed_keys , allowed_types ) default = field . get ( defs . DEFAULT ) field_type = field . get ( defs . TYPE ) if default : validate_field_matches_type ( field [ defs . VALUE ] , default , field_type )
Validate parameters in config file .
57,524
def validate_field_matches_type ( field , value , field_type , select_items = None , _min = None , _max = None ) : if ( field_type == defs . TEXT_TYPE and not isinstance ( value , six . string_types ) ) or ( field_type == defs . STRING_TYPE and not isinstance ( value , six . string_types ) ) or ( field_type == defs . BOOLEAN_TYPE and not isinstance ( value , bool ) ) or ( field_type == defs . INTEGER_TYPE and not isinstance ( value , int ) ) : raise exceptions . ConfigFieldTypeMismatch ( field , value , field_type ) if field_type == defs . INTEGER_TYPE : if _min and value < _min : raise exceptions . ConfigFieldTypeMismatch ( field , value , "must be higher than {}" . format ( _min ) ) if _max and value > _max : raise exceptions . ConfigFieldTypeMismatch ( field , value , "must be lower than {}" . format ( _max ) ) if field_type == defs . SELECT_TYPE : from honeycomb . utils . plugin_utils import get_select_items items = get_select_items ( select_items ) if value not in items : raise exceptions . ConfigFieldTypeMismatch ( field , value , "one of: {}" . format ( ", " . join ( items ) ) )
Validate a config field against a specific type .
57,525
def get_truetype ( value ) : if value in [ "true" , "True" , "y" , "Y" , "yes" ] : return True if value in [ "false" , "False" , "n" , "N" , "no" ] : return False if value . isdigit ( ) : return int ( value ) return str ( value )
Convert a string to a pythonized parameter .
57,526
def validate_field ( field , allowed_keys , allowed_types ) : for key , value in field . items ( ) : if key not in allowed_keys : raise exceptions . ParametersFieldError ( key , "property" ) if key == defs . TYPE : if value not in allowed_types : raise exceptions . ParametersFieldError ( value , key ) if key == defs . VALUE : if not is_valid_field_name ( value ) : raise exceptions . ParametersFieldError ( value , "field name" )
Validate field is allowed and valid .
57,527
def is_valid_field_name ( value ) : leftovers = re . sub ( r"\w" , "" , value ) leftovers = re . sub ( r"-" , "" , leftovers ) if leftovers != "" or value [ 0 ] . isdigit ( ) or value [ 0 ] in [ "-" , "_" ] or " " in value : return False return True
Ensure field name is valid .
57,528
def process_config ( ctx , configfile ) : from honeycomb . commands . service . run import run as service_run from honeycomb . commands . service . install import install as service_install from honeycomb . commands . integration . install import install as integration_install from honeycomb . commands . integration . configure import configure as integration_configure VERSION = "version" SERVICES = defs . SERVICES INTEGRATIONS = defs . INTEGRATIONS required_top_keys = [ VERSION , SERVICES ] supported_versions = [ 1 ] def validate_yml ( config ) : for key in required_top_keys : if key not in config : raise exceptions . ConfigFieldMissing ( key ) version = config . get ( VERSION ) if version not in supported_versions : raise exceptions . ConfigFieldTypeMismatch ( VERSION , version , "one of: {}" . format ( repr ( supported_versions ) ) ) def install_plugins ( services , integrations ) : for cmd , kwargs in [ ( service_install , { SERVICES : services } ) , ( integration_install , { INTEGRATIONS : integrations } ) ] : try : ctx . invoke ( cmd , ** kwargs ) except SystemExit : pass def parameters_to_string ( parameters_dict ) : return [ "{}={}" . format ( k , v ) for k , v in parameters_dict . items ( ) ] def configure_integrations ( integrations ) : for integration in integrations : args_list = parameters_to_string ( config [ INTEGRATIONS ] [ integration ] . get ( defs . PARAMETERS , dict ( ) ) ) ctx . invoke ( integration_configure , integration = integration , args = args_list ) def run_services ( services , integrations ) : for service in services : args_list = parameters_to_string ( config [ SERVICES ] [ service ] . get ( defs . PARAMETERS , dict ( ) ) ) ctx . invoke ( service_run , service = service , integration = integrations , args = args_list ) with open ( configfile , "rb" ) as fh : config = yaml . load ( fh . read ( ) ) validate_yml ( config ) services = config . get ( SERVICES ) . keys ( ) integrations = config . get ( INTEGRATIONS ) . keys ( ) if config . get ( INTEGRATIONS ) else [ ] install_plugins ( services , integrations ) configure_integrations ( integrations ) run_services ( services , integrations )
Process a yaml config with instructions .
57,529
def get_plugin_path ( home , plugin_type , plugin_name , editable = False ) : if editable : plugin_path = plugin_name else : plugin_path = os . path . join ( home , plugin_type , plugin_name ) return os . path . realpath ( plugin_path )
Return path to plugin .
57,530
def install_plugin ( pkgpath , plugin_type , install_path , register_func ) : service_name = os . path . basename ( pkgpath ) if os . path . exists ( os . path . join ( install_path , service_name ) ) : raise exceptions . PluginAlreadyInstalled ( pkgpath ) if os . path . exists ( pkgpath ) : logger . debug ( "%s exists in filesystem" , pkgpath ) if os . path . isdir ( pkgpath ) : pip_status = install_dir ( pkgpath , install_path , register_func ) else : pip_status = install_from_zip ( pkgpath , install_path , register_func ) else : logger . debug ( "cannot find %s locally, checking github repo" , pkgpath ) click . secho ( "Collecting {}.." . format ( pkgpath ) ) pip_status = install_from_repo ( pkgpath , plugin_type , install_path , register_func ) if pip_status == 0 : click . secho ( "[+] Great success!" ) else : click . secho ( "[-] Service installed but something was odd with dependency install, please review debug logs" )
Install specified plugin .
57,531
def install_deps ( pkgpath ) : if os . path . exists ( os . path . join ( pkgpath , "requirements.txt" ) ) : logger . debug ( "installing dependencies" ) click . secho ( "[*] Installing dependencies" ) pipargs = [ "install" , "--target" , os . path . join ( pkgpath , defs . DEPS_DIR ) , "--ignore-installed" , "-r" , os . path . join ( pkgpath , "requirements.txt" ) ] logger . debug ( "running pip %s" , pipargs ) return subprocess . check_call ( [ sys . executable , "-m" , "pip" ] + pipargs ) return 0
Install plugin dependencies using pip .
57,532
def copy_file ( src , dst ) : try : fin = os . open ( src , READ_FLAGS ) stat = os . fstat ( fin ) fout = os . open ( dst , WRITE_FLAGS , stat . st_mode ) for x in iter ( lambda : os . read ( fin , BUFFER_SIZE ) , b"" ) : os . write ( fout , x ) finally : try : os . close ( fin ) except Exception as exc : logger . debug ( "Failed to close file handle when copying: {}" . format ( exc ) ) try : os . close ( fout ) except Exception as exc : logger . debug ( "Failed to close file handle when copying: {}" . format ( exc ) )
Copy a single file .
57,533
def copy_tree ( src , dst , symlinks = False , ignore = [ ] ) : names = os . listdir ( src ) if not os . path . exists ( dst ) : os . makedirs ( dst ) errors = [ ] for name in names : if name in ignore : continue srcname = os . path . join ( src , name ) dstname = os . path . join ( dst , name ) try : if symlinks and os . path . islink ( srcname ) : linkto = os . readlink ( srcname ) os . symlink ( linkto , dstname ) elif os . path . isdir ( srcname ) : copy_tree ( srcname , dstname , symlinks , ignore ) else : copy_file ( srcname , dstname ) except ( IOError , os . error ) as exc : errors . append ( ( srcname , dstname , str ( exc ) ) ) except CTError as exc : errors . extend ( exc . errors ) if errors : raise CTError ( errors )
Copy a full directory structure .
57,534
def install_dir ( pkgpath , install_path , register_func , delete_after_install = False ) : logger . debug ( "%s is a directory, attempting to validate" , pkgpath ) plugin = register_func ( pkgpath ) logger . debug ( "%s looks good, copying to %s" , pkgpath , install_path ) try : copy_tree ( pkgpath , os . path . join ( install_path , plugin . name ) ) if delete_after_install : logger . debug ( "deleting %s" , pkgpath ) shutil . rmtree ( pkgpath ) pkgpath = os . path . join ( install_path , plugin . name ) except ( OSError , CTError ) as exc : logger . debug ( str ( exc ) , exc_info = True ) raise exceptions . PluginAlreadyInstalled ( plugin . name ) return install_deps ( pkgpath )
Install plugin from specified directory .
57,535
def install_from_zip ( pkgpath , install_path , register_func , delete_after_install = False ) : logger . debug ( "%s is a file, attempting to load zip" , pkgpath ) pkgtempdir = tempfile . mkdtemp ( prefix = "honeycomb_" ) try : with zipfile . ZipFile ( pkgpath ) as pkgzip : pkgzip . extractall ( pkgtempdir ) except zipfile . BadZipfile as exc : logger . debug ( str ( exc ) ) raise click . ClickException ( str ( exc ) ) if delete_after_install : logger . debug ( "deleting %s" , pkgpath ) os . remove ( pkgpath ) logger . debug ( "installing from unzipped folder %s" , pkgtempdir ) return install_dir ( pkgtempdir , install_path , register_func , delete_after_install = True )
Install plugin from zipfile .
57,536
def install_from_repo ( pkgname , plugin_type , install_path , register_func ) : rsession = requests . Session ( ) rsession . mount ( "https://" , HTTPAdapter ( max_retries = 3 ) ) logger . debug ( "trying to install %s from online repo" , pkgname ) pkgurl = "{}/{}s/{}.zip" . format ( defs . GITHUB_RAW , plugin_type , pkgname ) try : logger . debug ( "Requesting HTTP HEAD: %s" , pkgurl ) r = rsession . head ( pkgurl ) r . raise_for_status ( ) total_size = int ( r . headers . get ( "content-length" , 0 ) ) pkgsize = _sizeof_fmt ( total_size ) with click . progressbar ( length = total_size , label = "Downloading {} {} ({}).." . format ( plugin_type , pkgname , pkgsize ) ) as bar : r = rsession . get ( pkgurl , stream = True ) with tempfile . NamedTemporaryFile ( delete = False ) as f : downloaded_bytes = 0 for chunk in r . iter_content ( chunk_size = 1 ) : if chunk : f . write ( chunk ) downloaded_bytes += len ( chunk ) bar . update ( downloaded_bytes ) return install_from_zip ( f . name , install_path , register_func , delete_after_install = True ) except requests . exceptions . HTTPError as exc : logger . debug ( str ( exc ) ) raise exceptions . PluginNotFoundInOnlineRepo ( pkgname ) except requests . exceptions . ConnectionError as exc : logger . debug ( str ( exc ) ) raise exceptions . PluginRepoConnectionError ( )
Install plugin from online repo .
57,537
def uninstall_plugin ( pkgpath , force ) : pkgname = os . path . basename ( pkgpath ) if os . path . exists ( pkgpath ) : if not force : click . confirm ( "[?] Are you sure you want to delete `{}` from honeycomb?" . format ( pkgname ) , abort = True ) try : shutil . rmtree ( pkgpath ) logger . debug ( "successfully uninstalled {}" . format ( pkgname ) ) click . secho ( "[*] Uninstalled {}" . format ( pkgname ) ) except OSError as exc : logger . exception ( str ( exc ) ) else : click . secho ( "[-] doh! I cannot seem to find `{}`, are you sure it's installed?" . format ( pkgname ) )
Uninstall a plugin .
57,538
def list_remote_plugins ( installed_plugins , plugin_type ) : click . secho ( "\n[*] Additional plugins from online repository:" ) try : rsession = requests . Session ( ) rsession . mount ( "https://" , HTTPAdapter ( max_retries = 3 ) ) r = rsession . get ( "{0}/{1}s/{1}s.txt" . format ( defs . GITHUB_RAW , plugin_type ) ) logger . debug ( "fetching %ss from remote repo" , plugin_type ) plugins = [ _ for _ in r . text . splitlines ( ) if _ not in installed_plugins ] click . secho ( " " . join ( plugins ) ) except requests . exceptions . ConnectionError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Unable to fetch {} information from online repository" . format ( plugin_type ) )
List remote plugins from online repo .
57,539
def list_local_plugins ( plugin_type , plugins_path , plugin_details ) : installed_plugins = list ( ) for plugin in next ( os . walk ( plugins_path ) ) [ 1 ] : s = plugin_details ( plugin ) installed_plugins . append ( plugin ) click . secho ( s ) if not installed_plugins : click . secho ( "[*] You do not have any {0}s installed, " "try installing one with `honeycomb {0} install`" . format ( plugin_type ) ) return installed_plugins
List local plugins with details .
57,540
def parse_plugin_args ( command_args , config_args ) : parsed_args = dict ( ) for arg in command_args : kv = arg . split ( "=" ) if len ( kv ) != 2 : raise click . UsageError ( "Invalid parameter '{}', must be in key=value format" . format ( arg ) ) parsed_args [ kv [ 0 ] ] = config_utils . get_truetype ( kv [ 1 ] ) for arg in config_args : value = arg [ defs . VALUE ] value_type = arg [ defs . TYPE ] if value in parsed_args : config_utils . validate_field_matches_type ( value , parsed_args [ value ] , value_type , arg . get ( defs . ITEMS ) , arg . get ( defs . MIN ) , arg . get ( defs . MAX ) ) elif defs . DEFAULT in arg : parsed_args [ value ] = arg [ defs . DEFAULT ] elif arg [ defs . REQUIRED ] : raise exceptions . RequiredFieldMissing ( value ) return parsed_args
Parse command line arguments based on the plugin s parameters config .
57,541
def get_select_items ( items ) : option_items = list ( ) for item in items : if isinstance ( item , dict ) and defs . VALUE in item and defs . LABEL in item : option_items . append ( item [ defs . VALUE ] ) else : raise exceptions . ParametersFieldError ( item , "a dictionary with {} and {}" . format ( defs . LABEL , defs . VALUE ) ) return option_items
Return list of possible select items .
57,542
def print_plugin_args ( plugin_path ) : args = config_utils . get_config_parameters ( plugin_path ) args_format = "{:20} {:10} {:^15} {:^10} {:25}" title = args_format . format ( defs . NAME . upper ( ) , defs . TYPE . upper ( ) , defs . DEFAULT . upper ( ) , defs . REQUIRED . upper ( ) , defs . DESCRIPTION . upper ( ) ) click . secho ( title ) click . secho ( "-" * len ( title ) ) for arg in args : help_text = " ({})" . format ( arg [ defs . HELP_TEXT ] ) if defs . HELP_TEXT in arg else "" options = _parse_select_options ( arg ) description = arg [ defs . LABEL ] + options + help_text click . secho ( args_format . format ( arg [ defs . VALUE ] , arg [ defs . TYPE ] , str ( arg . get ( defs . DEFAULT , None ) ) , str ( arg . get ( defs . REQUIRED , False ) ) , description ) )
Print plugin parameters table .
57,543
def configure_integration ( path ) : integration = register_integration ( path ) integration_args = { } try : with open ( os . path . join ( path , ARGS_JSON ) ) as f : integration_args = json . loads ( f . read ( ) ) except Exception as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Cannot load {} integration args, please configure it first." . format ( os . path . basename ( path ) ) ) click . secho ( "[*] Adding integration {}" . format ( integration . name ) ) logger . debug ( "Adding integration %s" , integration . name , extra = { "integration" : integration . name , "args" : integration_args } ) configured_integration = ConfiguredIntegration ( name = integration . name , integration = integration , path = path ) configured_integration . data = integration_args configured_integration . integration . module = get_integration_module ( path ) . IntegrationActionsClass ( integration_args ) configured_integrations . append ( configured_integration )
Configure and enable an integration .
57,544
def send_alert_to_subscribed_integrations ( alert ) : valid_configured_integrations = get_valid_configured_integrations ( alert ) for configured_integration in valid_configured_integrations : threading . Thread ( target = create_integration_alert_and_call_send , args = ( alert , configured_integration ) ) . start ( )
Send Alert to relevant integrations .
57,545
def get_valid_configured_integrations ( alert ) : if not configured_integrations : return [ ] valid_configured_integrations = [ _ for _ in configured_integrations if _ . integration . integration_type == IntegrationTypes . EVENT_OUTPUT . name and ( not _ . integration . supported_event_types or alert . alert_type in _ . integration . supported_event_types ) ] return valid_configured_integrations
Return a list of integrations for alert filtered by alert_type .
57,546
def create_integration_alert_and_call_send ( alert , configured_integration ) : integration_alert = IntegrationAlert ( alert = alert , configured_integration = configured_integration , status = IntegrationAlertStatuses . PENDING . name , retries = configured_integration . integration . max_send_retries ) send_alert_to_configured_integration ( integration_alert )
Create an IntegrationAlert object and send it to Integration .
57,547
def send_alert_to_configured_integration ( integration_alert ) : try : alert = integration_alert . alert configured_integration = integration_alert . configured_integration integration = configured_integration . integration integration_actions_instance = configured_integration . integration . module alert_fields = dict ( ) if integration . required_fields : if not all ( [ hasattr ( alert , _ ) for _ in integration . required_fields ] ) : logger . debug ( "Alert does not have all required_fields (%s) for integration %s, skipping" , integration . required_fields , integration . name ) return exclude_fields = [ "alert_type" , "service_type" ] alert_fields = { } for field in alert . __slots__ : if hasattr ( alert , field ) and field not in exclude_fields : alert_fields [ field ] = getattr ( alert , field ) logger . debug ( "Sending alert %s to %s" , alert_fields , integration . name ) output_data , output_file_content = integration_actions_instance . send_event ( alert_fields ) if integration . polling_enabled : integration_alert . status = IntegrationAlertStatuses . POLLING . name polling_integration_alerts . append ( integration_alert ) else : integration_alert . status = IntegrationAlertStatuses . DONE . name integration_alert . send_time = get_current_datetime_utc ( ) integration_alert . output_data = json . dumps ( output_data ) except exceptions . IntegrationMissingRequiredFieldError as exc : logger . exception ( "Send response formatting for integration alert %s failed. Missing required fields" , integration_alert , exc . message ) integration_alert . status = IntegrationAlertStatuses . ERROR_MISSING_SEND_FIELDS . name except exceptions . IntegrationOutputFormatError : logger . exception ( "Send response formatting for integration alert %s failed" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_SENDING_FORMATTING . name except exceptions . IntegrationSendEventError as exc : integration_send_retries = integration_alert . retries if integration_alert . retries <= MAX_SEND_RETRIES else MAX_SEND_RETRIES send_retries_left = integration_send_retries - 1 integration_alert . retries = send_retries_left logger . error ( "Sending integration alert %s failed. Message: %s. Retries left: %s" , integration_alert , exc . message , send_retries_left ) if send_retries_left == 0 : integration_alert . status = IntegrationAlertStatuses . ERROR_SENDING . name if send_retries_left > 0 : sleep ( SEND_ALERT_DATA_INTERVAL ) send_alert_to_configured_integration ( integration_alert )
Send IntegrationAlert to configured integration .
57,548
def poll_integration_alert_data ( integration_alert ) : logger . info ( "Polling information for integration alert %s" , integration_alert ) try : configured_integration = integration_alert . configured_integration integration_actions_instance = configured_integration . integration . module output_data , output_file_content = integration_actions_instance . poll_for_updates ( json . loads ( integration_alert . output_data ) ) integration_alert . status = IntegrationAlertStatuses . DONE . name integration_alert . output_data = json . dumps ( output_data ) polling_integration_alerts . remove ( integration_alert ) except exceptions . IntegrationNoMethodImplementationError : logger . error ( "No poll_for_updates function found for integration alert %s" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING . name except exceptions . IntegrationPollEventError : logger . debug ( "Polling for integration alert %s failed" , integration_alert ) except exceptions . IntegrationOutputFormatError : logger . error ( "Integration alert %s formatting error" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING_FORMATTING . name except Exception : logger . exception ( "Error polling integration alert %s" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING . name
Poll for updates on waiting IntegrationAlerts .
57,549
def wait_until ( func , check_return_value = True , total_timeout = 60 , interval = 0.5 , exc_list = None , error_message = "" , * args , ** kwargs ) : start_function = time . time ( ) while time . time ( ) - start_function < total_timeout : try : logger . debug ( "executing {} with args {} {}" . format ( func , args , kwargs ) ) return_value = func ( * args , ** kwargs ) if not check_return_value or ( check_return_value and return_value ) : return return_value except Exception as exc : if exc_list and any ( [ isinstance ( exc , x ) for x in exc_list ] ) : pass else : raise time . sleep ( interval ) raise TimeoutException ( error_message )
Run a command in a loop until desired result or timeout occurs .
57,550
def search_json_log ( filepath , key , value ) : try : with open ( filepath , "r" ) as fh : for line in fh . readlines ( ) : log = json . loads ( line ) if key in log and log [ key ] == value : return log except IOError : pass return False
Search json log file for a key = value pair .
57,551
def list_commands ( self , ctx ) : rv = [ ] files = [ _ for _ in next ( os . walk ( self . folder ) ) [ 2 ] if not _ . startswith ( "_" ) and _ . endswith ( ".py" ) ] for filename in files : rv . append ( filename [ : - 3 ] ) rv . sort ( ) return rv
List commands from folder .
57,552
def get_command ( self , ctx , name ) : plugin = os . path . basename ( self . folder ) try : command = importlib . import_module ( "honeycomb.commands.{}.{}" . format ( plugin , name ) ) except ImportError : raise click . UsageError ( "No such command {} {}\n\n{}" . format ( plugin , name , self . get_help ( ctx ) ) ) return getattr ( command , name )
Fetch command from folder .
57,553
def cli ( ctx , home , iamroot , config , verbose ) : _mkhome ( home ) setup_logging ( home , verbose ) logger . debug ( "Honeycomb v%s" , __version__ , extra = { "version" : __version__ } ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) try : is_admin = os . getuid ( ) == 0 except AttributeError : is_admin = ctypes . windll . shell32 . IsUserAnAdmin ( ) if is_admin : if not iamroot : raise click . ClickException ( "Honeycomb should not run as a privileged user, if you are just " "trying to bind to a low port try running `setcap 'cap_net_bind_service=+ep' " "$(which honeycomb)` instead. If you insist, use --iamroot" ) logger . warn ( "running as root!" ) ctx . obj [ "HOME" ] = home logger . debug ( "ctx: {}" . format ( ctx . obj ) ) if config : return process_config ( ctx , config )
Honeycomb is a honeypot framework .
57,554
def setup_logging ( home , verbose ) : logging . setLoggerClass ( MyLogger ) logging . config . dictConfig ( { "version" : 1 , "disable_existing_loggers" : False , "formatters" : { "console" : { "format" : "%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s" , } , "json" : { "()" : jsonlogger . JsonFormatter , "format" : "%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s" , } , } , "handlers" : { "default" : { "level" : "DEBUG" if verbose else "INFO" , "class" : "logging.StreamHandler" , "formatter" : "console" , } , "file" : { "level" : "DEBUG" , "class" : "logging.handlers.WatchedFileHandler" , "filename" : os . path . join ( home , DEBUG_LOG_FILE ) , "formatter" : "json" , } , } , "loggers" : { "" : { "handlers" : [ "default" , "file" ] , "level" : "DEBUG" , "propagate" : True , } , } } )
Configure logging for honeycomb .
57,555
def makeRecord ( self , name , level , fn , lno , msg , args , exc_info , func = None , extra = None , sinfo = None ) : if six . PY2 : rv = logging . LogRecord ( name , level , fn , lno , msg , args , exc_info , func ) else : rv = logging . LogRecord ( name , level , fn , lno , msg , args , exc_info , func , sinfo ) if extra is None : extra = dict ( ) extra . update ( { "pid" : os . getpid ( ) , "uid" : os . getuid ( ) , "gid" : os . getgid ( ) , "ppid" : os . getppid ( ) } ) for key in extra : rv . __dict__ [ key ] = extra [ key ] return rv
Override default logger to allow overriding of internal attributes .
57,556
def stop ( ctx , service , editable ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] service_path = plugin_utils . get_plugin_path ( home , SERVICES , service , editable ) logger . debug ( "loading {}" . format ( service ) ) service = register_service ( service_path ) try : with open ( os . path . join ( service_path , ARGS_JSON ) ) as f : service_args = json . loads ( f . read ( ) ) except IOError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Cannot load service args, are you sure server is running?" ) service_module = get_service_module ( service_path ) service_obj = service_module . service_class ( alert_types = service . alert_types , service_args = service_args ) runner = myRunner ( service_obj , pidfile = service_path + ".pid" , stdout = open ( os . path . join ( service_path , "stdout.log" ) , "ab" ) , stderr = open ( os . path . join ( service_path , "stderr.log" ) , "ab" ) ) click . secho ( "[*] Stopping {}" . format ( service . name ) ) try : runner . _stop ( ) except daemon . runner . DaemonRunnerStopFailureError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Unable to stop service, are you sure it is running?" )
Stop a running service daemon .
57,557
def logs ( ctx , services , num , follow ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] services_path = os . path . join ( home , SERVICES ) tail_threads = [ ] for service in services : logpath = os . path . join ( services_path , service , LOGS_DIR , STDOUTLOG ) if os . path . exists ( logpath ) : logger . debug ( "tailing %s" , logpath ) t = threading . Thread ( target = Tailer , kwargs = { "name" : service , "nlines" : num , "filepath" : logpath , "follow" : follow } ) t . daemon = True t . start ( ) tail_threads . append ( t ) if tail_threads : while tail_threads [ 0 ] . isAlive ( ) : tail_threads [ 0 ] . join ( 0.1 )
Show logs of daemonized service .
57,558
def get_integration_module ( integration_path ) : paths = [ os . path . join ( __file__ , ".." , ".." ) , os . path . join ( integration_path , ".." ) , os . path . join ( integration_path , DEPS_DIR ) , ] for path in paths : path = os . path . realpath ( path ) logger . debug ( "adding %s to path" , path ) sys . path . insert ( 0 , path ) integration_name = os . path . basename ( integration_path ) logger . debug ( "importing %s" , "." . join ( [ integration_name , INTEGRATION ] ) ) return importlib . import_module ( "." . join ( [ integration_name , INTEGRATION ] ) )
Add custom paths to sys and import integration module .
57,559
def register_integration ( package_folder ) : logger . debug ( "registering integration %s" , package_folder ) package_folder = os . path . realpath ( package_folder ) if not os . path . exists ( package_folder ) : raise IntegrationNotFound ( os . path . basename ( package_folder ) ) json_config_path = os . path . join ( package_folder , CONFIG_FILE_NAME ) if not os . path . exists ( json_config_path ) : raise ConfigFileNotFound ( json_config_path ) with open ( json_config_path , "r" ) as f : config_json = json . load ( f ) validate_config ( config_json , defs . INTEGRATION_VALIDATE_CONFIG_FIELDS ) validate_config_parameters ( config_json , defs . INTEGRATION_PARAMETERS_ALLOWED_KEYS , defs . INTEGRATION_PARAMETERS_ALLOWED_TYPES ) integration_type = _create_integration_object ( config_json ) return integration_type
Register a honeycomb integration .
57,560
def list ( ctx , remote ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) click . secho ( "[*] Installed integrations:" ) home = ctx . obj [ "HOME" ] integrations_path = os . path . join ( home , INTEGRATIONS ) plugin_type = "integration" def get_integration_details ( integration_name ) : logger . debug ( "loading {}" . format ( integration_name ) ) integration = register_integration ( os . path . join ( integrations_path , integration_name ) ) supported_event_types = integration . supported_event_types if not supported_event_types : supported_event_types = "All" return "{:s} ({:s}) [Supported event types: {}]" . format ( integration . name , integration . description , supported_event_types ) installed_integrations = list_local_plugins ( plugin_type , integrations_path , get_integration_details ) if remote : list_remote_plugins ( installed_integrations , plugin_type ) else : click . secho ( "\n[*] Try running `honeycomb integrations list -r` " "to see integrations available from our repository" )
List integrations .
57,561
def run ( ctx , service , args , show_args , daemon , editable , integration ) : home = ctx . obj [ "HOME" ] service_path = plugin_utils . get_plugin_path ( home , SERVICES , service , editable ) service_log_path = os . path . join ( service_path , LOGS_DIR ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) logger . debug ( "loading {} ({})" . format ( service , service_path ) ) service = register_service ( service_path ) if show_args : return plugin_utils . print_plugin_args ( service_path ) service_module = get_service_module ( service_path ) service_args = plugin_utils . parse_plugin_args ( args , config_utils . get_config_parameters ( service_path ) ) service_obj = service_module . service_class ( alert_types = service . alert_types , service_args = service_args ) if not os . path . exists ( service_log_path ) : os . mkdir ( service_log_path ) if daemon : runner = myRunner ( service_obj , pidfile = service_path + ".pid" , stdout = open ( os . path . join ( service_log_path , STDOUTLOG ) , "ab" ) , stderr = open ( os . path . join ( service_log_path , STDERRLOG ) , "ab" ) ) files_preserve = [ ] for handler in logging . getLogger ( ) . handlers : if hasattr ( handler , "stream" ) : if hasattr ( handler . stream , "fileno" ) : files_preserve . append ( handler . stream . fileno ( ) ) if hasattr ( handler , "socket" ) : files_preserve . append ( handler . socket . fileno ( ) ) runner . daemon_context . files_preserve = files_preserve runner . daemon_context . signal_map . update ( { signal . SIGTERM : service_obj . _on_server_shutdown , signal . SIGINT : service_obj . _on_server_shutdown , } ) logger . debug ( "daemon_context" , extra = { "daemon_context" : vars ( runner . daemon_context ) } ) for integration_name in integration : integration_path = plugin_utils . get_plugin_path ( home , INTEGRATIONS , integration_name , editable ) configure_integration ( integration_path ) click . secho ( "[+] Launching {} {}" . format ( service . name , "in daemon mode" if daemon else "" ) ) try : with open ( os . path . join ( service_path , ARGS_JSON ) , "w" ) as f : f . write ( json . dumps ( service_args ) ) runner . _start ( ) if daemon else service_obj . run ( ) except KeyboardInterrupt : service_obj . _on_server_shutdown ( ) click . secho ( "[*] {} has stopped" . format ( service . name ) )
Load and run a specific service .
57,562
def read_lines ( self , file_path , empty_lines = False , signal_ready = True ) : file_handler , file_id = self . _get_file ( file_path ) file_handler . seek ( 0 , os . SEEK_END ) if signal_ready : self . signal_ready ( ) while self . thread_server . is_alive ( ) : line = six . text_type ( file_handler . readline ( ) , "utf-8" ) if line : yield line continue elif empty_lines : yield line time . sleep ( 0.1 ) if file_id != self . _get_file_id ( os . stat ( file_path ) ) and os . path . isfile ( file_path ) : file_handler , file_id = self . _get_file ( file_path )
Fetch lines from file .
57,563
def on_server_start ( self ) : self . _container = self . _docker_client . containers . run ( self . docker_image_name , detach = True , ** self . docker_params ) self . signal_ready ( ) for log_line in self . get_lines ( ) : try : alert_dict = self . parse_line ( log_line ) if alert_dict : self . add_alert_to_queue ( alert_dict ) except Exception : self . logger . exception ( None )
Service run loop function .
57,564
def on_server_shutdown ( self ) : if not self . _container : return self . _container . stop ( ) self . _container . remove ( v = True , force = True )
Stop the container before shutting down .
57,565
def uninstall ( ctx , yes , integrations ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] for integration in integrations : integration_path = plugin_utils . get_plugin_path ( home , INTEGRATIONS , integration ) plugin_utils . uninstall_plugin ( integration_path , yes )
Uninstall a integration .
57,566
def install ( ctx , services , delete_after_install = False ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] services_path = os . path . join ( home , SERVICES ) installed_all_plugins = True for service in services : try : plugin_utils . install_plugin ( service , SERVICE , services_path , register_service ) except exceptions . PluginAlreadyInstalled as exc : click . echo ( exc ) installed_all_plugins = False if not installed_all_plugins : raise ctx . exit ( errno . EEXIST )
Install a honeypot service from the online library local path or zipfile .
57,567
def uninstall ( ctx , yes , services ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] for service in services : service_path = plugin_utils . get_plugin_path ( home , SERVICES , service ) plugin_utils . uninstall_plugin ( service_path , yes )
Uninstall a service .
57,568
def get_service_module ( service_path ) : paths = [ os . path . dirname ( __file__ ) , os . path . realpath ( os . path . join ( service_path , ".." ) ) , os . path . realpath ( os . path . join ( service_path ) ) , os . path . realpath ( os . path . join ( service_path , DEPS_DIR ) ) , ] for path in paths : path = os . path . realpath ( path ) logger . debug ( "adding %s to path" , path ) sys . path . insert ( 0 , path ) service_name = os . path . basename ( service_path ) module = "." . join ( [ service_name , service_name + "_service" ] ) logger . debug ( "importing %s" , module ) return importlib . import_module ( module )
Add custom paths to sys and import service module .
57,569
def register_service ( package_folder ) : logger . debug ( "registering service %s" , package_folder ) package_folder = os . path . realpath ( package_folder ) if not os . path . exists ( package_folder ) : raise ServiceNotFound ( os . path . basename ( package_folder ) ) json_config_path = os . path . join ( package_folder , CONFIG_FILE_NAME ) if not os . path . exists ( json_config_path ) : raise ConfigFileNotFound ( json_config_path ) with open ( json_config_path , "r" ) as f : config_json = json . load ( f ) config_utils . validate_config ( config_json , defs . SERVICE_ALERT_VALIDATE_FIELDS ) config_utils . validate_config ( config_json . get ( defs . SERVICE_CONFIG_SECTION_KEY , { } ) , defs . SERVICE_CONFIG_VALIDATE_FIELDS ) _validate_supported_platform ( config_json ) _validate_alert_configs ( config_json ) config_utils . validate_config_parameters ( config_json , defs . SERVICE_ALLOWED_PARAMTER_KEYS , defs . SERVICE_ALLOWED_PARAMTER_TYPES ) service_type = _create_service_object ( config_json ) service_type . alert_types = _create_alert_types ( config_json , service_type ) return service_type
Register a honeycomb service .
57,570
def install ( ctx , integrations , delete_after_install = False ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] integrations_path = os . path . join ( home , INTEGRATIONS ) installed_all_plugins = True for integration in integrations : try : plugin_utils . install_plugin ( integration , INTEGRATION , integrations_path , register_integration ) except exceptions . PluginAlreadyInstalled as exc : click . echo ( exc ) installed_all_plugins = False if not installed_all_plugins : raise ctx . exit ( errno . EEXIST )
Install a honeycomb integration from the online library local path or zipfile .
57,571
def configure ( ctx , integration , args , show_args , editable ) : home = ctx . obj [ "HOME" ] integration_path = plugin_utils . get_plugin_path ( home , defs . INTEGRATIONS , integration , editable ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) logger . debug ( "loading {} ({})" . format ( integration , integration_path ) ) integration = register_integration ( integration_path ) if show_args : return plugin_utils . print_plugin_args ( integration_path ) integration_args = plugin_utils . parse_plugin_args ( args , config_utils . get_config_parameters ( integration_path ) ) args_file = os . path . join ( integration_path , defs . ARGS_JSON ) with open ( args_file , "w" ) as f : data = json . dumps ( integration_args ) logger . debug ( "writing %s to %s" , data , args_file ) f . write ( json . dumps ( integration_args ) ) click . secho ( "[*] {0} has been configured, make sure to test it with `honeycomb integration test {0}`" . format ( integration . name ) )
Configure an integration with default parameters .
57,572
def get_match_history ( self , account_id = None , ** kwargs ) : if 'account_id' not in kwargs : kwargs [ 'account_id' ] = account_id url = self . __build_url ( urls . GET_MATCH_HISTORY , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of the most recent Dota matches
57,573
def get_match_history_by_seq_num ( self , start_at_match_seq_num = None , ** kwargs ) : if 'start_at_match_seq_num' not in kwargs : kwargs [ 'start_at_match_seq_num' ] = start_at_match_seq_num url = self . __build_url ( urls . GET_MATCH_HISTORY_BY_SEQ_NUM , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of Dota matches in the order they were recorded
57,574
def get_match_details ( self , match_id = None , ** kwargs ) : if 'match_id' not in kwargs : kwargs [ 'match_id' ] = match_id url = self . __build_url ( urls . GET_MATCH_DETAILS , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing the details for a Dota 2 match
57,575
def get_league_listing ( self ) : url = self . __build_url ( urls . GET_LEAGUE_LISTING ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of all ticketed leagues
57,576
def get_live_league_games ( self ) : url = self . __build_url ( urls . GET_LIVE_LEAGUE_GAMES ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of ticked games in progress
57,577
def get_team_info_by_team_id ( self , start_at_team_id = None , ** kwargs ) : if 'start_at_team_id' not in kwargs : kwargs [ 'start_at_team_id' ] = start_at_team_id url = self . __build_url ( urls . GET_TEAM_INFO_BY_TEAM_ID , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a in - game teams
57,578
def get_player_summaries ( self , steamids = None , ** kwargs ) : if not isinstance ( steamids , collections . Iterable ) : steamids = [ steamids ] base64_ids = list ( map ( convert_to_64_bit , filter ( lambda x : x is not None , steamids ) ) ) if 'steamids' not in kwargs : kwargs [ 'steamids' ] = base64_ids url = self . __build_url ( urls . GET_PLAYER_SUMMARIES , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a player summaries
57,579
def get_heroes ( self , ** kwargs ) : url = self . __build_url ( urls . GET_HEROES , language = self . language , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary of in - game heroes used to parse ids into localised names
57,580
def get_tournament_prize_pool ( self , leagueid = None , ** kwargs ) : if 'leagueid' not in kwargs : kwargs [ 'leagueid' ] = leagueid url = self . __build_url ( urls . GET_TOURNAMENT_PRIZE_POOL , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary that includes community funded tournament prize pools
57,581
def get_top_live_games ( self , partner = '' , ** kwargs ) : if 'partner' not in kwargs : kwargs [ 'partner' ] = partner url = self . __build_url ( urls . GET_TOP_LIVE_GAME , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary that includes top MMR live games
57,582
def __build_url ( self , api_call , ** kwargs ) : kwargs [ 'key' ] = self . api_key if 'language' not in kwargs : kwargs [ 'language' ] = self . language if 'format' not in kwargs : kwargs [ 'format' ] = self . __format api_query = urlencode ( kwargs ) return "{0}{1}?{2}" . format ( urls . BASE_URL , api_call , api_query )
Builds the api query
57,583
def __check_http_err ( self , status_code ) : if status_code == 403 : raise exceptions . APIAuthenticationError ( self . api_key ) elif status_code == 503 : raise exceptions . APITimeoutError ( ) else : return False
Raises an exception if we get a http error
57,584
def item_id ( response ) : dict_keys = [ 'item_0' , 'item_1' , 'item_2' , 'item_3' , 'item_4' , 'item_5' ] new_keys = [ 'item_0_name' , 'item_1_name' , 'item_2_name' , 'item_3_name' , 'item_4_name' , 'item_5_name' ] for player in response [ 'players' ] : for key , new_key in zip ( dict_keys , new_keys ) : for item in items [ 'items' ] : if item [ 'id' ] == player [ key ] : player [ new_key ] = item [ 'localized_name' ] return response
Parse the item ids will be available as item_0_name item_1_name item_2_name and so on
57,585
def get_reviews ( obj ) : ctype = ContentType . objects . get_for_model ( obj ) return models . Review . objects . filter ( content_type = ctype , object_id = obj . id )
Simply returns the reviews for an object .
57,586
def get_review_average ( obj ) : total = 0 reviews = get_reviews ( obj ) if not reviews : return False for review in reviews : average = review . get_average_rating ( ) if average : total += review . get_average_rating ( ) if total > 0 : return total / reviews . count ( ) return False
Returns the review average for an object .
57,587
def render_category_averages ( obj , normalize_to = 100 ) : context = { 'reviewed_item' : obj } ctype = ContentType . objects . get_for_model ( obj ) reviews = models . Review . objects . filter ( content_type = ctype , object_id = obj . id ) category_averages = { } for review in reviews : review_category_averages = review . get_category_averages ( normalize_to ) if review_category_averages : for category , average in review_category_averages . items ( ) : if category not in category_averages : category_averages [ category ] = review_category_averages [ category ] else : category_averages [ category ] += review_category_averages [ category ] if reviews and category_averages : for category , average in category_averages . items ( ) : category_averages [ category ] = category_averages [ category ] / models . Rating . objects . filter ( category = category , value__isnull = False , review__content_type = ctype , review__object_id = obj . id ) . exclude ( value = '' ) . count ( ) else : category_averages = { } for category in models . RatingCategory . objects . filter ( counts_for_average = True ) : category_averages [ category ] = 0.0 context . update ( { 'category_averages' : category_averages } ) return context
Renders all the sub - averages for each category .
57,588
def total_review_average ( obj , normalize_to = 100 ) : ctype = ContentType . objects . get_for_model ( obj ) total_average = 0 reviews = models . Review . objects . filter ( content_type = ctype , object_id = obj . id ) for review in reviews : total_average += review . get_average_rating ( normalize_to ) if reviews : total_average /= reviews . count ( ) return total_average
Returns the average for all reviews of the given object .
57,589
def user_has_reviewed ( obj , user ) : ctype = ContentType . objects . get_for_model ( obj ) try : models . Review . objects . get ( user = user , content_type = ctype , object_id = obj . id ) except models . Review . DoesNotExist : return False return True
Returns True if the user has already reviewed the object .
57,590
def str_to_bytes ( value : str , expected_length : int ) -> bytes : length = len ( value ) if length != expected_length : raise ValueError ( 'Expects {} characters for decoding; got {}' . format ( expected_length , length ) ) try : encoded = value . encode ( 'ascii' ) except UnicodeEncodeError as ex : raise ValueError ( 'Expects value that can be encoded in ASCII charset: {}' . format ( ex ) ) decoding = DECODING for byte in encoded : if decoding [ byte ] > 31 : raise ValueError ( 'Non-base32 character found: "{}"' . format ( chr ( byte ) ) ) return encoded
Convert the given string to bytes and validate it is within the Base32 character set .
57,591
def package_version ( ) : version_path = os . path . join ( os . path . dirname ( __file__ ) , 'version.py' ) version = read_version ( version_path ) write_version ( version_path , version ) return version
Get the package version via Git Tag .
57,592
def synchronized ( * args ) : if callable ( args [ 0 ] ) : return decorate_synchronized ( args [ 0 ] , _synchronized_lock ) else : def wrap ( function ) : return decorate_synchronized ( function , args [ 0 ] ) return wrap
A synchronized function prevents two or more callers to interleave its execution preventing race conditions .
57,593
def worker_thread ( context ) : queue = context . task_queue parameters = context . worker_parameters if parameters . initializer is not None : if not run_initializer ( parameters . initializer , parameters . initargs ) : context . state = ERROR return for task in get_next_task ( context , parameters . max_tasks ) : execute_next_task ( task ) queue . task_done ( )
The worker thread routines .
57,594
def stop_process ( process ) : process . terminate ( ) process . join ( 3 ) if process . is_alive ( ) and os . name != 'nt' : try : os . kill ( process . pid , signal . SIGKILL ) process . join ( ) except OSError : return if process . is_alive ( ) : raise RuntimeError ( "Unable to terminate PID %d" % os . getpid ( ) )
Does its best to stop the process .
57,595
def send_result ( pipe , data ) : try : pipe . send ( data ) except ( pickle . PicklingError , TypeError ) as error : error . traceback = format_exc ( ) pipe . send ( RemoteException ( error , error . traceback ) )
Send result handling pickling and communication errors .
57,596
def process ( * args , ** kwargs ) : timeout = kwargs . get ( 'timeout' ) if len ( args ) == 1 and len ( kwargs ) == 0 and callable ( args [ 0 ] ) : return _process_wrapper ( args [ 0 ] , timeout ) else : if timeout is not None and not isinstance ( timeout , ( int , float ) ) : raise TypeError ( 'Timeout expected to be None or integer or float' ) def decorating_function ( function ) : return _process_wrapper ( function , timeout ) return decorating_function
Runs the decorated function in a concurrent process taking care of the result and error management .
57,597
def _worker_handler ( future , worker , pipe , timeout ) : result = _get_result ( future , pipe , timeout ) if isinstance ( result , BaseException ) : if isinstance ( result , ProcessExpired ) : result . exitcode = worker . exitcode future . set_exception ( result ) else : future . set_result ( result ) if worker . is_alive ( ) : stop_process ( worker )
Worker lifecycle manager .
57,598
def _function_handler ( function , args , kwargs , pipe ) : signal . signal ( signal . SIGINT , signal . SIG_IGN ) result = process_execute ( function , * args , ** kwargs ) send_result ( pipe , result )
Runs the actual function in separate process and returns its result .
57,599
def _get_result ( future , pipe , timeout ) : counter = count ( step = SLEEP_UNIT ) try : while not pipe . poll ( SLEEP_UNIT ) : if timeout is not None and next ( counter ) >= timeout : return TimeoutError ( 'Task Timeout' , timeout ) elif future . cancelled ( ) : return CancelledError ( ) return pipe . recv ( ) except ( EOFError , OSError ) : return ProcessExpired ( 'Abnormal termination' ) except Exception as error : return error
Waits for result and handles communication errors .