idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
6,300
|
def _get_showcase_dataset_dict ( self , dataset ) : if isinstance ( dataset , hdx . data . dataset . Dataset ) or isinstance ( dataset , dict ) : if 'id' not in dataset : dataset = hdx . data . dataset . Dataset . read_from_hdx ( dataset [ 'name' ] ) dataset = dataset [ 'id' ] elif not isinstance ( dataset , str ) : raise hdx . data . hdxobject . HDXError ( 'Type %s cannot be added as a dataset!' % type ( dataset ) . __name__ ) if is_valid_uuid ( dataset ) is False : raise hdx . data . hdxobject . HDXError ( '%s is not a valid dataset id!' % dataset ) return { 'showcase_id' : self . data [ 'id' ] , 'package_id' : dataset }
|
Get showcase dataset dict
|
6,301
|
def add_dataset ( self , dataset , datasets_to_check = None ) : showcase_dataset = self . _get_showcase_dataset_dict ( dataset ) if datasets_to_check is None : datasets_to_check = self . get_datasets ( ) for dataset in datasets_to_check : if showcase_dataset [ 'package_id' ] == dataset [ 'id' ] : return False self . _write_to_hdx ( 'associate' , showcase_dataset , 'package_id' ) return True
|
Add a dataset
|
6,302
|
def add_datasets ( self , datasets , datasets_to_check = None ) : if datasets_to_check is None : datasets_to_check = self . get_datasets ( ) alldatasetsadded = True for dataset in datasets : if not self . add_dataset ( dataset , datasets_to_check = datasets_to_check ) : alldatasetsadded = False return alldatasetsadded
|
Add multiple datasets
|
6,303
|
def join ( self , joiner , formatter = lambda s , t : t . format ( s ) , template = "{}" ) : return ww . s ( joiner ) . join ( self , formatter , template )
|
Join values and convert to string
|
6,304
|
def append ( self , * values ) : for value in values : list . append ( self , value ) return self
|
Append values at the end of the list
|
6,305
|
def extend ( self , * iterables ) : for value in iterables : list . extend ( self , value ) return self
|
Add all values of all iterables at the end of the list
|
6,306
|
def normalize_cell_value ( value ) : if isinstance ( value , dict ) or isinstance ( value , list ) : return json . dumps ( value ) return value
|
Process value for writing into a cell .
|
6,307
|
def get_addresses_from_input_file ( input_file_name ) : mode = 'r' if sys . version_info [ 0 ] < 3 : mode = 'rb' with io . open ( input_file_name , mode ) as input_file : reader = csv . reader ( input_file , delimiter = ',' , quotechar = '"' ) addresses = list ( map ( tuple , reader ) ) if len ( addresses ) == 0 : raise Exception ( 'No addresses found in input file' ) header_columns = list ( column . lower ( ) for column in addresses . pop ( 0 ) ) try : address_index = header_columns . index ( 'address' ) zipcode_index = header_columns . index ( 'zipcode' ) except ValueError : raise Exception ( ) return list ( ( row [ address_index ] , row [ zipcode_index ] ) for row in addresses )
|
Read addresses from input file into list of tuples . This only supports address and zipcode headers
|
6,308
|
def get_identifiers_from_input_file ( input_file_name ) : valid_identifiers = [ 'address' , 'zipcode' , 'unit' , 'city' , 'state' , 'slug' , 'block_id' , 'msa' , 'num_bins' , 'property_type' , 'client_value' , 'client_value_sqft' , 'meta' ] mode = 'r' if sys . version_info [ 0 ] < 3 : mode = 'rb' with io . open ( input_file_name , mode ) as input_file : result = [ { identifier : val for identifier , val in list ( row . items ( ) ) if identifier in valid_identifiers } for row in csv . DictReader ( input_file , skipinitialspace = True ) ] return result
|
Read identifiers from input file into list of dicts with the header row values as keys and the rest of the rows as values .
|
6,309
|
def delete_host_from_segment ( ipaddress , networkaddress , auth , url ) : host_id = get_host_id ( ipaddress , networkaddress , auth , url ) remove_scope_ip ( host_id , auth . creds , auth . url )
|
Function to abstract
|
6,310
|
def generate_signature ( method , version , endpoint , date , rel_url , content_type , content , access_key , secret_key , hash_type ) : hash_type = hash_type hostname = endpoint . _val . netloc if version >= 'v4.20181215' : content = b'' else : if content_type . startswith ( 'multipart/' ) : content = b'' body_hash = hashlib . new ( hash_type , content ) . hexdigest ( ) sign_str = '{}\n{}\n{}\nhost:{}\ncontent-type:{}\nx-backendai-version:{}\n{}' . format ( method . upper ( ) , rel_url , date . isoformat ( ) , hostname , content_type . lower ( ) , version , body_hash ) sign_bytes = sign_str . encode ( ) sign_key = hmac . new ( secret_key . encode ( ) , date . strftime ( '%Y%m%d' ) . encode ( ) , hash_type ) . digest ( ) sign_key = hmac . new ( sign_key , hostname . encode ( ) , hash_type ) . digest ( ) signature = hmac . new ( sign_key , sign_bytes , hash_type ) . hexdigest ( ) headers = { 'Authorization' : 'BackendAI signMethod=HMAC-{}, credential={}:{}' . format ( hash_type . upper ( ) , access_key , signature ) , } return headers , signature
|
Generates the API request signature from the given parameters .
|
6,311
|
async def list_with_limit ( cls , limit , offset , status : str = 'ALIVE' , fields : Iterable [ str ] = None ) -> Sequence [ dict ] : if fields is None : fields = ( 'id' , 'addr' , 'status' , 'first_contact' , 'mem_slots' , 'cpu_slots' , 'gpu_slots' , ) q = 'query($limit: Int!, $offset: Int!, $status: String) {' ' agent_list(limit: $limit, offset: $offset, status: $status) {' ' items { $fields }' ' total_count' ' }' '}' q = q . replace ( '$fields' , ' ' . join ( fields ) ) variables = { 'limit' : limit , 'offset' : offset , 'status' : status , } rqst = Request ( cls . session , 'POST' , '/admin/graphql' ) rqst . set_json ( { 'query' : q , 'variables' : variables , } ) async with rqst . fetch ( ) as resp : data = await resp . json ( ) return data [ 'agent_list' ]
|
Fetches the list of agents with the given status with limit and offset for pagination .
|
6,312
|
def set_content ( self , value : RequestContent , * , content_type : str = None ) : assert self . _attached_files is None , 'cannot set content because you already attached files.' guessed_content_type = 'application/octet-stream' if value is None : guessed_content_type = 'text/plain' self . _content = b'' elif isinstance ( value , str ) : guessed_content_type = 'text/plain' self . _content = value . encode ( 'utf-8' ) else : guessed_content_type = 'application/octet-stream' self . _content = value self . content_type = ( content_type if content_type is not None else guessed_content_type )
|
Sets the content of the request .
|
6,313
|
def attach_files ( self , files : Sequence [ AttachedFile ] ) : assert not self . _content , 'content must be empty to attach files.' self . content_type = 'multipart/form-data' self . _attached_files = files
|
Attach a list of files represented as AttachedFile .
|
6,314
|
def _sign ( self , rel_url , access_key = None , secret_key = None , hash_type = None ) : if access_key is None : access_key = self . config . access_key if secret_key is None : secret_key = self . config . secret_key if hash_type is None : hash_type = self . config . hash_type hdrs , _ = generate_signature ( self . method , self . config . version , self . config . endpoint , self . date , str ( rel_url ) , self . content_type , self . _content , access_key , secret_key , hash_type ) self . headers . update ( hdrs )
|
Calculates the signature of the given request and adds the Authorization HTTP header . It should be called at the very end of request preparation and before sending the request to the server .
|
6,315
|
def fetch ( self , ** kwargs ) -> 'FetchContextManager' : assert self . method in self . _allowed_methods , 'Disallowed HTTP method: {}' . format ( self . method ) self . date = datetime . now ( tzutc ( ) ) self . headers [ 'Date' ] = self . date . isoformat ( ) if self . content_type is not None : self . headers [ 'Content-Type' ] = self . content_type full_url = self . _build_url ( ) self . _sign ( full_url . relative ( ) ) rqst_ctx = self . session . aiohttp_session . request ( self . method , str ( full_url ) , data = self . _pack_content ( ) , timeout = _default_request_timeout , headers = self . headers ) return FetchContextManager ( self . session , rqst_ctx , ** kwargs )
|
Sends the request to the server and reads the response .
|
6,316
|
def connect_websocket ( self , ** kwargs ) -> 'WebSocketContextManager' : assert isinstance ( self . session , AsyncSession ) , 'Cannot use websockets with sessions in the synchronous mode' assert self . method == 'GET' , 'Invalid websocket method' self . date = datetime . now ( tzutc ( ) ) self . headers [ 'Date' ] = self . date . isoformat ( ) self . content_type = 'application/octet-stream' full_url = self . _build_url ( ) self . _sign ( full_url . relative ( ) ) ws_ctx = self . session . aiohttp_session . ws_connect ( str ( full_url ) , autoping = True , heartbeat = 30.0 , headers = self . headers ) return WebSocketContextManager ( self . session , ws_ctx , ** kwargs )
|
Creates a WebSocket connection .
|
6,317
|
def process_json_response ( self , response ) : response_json = response . json ( ) code_key = "code" if code_key in response_json and response_json [ code_key ] != constants . HTTP_CODE_OK : code = response_json [ code_key ] message = response_json if "message" in response_json : message = response_json [ "message" ] elif "code_description" in response_json : message = response_json [ "code_description" ] if code == constants . HTTP_FORBIDDEN : raise housecanary . exceptions . UnauthorizedException ( code , message ) if code == constants . HTTP_TOO_MANY_REQUESTS : raise housecanary . exceptions . RateLimitException ( code , message , response ) else : raise housecanary . exceptions . RequestException ( code , message ) request_url = response . request . url endpoint_name = self . _parse_endpoint_name_from_url ( request_url ) return Response . create ( endpoint_name , response_json , response )
|
For a json response check if there was any error and throw exception . Otherwise create a housecanary . response . Response .
|
6,318
|
def start_watcher_thread ( self ) : watcher_thread = threading . Thread ( target = self . run_watcher ) if self . _reload_mode == self . RELOAD_MODE_V_SPAWN_WAIT : daemon = False else : daemon = True watcher_thread . setDaemon ( daemon ) watcher_thread . start ( ) return watcher_thread
|
Start watcher thread .
|
6,319
|
def run_watcher ( self ) : observer = Observer ( ) observer . start ( ) watche_obj_map = { } while not self . _watcher_to_stop : old_watch_path_s = set ( watche_obj_map ) new_watch_path_s = self . _find_watch_paths ( ) for new_watch_path in new_watch_path_s : old_watch_path_s . discard ( new_watch_path ) if new_watch_path not in watche_obj_map : try : watch_obj = observer . schedule ( self , new_watch_path , recursive = True , ) watche_obj_map [ new_watch_path ] = watch_obj except OSError : watche_obj_map [ new_watch_path ] = None for old_watch_path in old_watch_path_s : watch_obj = watche_obj_map . pop ( old_watch_path , None ) if watch_obj is not None : observer . unschedule ( watch_obj ) self . _watch_paths = new_watch_path_s time . sleep ( self . _interval )
|
Watcher thread s function .
|
6,320
|
def _find_watch_paths ( self ) : watch_path_s = set ( os . path . abspath ( x ) for x in sys . path ) for extra_path in self . _extra_paths or ( ) : extra_dir_path = os . path . dirname ( os . path . abspath ( extra_path ) ) watch_path_s . add ( extra_dir_path ) for module in list ( sys . modules . values ( ) ) : module_path = getattr ( module , '__file__' , None ) if module_path is not None : module_dir_path = os . path . dirname ( os . path . abspath ( module_path ) ) watch_path_s . add ( module_dir_path ) watch_path_s = self . _find_short_paths ( watch_path_s ) return watch_path_s
|
Find paths to watch .
|
6,321
|
def _find_short_paths ( self , paths ) : path_parts_s = [ path . split ( os . path . sep ) for path in paths ] root_node = { } for parts in sorted ( path_parts_s , key = len , reverse = True ) : node = root_node for part in parts : node = node . setdefault ( part , { } ) node . clear ( ) short_path_s = set ( ) self . _collect_leaf_paths ( node = root_node , path_parts = ( ) , leaf_paths = short_path_s , ) return short_path_s
|
Find short paths of given paths .
|
6,322
|
def _collect_leaf_paths ( self , node , path_parts , leaf_paths ) : if not node : node_path = '/' . join ( path_parts ) leaf_paths . add ( node_path ) else : for child_path_part , child_node in node . items ( ) : child_path_part_s = path_parts + ( child_path_part , ) self . _collect_leaf_paths ( node = child_node , path_parts = child_path_part_s , leaf_paths = leaf_paths , )
|
Collect paths of leaf nodes .
|
6,323
|
def dispatch ( self , event ) : file_path = event . src_path if file_path in self . _extra_paths : self . reload ( ) if file_path . endswith ( ( '.pyc' , '.pyo' ) ) : file_path = file_path [ : - 1 ] if file_path . endswith ( '.py' ) : file_dir = os . path . dirname ( file_path ) if file_dir . startswith ( tuple ( self . _watch_paths ) ) : self . reload ( )
|
Dispatch file system event .
|
6,324
|
def reload ( self ) : reload_mode = self . _reload_mode if self . _reload_mode == self . RELOAD_MODE_V_EXEC : self . reload_using_exec ( ) elif self . _reload_mode == self . RELOAD_MODE_V_SPAWN_EXIT : self . reload_using_spawn_exit ( ) elif self . _reload_mode == self . RELOAD_MODE_V_SPAWN_WAIT : self . reload_using_spawn_wait ( ) else : error_msg = 'Invalid reload mode: {}.' . format ( repr ( reload_mode ) ) raise ValueError ( error_msg )
|
Reload the program .
|
6,325
|
def reload_using_exec ( self ) : cmd_parts = [ sys . executable ] + sys . argv env_copy = os . environ . copy ( ) os . execvpe ( sys . executable , cmd_parts , env_copy , )
|
Reload the program process .
|
6,326
|
def reload_using_spawn_exit ( self ) : cmd_parts = [ sys . executable ] + sys . argv env_copy = os . environ . copy ( ) subprocess . Popen ( cmd_parts , env = env_copy , close_fds = True ) if self . _force_exit : os . _exit ( 0 ) else : interrupt_main ( ) self . _watcher_to_stop = True sys . exit ( 0 )
|
Spawn a subprocess and exit the current process .
|
6,327
|
def reload_using_spawn_wait ( self ) : cmd_parts = [ sys . executable ] + sys . argv env_copy = os . environ . copy ( ) interrupt_main ( ) subprocess . call ( cmd_parts , env = env_copy , close_fds = True ) sys . exit ( 0 )
|
Spawn a subprocess and wait until it finishes .
|
6,328
|
def agent ( agent_id ) : fields = [ ( 'ID' , 'id' ) , ( 'Status' , 'status' ) , ( 'Region' , 'region' ) , ( 'First Contact' , 'first_contact' ) , ( 'CPU Usage (%)' , 'cpu_cur_pct' ) , ( 'Used Memory (MiB)' , 'mem_cur_bytes' ) , ( 'Total slots' , 'available_slots' ) , ( 'Occupied slots' , 'occupied_slots' ) , ] if is_legacy_server ( ) : del fields [ 9 ] del fields [ 6 ] q = 'query($agent_id:String!) {' ' agent(agent_id:$agent_id) { $fields }' '}' q = q . replace ( '$fields' , ' ' . join ( item [ 1 ] for item in fields ) ) v = { 'agent_id' : agent_id } with Session ( ) as session : try : resp = session . Admin . query ( q , v ) except Exception as e : print_error ( e ) sys . exit ( 1 ) info = resp [ 'agent' ] rows = [ ] for name , key in fields : if key == 'mem_cur_bytes' and info [ key ] is not None : info [ key ] = round ( info [ key ] / 2 ** 20 , 1 ) if key in info : rows . append ( ( name , info [ key ] ) ) print ( tabulate ( rows , headers = ( 'Field' , 'Value' ) ) )
|
Show the information about the given agent .
|
6,329
|
def resource_policy ( name ) : fields = [ ( 'Name' , 'name' ) , ( 'Created At' , 'created_at' ) , ( 'Default for Unspecified' , 'default_for_unspecified' ) , ( 'Total Resource Slot' , 'total_resource_slots' ) , ( 'Max Concurrent Sessions' , 'max_concurrent_sessions' ) , ( 'Max Containers per Session' , 'max_containers_per_session' ) , ( 'Max vFolder Count' , 'max_vfolder_count' ) , ( 'Max vFolder Size' , 'max_vfolder_size' ) , ( 'Idle Timeeout' , 'idle_timeout' ) , ( 'Allowed vFolder Hosts' , 'allowed_vfolder_hosts' ) , ] with Session ( ) as session : try : rp = session . ResourcePolicy ( session . config . access_key ) info = rp . info ( name , fields = ( item [ 1 ] for item in fields ) ) except Exception as e : print_error ( e ) sys . exit ( 1 ) rows = [ ] if info is None : print ( 'No such resource policy.' ) sys . exit ( 1 ) for name , key in fields : rows . append ( ( name , info [ key ] ) ) print ( tabulate ( rows , headers = ( 'Field' , 'Value' ) ) )
|
Show details about a keypair resource policy . When name option is omitted the resource policy for the current access_key will be returned .
|
6,330
|
def add ( name , default_for_unspecified , total_resource_slots , max_concurrent_sessions , max_containers_per_session , max_vfolder_count , max_vfolder_size , idle_timeout , allowed_vfolder_hosts ) : with Session ( ) as session : try : data = session . ResourcePolicy . create ( name , default_for_unspecified = default_for_unspecified , total_resource_slots = total_resource_slots , max_concurrent_sessions = max_concurrent_sessions , max_containers_per_session = max_containers_per_session , max_vfolder_count = max_vfolder_count , max_vfolder_size = max_vfolder_size , idle_timeout = idle_timeout , allowed_vfolder_hosts = allowed_vfolder_hosts , ) except Exception as e : print_error ( e ) sys . exit ( 1 ) if not data [ 'ok' ] : print_fail ( 'KeyPair Resource Policy creation has failed: {0}' . format ( data [ 'msg' ] ) ) sys . exit ( 1 ) item = data [ 'resource_policy' ] print ( 'Keypair resource policy ' + item [ 'name' ] + ' is created.' )
|
Add a new keypair resource policy .
|
6,331
|
def delete ( name ) : with Session ( ) as session : if input ( 'Are you sure? (y/n): ' ) . lower ( ) . strip ( ) [ : 1 ] != 'y' : print ( 'Canceled.' ) sys . exit ( 1 ) try : data = session . ResourcePolicy . delete ( name ) except Exception as e : print_error ( e ) sys . exit ( 1 ) if not data [ 'ok' ] : print_fail ( 'KeyPair Resource Policy deletion has failed: {0}' . format ( data [ 'msg' ] ) ) sys . exit ( 1 ) print ( 'Resource policy ' + name + ' is deleted.' )
|
Delete a keypair resource policy .
|
6,332
|
def app ( session_id , app , bind , port ) : api_session = None runner = None async def app_setup ( ) : nonlocal api_session , runner loop = current_loop ( ) api_session = AsyncSession ( ) protocol = 'http' runner = ProxyRunner ( api_session , session_id , app , protocol , bind , port , loop = loop ) await runner . ready ( ) print_info ( "A local proxy to the application \"{0}\" " . format ( app ) + "provided by the session \"{0}\" " . format ( session_id ) + "is available at: {0}://{1}:{2}" . format ( protocol , bind , port ) ) async def app_shutdown ( ) : nonlocal api_session , runner print_info ( "Shutting down...." ) await runner . close ( ) await api_session . close ( ) print_info ( "The local proxy to \"{}\" has terminated." . format ( app ) ) asyncio_run_forever ( app_setup ( ) , app_shutdown ( ) , stop_signals = { signal . SIGINT , signal . SIGTERM } )
|
Run a local proxy to a service provided by Backend . AI compute sessions .
|
6,333
|
def logs ( sess_id_or_alias ) : with Session ( ) as session : try : print_wait ( 'Retrieving container logs...' ) kernel = session . Kernel ( sess_id_or_alias ) result = kernel . get_logs ( ) . get ( 'result' ) logs = result . get ( 'logs' ) if 'logs' in result else '' print ( logs ) print_done ( 'End of logs.' ) except Exception as e : print_error ( e ) sys . exit ( 1 )
|
Shows the output logs of a running container .
|
6,334
|
def _wrap_key ( function , args , kws ) : return hashlib . md5 ( pickle . dumps ( ( _from_file ( function ) + function . __name__ , args , kws ) ) ) . hexdigest ( )
|
get the key from the function input .
|
6,335
|
def get ( key , adapter = MemoryAdapter ) : try : return pickle . loads ( adapter ( ) . get ( key ) ) except CacheExpiredException : return None
|
get the cache value
|
6,336
|
def set ( key , value , timeout = - 1 , adapter = MemoryAdapter ) : if adapter ( timeout = timeout ) . set ( key , pickle . dumps ( value ) ) : return value else : return None
|
set cache by code must set timeout length
|
6,337
|
def wrapcache ( timeout = - 1 , adapter = MemoryAdapter ) : def _wrapcache ( function ) : @ wraps ( function ) def __wrapcache ( * args , ** kws ) : hash_key = _wrap_key ( function , args , kws ) try : adapter_instance = adapter ( ) return pickle . loads ( adapter_instance . get ( hash_key ) ) except CacheExpiredException : value = function ( * args , ** kws ) set ( hash_key , value , timeout , adapter ) return value return __wrapcache return _wrapcache
|
the Decorator to cache Function .
|
6,338
|
def export_analytics_data_to_excel ( data , output_file_name , result_info_key , identifier_keys ) : workbook = create_excel_workbook ( data , result_info_key , identifier_keys ) workbook . save ( output_file_name ) print ( 'Saved Excel file to {}' . format ( output_file_name ) )
|
Creates an Excel file containing data returned by the Analytics API
|
6,339
|
def export_analytics_data_to_csv ( data , output_folder , result_info_key , identifier_keys ) : workbook = create_excel_workbook ( data , result_info_key , identifier_keys ) suffix = '.csv' if not os . path . exists ( output_folder ) : os . makedirs ( output_folder ) for worksheet in workbook . worksheets : file_name = utilities . convert_title_to_snake_case ( worksheet . title ) file_path = os . path . join ( output_folder , file_name + suffix ) mode = 'w' if sys . version_info [ 0 ] < 3 : mode = 'wb' with io . open ( file_path , mode ) as output_file : csv_writer = csv . writer ( output_file ) for row in worksheet . rows : csv_writer . writerow ( [ cell . value for cell in row ] ) print ( 'Saved CSV files to {}' . format ( output_folder ) )
|
Creates CSV files containing data returned by the Analytics API . Creates one file per requested endpoint and saves it into the specified output_folder
|
6,340
|
def concat_excel_reports ( addresses , output_file_name , endpoint , report_type , retry , api_key , api_secret , files_path ) : master_workbook = openpyxl . Workbook ( ) if api_key is not None and api_secret is not None : client = ApiClient ( api_key , api_secret ) else : client = ApiClient ( ) errors = [ ] for index , addr in enumerate ( addresses ) : print ( 'Processing {}' . format ( addr [ 0 ] ) ) result = _get_excel_report ( client , endpoint , addr [ 0 ] , addr [ 1 ] , report_type , retry ) if not result [ 'success' ] : print ( 'Error retrieving report for {}' . format ( addr [ 0 ] ) ) print ( result [ 'content' ] ) errors . append ( { 'address' : addr [ 0 ] , 'message' : result [ 'content' ] } ) continue orig_wb = openpyxl . load_workbook ( filename = io . BytesIO ( result [ 'content' ] ) ) _save_individual_file ( orig_wb , files_path , addr [ 0 ] ) for sheet_name in orig_wb . get_sheet_names ( ) : if sheet_name in master_workbook . get_sheet_names ( ) : master_ws = master_workbook . get_sheet_by_name ( sheet_name ) else : master_ws = master_workbook . create_sheet ( sheet_name ) orig_rows = orig_wb . get_sheet_by_name ( sheet_name ) . rows if sheet_name == 'Summary' or sheet_name == 'Chart Data' : _process_non_standard_sheet ( master_ws , orig_rows , addr , index ) continue _process_standard_sheet ( master_ws , orig_rows , addr , index ) master_workbook . remove ( master_workbook . worksheets [ 0 ] ) if len ( errors ) > 0 : errors_sheet = master_workbook . create_sheet ( 'Errors' ) for error_idx , error in enumerate ( errors ) : errors_sheet . cell ( row = error_idx + 1 , column = 1 , value = error [ 'address' ] ) errors_sheet . cell ( row = error_idx + 1 , column = 2 , value = error [ 'message' ] ) adjust_column_width_workbook ( master_workbook ) output_file_path = os . path . join ( files_path , output_file_name ) master_workbook . save ( output_file_path ) print ( 'Saved output to {}' . format ( output_file_path ) )
|
Creates an Excel file made up of combining the Value Report or Rental Report Excel output for the provided addresses .
|
6,341
|
def create_excel_workbook ( data , result_info_key , identifier_keys ) : workbook = analytics_data_excel . get_excel_workbook ( data , result_info_key , identifier_keys ) adjust_column_width_workbook ( workbook ) return workbook
|
Calls the analytics_data_excel module to create the Workbook
|
6,342
|
def adjust_column_width ( worksheet ) : dims = { } padding = 1 for row in worksheet . rows : for cell in row : if not cell . value : continue dims [ cell . column ] = max ( dims . get ( cell . column , 0 ) , len ( str ( cell . value ) ) ) for col , value in list ( dims . items ( ) ) : worksheet . column_dimensions [ col ] . width = value + padding
|
Adjust column width in worksheet .
|
6,343
|
def get_ap_info ( ipaddress , auth , url ) : get_ap_info_url = "/imcrs/wlan/apInfo/queryApBasicInfoByCondition?ipAddress=" + str ( ipaddress ) f_url = url + get_ap_info_url payload = None r = requests . get ( f_url , auth = auth , headers = HEADERS ) try : if r . status_code == 200 : if len ( r . text ) > 0 : return json . loads ( r . text ) [ 'apBasicInfo' ] except requests . exceptions . RequestException as e : return "Error:\n" + str ( e ) + " get_ap_info_all: An Error has occured"
|
function takes input of ipaddress to RESTFUL call to HP IMC
|
6,344
|
def session ( sess_id_or_alias ) : fields = [ ( 'Session ID' , 'sess_id' ) , ( 'Role' , 'role' ) , ( 'Image' , 'image' ) , ( 'Tag' , 'tag' ) , ( 'Created At' , 'created_at' ) , ( 'Terminated At' , 'terminated_at' ) , ( 'Agent' , 'agent' ) , ( 'Status' , 'status' ) , ( 'Status Info' , 'status_info' ) , ( 'Occupied Resources' , 'occupied_slots' ) , ( 'CPU Used (ms)' , 'cpu_used' ) , ( 'Used Memory (MiB)' , 'mem_cur_bytes' ) , ( 'Max Used Memory (MiB)' , 'mem_max_bytes' ) , ( 'Number of Queries' , 'num_queries' ) , ( 'Network RX Bytes' , 'net_rx_bytes' ) , ( 'Network TX Bytes' , 'net_tx_bytes' ) , ( 'IO Read Bytes' , 'io_read_bytes' ) , ( 'IO Write Bytes' , 'io_write_bytes' ) , ( 'IO Max Scratch Size' , 'io_max_scratch_size' ) , ( 'IO Current Scratch Size' , 'io_cur_scratch_size' ) , ( 'CPU Using (%)' , 'cpu_using' ) , ] if is_legacy_server ( ) : del fields [ 3 ] q = 'query($sess_id:String) {' ' compute_session(sess_id:$sess_id) { $fields }' '}' q = q . replace ( '$fields' , ' ' . join ( item [ 1 ] for item in fields ) ) v = { 'sess_id' : sess_id_or_alias } with Session ( ) as session : try : resp = session . Admin . query ( q , v ) except Exception as e : print_error ( e ) sys . exit ( 1 ) if resp [ 'compute_session' ] [ 'sess_id' ] is None : print ( 'There is no such running compute session.' ) return print ( 'Session detail:\n---------------' ) for i , value in enumerate ( resp [ 'compute_session' ] . values ( ) ) : if fields [ i ] [ 1 ] in [ 'mem_cur_bytes' , 'mem_max_bytes' ] : value = round ( value / 2 ** 20 , 1 ) print ( fields [ i ] [ 0 ] + ': ' + str ( value ) )
|
Show detailed information for a running compute session .
|
6,345
|
def get_object_errors ( self ) : if self . _object_errors is None : self . _object_errors = [ { str ( o ) : o . get_errors ( ) } for o in self . objects ( ) if o . has_error ( ) ] return self . _object_errors
|
Gets a list of business error message strings for each of the requested objects that had a business error . If there was no error returns an empty list
|
6,346
|
def has_object_error ( self ) : if self . _has_object_error is None : self . _has_object_error = next ( ( True for o in self . objects ( ) if o . has_error ( ) ) , False ) return self . _has_object_error
|
Returns true if any requested object had a business logic error otherwise returns false
|
6,347
|
def rate_limits ( self ) : if not self . _rate_limits : self . _rate_limits = utilities . get_rate_limits ( self . response ) return self . _rate_limits
|
Returns a list of rate limit details .
|
6,348
|
def check_imc_creds ( auth , url ) : test_url = '/imcrs' f_url = url + test_url try : response = requests . get ( f_url , auth = auth , headers = HEADERS , verify = False ) return bool ( response . status_code == 200 ) except requests . exceptions . RequestException as error : return "Error:\n" + str ( error ) + " test_imc_creds: An Error has occured"
|
Function takes input of auth class object auth object and URL and returns a BOOL of TRUE if the authentication was successful .
|
6,349
|
def get_full_python_version ( ) : version_part = '.' . join ( str ( x ) for x in sys . version_info ) int_width = struct . calcsize ( 'P' ) * 8 int_width_part = str ( int_width ) + 'bit' return version_part + '.' + int_width_part
|
Get full Python version .
|
6,350
|
def get_python_path ( venv_path ) : bin_path = get_bin_path ( venv_path ) program_path = os . path . join ( bin_path , 'python' ) if sys . platform . startswith ( 'win' ) : program_path = program_path + '.exe' return program_path
|
Get given virtual environment s python program path .
|
6,351
|
def add_options ( ctx ) : ctx . add_option ( '--always' , action = 'store_true' , default = False , dest = 'always' , help = 'whether always run tasks.' , ) ctx . add_option ( '--check-import' , action = 'store_true' , default = False , dest = 'check_import' , help = 'whether import module for dirty checking.' , ) ctx . add_option ( '--venv' , dest = 'venv' , help = ( 'virtual environment directory relative path relative to top' ' directory.' ) , ) ctx . add_option ( '--venv-add-version' , default = '1' , dest = 'venv_add_version' , type = int , metavar = '0|1' , help = ( 'whether add full Python version to virtual environment directory' ' name. E.g. `.py3.5.1.final.0.64bit`. Default is add.' ) , ) ctx . add_option ( '--req' , default = None , dest = 'req_path' , help = 'requirements file relative path relative to top directory.' , )
|
Add command line options .
|
6,352
|
def add_pythonpath ( path ) : pythonpath = os . environ . setdefault ( 'PYTHONPATH' , '' ) if path not in pythonpath . split ( os . pathsep ) : pythonpath = os . environ [ 'PYTHONPATH' ] = ( path + os . pathsep + pythonpath ) if pythonpath else path return pythonpath
|
Prepend given path to environment variable PYTHONPATH .
|
6,353
|
def mark_path ( path ) : if not isinstance ( path , str ) or os . path . isabs ( path ) : msg = 'Error (2D9ZA): Given path is not relative path: {0}.' . format ( path ) raise ValueError ( msg ) return _ItemWrapper ( type = 'path' , item = path )
|
Wrap given path as relative path relative to top directory .
|
6,354
|
def _mark_target ( type , item ) : if type not in ( 'input' , 'output' ) : msg = 'Error (7D74X): Type is not valid: {0}' . format ( type ) raise ValueError ( msg ) orig_item = item if isinstance ( item , list ) : item_s = item else : item_s = [ item ] for item in item_s : if isinstance ( item , str ) and os . path . isabs ( item ) : msg = ( 'Error (5VWOZ): Given path is not relative path: {0}.' ) . format ( item ) raise ValueError ( msg ) return _ItemWrapper ( type = type , item = orig_item )
|
Wrap given item as input or output target that should be added to task .
|
6,355
|
def create_node ( ctx , path ) : _ensure_build_context ( ctx ) top_dir_relpath = os . path . relpath ( ctx . top_dir , ctx . run_dir , ) node_path = os . path . join ( top_dir_relpath , path ) node = ctx . path . make_node ( node_path ) return node
|
Create node for given relative path .
|
6,356
|
def _normalize_items ( ctx , items , str_to_node = False , node_to_str = False , allow_task = False , ) : _ensure_build_context ( ctx ) norm_tuple_s = [ ] if not items : return norm_tuple_s for item in items : if isinstance ( item , _ItemWrapper ) : wrapper_type = item . type ( ) item = item . item ( ) else : wrapper_type = None item = item if isinstance ( item , list ) : real_item_s = item else : real_item_s = [ item ] for real_item in real_item_s : if real_item is None : continue elif isinstance ( real_item , str ) : if ( wrapper_type is not None ) or str_to_node : if os . path . isabs ( real_item ) : msg = ( 'Error (7MWU9): Given path is not relative path:' ' {0}.' ) . format ( real_item ) raise ValueError ( msg ) norm_item = create_node ( ctx , real_item ) if node_to_str : norm_item = norm_item . abspath ( ) else : norm_item = real_item norm_tuple = ( norm_item , wrapper_type ) elif isinstance ( real_item , Node ) : if node_to_str : real_item = real_item . abspath ( ) norm_tuple = ( real_item , wrapper_type ) elif isinstance ( real_item , Task ) : if allow_task : norm_tuple = ( real_item , wrapper_type ) else : msg = 'Error (6PVMG): Item type is not valid: {0}.' . format ( real_item ) raise ValueError ( msg ) else : msg = 'Error (63KUG): Item type is not valid: {0}.' . format ( real_item ) raise ValueError ( msg ) norm_tuple_s . append ( norm_tuple ) return norm_tuple_s
|
Normalize given items .
|
6,357
|
def update_touch_file ( ctx , path , check_import = False , check_import_module = None , check_import_python = None , always = False , ) : _ensure_build_context ( ctx ) print_title ( 'Update touch file: {}' . format ( path ) ) touch_node = create_node ( ctx , path ) need_run = False if not touch_node . exists ( ) or always : need_run = True else : if check_import and check_import_module : import_stmt = 'from {} import __name__' . format ( check_import_module ) print_text ( 'Check import: {}' . format ( import_stmt ) ) if check_import_python is None : msg = ( 'Error (3BKFW): Python program to check import is not' ' given.' ) raise ValueError ( msg ) check_import_python , _ = _normalize_items ( ctx = ctx , items = [ check_import_python ] , node_to_str = True , ) [ 0 ] if not isinstance ( check_import_python , str ) : msg = ( 'Error (39FQE): Given Python program to check import is' ' not string or node: {0}.' ) . format ( check_import_python ) raise ValueError ( msg ) if not os . path . isabs ( check_import_python ) : check_import_python = create_node ( ctx , check_import_python ) . abspath ( ) cmd_part_s = [ check_import_python , '-c' , import_stmt ] print_text ( _format_multi_line_command ( cmd_part_s ) ) try : subprocess . check_output ( cmd_part_s ) need_run = False except Exception : need_run = True if need_run : if not touch_node . parent . exists ( ) : touch_node . parent . mkdir ( ) touch_node . write ( '{0}\n' . format ( datetime . utcnow ( ) ) ) print_text ( 'Updated.' ) else : print_text ( 'Skipped.' ) print_title ( 'Update touch file: {}' . format ( path ) , is_end = True ) return touch_node , need_run
|
Update touch file at given path .
|
6,358
|
def chain_tasks ( tasks ) : if tasks : previous_task = None for task in tasks : if task is not None : if previous_task is not None : task . set_run_after ( previous_task ) previous_task = task return tasks
|
Chain given tasks . Set each task to run after its previous task .
|
6,359
|
def build_ctx ( pythonpath = None ) : if isinstance ( pythonpath , str ) : path_s = [ pythonpath ] elif isinstance ( pythonpath , list ) : path_s = pythonpath else : path_s = None def _noarg_decorator ( func ) : class _BuildContext ( BuildContext ) : cmd = func . __name__ fun = func . __name__ @ wraps ( func ) def _new_func ( ctx , * args , ** kwargs ) : if path_s : for path in path_s : if os . path . isabs ( path ) : abs_path = path else : path_node = create_node ( ctx , path ) abs_path = path_node . abspath ( ) add_pythonpath ( abs_path ) result = func ( ctx , * args , ** kwargs ) return result _new_func . _context_class = _BuildContext return _new_func if path_s is not None : return _noarg_decorator else : _func = pythonpath wrapper_func = _noarg_decorator ( _func ) return wrapper_func
|
Decorator that makes decorated function use BuildContext instead of \ Context instance . BuildContext instance has more methods .
|
6,360
|
def config_ctx ( func ) : class _ConfigurationContext ( ConfigurationContext ) : cmd = func . __name__ fun = func . __name__ func . _context_class = _ConfigurationContext return func
|
Decorator that makes decorated function use ConfigurationContext instead \ of Context instance .
|
6,361
|
def print_ctx ( ctx ) : print_title ( 'ctx attributes' ) print_text ( dir ( ctx ) ) print_title ( 'ctx attributes' , is_end = True ) print_title ( 'ctx.options' ) print_text ( pformat ( vars ( ctx . options ) , indent = 4 , width = 1 ) ) print_title ( 'ctx.options' , is_end = True ) if hasattr ( ctx , 'env' ) : print_title ( 'ctx.env' ) print_text ( pformat ( dict ( ctx . env ) , indent = 4 , width = 1 ) ) print_title ( 'ctx.env' , is_end = True )
|
Print given context s info .
|
6,362
|
def virtualenv_setup ( ctx , python , inputs = None , outputs = None , touch = None , check_import = False , pip_setup_file = None , pip_setup_touch = None , cache_key = None , always = False , ) : _ensure_build_context ( ctx ) if pip_setup_file is None : pip_setup_task = None else : pip_setup_task = pip_setup ( ctx = ctx , python = python , setup_file = pip_setup_file , touch = pip_setup_touch , check_import = check_import , always = always , ) if touch is None : touch_node = None else : touch_node , always = update_touch_file ( ctx = ctx , path = touch , check_import = check_import , check_import_module = 'virtualenv' , check_import_python = python , always = always , ) task = create_cmd_task ( ctx = ctx , parts = [ python , '-m' , 'pip' , 'install' , 'virtualenv' , ] , inputs = [ pip_setup_task , inputs , ] , outputs = [ touch_node , outputs , ] , always = always , cache_key = cache_key or ( python , 'virtualenv' ) , ) return task
|
Create task that sets up virtualenv package .
|
6,363
|
def create_venv ( ctx , python , venv_path , inputs = None , outputs = None , pip_setup_file = None , pip_setup_touch = None , virtualenv_setup_touch = None , task_name = None , cache_key = None , always = False , ) : _ensure_build_context ( ctx ) virtualenv_setup_task = virtualenv_setup ( ctx = ctx , python = python , touch = virtualenv_setup_touch , pip_setup_file = pip_setup_file , pip_setup_touch = pip_setup_touch , ) venv_path_node , _ = _normalize_items ( ctx = ctx , items = [ venv_path ] , str_to_node = True ) [ 0 ] task = create_cmd_task ( ctx = ctx , parts = [ python , '-m' , 'virtualenv' , venv_path_node . abspath ( ) , ] , inputs = [ virtualenv_setup_task , inputs , ] , outputs = [ get_python_path ( venv_path ) , get_pip_path ( venv_path ) , outputs , ] , always = always , task_name = task_name , cache_key = cache_key or ( python , venv_path ) , ) return task
|
Create task that sets up virtual environment .
|
6,364
|
def pip_ins_req ( ctx , python , req_path , venv_path = None , inputs = None , outputs = None , touch = None , check_import = False , check_import_module = None , pip_setup_file = None , pip_setup_touch = None , virtualenv_setup_touch = None , always = False , ) : _ensure_build_context ( ctx ) if venv_path is None : venv_python = python else : venv_python = get_python_path ( venv_path ) venv_python = mark_input ( venv_python ) if venv_path is None : pip_setup_task = pip_setup ( ctx = ctx , python = python , setup_file = pip_setup_file , touch = pip_setup_touch , always = always , ) venv_task = None else : pip_setup_task = None venv_task = create_venv ( ctx = ctx , python = python , venv_path = venv_path , outputs = [ get_python_path ( venv_path ) , get_pip_path ( venv_path ) , ] , always = always , task_name = 'Create venv `{}`' . format ( venv_path ) , pip_setup_file = pip_setup_file , pip_setup_touch = pip_setup_touch , virtualenv_setup_touch = virtualenv_setup_touch , ) if not touch : touch_node = None else : touch_node , always = update_touch_file ( ctx = ctx , path = touch , check_import = check_import , check_import_module = check_import_module , check_import_python = venv_python , always = always , ) task = create_cmd_task ( ctx = ctx , parts = [ venv_python , '-m' , 'pip' , 'install' , '-r' , mark_input ( req_path ) , ] , inputs = inputs , outputs = [ touch_node , outputs , ] , always = always , ) chain_tasks ( [ pip_setup_task , venv_task , task , ] ) return task
|
Create task that uses given virtual environment s pip to sets up \ packages listed in given requirements file .
|
6,365
|
def git_clean ( ctx ) : cmd_part_s = [ 'git' , 'clean' , '-x' , '-d' , '-f' , '-f' , ] print_title ( 'git_clean' ) print_text ( _format_multi_line_command ( cmd_part_s ) ) proc = subprocess . Popen ( cmd_part_s , cwd = ctx . top_dir ) proc . wait ( ) print_title ( 'git_clean' , is_end = True )
|
Delete all files untracked by git .
|
6,366
|
def delete_telnet_template ( auth , url , template_name = None , template_id = None ) : try : if template_id is None : telnet_templates = get_telnet_template ( auth , url ) if template_name is None : template_name = telnet_template [ 'name' ] template_id = None for template in telnet_templates : if template [ 'name' ] == template_name : template_id = template [ 'id' ] f_url = url + "/imcrs/plat/res/telnet/%s/delete" % template_id response = requests . delete ( f_url , auth = auth , headers = HEADERS ) return response . status_code except requests . exceptions . RequestException as error : return "Error:\n" + str ( error ) + " delete_telnet_template: An Error has occured"
|
Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific telnet template from the IMC system
|
6,367
|
def delete_ssh_template ( auth , url , template_name = None , template_id = None ) : try : if template_id is None : ssh_templates = get_ssh_template ( auth , url ) if template_name is None : template_name = ssh_template [ 'name' ] template_id = None for template in ssh_templates : if template [ 'name' ] == template_name : template_id = template [ 'id' ] f_url = url + "/imcrs/plat/res/ssh/%s/delete" % template_id response = requests . delete ( f_url , auth = auth , headers = HEADERS ) return response . status_code except requests . exceptions . RequestException as error : return "Error:\n" + str ( error ) + " delete_ssh_template: An Error has occured"
|
Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific ssh template from the IMC system
|
6,368
|
def delete_snmp_template ( auth , url , template_name = None , template_id = None ) : try : if template_id is None : snmp_templates = get_snmp_templates ( auth , url ) if template_name is None : template_name = snmp_template [ 'name' ] template_id = None for template in snmp_templates : if template [ 'name' ] == template_name : template_id = template [ 'id' ] f_url = url + "/imcrs/plat/res/snmp/%s/delete" % template_id response = requests . delete ( f_url , auth = auth , headers = HEADERS ) return response . status_code except requests . exceptions . RequestException as error : return "Error:\n" + str ( error ) + " delete_snmp_template: An Error has occured"
|
Takes template_name as input to issue RESTUL call to HP IMC which will delete the specific snmp template from the IMC system
|
6,369
|
def proxy ( ctx , bind , port ) : app = web . Application ( ) app . on_startup . append ( startup_proxy ) app . on_cleanup . append ( cleanup_proxy ) app . router . add_route ( "GET" , r'/stream/{path:.*$}' , websocket_handler ) app . router . add_route ( "GET" , r'/wsproxy/{path:.*$}' , websocket_handler ) app . router . add_route ( '*' , r'/{path:.*$}' , web_handler ) if getattr ( ctx . args , 'testing' , False ) : return app web . run_app ( app , host = bind , port = port )
|
Run a non - encrypted non - authorized API proxy server . Use this only for development and testing!
|
6,370
|
def get_dev_details ( ip_address , auth , url ) : get_dev_details_url = "/imcrs/plat/res/device?resPrivilegeFilter=false&ip=" + str ( ip_address ) + "&start=0&size=1000&orderBy=id&desc=false&total=false" f_url = url + get_dev_details_url response = requests . get ( f_url , auth = auth , headers = HEADERS ) try : if response . status_code == 200 : dev_details = ( json . loads ( response . text ) ) if len ( dev_details ) == 0 : print ( "Device not found" ) return "Device not found" elif isinstance ( dev_details [ 'device' ] , list ) : for i in dev_details [ 'device' ] : if i [ 'ip' ] == ip_address : dev_details = i return dev_details elif isinstance ( dev_details [ 'device' ] , dict ) : return dev_details [ 'device' ] except requests . exceptions . RequestException as error : return "Error:\n" + str ( error ) + " get_dev_details: An Error has occured"
|
Takes string input of IP address to issue RESTUL call to HP IMC
|
6,371
|
def set_inteface_up ( ifindex , auth , url , devid = None , devip = None ) : if devip is not None : devid = get_dev_details ( devip , auth , url ) [ 'id' ] set_int_up_url = "/imcrs/plat/res/device/" + str ( devid ) + "/interface/" + str ( ifindex ) + "/up" f_url = url + set_int_up_url try : response = requests . put ( f_url , auth = auth , headers = HEADERS ) if response . status_code == 204 : return response . status_code except requests . exceptions . RequestException as error : return "Error:\n" + str ( error ) + " set_inteface_up: An Error has occured"
|
function takest devid and ifindex of specific device and interface and issues a RESTFUL call to undo shut the specified interface on the target device .
|
6,372
|
async def delete ( cls , access_key : str ) : q = 'mutation($access_key: String!) {' ' delete_keypair(access_key: $access_key) {' ' ok msg' ' }' '}' variables = { 'access_key' : access_key , } rqst = Request ( cls . session , 'POST' , '/admin/graphql' ) rqst . set_json ( { 'query' : q , 'variables' : variables , } ) async with rqst . fetch ( ) as resp : data = await resp . json ( ) return data [ 'delete_keypair' ]
|
Deletes an existing keypair with given ACCESSKEY .
|
6,373
|
async def list ( cls , user_id : Union [ int , str ] = None , is_active : bool = None , fields : Iterable [ str ] = None ) -> Sequence [ dict ] : if fields is None : fields = ( 'access_key' , 'secret_key' , 'is_active' , 'is_admin' , ) if user_id is None : q = 'query($is_active: Boolean) {' ' keypairs(is_active: $is_active) {' ' $fields' ' }' '}' else : uid_type = 'Int!' if isinstance ( user_id , int ) else 'String!' q = 'query($user_id: {0}, $is_active: Boolean) {{' . format ( uid_type ) + ' keypairs(user_id: $user_id, is_active: $is_active) {' ' $fields' ' }' '}' q = q . replace ( '$fields' , ' ' . join ( fields ) ) variables = { 'is_active' : is_active , } if user_id is not None : variables [ 'user_id' ] = user_id rqst = Request ( cls . session , 'POST' , '/admin/graphql' ) rqst . set_json ( { 'query' : q , 'variables' : variables , } ) async with rqst . fetch ( ) as resp : data = await resp . json ( ) return data [ 'keypairs' ]
|
Lists the keypairs . You need an admin privilege for this operation .
|
6,374
|
async def info ( self , fields : Iterable [ str ] = None ) -> dict : if fields is None : fields = ( 'access_key' , 'secret_key' , 'is_active' , 'is_admin' , ) q = 'query {' ' keypair {' ' $fields' ' }' '}' q = q . replace ( '$fields' , ' ' . join ( fields ) ) rqst = Request ( self . session , 'POST' , '/admin/graphql' ) rqst . set_json ( { 'query' : q , } ) async with rqst . fetch ( ) as resp : data = await resp . json ( ) return data [ 'keypair' ]
|
Returns the keypair s information such as resource limits .
|
6,375
|
async def activate ( cls , access_key : str ) -> dict : q = 'mutation($access_key: String!, $input: ModifyKeyPairInput!) {' + ' modify_keypair(access_key: $access_key, props: $input) {' ' ok msg' ' }' '}' variables = { 'access_key' : access_key , 'input' : { 'is_active' : True , 'is_admin' : None , 'resource_policy' : None , 'rate_limit' : None , } , } rqst = Request ( cls . session , 'POST' , '/admin/graphql' ) rqst . set_json ( { 'query' : q , 'variables' : variables , } ) async with rqst . fetch ( ) as resp : data = await resp . json ( ) return data [ 'modify_keypair' ]
|
Activates this keypair . You need an admin privilege for this operation .
|
6,376
|
async def deactivate ( cls , access_key : str ) -> dict : q = 'mutation($access_key: String!, $input: ModifyKeyPairInput!) {' + ' modify_keypair(access_key: $access_key, props: $input) {' ' ok msg' ' }' '}' variables = { 'access_key' : access_key , 'input' : { 'is_active' : False , 'is_admin' : None , 'resource_policy' : None , 'rate_limit' : None , } , } rqst = Request ( cls . session , 'POST' , '/admin/graphql' ) rqst . set_json ( { 'query' : q , 'variables' : variables , } ) async with rqst . fetch ( ) as resp : data = await resp . json ( ) return data [ 'modify_keypair' ]
|
Deactivates this keypair . Deactivated keypairs cannot make any API requests unless activated again by an administrator . You need an admin privilege for this operation .
|
6,377
|
async def check_presets ( cls ) : rqst = Request ( cls . session , 'POST' , '/resource/check-presets' ) async with rqst . fetch ( ) as resp : return await resp . json ( )
|
Lists all resource presets in the current scaling group with additiona information .
|
6,378
|
def set_imc_creds ( h_url = None , imc_server = None , imc_port = None , imc_user = None , imc_pw = None ) : global auth , url if h_url is None : imc_protocol = input ( "What protocol would you like to use to connect to the IMC server: \n Press 1 for HTTP: \n Press 2 for HTTPS:" ) if imc_protocol == "1" : h_url = 'http://' else : h_url = 'https://' imc_server = input ( "What is the ip address of the IMC server?" ) imc_port = input ( "What is the port number of the IMC server?" ) imc_user = input ( "What is the username of the IMC eAPI user?" ) imc_pw = input ( ) url = h_url + imc_server + ":" + imc_port auth = requests . auth . HTTPDigestAuth ( imc_user , imc_pw ) test_url = '/imcrs' f_url = url + test_url try : r = requests . get ( f_url , auth = auth , headers = headers , verify = False ) print ( r . status_code ) return auth except requests . exceptions . RequestException as e : print ( "Error:\n" + str ( e ) ) print ( "\n\nThe IMC server address is invalid. Please try again\n\n" ) set_imc_creds ( ) if r . status_code != 200 : print ( "Error: \n You're credentials are invalid. Please try again\n\n" ) set_imc_creds ( ) else : print ( "You've successfully access the IMC eAPI" )
|
This function prompts user for IMC server information and credentuials and stores values in url and auth global variables
|
6,379
|
def get_version ( root ) : version_json = os . path . join ( root , 'version.json' ) if os . path . exists ( version_json ) : with open ( version_json , 'r' ) as version_json_file : return json . load ( version_json_file ) return None
|
Load and return the contents of version . json .
|
6,380
|
def fetch ( self , endpoint_name , identifier_input , query_params = None ) : endpoint_url = constants . URL_PREFIX + "/" + self . _version + "/" + endpoint_name if query_params is None : query_params = { } if len ( identifier_input ) == 1 : query_params . update ( identifier_input [ 0 ] ) return self . _request_client . get ( endpoint_url , query_params ) return self . _request_client . post ( endpoint_url , identifier_input , query_params )
|
Calls this instance s request_client s post method with the specified component endpoint
|
6,381
|
def fetch_synchronous ( self , endpoint_name , query_params = None ) : endpoint_url = constants . URL_PREFIX + "/" + self . _version + "/" + endpoint_name if query_params is None : query_params = { } return self . _request_client . get ( endpoint_url , query_params )
|
Calls this instance s request_client s get method with the specified component endpoint
|
6,382
|
def get_identifier_input ( self , identifier_data ) : identifier_input = [ ] if isinstance ( identifier_data , list ) and len ( identifier_data ) > 0 : for address in identifier_data : identifier_input . append ( self . _convert_to_identifier_json ( address ) ) else : identifier_input . append ( self . _convert_to_identifier_json ( identifier_data ) ) return identifier_input
|
Convert the various formats of input identifier_data into the proper json format expected by the ApiClient fetch method which is a list of dicts .
|
6,383
|
def fetch_identifier_component ( self , endpoint_name , identifier_data , query_params = None ) : if query_params is None : query_params = { } identifier_input = self . get_identifier_input ( identifier_data ) return self . _api_client . fetch ( endpoint_name , identifier_input , query_params )
|
Common method for handling parameters before passing to api_client
|
6,384
|
def _convert_to_identifier_json ( self , address_data ) : if isinstance ( address_data , str ) : return { "slug" : address_data } if isinstance ( address_data , tuple ) and len ( address_data ) > 0 : address_json = { "address" : address_data [ 0 ] } if len ( address_data ) > 1 : address_json [ "zipcode" ] = address_data [ 1 ] if len ( address_data ) > 2 : address_json [ "meta" ] = address_data [ 2 ] return address_json if isinstance ( address_data , dict ) : allowed_keys = [ "address" , "zipcode" , "unit" , "city" , "state" , "slug" , "meta" , "client_value" , "client_value_sqft" ] for key in address_data : if key not in allowed_keys : msg = "Key in address input not allowed: " + key raise housecanary . exceptions . InvalidInputException ( msg ) if "address" in address_data or "slug" in address_data : return address_data msg = ( "Input is invalid. Must be a list of (address, zipcode) tuples, or a dict or list" " of dicts with each item containing at least an 'address' or 'slug' key." ) raise housecanary . exceptions . InvalidInputException ( ( msg ) )
|
Convert input address data into json format
|
6,385
|
def value_report ( self , address , zipcode , report_type = "full" , format_type = "json" ) : query_params = { "report_type" : report_type , "format" : format_type , "address" : address , "zipcode" : zipcode } return self . _api_client . fetch_synchronous ( "property/value_report" , query_params )
|
Call the value_report component
|
6,386
|
def rental_report ( self , address , zipcode , format_type = "json" ) : query_params = { "format" : format_type , "address" : address , "zipcode" : zipcode } return self . _api_client . fetch_synchronous ( "property/rental_report" , query_params )
|
Call the rental_report component
|
6,387
|
def component_mget ( self , zip_data , components ) : if not isinstance ( components , list ) : print ( "Components param must be a list" ) return query_params = { "components" : "," . join ( components ) } return self . fetch_identifier_component ( "zip/component_mget" , zip_data , query_params )
|
Call the zip component_mget endpoint
|
6,388
|
def version ( request ) : version_json = import_string ( version_callback ) ( settings . BASE_DIR ) if version_json is None : return HttpResponseNotFound ( 'version.json not found' ) else : return JsonResponse ( version_json )
|
Returns the contents of version . json or a 404 .
|
6,389
|
def heartbeat ( request ) : all_checks = checks . registry . registry . get_checks ( include_deployment_checks = not settings . DEBUG , ) details = { } statuses = { } level = 0 for check in all_checks : detail = heartbeat_check_detail ( check ) statuses [ check . __name__ ] = detail [ 'status' ] level = max ( level , detail [ 'level' ] ) if detail [ 'level' ] > 0 : details [ check . __name__ ] = detail if level < checks . messages . WARNING : status_code = 200 heartbeat_passed . send ( sender = heartbeat , level = level ) else : status_code = 500 heartbeat_failed . send ( sender = heartbeat , level = level ) payload = { 'status' : level_to_text ( level ) , 'checks' : statuses , 'details' : details , } return JsonResponse ( payload , status = status_code )
|
Runs all the Django checks and returns a JsonResponse with either a status code of 200 or 500 depending on the results of the checks .
|
6,390
|
def _create_component_results ( json_data , result_key ) : component_results = [ ] for key , value in list ( json_data . items ( ) ) : if key not in [ result_key , "meta" ] : component_result = ComponentResult ( key , value [ "result" ] , value [ "api_code" ] , value [ "api_code_description" ] ) component_results . append ( component_result ) return component_results
|
Returns a list of ComponentResult from the json_data
|
6,391
|
def has_error ( self ) : return next ( ( True for cr in self . component_results if cr . has_error ( ) ) , False )
|
Returns whether there was a business logic error when fetching data for any components for this property .
|
6,392
|
def get_errors ( self ) : return [ { cr . component_name : cr . get_error ( ) } for cr in self . component_results if cr . has_error ( ) ]
|
If there were any business errors fetching data for this property returns the error messages .
|
6,393
|
def create_from_json ( cls , json_data ) : prop = Property ( ) address_info = json_data [ "address_info" ] prop . address = address_info [ "address" ] prop . block_id = address_info [ "block_id" ] prop . zipcode = address_info [ "zipcode" ] prop . zipcode_plus4 = address_info [ "zipcode_plus4" ] prop . address_full = address_info [ "address_full" ] prop . city = address_info [ "city" ] prop . county_fips = address_info [ "county_fips" ] prop . geo_precision = address_info [ "geo_precision" ] prop . lat = address_info [ "lat" ] prop . lng = address_info [ "lng" ] prop . slug = address_info [ "slug" ] prop . state = address_info [ "state" ] prop . unit = address_info [ "unit" ] prop . meta = None if "meta" in json_data : prop . meta = json_data [ "meta" ] prop . component_results = _create_component_results ( json_data , "address_info" ) return prop
|
Deserialize property json data into a Property object
|
6,394
|
def create_from_json ( cls , json_data ) : block = Block ( ) block_info = json_data [ "block_info" ] block . block_id = block_info [ "block_id" ] block . num_bins = block_info [ "num_bins" ] if "num_bins" in block_info else None block . property_type = block_info [ "property_type" ] if "property_type" in block_info else None block . meta = json_data [ "meta" ] if "meta" in json_data else None block . component_results = _create_component_results ( json_data , "block_info" ) return block
|
Deserialize block json data into a Block object
|
6,395
|
def create_from_json ( cls , json_data ) : zipcode = ZipCode ( ) zipcode . zipcode = json_data [ "zipcode_info" ] [ "zipcode" ] zipcode . meta = json_data [ "meta" ] if "meta" in json_data else None zipcode . component_results = _create_component_results ( json_data , "zipcode_info" ) return zipcode
|
Deserialize zipcode json data into a ZipCode object
|
6,396
|
def create_from_json ( cls , json_data ) : msa = Msa ( ) msa . msa = json_data [ "msa_info" ] [ "msa" ] msa . meta = json_data [ "meta" ] if "meta" in json_data else None msa . component_results = _create_component_results ( json_data , "msa_info" ) return msa
|
Deserialize msa json data into a Msa object
|
6,397
|
def starts_when ( iterable , condition ) : if not callable ( condition ) : cond_value = condition def condition ( x ) : return x == cond_value return itertools . dropwhile ( lambda x : not condition ( x ) , iterable )
|
Start yielding items when a condition arise .
|
6,398
|
def stops_when ( iterable , condition ) : if not callable ( condition ) : cond_value = condition def condition ( x ) : return x == cond_value return itertools . takewhile ( lambda x : not condition ( x ) , iterable )
|
Stop yielding items when a condition arise .
|
6,399
|
def skip_duplicates ( iterable , key = None , fingerprints = ( ) ) : fingerprints = fingerprints or set ( ) fingerprint = None try : if key is None : for x in iterable : if x not in fingerprints : yield x fingerprints . add ( x ) else : for x in iterable : fingerprint = key ( x ) if fingerprint not in fingerprints : yield x fingerprints . add ( fingerprint ) except TypeError : try : hash ( fingerprint ) except TypeError : raise TypeError ( "The 'key' function returned a non hashable object of type " "'%s' when receiving '%s'. Make sure this function always " "returns a hashable object. Hint: immutable primitives like" "int, str or tuple, are hashable while dict, set and list are " "not." % ( type ( fingerprint ) , x ) ) else : raise
|
Returns a generator that will yield all objects from iterable skipping duplicates .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.