idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
14,100 | async def get_agents ( self , addr = True , agent_cls = None ) : return await self . menv . get_agents ( addr = True , agent_cls = None , as_coro = True ) | Get addresses of all agents in all the slave environments . |
14,101 | async def get_connections ( self , data = True ) : return await self . menv . get_connections ( data = data , as_coro = True ) | Return connections for all the agents in the slave environments . |
14,102 | def get_agents ( self , addr = True , agent_cls = None , as_coro = False ) : async def slave_task ( mgr_addr , addr = True , agent_cls = None ) : r_manager = await self . env . connect ( mgr_addr , timeout = TIMEOUT ) return await r_manager . get_agents ( addr = addr , agent_cls = agent_cls ) tasks = create_tasks ( slave_task , self . addrs , addr , agent_cls ) return run_or_coro ( tasks , as_coro ) | Get agents from the slave environments . |
14,103 | async def is_ready ( self ) : async def slave_task ( addr , timeout ) : try : r_manager = await self . env . connect ( addr , timeout = timeout ) ready = await r_manager . is_ready ( ) if not ready : return False except : return False return True if not self . env . is_ready ( ) : return False if not self . check_ready ( ) : return False rets = await create_tasks ( slave_task , self . addrs , 0.5 ) if not all ( rets ) : return False return True | Check if the multi - environment has been fully initialized . |
14,104 | async def spawn_slaves ( self , slave_addrs , slave_env_cls , slave_mgr_cls , slave_kwargs = None ) : pool , r = spawn_containers ( slave_addrs , env_cls = slave_env_cls , env_params = slave_kwargs , mgr_cls = slave_mgr_cls ) self . _pool = pool self . _r = r self . _manager_addrs = [ "{}{}" . format ( _get_base_url ( a ) , 0 ) for a in slave_addrs ] | Spawn slave environments . |
14,105 | async def _get_smallest_env ( self ) : async def slave_task ( mgr_addr ) : r_manager = await self . env . connect ( mgr_addr , timeout = TIMEOUT ) ret = await r_manager . get_agents ( addr = True ) return mgr_addr , len ( ret ) sizes = await create_tasks ( slave_task , self . addrs , flatten = False ) return sorted ( sizes , key = lambda x : x [ 1 ] ) [ 0 ] [ 0 ] | Get address of the slave environment manager with the smallest number of agents . |
14,106 | async def spawn ( self , agent_cls , * args , addr = None , ** kwargs ) : if addr is None : addr = await self . _get_smallest_env ( ) r_manager = await self . env . connect ( addr ) return await r_manager . spawn ( agent_cls , * args , ** kwargs ) | Spawn a new agent in a slave environment . |
14,107 | def get_connections ( self , data = True , as_coro = False ) : async def slave_task ( addr , data ) : r_manager = await self . env . connect ( addr ) return await r_manager . get_connections ( data ) tasks = create_tasks ( slave_task , self . addrs , data ) return run_or_coro ( tasks , as_coro ) | Return connections from all the agents in the slave environments . |
14,108 | def get_artifacts ( self , agent_name = None ) : if agent_name is not None : return [ a for a in self . artifacts if agent_name == a . creator ] return self . artifacts | Get all artifacts or all artifacts published by a specific agent . |
14,109 | async def stop_slaves ( self , timeout = 1 ) : for addr in self . addrs : try : r_manager = await self . env . connect ( addr , timeout = timeout ) await r_manager . stop ( ) except : self . _log ( logging . WARNING , "Could not stop {}" . format ( addr ) ) | Stop all the slaves by sending a stop - message to their managers . |
14,110 | def destroy ( self , folder = None , as_coro = False ) : async def _destroy ( folder ) : ret = self . save_info ( folder ) await self . stop_slaves ( ) if self . _pool is not None : self . _pool . terminate ( ) self . _pool . join ( ) await self . _env . shutdown ( as_coro = True ) return ret return run_or_coro ( _destroy ( folder ) , as_coro ) | Destroy the multiprocessing environment and its slave environments . |
14,111 | def dump_index ( self , obj ) : if isinstance ( obj , PIDNodeOrdered ) and self . _is_child ( obj ) : return obj . index ( self . context [ 'pid' ] ) else : return None | Dump the index of the child in the relation . |
14,112 | def dump_is_last ( self , obj ) : if self . _is_child ( obj ) and isinstance ( obj , PIDNodeOrdered ) : if obj . children . count ( ) > 0 : return obj . children . ordered ( 'asc' ) . all ( ) [ - 1 ] == self . context [ 'pid' ] elif obj . draft_child : return obj . draft_child == self . context [ 'pid' ] else : return True else : return None | Dump the boolean stating if the child in the relation is last . |
14,113 | def dump_type ( self , obj ) : if not isinstance ( obj . relation_type , RelationType ) : return resolve_relation_type_config ( obj . relation_type ) . name else : return obj . relation_type . name | Dump the text name of the relation . |
14,114 | def dump_children ( self , obj ) : data , errors = PIDSchema ( many = True ) . dump ( obj . children . ordered ( 'asc' ) . all ( ) ) return data | Dump the siblings of a PID . |
14,115 | def identify_window ( pid , text ) : proc = None path = None uas = Session . query ( UserApp ) . filter ( UserApp . window_text == text ) nontext = Session . query ( UserApp ) . filter ( UserApp . window_text == None ) if uas . count ( ) : proc = psutil . Process ( pid ) try : path = proc . exe ( ) except psutil . AccessDenied : path = proc . name ( ) logger . debug ( "Trying to identify app, path=%s" , path ) app = uas . filter ( UserApp . path == path ) . first ( ) if app : return app , proc if nontext . count ( ) : if proc == None : proc = psutil . Process ( pid ) path = proc . exe ( ) app = nontext . filter ( UserApp . path == path ) . first ( ) if app : return app , proc return None , None | Identify the app associated with a window . |
14,116 | def _assertCALL ( self , url , * , allow_empty = False , check_headers = True , check_status = True , expect_errors = False , name = None , method = 'get' , data = None ) : self . view = resolve ( url ) . func . cls m = getattr ( self . client , method . lower ( ) ) self . filename = self . get_response_filename ( method , name or url ) response = m ( url , data = data ) assert response . accepted_renderer payload = response . data if not allow_empty and not payload : raise ValueError ( f"View {self.view} returned and empty json. Check your test" ) if response . status_code > 299 and not expect_errors : raise ValueError ( f"View {self.view} unexpected response. {response.status_code} - {response.content}" ) if not allow_empty and response . status_code == 404 : raise ValueError ( f"View {self.view} returned 404 status code. Check your test" ) if not os . path . exists ( self . filename ) or os . environ . get ( 'API_CHECKER_RESET' , False ) : _write ( self . filename , serialize_response ( response ) ) stored = load_response ( self . filename ) if ( check_status ) and response . status_code != stored . status_code : raise StatusCodeError ( self . view , response . status_code , stored . status_code ) if check_headers : self . _assert_headers ( response , stored ) self . compare ( payload , stored . data , self . filename , view = self . view ) | check url for response changes |
14,117 | async def reboot ( ) : async with aiohttp . ClientSession ( ) as session : ghlocalapi = DeviceSettings ( LOOP , session , IPADDRESS ) result = await ghlocalapi . reboot ( ) print ( "Reboot info:" , result ) | Reboot a Google Home unit . |
14,118 | def filter ( self , record ) : request = get_request ( ) if request : user = getattr ( request , 'user' , None ) if user and not user . is_anonymous ( ) : record . username = user . username else : record . username = '-' meta = getattr ( request , 'META' , { } ) record . remote_addr = meta . get ( 'REMOTE_ADDR' , '-' ) record . http_user_agent = meta . get ( 'HTTP_USER_AGENT' , '-' ) if not hasattr ( record , 'request' ) : record . request = request else : record . username = '-' record . remote_addr = '-' record . http_user_agent = '-' return True | Adds user and remote_addr to the record . |
14,119 | def status ( self ) : try : r = self . _response except AttributeError : return None else : return r . status_code , r . reason | Status da resposta recebida do Postmon . |
14,120 | def package_theme ( app_name , build_dir , excludes = None , includes = None , path_prefix = None , template_dirs = None ) : templates_dest = os . path . join ( build_dir , 'templates' ) orig_static_url = django_settings . STATIC_URL if ( app_name != settings . APP_NAME and not django_settings . STATIC_URL . startswith ( '/' + app_name ) ) : django_settings . STATIC_URL = '/' + app_name + orig_static_url if not os . path . exists ( templates_dest ) : os . makedirs ( templates_dest ) if template_dirs is None : template_dirs = get_template_search_path ( app_name ) for template_dir in template_dirs : if ( templates_dest and not os . path . samefile ( template_dir , templates_dest ) ) : install_templates ( template_dir , templates_dest , excludes = excludes , includes = includes , path_prefix = path_prefix ) | Package resources and templates for a multi - tier environment into a zip file . |
14,121 | def json_to_response ( self , action = None , json_status = None , success_url = None , json_data = None , ** response_kwargs ) : data = { "status" : self . get_status ( json_status ) , "action" : self . get_action ( action ) , "extra_data" : self . get_json_data ( json_data or { } ) } if self . action == AjaxResponseAction . REDIRECT : data [ "action_url" ] = success_url or self . get_success_url ( ) return JsonResponse ( data , ** response_kwargs ) | Valid response with next action to be followed by the JS |
14,122 | def get_action ( self , action = None ) : if action : self . action = action if self . action not in AjaxResponseAction . choices : raise ValueError ( "Invalid action selected: '{}'" . format ( self . action ) ) return self . action | Returns action to take after call |
14,123 | def get_status ( self , json_status = None ) : if json_status : self . json_status = json_status if self . json_status not in AjaxResponseStatus . choices : raise ValueError ( "Invalid status selected: '{}'" . format ( self . json_status ) ) return self . json_status | Returns status of for json |
14,124 | def form_invalid ( self , form , prefix = None ) : response = super ( FormAjaxMixin , self ) . form_invalid ( form ) if self . request . is_ajax ( ) : data = { "errors_list" : self . add_prefix ( form . errors , prefix ) , } return self . json_to_response ( status = 400 , json_data = data , json_status = AjaxResponseStatus . ERROR ) return response | If form invalid return error list in JSON response |
14,125 | def form_valid ( self , form ) : response = super ( FormAjaxMixin , self ) . form_valid ( form ) if self . request . is_ajax ( ) : return self . json_to_response ( ) return response | If form valid return response with action |
14,126 | def add_prefix ( self , errors , prefix ) : if not prefix : prefix = self . get_prefix ( ) if prefix : return { "%s-%s" % ( prefix , k ) : v for k , v in errors . items ( ) } return errors | Add form prefix to errors |
14,127 | def render_to_response ( self , context , ** response_kwargs ) : if self . request . is_ajax ( ) : data = { "content" : render_to_string ( self . get_template_names ( ) , context , request = self . request ) } return JsonResponse ( data ) if settings . DEBUG : return super ( PartialAjaxMixin , self ) . render_to_response ( context , ** response_kwargs ) raise Http404 ( ) | Returns the rendered template in JSON format |
14,128 | def random_init_map ( interface , state , label , inp ) : import random out = interface . output ( 0 ) centers = { } for row in inp : row = row . strip ( ) . split ( state [ "delimiter" ] ) if len ( row ) > 1 : x = [ ( 0 if row [ i ] in state [ "missing_vals" ] else float ( row [ i ] ) ) for i in state [ "X_indices" ] ] cluster = random . randint ( 0 , state [ 'k' ] - 1 ) vertex = state [ 'create' ] ( x , 1.0 ) centers [ cluster ] = vertex if cluster not in centers else state [ "update" ] ( centers [ cluster ] , vertex ) for cluster , values in centers . iteritems ( ) : out . add ( cluster , values ) | Assign datapoint e randomly to one of the k clusters . |
14,129 | def estimate_map ( interface , state , label , inp ) : out = interface . output ( 0 ) centers = { } for row in inp : row = row . strip ( ) . split ( state [ "delimiter" ] ) if len ( row ) > 1 : x = [ ( 0 if row [ i ] in state [ "missing_vals" ] else float ( row [ i ] ) ) for i in state [ "X_indices" ] ] cluster = min ( ( state [ 'dist' ] ( c , x ) , i ) for i , c in state [ 'centers' ] ) [ 1 ] vertex = state [ 'create' ] ( x , 1.0 ) centers [ cluster ] = vertex if cluster not in centers else state [ "update" ] ( centers [ cluster ] , vertex ) for cluster , values in centers . iteritems ( ) : out . add ( cluster , values ) | Find the cluster i that is closest to the datapoint e . |
14,130 | def estimate_reduce ( interface , state , label , inp ) : centers = { } for i , c in inp : centers [ i ] = c if i not in centers else state [ 'update' ] ( centers [ i ] , c ) out = interface . output ( 0 ) for i , c in centers . items ( ) : out . add ( i , state [ 'finalize' ] ( c ) ) | Estimate the cluster centers for each cluster . |
14,131 | def predict_map ( interface , state , label , inp ) : out = interface . output ( 0 ) for row in inp : if len ( row ) > 1 : row = row . strip ( ) . split ( state [ "delimiter" ] ) x_id = "" if state [ "id_index" ] == - 1 else row [ state [ "id_index" ] ] x = [ ( 0 if row [ i ] in state [ "missing_vals" ] else float ( row [ i ] ) ) for i in state [ "X_indices" ] ] out . add ( x_id , min ( [ ( i , state [ "dist" ] ( c , x ) ) for i , c in state [ "centers" ] ] , key = lambda t : t [ 1 ] ) ) | Determine the closest cluster for the datapoint e . |
14,132 | def fit ( dataset , n_clusters = 5 , max_iterations = 10 , random_state = None , save_results = True , show = False ) : from disco . job import Job from disco . worker . pipeline . worker import Worker , Stage from disco . core import result_iterator try : n_clusters = int ( n_clusters ) max_iterations = int ( max_iterations ) if n_clusters < 2 : raise Exception ( "Parameter n_clusters should be greater than 1." ) if max_iterations < 1 : raise Exception ( "Parameter max_iterations should be greater than 0." ) except ValueError : raise Exception ( "Parameters should be numerical." ) job = Job ( worker = Worker ( save_results = save_results ) ) job . pipeline = [ ( "split" , Stage ( "kmeans_init_map" , input_chain = dataset . params [ "input_chain" ] , init = map_init , process = random_init_map ) ) , ( 'group_label' , Stage ( "kmeans_init_reduce" , process = estimate_reduce , init = simple_init , combine = True ) ) ] job . params = dict ( dataset . params . items ( ) + mean_point_center . items ( ) ) job . params [ 'seed' ] = random_state job . params [ 'k' ] = n_clusters job . run ( input = dataset . params [ "data_tag" ] , name = "kmeans_init" ) init = job . wait ( show = show ) centers = [ ( i , c ) for i , c in result_iterator ( init ) ] for j in range ( max_iterations ) : job = Job ( worker = Worker ( save_results = save_results ) ) job . params = dict ( dataset . params . items ( ) + mean_point_center . items ( ) ) job . params [ 'k' ] = n_clusters job . params [ 'centers' ] = centers job . pipeline = [ ( 'split' , Stage ( "kmeans_map_iter_%s" % ( j + 1 , ) , input_chain = dataset . params [ "input_chain" ] , process = estimate_map , init = simple_init ) ) , ( 'group_label' , Stage ( "kmeans_reduce_iter_%s" % ( j + 1 , ) , process = estimate_reduce , init = simple_init , combine = True ) ) ] job . run ( input = dataset . params [ "data_tag" ] , name = 'kmeans_iter_%d' % ( j + 1 , ) ) fitmodel_url = job . wait ( show = show ) centers = [ ( i , c ) for i , c in result_iterator ( fitmodel_url ) ] return { "kmeans_fitmodel" : fitmodel_url } | Optimize k - clustering for iterations iterations with cluster center definitions as given in center . |
14,133 | def predict ( dataset , fitmodel_url , save_results = True , show = False ) : from disco . job import Job from disco . worker . pipeline . worker import Worker , Stage from disco . core import result_iterator if "kmeans_fitmodel" not in fitmodel_url : raise Exception ( "Incorrect fit model." ) job = Job ( worker = Worker ( save_results = save_results ) ) job . params = dict ( dataset . params . items ( ) + mean_point_center . items ( ) ) job . params [ "centers" ] = [ ( i , c ) for i , c in result_iterator ( fitmodel_url [ "kmeans_fitmodel" ] ) ] job . pipeline = [ ( "split" , Stage ( "kmeans_predict" , input_chain = dataset . params [ "input_chain" ] , init = simple_init , process = predict_map ) ) ] job . run ( input = dataset . params [ "data_tag" ] , name = "kmeans_predict" ) return job . wait ( show = show ) | Predict the closest clusters for the datapoints in input . |
14,134 | def load_stdlib ( ) : if _stdlib : return _stdlib prefixes = tuple ( { os . path . abspath ( p ) for p in ( sys . prefix , getattr ( sys , 'real_prefix' , sys . prefix ) , getattr ( sys , 'base_prefix' , sys . prefix ) , ) } ) for sp in sys . path : if not sp : continue _import_paths . append ( os . path . abspath ( sp ) ) stdpaths = tuple ( { p for p in _import_paths if p . startswith ( prefixes ) and 'site-packages' not in p } ) _stdlib . update ( sys . builtin_module_names ) for stdpath in stdpaths : if not os . path . isdir ( stdpath ) : continue for item in os . listdir ( stdpath ) : if item . startswith ( '.' ) or item == 'site-packages' : continue p = os . path . join ( stdpath , item ) if not os . path . isdir ( p ) and not item . endswith ( ( '.py' , '.so' ) ) : continue _stdlib . add ( item . split ( '.' , 1 ) [ 0 ] ) return _stdlib | Scans sys . path for standard library modules . |
14,135 | def import_path_from_file ( filename , as_list = False ) : module_path = [ ] basename = os . path . splitext ( os . path . basename ( filename ) ) [ 0 ] if basename != '__init__' : module_path . append ( basename ) dirname = os . path . dirname ( filename ) while os . path . isfile ( os . path . join ( dirname , '__init__.py' ) ) : dirname , tail = os . path . split ( dirname ) module_path . insert ( 0 , tail ) if as_list : return module_path , dirname return '.' . join ( module_path ) , dirname | Returns a tuple of the import path and root module directory for the supplied file . |
14,136 | def file_containing_import ( import_path , import_root ) : if not _import_paths : load_stdlib ( ) if os . path . isfile ( import_root ) : import_root = os . path . dirname ( import_root ) search_paths = [ import_root ] + _import_paths module_parts = import_path . split ( '.' ) for i in range ( len ( module_parts ) , 0 , - 1 ) : module_path = os . path . join ( * module_parts [ : i ] ) for sp in search_paths : p = os . path . join ( sp , module_path ) if os . path . isdir ( p ) : return os . path . join ( p , '__init__.py' ) elif os . path . isfile ( p + '.py' ) : return p + '.py' return None | Finds the file that might contain the import_path . |
14,137 | def resolve_import ( import_path , from_module ) : if not import_path or not import_path . startswith ( '.' ) : return import_path from_module = from_module . split ( '.' ) dots = 0 for c in import_path : if c == '.' : dots += 1 else : break if dots : from_module = from_module [ : - dots ] import_path = import_path [ dots : ] if import_path : from_module . append ( import_path ) return '.' . join ( from_module ) | Resolves relative imports from a module . |
14,138 | def find_package ( name , installed , package = False ) : if package : name = name . lower ( ) tests = ( lambda x : x . user and name == x . name . lower ( ) , lambda x : x . local and name == x . name . lower ( ) , lambda x : name == x . name . lower ( ) , ) else : tests = ( lambda x : x . user and name in x . import_names , lambda x : x . local and name in x . import_names , lambda x : name in x . import_names , ) for t in tests : try : found = list ( filter ( t , installed ) ) if found and not found [ 0 ] . is_scan : return found [ 0 ] except StopIteration : pass return None | Finds a package in the installed list . |
14,139 | def is_script ( filename ) : if not os . path . isfile ( filename ) : return False try : with open ( filename , 'rb' ) as fp : return fp . read ( 2 ) == b'#!' except IOError : pass return False | Checks if a file has a hashbang . |
14,140 | def is_python_script ( filename ) : if filename . lower ( ) . endswith ( '.py' ) : return True if not os . path . isfile ( filename ) : return False try : with open ( filename , 'rb' ) as fp : if fp . read ( 2 ) != b'#!' : return False return re . match ( r'.*python' , str_ ( fp . readline ( ) ) ) except IOError : pass return False | Checks a file to see if it s a python script of some sort . |
14,141 | def search ( self , key_pattern : str , user_pattern : str ) -> List [ Entry ] : key_pattern = _normalized_key ( key_pattern ) results = [ ] for entry in self . entries : if key_pattern in entry . key and user_pattern in entry . user : results . append ( entry ) return sorted ( results , key = lambda e : e . key ) | Search database for given key and user pattern . |
14,142 | def load ( path : str ) -> "Store" : if _gpg . is_encrypted ( path ) : src_bytes = _gpg . decrypt ( path ) else : src_bytes = open ( path , "rb" ) . read ( ) src = src_bytes . decode ( "utf-8" ) ext = _gpg . unencrypted_ext ( path ) assert ext not in [ ".yml" , ".yaml" , ] , "YAML support was removed in version 0.12.0" entries = _parse_entries ( src ) return Store ( path , entries ) | Load password store from file . |
14,143 | def only_passed_and_wait ( result ) : verdict = result . get ( "verdict" , "" ) . strip ( ) . lower ( ) if verdict in Verdicts . PASS + Verdicts . WAIT : return result return None | Returns PASS and WAIT results only skips everything else . |
14,144 | def insert_source_info ( result ) : comment = result . get ( "comment" ) if comment : return source = result . get ( "source" ) job_name = result . get ( "job_name" ) run = result . get ( "run" ) source_list = [ source , job_name , run ] if not all ( source_list ) : return source_note = "/" . join ( source_list ) source_note = "Source: {}" . format ( source_note ) result [ "comment" ] = source_note | Adds info about source of test result if available . |
14,145 | def setup_parametrization ( result , parametrize ) : if parametrize : title = result . get ( "title" ) if title : result [ "title" ] = TEST_PARAM_RE . sub ( "" , title ) else : if "params" in result : del result [ "params" ] | Modifies result s data according to the parametrization settings . |
14,146 | def include_class_in_title ( result ) : classname = result . get ( "classname" , "" ) if classname : filepath = result . get ( "file" , "" ) title = result . get ( "title" ) if title and "/" in filepath and "." in classname : fname = filepath . split ( "/" ) [ - 1 ] . replace ( ".py" , "" ) last_classname = classname . split ( "." ) [ - 1 ] if fname != last_classname and last_classname not in title : result [ "title" ] = "{}.{}" . format ( last_classname , title ) del result [ "classname" ] | Makes sure that test class is included in title . |
14,147 | def parse_rst_description ( testcase ) : description = testcase . get ( "description" ) if not description : return try : with open ( os . devnull , "w" ) as devnull : testcase [ "description" ] = publish_parts ( description , writer_name = "html" , settings_overrides = { "report_level" : 2 , "halt_level" : 2 , "warning_stream" : devnull } , ) [ "html_body" ] except Exception as exp : testcase_id = testcase . get ( "nodeid" ) or testcase . get ( "id" ) or testcase . get ( "title" ) logger . error ( "%s: description: %s" , str ( exp ) , testcase_id ) | Creates an HTML version of the RST formatted description . |
14,148 | def preformat_plain_description ( testcase ) : description = testcase . get ( "description" ) if not description : return nodeid = testcase . get ( "nodeid" ) or "" indent = None if "::Test" in nodeid : indent = 8 * " " elif "::test_" in nodeid : indent = 4 * " " if indent : orig_lines = description . split ( "\n" ) new_lines = [ ] for line in orig_lines : if line . startswith ( indent ) : line = line . replace ( indent , "" , 1 ) new_lines . append ( line ) description = "\n" . join ( new_lines ) testcase [ "description" ] = "<pre>\n{}\n</pre>" . format ( description ) | Creates a preformatted HTML version of the description . |
14,149 | def add_unique_runid ( testcase , run_id = None ) : testcase [ "description" ] = '{}<br id="{}"/>' . format ( testcase . get ( "description" ) or "" , run_id or id ( add_unique_runid ) ) | Adds run id to the test description . |
14,150 | def add_automation_link ( testcase ) : automation_link = ( '<a href="{}">Test Source</a>' . format ( testcase [ "automation_script" ] ) if testcase . get ( "automation_script" ) else "" ) testcase [ "description" ] = "{}<br/>{}" . format ( testcase . get ( "description" ) or "" , automation_link ) | Appends link to automation script to the test description . |
14,151 | def image ( self ) : r = requests . get ( self . image_url , stream = True ) r . raise_for_status ( ) return r . raw . read ( ) | Return an image of the structure of the compound |
14,152 | def add_numbers ( ) : a = request . params . get ( 'a' , 0 , type = int ) b = request . params . get ( 'b' , 0 , type = int ) return json . dumps ( { 'result' : a + b } ) | Add two numbers server side ridiculous but well ... |
14,153 | def entry_archive_year_url ( ) : entry = Entry . objects . filter ( published = True ) . latest ( ) arg_list = [ entry . published_on . strftime ( "%Y" ) ] return reverse ( 'blargg:entry_archive_year' , args = arg_list ) | Renders the entry_archive_year URL for the latest Entry . |
14,154 | def _extract_parameters_from_properties ( properties ) : new_properties = { } parameters = [ ] for key , value in six . iteritems ( properties ) : if key . startswith ( _PARAMETER_PREFIX ) : parameters . append ( ( key . replace ( _PARAMETER_PREFIX , "" ) , value ) ) else : new_properties [ key ] = value return new_properties , sorted ( parameters ) | Extracts parameters from properties . |
14,155 | def import_junit ( junit_file , ** kwargs ) : xml_root = _get_xml_root ( junit_file ) results = [ ] for test_data in xml_root : if test_data . tag != "testcase" : continue verdict , comment , properties = _parse_testcase_record ( test_data ) properties , parameters = _extract_parameters_from_properties ( properties ) title = test_data . get ( "name" ) classname = test_data . get ( "classname" ) time = test_data . get ( "time" , 0 ) filepath = test_data . get ( "file" ) data = [ ( "title" , title ) , ( "classname" , classname ) , ( "verdict" , verdict ) , ( "comment" , comment ) , ( "time" , time ) , ( "file" , filepath ) , ] for key in sorted ( properties ) : data . append ( ( key , properties [ key ] ) ) if parameters : data . append ( ( "params" , OrderedDict ( parameters ) ) ) results . append ( OrderedDict ( data ) ) return xunit_exporter . ImportedData ( results = results , testrun = None ) | Reads the content of the junit - results file produced by pytest and returns imported data . |
14,156 | def libs ( ) : " Show zeta libs " for name , description , version , url in gen_frameworks ( ) : print name print '' . join ( '-' for _ in xrange ( len ( name ) ) ) print description . strip ( '/*\n ' ) print version . strip ( '/*\n ' ) print url . strip ( '/*\n ' ) print | Show zeta libs |
14,157 | def watch ( args ) : " Watch directory for changes and auto pack sources " assert op . isdir ( args . source ) , "Watch mode allowed only for directories." print 'Zeta-library v. %s watch mode' % VERSION print '================================' print 'Ctrl+C for exit\n' observer = Observer ( ) handler = ZetaTrick ( args = args ) observer . schedule ( handler , args . source , recursive = True ) observer . start ( ) try : while True : time . sleep ( 1 ) except KeyboardInterrupt : observer . stop ( ) print "\nWatch mode stoped." observer . join ( ) | Watch directory for changes and auto pack sources |
14,158 | def pack ( args ) : " Parse file or dir, import css, js code and save with prefix " assert op . exists ( args . source ) , "Does not exists: %s" % args . source zeta_pack ( args ) | Parse file or dir import css js code and save with prefix |
14,159 | def cast ( keys , data ) : matrix = Matrix ( ) matrix . keys = keys matrix . data = data return matrix | Cast a set of keys and an array to a Matrix object . |
14,160 | def connect_to ( self , vertex , weight = 1 ) : for edge in self . edges_out : if vertex == edge . vertex_in : return edge return Edge ( self , vertex , weight ) | Connect this vertex to another one . |
14,161 | def connect_from ( self , vertex , weight = 1 ) : for edge in self . edges_in : if vertex == edge . vertex_out : return edge return Edge ( vertex , self , weight ) | Connect another vertex to this one . |
14,162 | def go_from ( self , vertex ) : if self . vertex_out : self . vertex_out . edges_out . remove ( self ) self . vertex_out = vertex vertex . edges_out . add ( self ) | Tell the edge to go out from this vertex . |
14,163 | def go_in ( self , vertex ) : if self . vertex_in : self . vertex_in . edges_in . remove ( self ) self . vertex_in = vertex vertex . edges_in . add ( self ) | Tell the edge to go into this vertex . |
14,164 | def get_device_type ( device_type = 0 ) : device_types = { 0 : "Unknown" , 1 : "Classic - BR/EDR devices" , 2 : "Low Energy - LE-only" , 3 : "Dual Mode - BR/EDR/LE" } if device_type in [ 0 , 1 , 2 , 3 ] : return_value = device_types [ device_type ] else : return_value = device_types [ 0 ] return return_value | Return the device type from a device_type list . |
14,165 | def fit ( dataset , alpha = 1e-8 , max_iterations = 10 , save_results = True , show = False ) : from disco . worker . pipeline . worker import Worker , Stage from disco . core import Job , result_iterator import numpy as np if dataset . params [ "y_map" ] == [ ] : raise Exception ( "Logistic regression requires a target label mapping parameter." ) try : alpha = float ( alpha ) max_iterations = int ( max_iterations ) if max_iterations < 1 : raise Exception ( "Parameter max_iterations should be greater than 0." ) except ValueError : raise Exception ( "Parameters should be numerical." ) thetas = np . zeros ( len ( dataset . params [ "X_indices" ] ) + 1 ) J = [ 0 ] for i in range ( max_iterations ) : job = Job ( worker = Worker ( save_results = save_results ) ) job . pipeline = [ ( "split" , Stage ( "map" , input_chain = dataset . params [ "input_chain" ] , init = simple_init , process = map_fit ) ) , ( 'group_all' , Stage ( "reduce" , init = simple_init , process = reduce_fit , combine = True ) ) ] job . params = dataset . params job . params [ "thetas" ] = thetas job . run ( name = "logreg_fit_iter_%d" % ( i + 1 ) , input = dataset . params [ "data_tag" ] ) fitmodel_url = job . wait ( show = show ) for k , v in result_iterator ( fitmodel_url ) : if k == "J" : J . append ( v ) else : thetas = v if np . abs ( J [ - 2 ] - J [ - 1 ] ) < alpha : if show : print ( "Converged at iteration %d" % ( i + 1 ) ) break return { "logreg_fitmodel" : fitmodel_url } | Function starts a job for calculation of theta parameters |
14,166 | def save ( self , dest_path ) : r self . save_assets ( dest_path ) self . external_filename = 'externalized-%s' % os . path . basename ( dest_path ) . replace ( ' ' , '_' ) dest_path = self . _add_extension ( 'tex' , dest_path ) with open ( dest_path , 'w' ) as f : f . write ( self . render ( ) ) | r Save the plot as a LaTeX file . |
14,167 | def save_as_pdf ( self , dest_path ) : dest_path = self . _add_extension ( 'pdf' , dest_path ) build_dir = tempfile . mkdtemp ( ) build_path = os . path . join ( build_dir , 'document.tex' ) self . save_assets ( build_path ) with open ( build_path , 'w' ) as f : f . write ( self . render ( ) ) pdf_path = self . _build_document ( build_path ) shutil . copyfile ( pdf_path , dest_path ) shutil . rmtree ( build_dir ) | Save the plot as a PDF file . |
14,168 | def histogram ( self , counts , bin_edges , linestyle = 'solid' ) : if len ( bin_edges ) - 1 != len ( counts ) : raise RuntimeError ( 'The length of bin_edges should be length of counts + 1' ) x = bin_edges y = list ( counts ) + [ counts [ - 1 ] ] self . plot ( x , y , mark = None , linestyle = linestyle , use_steps = True ) | Plot a histogram . |
14,169 | def scatter ( self , x , y , xerr = [ ] , yerr = [ ] , mark = 'o' , markstyle = None ) : self . plot ( x , y , xerr = xerr , yerr = yerr , mark = mark , linestyle = None , markstyle = markstyle ) | Plot a series of points . |
14,170 | def set_label ( self , text , location = 'upper right' , style = None ) : if location in RELATIVE_NODE_LOCATIONS : label = RELATIVE_NODE_LOCATIONS [ location ] . copy ( ) label [ 'text' ] = text label [ 'style' ] = style self . label = label else : raise RuntimeError ( 'Unknown label location: %s' % location ) | Set a label for the plot . |
14,171 | def add_pin ( self , text , location = 'left' , x = None , use_arrow = False , relative_position = None , style = None ) : try : series = self . plot_series_list [ - 1 ] except IndexError : raise RuntimeError ( 'First plot a data series, before using this function' ) data = series [ 'data' ] series_x , series_y = list ( zip ( * data ) ) [ : 2 ] if x is not None : if self . xmode == 'log' : series_x = np . log10 ( np . array ( series_x ) ) xp = np . log10 ( x ) else : xp = x if self . ymode == 'log' : series_y = np . log10 ( np . array ( series_y ) ) y = 10 ** np . interp ( xp , series_x , series_y ) else : y = np . interp ( xp , series_x , series_y ) else : x , y = series_x , series_y self . add_pin_at_xy ( x , y , text , location , relative_position , use_arrow , style ) | Add pin to most recent data series . |
14,172 | def add_pin_at_xy ( self , x , y , text , location = 'above right' , relative_position = .9 , use_arrow = True , style = None ) : if relative_position is None : if location == 'left' : relative_position = 0. elif location == 'right' : relative_position = 1. else : relative_position = .8 x , y = self . _calc_position_for_pin ( x , y , relative_position ) self . pin_list . append ( { 'x' : x , 'y' : y , 'text' : text , 'location' : location , 'use_arrow' : use_arrow , 'options' : style } ) | Add pin at x y location . |
14,173 | def shade_region ( self , x , lower , upper , color = 'lightgray' ) : self . shaded_regions_list . append ( { 'data' : list ( zip ( x , lower , upper ) ) , 'color' : color } ) | Shade a region between upper and lower bounds . |
14,174 | def draw_image ( self , image , xmin = 0 , ymin = 0 , xmax = None , ymax = None ) : if xmax is None : xmax = xmin + image . size [ 0 ] if ymax is None : ymax = ymin + image . size [ 1 ] self . bitmap_list . append ( { 'image' : image , 'xmin' : xmin , 'xmax' : xmax , 'ymin' : ymin , 'ymax' : ymax } ) xmin = min ( x for x in ( xmin , self . limits [ 'xmin' ] ) if x is not None ) ymin = min ( y for y in ( ymin , self . limits [ 'ymin' ] ) if y is not None ) xmax = max ( x for x in ( xmax , self . limits [ 'xmax' ] ) if x is not None ) ymax = max ( y for y in ( ymax , self . limits [ 'ymax' ] ) if y is not None ) self . set_xlimits ( xmin , xmax ) self . set_ylimits ( ymin , ymax ) | Draw an image . |
14,175 | def set_xlimits ( self , min = None , max = None ) : self . limits [ 'xmin' ] = min self . limits [ 'xmax' ] = max | Set limits for the x - axis . |
14,176 | def set_ylimits ( self , min = None , max = None ) : self . limits [ 'ymin' ] = min self . limits [ 'ymax' ] = max | Set limits for the y - axis . |
14,177 | def set_xtick_labels ( self , labels , style = None ) : self . ticks [ 'xlabels' ] = labels self . ticks [ 'xlabel_style' ] = style | Set tick labels for the x - axis . |
14,178 | def set_ytick_labels ( self , labels , style = None ) : self . ticks [ 'ylabels' ] = labels self . ticks [ 'ylabel_style' ] = style | Set tick labels for the y - axis . |
14,179 | def set_xtick_suffix ( self , suffix ) : if suffix == 'degree' : suffix = r'^\circ' elif suffix == 'percent' : suffix = r'\%' self . ticks [ 'xsuffix' ] = suffix | Set the suffix for the ticks of the x - axis . |
14,180 | def set_ytick_suffix ( self , suffix ) : if suffix == 'degree' : suffix = r'^\circ' elif suffix == 'percent' : suffix = r'\%' self . ticks [ 'ysuffix' ] = suffix | Set ticks for the y - axis . |
14,181 | def set_scalebar ( self , location = 'lower right' ) : if location in RELATIVE_NODE_LOCATIONS : scalebar = RELATIVE_NODE_LOCATIONS [ location ] . copy ( ) self . scalebar = scalebar else : raise RuntimeError ( 'Unknown scalebar location: %s' % location ) | Show marker area scale . |
14,182 | def _calc_position_for_pin ( self , x , y , relative_position ) : try : max_idx_x = len ( x ) - 1 max_idx_y = len ( y ) - 1 except TypeError : return x , y else : assert max_idx_x == max_idx_y , 'If x and y are iterables, they must be the same length' if relative_position == 0 : xs , ys = x [ 0 ] , y [ 0 ] elif relative_position == 1 : xs , ys = x [ max_idx_x ] , y [ max_idx_y ] else : if self . xmode == 'log' : x = np . log10 ( np . array ( x ) ) if self . ymode == 'log' : y = np . log10 ( np . array ( y ) ) rel_length = [ 0 ] rel_length . extend ( self . _calc_relative_path_lengths ( x , y ) ) idx = np . interp ( relative_position , rel_length , range ( len ( rel_length ) ) ) frac , idx = modf ( idx ) idx = int ( idx ) if self . xmode == 'log' : xs = 10 ** ( x [ idx ] + ( x [ idx + 1 ] - x [ idx ] ) * frac ) else : xs = x [ idx ] + ( x [ idx + 1 ] - x [ idx ] ) * frac if self . ymode == 'log' : ys = 10 ** ( y [ idx ] + ( y [ idx + 1 ] - y [ idx ] ) * frac ) else : ys = y [ idx ] + ( y [ idx + 1 ] - y [ idx ] ) * frac return xs , ys | Determine position at fraction of x y path . |
14,183 | def _calc_relative_path_lengths ( self , x , y ) : path_lengths = np . sqrt ( np . diff ( x ) ** 2 + np . diff ( y ) ** 2 ) total_length = np . sum ( path_lengths ) cummulative_lengths = np . cumsum ( path_lengths ) relative_path_lengths = cummulative_lengths / total_length return relative_path_lengths | Determine the relative path length at each x y position . |
14,184 | def _normalize_histogram2d ( self , counts , type ) : counts = ( 255 * ( counts - np . nanmin ( counts ) ) / ( np . nanmax ( counts ) - np . nanmin ( counts ) ) ) if type == 'reverse_bw' : counts = 255 - counts return counts . astype ( np . uint8 ) | Normalize the values of the counts for a 2D histogram . |
14,185 | def _write_bitmaps ( self , path , suffix = '' ) : dir , prefix = os . path . split ( path ) if '.' in prefix : prefix = prefix . split ( '.' ) [ 0 ] if prefix == '' : prefix = 'figure' for i , bitmap in enumerate ( self . bitmap_list ) : name = '%s%s_%d.png' % ( prefix , suffix , i ) bitmap [ 'name' ] = name img = bitmap [ 'image' ] size0 = int ( np . ceil ( 1000. / img . size [ 0 ] ) * img . size [ 0 ] ) size1 = int ( np . ceil ( 1000. / img . size [ 1 ] ) * img . size [ 1 ] ) large_img = img . resize ( ( size0 , size1 ) ) large_img . save ( os . path . join ( dir , name ) ) | Write bitmap file assets . |
14,186 | def _prepare_data ( self ) : xmin , xmax = self . limits [ 'xmin' ] , self . limits [ 'xmax' ] self . prepared_plot_series_list = [ ] for series in self . plot_series_list : prepared_series = series . copy ( ) data = prepared_series [ 'data' ] x , _ , _ , _ = zip ( * data ) if sorted ( x ) == list ( x ) : x = np . array ( x ) if xmin is not None : min_idx = x . searchsorted ( xmin ) if min_idx > 0 : min_idx -= 1 else : min_idx = None if xmax is not None : max_idx = x . searchsorted ( xmax ) + 1 else : max_idx = None prepared_series [ 'data' ] = data [ min_idx : max_idx ] self . prepared_plot_series_list . append ( prepared_series ) self . prepared_shaded_regions_list = [ ] for series in self . shaded_regions_list : prepared_series = series . copy ( ) data = prepared_series [ 'data' ] x , _ , _ = zip ( * data ) if sorted ( x ) == list ( x ) : x = np . array ( x ) if xmin is not None : min_idx = x . searchsorted ( xmin ) if min_idx > 0 : min_idx -= 1 else : min_idx = None if xmax is not None : max_idx = x . searchsorted ( xmax ) + 1 else : max_idx = None prepared_series [ 'data' ] = data [ min_idx : max_idx ] self . prepared_shaded_regions_list . append ( prepared_series ) | Prepare data before rendering |
14,187 | def histogram ( self , counts , bin_edges , linestyle = 'solid' ) : if len ( bin_edges ) - 1 != len ( counts ) : raise RuntimeError ( 'The length of bin_edges should be length of counts + 1' ) x = [ ] y = [ ] if self . use_radians : circle = 2 * np . pi else : circle = 360. step = circle / 1800. for i in range ( len ( bin_edges ) - 1 ) : for bin_edge in np . arange ( bin_edges [ i ] , bin_edges [ i + 1 ] , step = step ) : x . append ( bin_edge ) y . append ( counts [ i ] ) x . append ( bin_edges [ i + 1 ] ) y . append ( counts [ i ] ) if bin_edges [ - 1 ] % circle == bin_edges [ 0 ] % circle : x . append ( bin_edges [ 0 ] ) y . append ( counts [ 0 ] ) self . plot ( x , y , mark = None , linestyle = linestyle ) | Plot a polar histogram . |
14,188 | def _get_filename ( self , path ) : match = re . search ( "[a-z]{2,3}_[A-Z]{2}" , path ) if match : start = match . start ( 0 ) filename = path [ start : ] else : filename = os . path . basename ( path ) return filename | This function gets the base filename from the path if a language code is present the filename will start from there . |
14,189 | def _override_payload ( self , payload ) : if self . override_payload : old_payload = payload def get_value ( data , key ) : try : parent_key , nested_key = key . split ( "." , 1 ) return get_value ( data . get ( parent_key , { } ) , nested_key ) except ValueError : return data . get ( key , key ) def set_values ( data ) : for key , value in data . items ( ) : if isinstance ( value , dict ) : set_values ( value ) else : data [ key ] = get_value ( old_payload , value ) payload = deepcopy ( self . override_payload ) set_values ( payload ) return payload | This function transforms the payload into a new format using the self . override_payload property . |
14,190 | def fire_failed_contact_lookup ( self , msisdn ) : payload = { "address" : msisdn } hooks = Hook . objects . filter ( event = "whatsapp.failed_contact_check" ) for hook in hooks : hook . deliver_hook ( None , payload_override = { "hook" : hook . dict ( ) , "data" : payload } ) | Fires a webhook in the event of a failed WhatsApp contact lookup . |
14,191 | def get_contact ( self , msisdn ) : response = self . session . post ( urllib_parse . urljoin ( self . api_url , "/v1/contacts" ) , json = { "blocking" : "wait" , "contacts" : [ msisdn ] } , ) response . raise_for_status ( ) whatsapp_id = response . json ( ) [ "contacts" ] [ 0 ] . get ( "wa_id" ) if not whatsapp_id : self . fire_failed_contact_lookup ( msisdn ) return whatsapp_id | Returns the WhatsApp ID for the given MSISDN |
14,192 | def send_custom_hsm ( self , whatsapp_id , template_name , language , variables ) : data = { "to" : whatsapp_id , "type" : "hsm" , "hsm" : { "namespace" : self . hsm_namespace , "element_name" : template_name , "language" : { "policy" : "deterministic" , "code" : language } , "localizable_params" : [ { "default" : variable } for variable in variables ] , } , } if self . ttl is not None : data [ "ttl" ] = self . ttl response = self . session . post ( urllib_parse . urljoin ( self . api_url , "/v1/messages" ) , json = data ) return self . return_response ( response ) | Sends an HSM with more customizable fields than the send_hsm function |
14,193 | def load_data ( self ) : try : df = self . live_quote_arg_func ( self . tickers ) for index , ticker in enumerate ( self . tickers ) : ticker_info = df . loc [ index ] self . ticker_dict [ ticker ] . append ( ticker_info [ 'price' ] , ticker_info [ 'volume' ] , ticker_info [ 'amount' ] , ticker_info [ 'time' ] ) except Exception : raise ValueError ( 'Polling thread exception' ) | Overwrite this for new source data structures |
14,194 | def get_departures ( self , stop_id , route , destination , api_key ) : self . stop_id = stop_id self . route = route self . destination = destination self . api_key = api_key url = 'https://api.transport.nsw.gov.au/v1/tp/departure_mon?' 'outputFormat=rapidJSON&coordOutputFormat=EPSG%3A4326&' 'mode=direct&type_dm=stop&name_dm=' + self . stop_id + '&departureMonitorMacro=true&TfNSWDM=true&version=10.2.1.42' auth = 'apikey ' + self . api_key header = { 'Accept' : 'application/json' , 'Authorization' : auth } try : response = requests . get ( url , headers = header , timeout = 10 ) except : logger . warning ( "Network or Timeout error" ) return self . info if response . status_code != 200 : logger . warning ( "Error with the request sent; check api key" ) return self . info result = response . json ( ) try : result [ 'stopEvents' ] except KeyError : logger . warning ( "No stop events for this query" ) return self . info maxresults = 1 monitor = [ ] if self . destination != '' : for i in range ( len ( result [ 'stopEvents' ] ) ) : destination = result [ 'stopEvents' ] [ i ] [ 'transportation' ] [ 'destination' ] [ 'name' ] if destination == self . destination : event = self . parseEvent ( result , i ) if event != None : monitor . append ( event ) if len ( monitor ) >= maxresults : break elif self . route != '' : for i in range ( len ( result [ 'stopEvents' ] ) ) : number = result [ 'stopEvents' ] [ i ] [ 'transportation' ] [ 'number' ] if number == self . route : event = self . parseEvent ( result , i ) if event != None : monitor . append ( event ) if len ( monitor ) >= maxresults : break else : for i in range ( 0 , maxresults ) : event = self . parseEvent ( result , i ) if event != None : monitor . append ( event ) if monitor : self . info = { ATTR_STOP_ID : self . stop_id , ATTR_ROUTE : monitor [ 0 ] [ 0 ] , ATTR_DUE_IN : monitor [ 0 ] [ 1 ] , ATTR_DELAY : monitor [ 0 ] [ 2 ] , ATTR_REALTIME : monitor [ 0 ] [ 5 ] , ATTR_DESTINATION : monitor [ 0 ] [ 6 ] , ATTR_MODE : monitor [ 0 ] [ 7 ] } return self . info | Get the latest data from Transport NSW . |
14,195 | def parseEvent ( self , result , i ) : fmt = '%Y-%m-%dT%H:%M:%SZ' due = 0 delay = 0 real_time = 'n' number = result [ 'stopEvents' ] [ i ] [ 'transportation' ] [ 'number' ] planned = datetime . strptime ( result [ 'stopEvents' ] [ i ] [ 'departureTimePlanned' ] , fmt ) destination = result [ 'stopEvents' ] [ i ] [ 'transportation' ] [ 'destination' ] [ 'name' ] mode = self . get_mode ( result [ 'stopEvents' ] [ i ] [ 'transportation' ] [ 'product' ] [ 'class' ] ) estimated = planned if 'isRealtimeControlled' in result [ 'stopEvents' ] [ i ] : real_time = 'y' estimated = datetime . strptime ( result [ 'stopEvents' ] [ i ] [ 'departureTimeEstimated' ] , fmt ) if estimated > datetime . utcnow ( ) : due = self . get_due ( estimated ) delay = self . get_delay ( planned , estimated ) return [ number , due , delay , planned , estimated , real_time , destination , mode ] else : return None | Parse the current event and extract data . |
14,196 | def get_due ( self , estimated ) : due = 0 due = round ( ( estimated - datetime . utcnow ( ) ) . seconds / 60 ) return due | Min till next leave event . |
14,197 | def get_delay ( self , planned , estimated ) : delay = 0 if estimated >= planned : delay = round ( ( estimated - planned ) . seconds / 60 ) else : delay = round ( ( planned - estimated ) . seconds / 60 ) * - 1 return delay | Min of delay on planned departure . |
14,198 | def create_ethereum_client ( uri , timeout = 60 , * , loop = None ) : if loop is None : loop = asyncio . get_event_loop ( ) presult = urlparse ( uri ) if presult . scheme in ( 'ipc' , 'unix' ) : reader , writer = yield from asyncio . open_unix_connection ( presult . path , loop = loop ) return AsyncIOIPCClient ( reader , writer , uri , timeout , loop = loop ) elif presult . scheme in ( 'http' , 'https' ) : tls = presult . scheme [ - 1 ] == 's' netloc = presult . netloc . split ( ':' ) host = netloc . pop ( 0 ) port = netloc . pop ( 0 ) if netloc else ( 443 if tls else 80 ) return AsyncIOHTTPClient ( host , port , tls , timeout , loop = loop ) else : raise RuntimeError ( 'This scheme does not supported.' ) | Create client to ethereum node based on schema . |
14,199 | async def get_alarms ( ) : async with aiohttp . ClientSession ( ) as session : ghlocalapi = Alarms ( LOOP , session , IPADDRESS ) await ghlocalapi . get_alarms ( ) print ( "Alarms:" , ghlocalapi . alarms ) | Get alarms and timers from GH . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.