idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
51,400 | def _has_keep_elt_in_descendants ( self , elt ) : for d in elt . iterdescendants ( ) : if d in self . elts_to_keep : return True return False | Returns whether the element has a descendant to keep or not |
51,401 | def _remove_elements ( self , elts_to_remove ) : for e in elts_to_remove : parent = e . getparent ( ) if e . tail and e . tail . strip ( ) : parent_text = parent . text or '' parent . text = parent_text + e . tail e . getparent ( ) . remove ( e ) | Removes flagged elements from the ElementTree |
51,402 | def getSystemInfo ( self , timeout = 1 ) : namespace = System . getServiceType ( "getSystemInfo" ) uri = self . getControlURL ( namespace ) results = self . execute ( uri , namespace , "GetInfo" , timeout = timeout ) return SystemInfo ( results ) | Execute GetInfo action to get information s about the System on the device . |
51,403 | def reboot ( self , timeout = 1 ) : namespace = System . getServiceType ( "reboot" ) uri = self . getControlURL ( namespace ) self . execute ( uri , namespace , "Reboot" , timeout = timeout ) | Reboot the device |
51,404 | def getTimeInfo ( self , timeout = 1 ) : namespace = System . getServiceType ( "getTimeInfo" ) uri = self . getControlURL ( namespace ) results = self . execute ( uri , namespace , "GetInfo" , timeout = timeout ) return TimeInfo ( results ) | Execute GetInfo action to get information s about the time on the device . |
51,405 | def softwareUpdateAvailable ( self , timeout = 1 ) : namespace = System . getServiceType ( "softwareUpdateAvailable" ) uri = self . getControlURL ( namespace ) results = self . execute ( uri , namespace , "GetInfo" , timeout = timeout ) return bool ( int ( results [ "NewUpgradeAvailable" ] ) ) | Returns if a software update is available |
51,406 | def make_headers ( headers ) : out = { } if 'etag' in headers : out [ 'if-none-match' ] = headers [ 'etag' ] if 'last-modified' in headers : out [ 'if-modified-since' ] = headers [ 'last-modified' ] return out | Make the cache control headers based on a previous request s response headers |
51,407 | def get ( self , prefix , url , schema_version = None ) : if not self . cache_dir : return None filename = self . _get_cache_file ( prefix , url ) try : with open ( filename , 'rb' ) as file : item = pickle . load ( file ) if schema_version and schema_version != item . schema : LOGGER . debug ( "Cache get %s %s: Wanted schema %d, got %d" , prefix , url , schema_version , item . schema ) return None return item except FileNotFoundError : pass except Exception : _ , msg , _ = sys . exc_info ( ) LOGGER . warning ( "Cache get %s %s failed: %s" , prefix , url , msg ) return None | Get the cached object |
51,408 | def set ( self , prefix , url , obj ) : if not self . cache_dir : return filename = self . _get_cache_file ( prefix , url ) try : os . makedirs ( os . path . join ( self . cache_dir , prefix ) ) except OSError : pass with open ( filename , 'wb' ) as file : pickle . dump ( obj , file ) | Add an object into the cache |
51,409 | def dataset_path ( cache = None , cachefile = "~/.io3d_cache.yaml" , get_root = False ) : local_data_dir = local_dir if cachefile is not None : cache = cachef . CacheFile ( cachefile ) if cache is not None : local_data_dir = cache . get_or_save_default ( "local_dataset_dir" , local_dir ) if get_root : local_data_dir else : logger . warning ( "Parameter" ) local_data_dir = op . join ( local_data_dir , "medical" , "orig" ) return op . expanduser ( local_data_dir ) | Get dataset path . |
51,410 | def get_dataset_meta ( label ) : data_url = data_urls [ label ] if type ( data_url ) == str : data_url = [ data_url ] if type ( data_url ) == list : data_url . extend ( [ None , None , None , None ] ) data_url = data_url [ : 4 ] url , expected_hash , hash_path , relative_donwload_dir = data_url if hash_path is None : hash_path = label return data_url , url , expected_hash , hash_path , relative_donwload_dir | Gives you metadata for dataset chosen via label param |
51,411 | def _expand_dataset_packages ( dataset_label_dict ) : new_dataset_label_dict = [ ] for label in dataset_label_dict : dataset_metadata = data_urls [ label ] if type ( dataset_metadata ) == dict and "package" in dataset_metadata : new_dataset_label_dict . extend ( dataset_metadata [ "package" ] ) else : new_dataset_label_dict . append ( label ) return new_dataset_label_dict | Returns list of possible packages contained in dataset in case the dataset is multi dataset eg . lisa . |
51,412 | def get_old ( dataset_label , data_id , destination_dir = None ) : if destination_dir is None : destination_dir = op . join ( dataset_path ( get_root = True ) , "medical" , "orig" ) destination_dir = op . expanduser ( destination_dir ) data_url , url , expected_hash , hash_path , relative_output_path = get_dataset_meta ( dataset_label ) paths = glob . glob ( os . path . join ( destination_dir , hash_path ) ) paths . sort ( ) import fnmatch print ( paths ) print ( data_id ) pathsf = fnmatch . filter ( paths , data_id ) print ( pathsf ) datap = io3d . read ( pathsf [ 0 ] , dataplus_format = True ) return datap | Get the 3D data from specified dataset with specified id . |
51,413 | def checksum ( path , hashfunc = "md5" ) : import checksumdir hash_func = checksumdir . HASH_FUNCS . get ( hashfunc ) if not hash_func : raise NotImplementedError ( "{} not implemented." . format ( hashfunc ) ) if os . path . isdir ( path ) : return checksumdir . dirhash ( path , hashfunc = hashfunc ) hashvalues = [ ] path_list = list ( sorted ( glob . glob ( path ) ) ) logger . debug ( "path_list: len: %i" , len ( path_list ) ) if len ( path_list ) > 0 : logger . debug ( "first ... last: %s ... %s" , str ( path_list [ 0 ] ) , str ( path_list [ - 1 ] ) ) for path in path_list : if os . path . isfile ( path ) : hashvalues . append ( checksumdir . _filehash ( path , hashfunc = hash_func ) ) logger . debug ( "one hash per file: len: %i" , len ( hashvalues ) ) if len ( path_list ) > 0 : logger . debug ( "first ... last: %s ... %s" , str ( hashvalues [ 0 ] ) , str ( hashvalues [ - 1 ] ) ) checksum_hash = checksumdir . _reduce_hash ( hashvalues , hashfunc = hash_func ) logger . debug ( "total hash: {}" . format ( str ( checksum_hash ) ) ) return checksum_hash | Return checksum of files given by path . |
51,414 | def generate_face ( shape = None , face_r = 1.0 , smile_r1 = 0.5 , smile_r2 = 0.7 , eye_r = 0.2 ) : if shape is None : shape = [ 32 , 32 ] nd = len ( shape ) if nd == 2 : sh2 = shape else : sh2 = shape [ 1 : ] fc2 = _get_face2 ( sh2 , face_r = face_r , smile_r1 = smile_r1 , smile_r2 = smile_r2 , eye_r = eye_r ) if nd == 2 : return fc2 else : fc3 = np . zeros ( shape ) for i in range ( fc3 . shape [ 0 ] ) : fc3 [ i , : , : ] = fc2 return fc3 | Create 2D or 3D binar data with smile face . |
51,415 | def remove ( local_file_name ) : try : os . remove ( local_file_name ) except Exception as e : print ( "Cannot remove file '" + local_file_name + "'. Please remove it manually." ) print ( e ) | Function attempts to remove file if failure occures - > print exception |
51,416 | def unzip_recursive ( zip_file_name ) : logger . debug ( "unzipping " + zip_file_name ) fnlist = unzip_one ( zip_file_name ) for fn in fnlist : if zipfile . is_zipfile ( fn ) : local_fnlist = unzip_recursive ( fn ) fnlist . extend ( local_fnlist ) return fnlist | Unzip file with all recursive zip files inside and delete zip files after that . |
51,417 | def upload_asciinema ( filename ) : if os . path . exists ( filename ) : import asciinema . config as aconfig from asciinema . api import Api cfg = aconfig . load ( ) api = Api ( cfg . api_url , os . environ . get ( "USER" ) , cfg . install_id ) uploader = UploadCommand ( api , filename ) try : url , warn = uploader . api . upload_asciicast ( filename ) if warn : uploader . print_warning ( warn ) if url : match = re . search ( 'https://.+' , url ) if match : url = match . group ( ) return url except : bot . error ( 'Problem with upload, skipping' ) else : bot . warning ( 'Cannot find %s, skipping submission.' % filename ) | a wrapper around generation of an asciinema . api . Api to call the upload command given an already existing asciinema file . |
51,418 | def make_tls_config ( app_config ) : if not app_config [ 'DOCKER_TLS' ] : return False cert_path = app_config [ 'DOCKER_TLS_CERT_PATH' ] if cert_path : client_cert = '{0}:{1}' . format ( os . path . join ( cert_path , 'cert.pem' ) , os . path . join ( cert_path , 'key.pem' ) ) ca_cert = os . path . join ( cert_path , 'ca.pem' ) else : client_cert = app_config [ 'DOCKER_TLS_CLIENT_CERT' ] ca_cert = app_config [ 'DOCKER_TLS_CA_CERT' ] client_cert = parse_client_cert_pair ( client_cert ) return TLSConfig ( client_cert = client_cert , ca_cert = ca_cert , verify = app_config [ 'DOCKER_TLS_VERIFY' ] , ssl_version = app_config [ 'DOCKER_TLS_SSL_VERSION' ] , assert_hostname = app_config [ 'DOCKER_TLS_ASSERT_HOSTNAME' ] ) | Creates TLS configuration object . |
51,419 | def parse_client_cert_pair ( config_value ) : if not config_value : return client_cert = config_value . split ( ':' ) if len ( client_cert ) != 2 : tips = ( 'client_cert should be formatted like ' '"/path/to/cert.pem:/path/to/key.pem"' ) raise ValueError ( '{0!r} is invalid.\n{1}' . format ( config_value , tips ) ) return tuple ( client_cert ) | Parses the client cert pair from config item . |
51,420 | async def process_feed ( self , url , send_mentions = True ) : self . _feed_domains . add ( utils . get_domain ( url ) ) if url in self . _processed_feeds : LOGGER . debug ( "Skipping already processed feed %s" , url ) return self . _processed_feeds . add ( url ) LOGGER . debug ( "++WAIT: %s: get feed" , url ) feed , previous , updated = await feeds . get_feed ( self , url ) LOGGER . debug ( "++DONE: %s: get feed" , url ) if updated : LOGGER . info ( "Feed %s has been updated" , url ) if not feed : return LOGGER . debug ( "--- starting process_feed %s %s" , url , send_mentions ) pending = [ ] try : for link in feed . links : href = link [ 'href' ] if not href : continue if self . args . archive and link . get ( 'rel' ) in ( 'prev-archive' , 'next-archive' , 'prev-page' , 'next-page' ) : LOGGER . debug ( "Found archive link %s" , link ) pending . append ( ( "process feed " + href , self . process_feed ( href , send_mentions ) ) ) if updated and link . get ( 'rel' ) == 'hub' and not feed . is_archive : LOGGER . debug ( "Found WebSub hub %s" , link ) pending . append ( ( "update websub " + href , feed . update_websub ( self , href ) ) ) except ( AttributeError , KeyError ) : LOGGER . debug ( "Feed %s has no links" , url ) items = set ( feed . entry_links ) if previous : items |= set ( previous . entry_links ) for entry in items : pending . append ( ( "process entry " + entry , self . process_entry ( entry , send_mentions = send_mentions ) ) ) LOGGER . debug ( "--- finish process_feed %s %s" , url , send_mentions ) if pending : LOGGER . debug ( "+++WAIT: process_feed(%s): %d subtasks" , url , len ( pending ) ) LOGGER . debug ( "%s" , [ name for ( name , _ ) in pending ] ) await asyncio . wait ( [ task for ( _ , task ) in pending ] ) LOGGER . debug ( "+++DONE: process_feed(%s): %d subtasks" , url , len ( pending ) ) | process a feed |
51,421 | async def process_entry ( self , url , add_domain = False , send_mentions = True ) : if add_domain : self . _feed_domains . add ( utils . get_domain ( url ) ) if url in self . _processed_entries : LOGGER . debug ( "Skipping already processed entry %s" , url ) return self . _processed_entries . add ( url ) LOGGER . debug ( "++WAIT: get entry %s" , url ) entry , previous , updated = await entries . get_entry ( self , url ) LOGGER . debug ( "++DONE: get entry %s" , url ) LOGGER . debug ( "--- starting process_entry %s" , url ) pending = [ ] if updated : LOGGER . info ( "Processing entry: %s" , url ) if send_mentions : links = entry . get_targets ( self ) if previous : links = links ^ previous . get_targets ( self ) for link in links : pending . append ( ( "send webmention {} -> {}" . format ( url , link ) , self . send_webmention ( entry , link ) ) ) if self . args . recurse : for feed in entry . feeds : if utils . get_domain ( feed ) in self . _feed_domains : pending . append ( ( "process feed " + feed , self . process_feed ( feed , send_mentions = send_mentions ) ) ) else : LOGGER . info ( "Ignoring non-local feed %s" , feed ) LOGGER . debug ( "--- finish process_entry %s" , url ) if pending : LOGGER . debug ( "+++WAIT: process_entry(%s): %d subtasks" , url , len ( pending ) ) LOGGER . debug ( "%s" , [ name for ( name , _ ) in pending ] ) await asyncio . wait ( [ task for ( _ , task ) in pending ] ) LOGGER . debug ( "+++DONE: process_entry(%s): %d subtasks" , url , len ( pending ) ) | process an entry |
51,422 | async def send_webmention ( self , entry , url ) : if ( entry . url , url ) in self . _processed_mentions : LOGGER . debug ( "Skipping already processed mention %s -> %s" , entry . url , url ) self . _processed_mentions . add ( ( entry . url , url ) ) LOGGER . debug ( "++WAIT: webmentions.get_target %s" , url ) target = await webmentions . get_target ( self , url ) LOGGER . debug ( "++DONE: webmentions.get_target %s" , url ) if target : LOGGER . debug ( "++WAIT: Sending webmention %s -> %s" , entry . url , url ) await target . send ( self , entry ) LOGGER . debug ( "++DONE: Sending webmention %s -> %s" , entry . url , url ) | send a webmention from an entry to a URL |
51,423 | def compilemessages ( application ) : from django . core . management import call_command with work_in ( application ) : if DJANGO_1_11 : call_command ( 'compilemessages' , all = True ) else : call_command ( 'compilemessages' ) | Compiles locale messages |
51,424 | def makemessages ( application , locale ) : from django . core . management import call_command if not locale : locale = 'en' with work_in ( application ) : call_command ( 'makemessages' , locale = ( locale , ) ) | Updates the locale message files |
51,425 | def cms_check ( migrate_cmd = False ) : from django . core . management import call_command try : import cms _create_db ( migrate_cmd ) call_command ( 'cms' , 'check' ) except ImportError : print ( 'cms_check available only if django CMS is installed' ) | Runs the django CMS cms check command |
51,426 | def generate_authors ( ) : print ( 'Generating AUTHORS' ) print ( 'Collecting author names' ) r = subprocess . Popen ( [ 'git' , 'log' , '--use-mailmap' , '--format=%aN' ] , stdout = subprocess . PIPE ) seen_authors = [ ] authors = [ ] for authfile in ( 'AUTHORS' , 'AUTHORS.rst' ) : if os . path . exists ( authfile ) : break with open ( authfile , 'r' ) as f : for line in f . readlines ( ) : if line . startswith ( "*" ) : author = force_text ( line ) . strip ( "* \n" ) if author . lower ( ) not in seen_authors : seen_authors . append ( author . lower ( ) ) authors . append ( author ) for author in r . stdout . readlines ( ) : author = force_text ( author ) . strip ( ) if author . lower ( ) not in seen_authors : seen_authors . append ( author . lower ( ) ) authors . append ( author ) authors = sorted ( authors , key = lambda x : x . lower ( ) ) print ( 'Authors (%s):\n\n\n* %s' % ( len ( authors ) , '\n* ' . join ( authors ) ) ) | Updates the authors list |
51,427 | def static_analisys ( application ) : try : from cms . test_utils . util . static_analysis import pyflakes application_module = __import__ ( application ) report = pyflakes ( ( application_module , ) ) if type ( report ) == tuple : assert report [ 0 ] == 0 else : assert report == 0 except ImportError : print ( 'Static analysis available only if django CMS is installed' ) | Performs a pyflakes static analysis with the same configuration as django CMS testsuite |
51,428 | def build_wheel ( platform ) : if platform in [ 'x86_64' , 'i686' ] : system = 'manylinux1' else : system = 'linux' setuptools . sandbox . run_setup ( 'setup.py' , [ '-q' , 'clean' , '--all' , 'bdist_wheel' , '--plat-name' , '{}_{}' . format ( system , platform ) ] ) | Create a wheel |
51,429 | def print_mem ( unit = "MB" ) : try : import psutil B = float ( psutil . Process ( os . getpid ( ) ) . memory_info ( ) . vms ) KB = B / 1024 MB = KB / 1024 GB = MB / 1024 result = vars ( ) [ unit ] print_info ( "memory usage: %.2f(%s)" % ( result , unit ) ) return result except ImportError : print_info ( "pip install psutil first." ) | Show the proc - mem - cost with psutil use this only for lazinesssss . |
51,430 | def slice_into_pieces ( seq , n ) : length = len ( seq ) if length % n == 0 : size = length // n else : size = length // n + 1 for it in slice_by_size ( seq , size ) : yield it | Slice a sequence into n pieces return a generation of n pieces |
51,431 | def slice_by_size ( seq , size ) : filling = null for it in zip ( * ( itertools_chain ( seq , [ filling ] * size ) , ) * size ) : if filling in it : it = tuple ( i for i in it if i is not filling ) if it : yield it | Slice a sequence into chunks return as a generation of chunks with size . |
51,432 | def unique ( seq , key = None , return_as = None ) : seen = set ( ) add = seen . add if key : generator = ( x for x in seq if key ( x ) not in seen and not add ( key ( x ) ) ) else : generator = ( x for x in seq if x not in seen and not add ( x ) ) if return_as : if return_as == str : return "" . join ( map ( str , generator ) ) else : return return_as ( generator ) else : return generator | Unique the seq and keep the order . |
51,433 | def unparse_qs ( qs , sort = False , reverse = False ) : result = [ ] items = qs . items ( ) if sort : items = sorted ( items , key = lambda x : x [ 0 ] , reverse = reverse ) for keys , values in items : query_name = quote ( keys ) for value in values : result . append ( query_name + "=" + quote ( value ) ) return "&" . join ( result ) | Reverse conversion for parse_qs |
51,434 | def unparse_qsl ( qsl , sort = False , reverse = False ) : result = [ ] items = qsl if sort : items = sorted ( items , key = lambda x : x [ 0 ] , reverse = reverse ) for keys , values in items : query_name = quote ( keys ) result . append ( query_name + "=" + quote ( values ) ) return "&" . join ( result ) | Reverse conversion for parse_qsl |
51,435 | def kill_after ( seconds , timeout = 2 ) : pid = os . getpid ( ) kill = os . kill run_after_async ( seconds , kill , pid , signal . SIGTERM ) run_after_async ( seconds + timeout , kill , pid , 9 ) | Kill self after seconds |
51,436 | def try_import ( module_name , names = None , default = ImportErrorModule , warn = True ) : try : module = importlib . import_module ( module_name ) except ImportError : if warn : if warn is True : Config . utils_logger . warning ( "Module `%s` not found. Install it to remove this warning" % module_name ) else : warn ( module_name , names , default ) module = ( ImportErrorModule ( module_name ) if default is ImportErrorModule else default ) if not names : return module if not isinstance ( names , ( tuple , set , list ) ) : names = [ names ] result = [ ] for name in names : if hasattr ( module , name ) : result . append ( module . __getattribute__ ( name ) ) else : result . append ( ImportErrorModule ( "%s.%s" % ( module_name , name ) ) if default is ImportErrorModule else default ) return result [ 0 ] if len ( result ) == 1 else result | Try import module_name except ImportError and return default sometimes to be used for catch ImportError and lazy - import . |
51,437 | def guess_interval ( nums , accuracy = 0 ) : if not nums : return 0 nums = sorted ( [ int ( i ) for i in nums ] ) if len ( nums ) == 1 : return nums [ 0 ] diffs = [ nums [ i + 1 ] - nums [ i ] for i in range ( len ( nums ) - 1 ) ] diffs = [ item for item in diffs if item >= accuracy ] sorted_diff = sorted ( diffs ) result = sorted_diff [ len ( diffs ) // 2 ] return result | Given a seq of number return the median only calculate interval > = accuracy . |
51,438 | def split_n ( string , seps , reg = False ) : r deep = len ( seps ) if not deep : return string return [ split_n ( i , seps [ 1 : ] ) for i in _re_split_mixin ( string , seps [ 0 ] , reg = reg ) ] | r Split strings into n - dimensional list . |
51,439 | def curlrequests ( curl_string , ** kwargs ) : req = kwargs . pop ( 'req' , tPool ( ) ) kwargs . update ( curlparse ( curl_string ) ) return req . request ( ** kwargs ) | Use tPool to request for curl string . If kwargs contains the req which hasattr request method like req = requests . |
51,440 | def register_function ( self , patterns , instances = None , ** reg_kwargs ) : def wrapper ( function ) : self . register ( patterns , function , instances = instances , ** reg_kwargs ) return function return wrapper | Decorator for register . |
51,441 | def find ( self , string , default = None ) : return self . match ( string ) or self . search ( string ) or default | Return match or search result . |
51,442 | def search ( self , string , default = None ) : default = default if default else [ ] result = [ item [ 1 ] for item in self . container if item [ 0 ] . search ( string ) ] if self . ensure_mapping : assert len ( result ) < 2 , "%s matches more than one pattern: %s" % ( string , result , ) return result if result else default | Use re . search to find the result |
51,443 | def fuzzy ( self , key , limit = 5 ) : instances = [ i [ 2 ] for i in self . container if i [ 2 ] ] if not instances : return instances = sum ( instances , [ ] ) from fuzzywuzzy import process maybe = process . extract ( key , instances , limit = limit ) return maybe | Give suggestion from all instances . |
51,444 | def show_all ( self , as_string = True ) : result = [ ] for item in self . container : pattern = str ( item [ 0 ] ) [ 10 : ] if PY3 else item [ 0 ] . pattern instances = item [ 2 ] or [ ] value = ( '%s "%s"' % ( item [ 1 ] . __name__ , ( item [ 1 ] . __doc__ or "" ) ) if callable ( item [ 1 ] ) else str ( item [ 1 ] ) ) value = "%s %s" % ( type ( item [ 1 ] ) , value ) result . append ( " => " . join ( ( pattern , "," . join ( instances ) , value ) ) ) return "\n" . join ( result ) if as_string else result | python2 will not show flags |
51,445 | def tick ( self ) : string = self . passed if self . rounding : string = round ( string ) if self . readable : string = self . readable ( string ) return string | Return the time cost string as expect . |
51,446 | def watch ( * timer_args , ** timer_kwargs ) : def wrapper ( function ) : @ wraps ( function ) def inner ( * args , ** kwargs ) : args1 = ", " . join ( map ( repr , args ) ) if args else "" kwargs1 = ", " . join ( [ "%s=%s" % ( i , repr ( kwargs [ i ] ) ) for i in sorted ( kwargs . keys ( ) ) ] ) arg = ", " . join ( filter ( None , [ args1 , kwargs1 ] ) ) name = "%s(%s)" % ( function . __name__ , arg ) _ = Timer ( name = name , * timer_args , ** timer_kwargs ) result = function ( * args , ** kwargs ) return result return inner return wrapper | Decorator for Timer . |
51,447 | def default_callback ( self , text ) : text = text . replace ( "\r\n" , "\n" ) text = "%s\n" % text flush_print ( text , sep = "" , end = "" ) return text | Default clean the \\ n in text . |
51,448 | def watch ( self , limit = None , timeout = None ) : start_time = time . time ( ) count = 0 while not timeout or time . time ( ) - start_time < timeout : new = self . read ( ) if new != self . temp : count += 1 self . callback ( new ) if count == limit : break self . temp = new time . sleep ( self . interval ) | Block method to watch the clipboard changing . |
51,449 | def watch_async ( self , limit = None , timeout = None ) : return self . watch ( limit = limit , timeout = timeout ) | Non - block method to watch the clipboard changing . |
51,450 | def find_one ( cls , pattern , string , flags = 0 ) : item = re . search ( pattern , string , flags = flags ) return cls ( item ) | JS - like match object . Use index number to get groups if not match or no group will return . |
51,451 | def create_issue ( title , body , repo , token ) : owner , name = repo . split ( '/' ) url = 'https://api.github.com/repos/%s/%s/issues' % ( owner , name ) data = { 'title' : title , 'body' : body } headers = { "Authorization" : "token %s" % token , "Accept" : "application/vnd.github.symmetra-preview+json" } response = requests . post ( url , data = json . dumps ( data ) , headers = headers ) if response . status_code in [ 201 , 202 ] : url = response . json ( ) [ 'html_url' ] bot . info ( url ) return url elif response . status_code == 404 : bot . error ( 'Cannot create issue. Does your token have scope repo?' ) sys . exit ( 1 ) else : bot . error ( 'Cannot create issue %s' % title ) bot . error ( response . content ) sys . exit ( 1 ) | create a Github issue given a title body repo and token . |
51,452 | async def get_target ( config , url ) : previous = config . cache . get ( 'target' , url , schema_version = SCHEMA_VERSION ) if config . cache else None headers = previous . caching if previous else None request = await utils . retry_get ( config , url , headers = headers ) if not request or not request . success : return previous if request . cached : return previous current = Target ( request ) if config . cache : config . cache . set ( 'target' , url , current ) return current | Given a URL get the webmention endpoint |
51,453 | async def send ( self , config , entry ) : if self . endpoint : LOGGER . debug ( "%s -> %s" , entry . url , self . url ) try : await self . endpoint . send ( config , entry . url , self . url ) except Exception as err : LOGGER . warning ( "Ping %s: got %s: %s" , self . url , err . __class__ . __name__ , err ) | Send a webmention to this target from the specified entry |
51,454 | def get_helper ( name = None , quiet = True , ** kwargs ) : from helpme . defaults import HELPME_CLIENT if name is not None : HELPME_CLIENT = name if HELPME_CLIENT == 'github' : from . github import Helper elif HELPME_CLIENT == 'uservoice' : from . uservoice import Helper elif HELPME_CLIENT == 'discourse' : from . discourse import Helper else : from . github import Helper Helper . name = HELPME_CLIENT Helper . quiet = quiet return Helper ( ) | get the correct helper depending on the environment variable HELPME_CLIENT |
51,455 | def mock_bable ( monkeypatch ) : mocked_bable = MockBaBLE ( ) mocked_bable . set_controllers ( [ Controller ( 0 , '11:22:33:44:55:66' , '#0' ) , Controller ( 1 , '22:33:44:55:66:11' , '#1' , settings = { 'powered' : True , 'low_energy' : True } ) , Controller ( 2 , '33:44:55:66:11:22' , '#2' , settings = { 'powered' : True } ) ] ) monkeypatch . setattr ( bable_interface , 'BaBLEInterface' , lambda : mocked_bable ) return mocked_bable | Mock the BaBLEInterface class with some controllers inside . |
51,456 | def handle ( self ) : self . output = PyStratumStyle ( self . input , self . output ) command = self . get_application ( ) . find ( 'constants' ) ret = command . execute ( self . input , self . output ) if ret : return ret command = self . get_application ( ) . find ( 'loader' ) ret = command . execute ( self . input , self . output ) if ret : return ret command = self . get_application ( ) . find ( 'wrapper' ) ret = command . execute ( self . input , self . output ) self . output . writeln ( '' ) return ret | Executes the actual Stratum program . |
51,457 | def load_from_file ( module_path ) : from imp import load_module , PY_SOURCE imported = None if module_path : with open ( module_path , 'r' ) as openfile : imported = load_module ( 'mod' , openfile , module_path , ( 'imported' , 'r' , PY_SOURCE ) ) return imported | Load a python module from its absolute filesystem path |
51,458 | def ensure_unicoded_and_unique ( args_list , application ) : unicoded_args = [ ] for argument in args_list : argument = ( six . u ( argument ) if not isinstance ( argument , six . text_type ) else argument ) if argument not in unicoded_args or argument == application : unicoded_args . append ( argument ) return unicoded_args | Iterate over args_list make it unicode if needed and ensure that there are no duplicates . Returns list of unicoded arguments in the same order . |
51,459 | def write_two_phases ( filename , data , io ) : write_flag = True if os . path . exists ( filename ) : with open ( filename , 'r' ) as file : old_data = file . read ( ) if data == old_data : write_flag = False if write_flag : tmp_filename = filename + '.tmp' with open ( tmp_filename , 'w+' ) as file : file . write ( data ) os . replace ( tmp_filename , filename ) io . text ( 'Wrote: <fso>{0}</fso>' . format ( filename ) ) else : io . text ( 'File <fso>{0}</fso> is up to date' . format ( filename ) ) | Writes a file in two phase to the filesystem . |
51,460 | async def get_entry ( config , url ) : previous = config . cache . get ( 'entry' , url , schema_version = SCHEMA_VERSION ) if config . cache else None headers = previous . caching if previous else None request = await utils . retry_get ( config , url , headers = headers ) if not request or not request . success : LOGGER . error ( "Could not get entry %s: %d" , url , request . status if request else - 1 ) return None , previous , False if request . cached : return previous , previous , False current = Entry ( request ) if config . cache : config . cache . set ( 'entry' , url , current ) return current , previous , ( not previous or previous . digest != current . digest or previous . status != current . status ) | Given an entry URL return the entry |
51,461 | def _check_rel ( attrs , rel_whitelist , rel_blacklist ) : rels = attrs . get ( 'rel' , [ None ] ) if rel_blacklist : for rel in rels : if rel in rel_blacklist : return False if rel_whitelist : for rel in rels : if rel in rel_whitelist : return True return False return True | Check a link s relations against the whitelist or blacklist . |
51,462 | def _domain_differs ( self , href ) : target = utils . get_domain ( href ) if not target : return False origin = utils . get_domain ( self . url ) return target != origin | Check that a link is not on the same domain as the source URL |
51,463 | def get_targets ( self , config ) : return { urllib . parse . urljoin ( self . url , attrs [ 'href' ] ) for attrs in self . _targets if self . _check_rel ( attrs , config . rel_whitelist , config . rel_blacklist ) and self . _domain_differs ( attrs [ 'href' ] ) } | Given an Entry object return all of the outgoing links . |
51,464 | def handle ( self ) : self . output = PyStratumStyle ( self . input , self . output ) config_file = self . argument ( 'config_file' ) sources = self . argument ( 'file_names' ) status = self . run_command ( config_file , sources ) return status | Executes loader command . |
51,465 | async def create_vm ( self , * , preset_name , image , flavor , security_groups = None , userdata = None , key_name = None , availability_zone = None , subnet = None ) : info = { 'id' : next ( self . _id_it ) , 'name' : preset_name , 'ip' : [ '127.0.0.1' ] , 'created' : 0 , 'state' : VmState . RUNNING , 'flavor' : flavor , 'image' : image , 'metadata' : { 'test-meta' : 'abctest' } , 'timed_shutdown_at' : 1522753481 , 'tags' : [ 'a-tag' , 'b-tag' , 'c-tag' ] } logging . debug ( 'Prepare vm: %s' , info ) vm = Vm ( self , ** info ) self . _vms [ vm . id ] = vm logging . debug ( 'Create: %s' , vm ) return None | Dummy create_vm func . |
51,466 | async def list_vms ( self , preset_name ) : return list ( vm for vm in self . _vms . values ( ) if vm . name == preset_name ) | Dummy list_vms func |
51,467 | async def terminate_vm ( self , vm_id ) : if vm_id not in self . _vms : raise DummyIaasVmNotFound ( ) del self . _vms [ vm_id ] return None | Dummy terminate_vm func |
51,468 | async def get_vm ( self , vm_id ) : if vm_id not in self . _vms : raise DummyIaasVmNotFound ( ) return self . _vms [ vm_id ] | Dummy get_vm func |
51,469 | def suggest_filename ( file_path , exists = None ) : import os . path import re if not isinstance ( exists , bool ) : exists = os . path . exists ( file_path ) if exists : file_path , file_extension = os . path . splitext ( file_path ) m = re . search ( r"_\d+$" , file_path ) if m is None : new_cislo_str = "_2" else : cislostr = ( m . group ( ) ) cislo = int ( cislostr [ 1 : ] ) + 1 file_path = file_path [ : - len ( cislostr ) ] new_cislo_str = "_" + str ( cislo ) file_path = file_path + new_cislo_str + file_extension file_path = suggest_filename ( file_path ) return file_path | Try if exist path and append number to its end . For debug you can set as input if file exists or not . |
51,470 | def obj_from_file ( filename = 'annotation.yaml' , filetype = 'auto' ) : if filetype == 'auto' : _ , ext = os . path . splitext ( filename ) filetype = ext [ 1 : ] if filetype in ( 'yaml' , 'yml' ) : from ruamel . yaml import YAML yaml = YAML ( typ = "unsafe" ) with open ( filename , encoding = "utf-8" ) as f : obj = yaml . load ( f ) if obj is None : obj = { } elif filetype in ( 'pickle' , 'pkl' , 'pklz' , 'picklezip' ) : fcontent = read_pkl_and_pklz ( filename ) if sys . version_info [ 0 ] < 3 : import cPickle as pickle else : import _pickle as pickle if sys . version_info . major == 2 : obj = pickle . loads ( fcontent ) else : obj = pickle . loads ( fcontent , encoding = "latin1" ) else : logger . error ( 'Unknown filetype ' + filetype ) return obj | Read object from file |
51,471 | def read_pkl_and_pklz ( filename ) : fcontent = None try : import gzip f = gzip . open ( filename , 'rb' ) fcontent = f . read ( ) f . close ( ) except IOError as e : logger . info ( "Input gzip exception: " + str ( e ) ) f = open ( filename , 'rb' ) fcontent = f . read ( ) f . close ( ) except Exception as e : import traceback logger . error ( "Input gzip exception: " + str ( e ) ) logger . error ( traceback . format_exc ( ) ) return fcontent | Try read zipped or not zipped pickle file |
51,472 | def obj_to_file ( obj , filename , filetype = 'auto' , ndarray_to_list = False , squeeze = True ) : if ndarray_to_list : obj = ndarray_to_list_in_structure ( obj , squeeze = squeeze ) d = os . path . dirname ( os . path . abspath ( filename ) ) if not os . path . exists ( d ) : os . makedirs ( d ) if filetype == 'auto' : _ , ext = os . path . splitext ( filename ) filetype = ext [ 1 : ] if filetype in ( 'yaml' , 'yml' ) : from ruamel . yaml import YAML yaml = YAML ( typ = "unsafe" ) with open ( filename , 'wt' , encoding = "utf-8" ) as f : yaml . dump ( obj , f ) elif filetype in ( 'pickle' , 'pkl' ) : f = open ( filename , 'wb' ) logger . info ( "filename " + filename ) import pickle pickle . dump ( obj , f , - 1 ) f . close elif filetype in ( 'streamingpicklezip' , 'spklz' ) : import gzip import sPickle as pickle f = gzip . open ( filename , 'wb' , compresslevel = 1 ) pickle . s_dump ( obj , f ) f . close elif filetype in ( 'picklezip' , 'pklz' ) : import gzip if sys . version_info [ 0 ] < 3 : import cPickle as pickle else : import _pickle as pickle f = gzip . open ( filename , 'wb' , compresslevel = 1 ) pickle . dump ( obj , f ) f . close elif filetype in ( 'mat' ) : import scipy . io as sio sio . savemat ( filename , obj ) else : logger . error ( 'Unknown filetype ' + filetype ) | Writes annotation in file . |
51,473 | def resize_to_shape ( data , shape , zoom = None , mode = 'nearest' , order = 0 ) : try : import skimage import skimage . transform segm_orig_scale = skimage . transform . resize ( data , shape , order = 0 , preserve_range = True , mode = "constant" , ) segmentation = segm_orig_scale logger . debug ( 'resize to orig with skimage' ) except : import scipy import scipy . ndimage dtype = data . dtype if zoom is None : zoom = shape / np . asarray ( data . shape ) . astype ( np . double ) segm_orig_scale = scipy . ndimage . zoom ( data , 1.0 / zoom , mode = mode , order = order ) . astype ( dtype ) logger . debug ( 'resize to orig with scipy.ndimage' ) shp = [ np . min ( [ segm_orig_scale . shape [ 0 ] , shape [ 0 ] ] ) , np . min ( [ segm_orig_scale . shape [ 1 ] , shape [ 1 ] ] ) , np . min ( [ segm_orig_scale . shape [ 2 ] , shape [ 2 ] ] ) , ] segmentation = np . zeros ( shape , dtype = dtype ) segmentation [ 0 : shp [ 0 ] , 0 : shp [ 1 ] , 0 : shp [ 2 ] ] = segm_orig_scale [ 0 : shp [ 0 ] , 0 : shp [ 1 ] , 0 : shp [ 2 ] ] del segm_orig_scale return segmentation | Function resize input data to specific shape . |
51,474 | def use_economic_dtype ( data3d , slope = 1 , inter = 0 , dtype = None ) : if dtype is None : dtype = data3d . dtype if issubclass ( dtype . type , np . integer ) : mn = data3d . min ( ) * slope + inter mx = data3d . max ( ) * slope + inter if suits_with_dtype ( mn , mx , dtype = np . uint8 ) : dtype = np . uint8 elif suits_with_dtype ( mn , mx , dtype = np . int8 ) : dtype = np . int8 elif suits_with_dtype ( mn , mx , dtype = np . uint16 ) : dtype = np . uint16 elif suits_with_dtype ( mn , mx , dtype = np . int16 ) : dtype = np . int16 elif suits_with_dtype ( mn , mx , dtype = np . uint32 ) : dtype = np . uint32 elif suits_with_dtype ( mn , mx , dtype = np . int32 ) : dtype = np . int32 if slope == 1 and inter == 0 : new_data3d = data3d . astype ( dtype ) else : new_data3d = ( ( slope * data3d ) + inter ) . astype ( dtype ) return new_data3d | Use more economic integer - like dtype if it is possible . |
51,475 | def get_sitk_image_from_ndarray ( data3d ) : import SimpleITK as sitk rescale_intercept = None if sitk . Version . MajorVersion ( ) > 0 : if data3d . dtype == np . int8 : rescale_intercept = - 2 ** 7 data3d = ( data3d - rescale_intercept ) . astype ( np . uint8 ) elif data3d . dtype == np . int16 : rescale_intercept = - 2 ** 10 data3d = ( data3d - rescale_intercept ) . astype ( np . uint16 ) elif data3d . dtype == np . int32 : rescale_intercept = - 2 ** 31 data3d = ( data3d - rescale_intercept ) . astype ( np . uint16 ) dim = sitk . GetImageFromArray ( data3d ) if sitk . Version . MajorVersion ( ) > 0 : if rescale_intercept is not None : dim . SetMetaData ( "0028|1052" , str ( rescale_intercept ) ) dim . SetMetaData ( "0028|1053" , "1" ) return dim | Prepare SimpleItk Image object and rescale data to unsigned types . |
51,476 | def split_evenly ( n , chunks ) : if n < chunks : raise ChunkingError ( "Number of chunks is greater than number" ) if n % chunks == 0 : return [ n / chunks ] * chunks max_size = n / chunks + 1 return [ max_size ] + split_evenly ( n - max_size , chunks - 1 ) | Split an integer into evenly distributed list |
51,477 | def _generate_keys ( self ) : from helpme . defaults import HELPME_CLIENT_SECRETS keypair_dir = os . path . join ( os . path . dirname ( HELPME_CLIENT_SECRETS ) , 'discourse' ) self . keypair_file = os . path . join ( keypair_dir , 'private.pem' ) if not hasattr ( self , 'key' ) : self . key = generate_keypair ( self . keypair_file ) if not hasattr ( self , 'public_key' ) : self . public_key = load_keypair ( self . keypair_file ) | the discourse API requires the interactions to be signed so we generate a keypair on behalf of the user |
51,478 | def invalidate ( self , cls , id_field , id_val ) : cache_key , flag_key = self . get_keys ( cls , id_field , id_val ) pipeline = self . redis . pipeline ( ) pipeline . delete ( cache_key ) pipeline . delete ( flag_key ) pipeline . execute ( ) | Invalidate the cache for a given Mongo object by deleting the cached data and the cache flag . |
51,479 | async def manage ( self ) : self . _vms = await self . iaas . list_vms ( self . name ) vms_stat = Counter ( [ vm . get_state ( ) for vm in self . _vms ] ) missing = self . count - len ( self . _vms ) if len ( self . _vms ) < self . count else 0 logging . info ( 'VMs Status: %s expected, %s in iaas, %s running, %s nearby shutdown, %s pending, %s after time shutdown, ' '%s terminated, %s error, %s unknown, %s missing' , self . count , len ( self . _vms ) , vms_stat [ VmState . RUNNING . value ] , vms_stat [ VmState . NEARBY_SHUTDOWN . value ] , vms_stat [ VmState . PENDING . value ] , vms_stat [ VmState . AFTER_TIME_SHUTDOWN . value ] , vms_stat [ VmState . TERMINATED . value ] , vms_stat [ VmState . ERROR . value ] , vms_stat [ VmState . UNKNOWN . value ] , missing , extra = self . _extra ) for vm in self . _vms : if vm . is_dead ( ) : logging . info ( "Terminate %s" , vm , extra = self . _extra ) await vm . terminate ( ) self . terminated += 1 to_create = self . count - ( len ( self . _vms ) - self . terminated - vms_stat [ VmState . NEARBY_SHUTDOWN . value ] ) to_create = to_create if to_create > 0 else 0 logging . debug ( "Create %s Vm" , to_create , extra = self . _extra ) await self . _create_vms ( to_create ) await self . _healthcheck ( self . _vms ) logging . info ( 'VMs Status update: %s terminated, %s terminated by healthcheck, %s created, %s failed healthcheck' , self . terminated , self . healthcheck_terminated , to_create , len ( self . runtime . failed_checks ) , extra = self . _extra ) | Manage function docstring |
51,480 | def log_method ( log , level = logging . DEBUG ) : def decorator ( func ) : func_name = func . __name__ @ six . wraps ( func ) def wrapper ( self , * args , ** kwargs ) : if log . isEnabledFor ( level ) : pretty_args = [ ] if args : pretty_args . extend ( str ( a ) for a in args ) if kwargs : pretty_args . extend ( "%s=%s" % ( k , v ) for k , v in six . iteritems ( kwargs ) ) log . log ( level , "%s(%s)" , func_name , ", " . join ( pretty_args ) ) return func ( self , * args , ** kwargs ) return wrapper return decorator | Logs a method and its arguments when entered . |
51,481 | def check_local ( ) -> None : to_check = [ './replay' , './replay/toDo' , './replay/archive' ] for i in to_check : if not os . path . exists ( i ) : os . makedirs ( i ) | Verify required directories exist . |
51,482 | def create_file ( project : str , environment : str , feature : str , state : str ) -> None : check_local ( ) save_path = './replay/toDo/' filename = '{0}.txt' . format ( str ( uuid . uuid1 ( ) ) ) complete_name = os . path . join ( save_path , filename ) with open ( complete_name , 'w' ) as filename : filename . write ( 'rc update-ld-api -p {0} -e {1} -f {2} -s {3}' . format ( project , environment , feature , state ) ) | Create file to replay . |
51,483 | def execute_replay ( ) -> None : files = glob . glob ( './replay/toDo/*' ) sorted_files = sorted ( files , key = os . path . getctime ) if not sorted_files : LOG . debug ( 'Found %s, beginning execution.' , sorted_files ) for command_file in sorted_files : with open ( command_file , 'r' ) as command : cmd = command . read ( ) LOG . debug ( 'executing command: %s' , cmd ) resp = run ( [ cmd , '-v' , 'DEBUG' ] , shell = True , check = True ) LOG . debug ( resp ) LOG . debug ( 'moving %s to archive' , command . name ) move_command = 'mv {0} ./replay/archive/' . format ( command . name ) run ( move_command , shell = True , check = True ) LOG . info ( 'LaunchDarkly is now up to date.' ) else : LOG . warning ( 'No files found, nothing to replay.' ) | Execute all commands . |
51,484 | def choice_prompt ( prompt , choices = None , choice = None ) : if not choices : choices = [ "y" , "n" , "Y" , "N" ] print ( prompt ) get_input = getattr ( __builtins__ , 'raw_input' , input ) pretty_choices = '/' . join ( choices ) message = 'Please enter your choice [%s] : ' % ( pretty_choices ) while choice not in choices : choice = get_input ( message ) . strip ( ) message = "Please enter a valid option in [%s]" % ( pretty_choices ) return choice | Ask the user for a prompt and only return when one of the requested options is provided . |
51,485 | def regexp_prompt ( prompt , regexp = '.' , answer = '' ) : get_input = getattr ( __builtins__ , 'raw_input' , input ) while not re . search ( regexp , answer ) : answer = get_input ( prompt + ': ' ) . strip ( ) message = "Your entry must match the regular expression %s" % regexp return answer | Ask the user for a text entry that matches a regular expression |
51,486 | def which ( software , strip_newline = True ) : if software is None : software = "singularity" cmd = [ 'which' , software ] try : result = run_command ( cmd ) if strip_newline is True : result [ 'message' ] = result [ 'message' ] . strip ( '\n' ) return result except : return None | get_install will return the path to where an executable is installed . |
51,487 | def guess_encoding ( request ) : ctype = request . headers . get ( 'content-type' ) if not ctype : LOGGER . warning ( "%s: no content-type; headers are %s" , request . url , request . headers ) return 'utf-8' match = re . search ( r'charset=([^ ;]*)(;| |$)' , ctype ) if match : return match [ 1 ] if ctype . startswith ( 'text/html' ) : return 'iso-8859-1' return 'utf-8' | Try to guess the encoding of a request without going through the slow chardet process |
51,488 | def _make_headers ( config , kwargs ) : headers = kwargs . get ( 'headers' ) headers = headers . copy ( ) if headers is not None else { } headers [ 'User-Agent' ] = config . args . user_agent kwargs = kwargs . copy ( ) kwargs [ 'headers' ] = headers return kwargs | Replace the kwargs with one where the headers include our user - agent |
51,489 | async def retry_get ( config , url , * args , ** kwargs ) : return await _retry_do ( config . session . get , url , * args , ** _make_headers ( config , kwargs ) ) | aiohttp wrapper for GET |
51,490 | async def retry_post ( config , url , * args , ** kwargs ) : return await _retry_do ( config . session . post , url , * args , ** _make_headers ( config , kwargs ) ) | aiohttp wrapper for POST |
51,491 | def collect_argument ( self , step , message ) : if step not in self . data : self . data [ step ] = regexp_prompt ( message ) | given a key in the configuration collect the runtime argument if provided . Otherwise prompt the user for the value . |
51,492 | def record_environment ( self ) : envars = self . _get_setting ( name = 'whitelist' , section = 'record_environment' , user = False ) if envars is not None : envars = [ x . upper ( ) for x in envars . split ( '\n' ) ] bot . custom ( prefix = "Environment " , message = '|' . join ( envars ) , color = "CYAN" ) keep = [ ( k , v ) for k , v in os . environ . items ( ) if k . upper ( ) in envars ] if confirm_prompt ( 'Is this list ok to share?' ) : self . data [ 'record_environment' ] = keep | collect a limited set of environment variables based on the list under record_envirionment in the configuration file . |
51,493 | def speak ( self ) : if self . quiet is False : bot . info ( '[helper|%s]' % ( self . name ) ) self . _speak ( ) | a function for the helper to announce him or herself depending on the level specified . If you want your client to have additional announced things here then implement the class _speak for your client . |
51,494 | def __log_number_of_constants ( self ) : n_id = len ( self . _labels ) n_widths = len ( self . _constants ) - n_id self . _io . writeln ( '' ) self . _io . text ( 'Number of constants based on column widths: {0}' . format ( n_widths ) ) self . _io . text ( 'Number of constants based on database IDs : {0}' . format ( n_id ) ) | Logs the number of constants generated . |
51,495 | def _check_env_var ( envvar : str ) -> bool : if os . getenv ( envvar ) is None : raise KeyError ( "Required ENVVAR: {0} is not set" . format ( envvar ) ) if not os . getenv ( envvar ) : raise KeyError ( "Required ENVVAR: {0} is empty" . format ( envvar ) ) return True | Check Environment Variable to verify that it is set and not empty . |
51,496 | def valid_state ( state : str ) -> bool : lower_case_state = state . lower ( ) if lower_case_state in _VALID_STATES : return True return False | Validate State Argument |
51,497 | def valid_env_vars ( ) -> bool : for envvar in _REQUIRED_ENV_VARS : try : _check_env_var ( envvar ) except KeyError as ex : LOG . error ( ex ) sys . exit ( 1 ) return True | Validate that required env vars exist . |
51,498 | def configure_panel ( self ) : webroot = os . path . dirname ( __file__ ) self . template_path = os . path . join ( webroot , 'templates' ) aiohttp_jinja2 . setup ( self , loader = jinja2 . FileSystemLoader ( self . template_path ) , filters = { 'sorted' : sorted , 'int' : int } ) self [ 'static_root_url' ] = '/static' self . router . add_view ( '/' , Panel ) self . router . add_static ( '/static/' , path = os . path . join ( webroot , 'static' ) , name = 'static' ) | Configure templates and routing |
51,499 | async def start ( self ) : logging . info ( 'Starting server, listening on %s.' , self . port ) runner = web . AppRunner ( self ) await runner . setup ( ) site = web . TCPSite ( runner , '' , self . port ) await site . start ( ) | Initialize and start WebServer |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.