idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
14,500
def subscribe ( self , topic = b'' ) : self . sockets [ zmq . SUB ] . setsockopt ( zmq . SUBSCRIBE , topic ) poller = self . pollers [ zmq . SUB ] return poller
subscribe to the SUB socket to listen for incomming variables return a stream that can be listened to .
14,501
def download ( remote_location , remotes = None , prefix = "" , dry_run = False ) : if remotes is None : remotes , _ = _resources_files ( abs_paths = remote_location . startswith ( 's3://' ) ) if remote_location . startswith ( 's3://' ) : from . s3 import S3Backend backend = S3Backend ( remote_location , dry_run = dry_...
Download resources from a stage server .
14,502
def upload ( remote_location , remotes = None , ignores = None , static_root = "/static/" , prefix = "" , dry_run = False ) : if remotes is None : remotes , ignores = _resources_files ( abs_paths = remote_location . startswith ( 's3://' ) ) if remote_location . startswith ( 's3://' ) : from deployutils . s3 import S3Ba...
Upload resources to a stage server .
14,503
def json_dumps ( obj ) : try : return json . dumps ( obj , indent = 2 , sort_keys = True , allow_nan = False ) except ValueError : pass json_str = json . dumps ( obj , indent = 2 , sort_keys = True , allow_nan = True ) json_obj = json . loads ( json_str ) def do_map ( obj ) : if obj is None : return None if isinstance ...
A safe JSON dump function that provides correct diverging numbers for a ECMAscript consumer .
14,504
def msg ( message , * args , ** kwargs ) : global log_file if log_file is None : log_file = sys . stderr if long_msg : file_name , line = caller_trace ( ) file_name , file_type = os . path . splitext ( file_name ) if file_name . endswith ( '/__init__' ) : file_name = os . path . basename ( os . path . dirname ( file_na...
Prints a message from the server to the log file .
14,505
def setup_restart ( ) : exit_code = os . environ . get ( 'QUICK_SERVER_RESTART' , None ) if exit_code is None : try : atexit . unregister ( _on_exit ) except AttributeError : atexit . _exithandlers = filter ( lambda exit_hnd : exit_hnd [ 0 ] != _on_exit , atexit . _exithandlers ) _start_restart_loop ( None , in_atexit ...
Sets up restart functionality that doesn t keep the first process alive . The function needs to be called before the actual process starts but after loading the program . It will restart the program in a child process and immediately returns in the child process . The call in the parent process never returns . Calling ...
14,506
def convert_argmap ( self , query ) : res = { } if isinstance ( query , bytes ) : query = query . decode ( 'utf8' ) for section in query . split ( '&' ) : eqs = section . split ( '=' , 1 ) name = urlparse_unquote ( eqs [ 0 ] ) if len ( eqs ) > 1 : res [ name ] = urlparse_unquote ( eqs [ 1 ] ) else : res [ name ] = True...
Converts the query string of an URL to a map .
14,507
def convert_args ( self , rem_path , args ) : fragment_split = rem_path . split ( '#' , 1 ) query_split = fragment_split [ 0 ] . split ( '?' , 1 ) segs = filter ( lambda p : len ( p ) and p != '.' , os . path . normpath ( query_split [ 0 ] ) . split ( '/' ) ) paths = [ urlparse_unquote ( p ) for p in segs ] query = sel...
Splits the rest of a URL into its argument parts . The URL is assumed to start with the dynamic request prefix already removed .
14,508
def handle_special ( self , send_body , method_str ) : ongoing = True if self . server . report_slow_requests : path = self . path def do_report ( ) : if not ongoing : return msg ( "request takes longer than expected: \"{0} {1}\"" , method_str , path ) alarm = threading . Timer ( 5.0 , do_report ) alarm . start ( ) els...
Handles a dynamic request . If this method returns False the request is interpreted as static file request . Methods can be registered using the add_TYPE_METHOD_mask methods of QuickServer .
14,509
def check_cache ( self , e_tag , match ) : if e_tag != match : return False self . send_response ( 304 ) self . send_header ( "ETag" , e_tag ) self . send_header ( "Cache-Control" , "max-age={0}" . format ( self . server . max_age ) ) self . end_headers ( ) thread_local . size = 0 return True
Checks the ETag and sends a cache match response if it matches .
14,510
def handle_error ( self ) : if self . server . can_ignore_error ( self ) : return if thread_local . status_code is None : msg ( "ERROR: Cannot send error status code! " + "Header already sent!\n{0}" , traceback . format_exc ( ) ) else : msg ( "ERROR: Error while processing request:\n{0}" , traceback . format_exc ( ) ) ...
Tries to send an 500 error after encountering an exception .
14,511
def cross_origin_headers ( self ) : if not self . is_cross_origin ( ) : return False self . send_header ( "Access-Control-Allow-Methods" , "GET, POST, PUT, DELETE, HEAD" ) allow_headers = _getheader ( self . headers , 'access-control-request-headers' ) if allow_headers is not None : self . send_header ( "Access-Control...
Sends cross origin headers .
14,512
def do_OPTIONS ( self ) : thread_local . clock_start = get_time ( ) thread_local . status_code = 200 thread_local . message = None thread_local . headers = [ ] thread_local . end_headers = [ ] thread_local . size = - 1 thread_local . method = 'OPTIONS' self . send_response ( 200 ) if self . is_cross_origin ( ) : no_cac...
Handles an OPTIONS request .
14,513
def do_GET ( self ) : thread_local . clock_start = get_time ( ) thread_local . status_code = 200 thread_local . message = None thread_local . headers = [ ] thread_local . end_headers = [ ] thread_local . size = - 1 thread_local . method = 'GET' try : self . cross_origin_headers ( ) if self . handle_special ( True , 'GE...
Handles a GET request .
14,514
def log_request ( self , code = '-' , size = '-' ) : print_size = getattr ( thread_local , 'size' , - 1 ) if size != '-' : size_str = ' (%s)' % size elif print_size >= 0 : size_str = self . log_size_string ( print_size ) + ' ' else : size_str = '' if not self . server . suppress_noise or ( code != 200 and code != 304 )...
Logs the current request .
14,515
def _process_request ( self , request , client_address ) : try : self . finish_request ( request , client_address ) except Exception : self . handle_error ( request , client_address ) finally : self . shutdown_request ( request )
Actually processes the request .
14,516
def process_request ( self , request , client_address ) : if not self . _parallel : self . _process_request ( request , client_address ) return t = self . _thread_factory ( target = self . _process_request , args = ( request , client_address ) ) t . daemon = True t . start ( )
Processes the request by delegating to _process_request .
14,517
def add_file_patterns ( self , patterns , blacklist ) : bl = self . _pattern_black if blacklist else self . _pattern_white for pattern in patterns : bl . append ( pattern )
Adds a list of file patterns to either the black - or white - list . Note that this pattern is applied to the absolute path of the file that will be delivered . For including or excluding folders use add_folder_mask or add_folder_fallback .
14,518
def bind_path ( self , name , folder ) : if not len ( name ) or name [ 0 ] != '/' or name [ - 1 ] != '/' : raise ValueError ( "name must start and end with '/': {0}" . format ( name ) ) self . _folder_masks . insert ( 0 , ( name , folder ) )
Adds a mask that maps to a given folder relative to base_path .
14,519
def bind_path_fallback ( self , name , folder ) : if not len ( name ) or name [ 0 ] != '/' or name [ - 1 ] != '/' : raise ValueError ( "name must start and end with '/': {0}" . format ( name ) ) self . _folder_masks . append ( ( name , folder ) )
Adds a fallback for a given folder relative to base_path .
14,520
def bind_proxy ( self , name , proxy ) : if not len ( name ) or name [ 0 ] != '/' or name [ - 1 ] != '/' : raise ValueError ( "name must start and end with '/': {0}" . format ( name ) ) self . _folder_proxys . insert ( 0 , ( name , proxy ) )
Adds a mask that maps to a given proxy .
14,521
def add_cmd_method ( self , name , method , argc = None , complete = None ) : if ' ' in name : raise ValueError ( "' ' cannot be in command name {0}" . format ( name ) ) self . _cmd_methods [ name ] = method self . _cmd_argc [ name ] = argc self . _cmd_complete [ name ] = complete
Adds a command to the command line interface loop .
14,522
def _add_file_mask ( self , start , method_str , method ) : fm = self . _f_mask . get ( method_str , [ ] ) fm . append ( ( start , method ) ) fm . sort ( key = lambda k : len ( k [ 0 ] ) , reverse = True ) self . _f_mask [ method_str ] = fm self . _f_argc [ method_str ] = None
Adds a raw file mask for dynamic requests .
14,523
def add_json_mask ( self , start , method_str , json_producer ) : def send_json ( drh , rem_path ) : obj = json_producer ( drh , rem_path ) if not isinstance ( obj , Response ) : obj = Response ( obj ) ctype = obj . get_ctype ( "application/json" ) code = obj . code obj = obj . response if obj is None : drh . send_erro...
Adds a handler that produces a JSON response .
14,524
def add_text_mask ( self , start , method_str , text_producer ) : def send_text ( drh , rem_path ) : text = text_producer ( drh , rem_path ) if not isinstance ( text , Response ) : text = Response ( text ) ctype = text . get_ctype ( "text/plain" ) code = text . code text = text . response if text is None : drh . send_e...
Adds a handler that produces a plain text response .
14,525
def add_special_file ( self , mask , path , from_quick_server , ctype = None ) : full_path = path if not from_quick_server else os . path . join ( os . path . dirname ( __file__ ) , path ) def read_file ( _req , _args ) : with open ( full_path , 'rb' ) as f_out : return Response ( f_out . read ( ) , ctype = ctype ) sel...
Adds a special file that might have a different actual path than its address .
14,526
def mirror_file ( self , path_to , path_from , from_quick_server = True ) : full_path = path_from if not from_quick_server else os . path . join ( os . path . dirname ( __file__ ) , path_from ) if self . _mirror is None : if not self . _symlink_mirror ( path_to , full_path , init = True ) : self . _poll_mirror ( path_t...
Mirrors a file to a different location . Each time the file changes while the process is running it will be copied to path_to overwriting the destination .
14,527
def link_empty_favicon_fallback ( self ) : self . favicon_fallback = os . path . join ( os . path . dirname ( __file__ ) , 'favicon.ico' )
Links the empty favicon as default favicon .
14,528
def get_token_obj ( self , token , expire = _token_default ) : if expire == _token_default : expire = self . get_default_token_expiration ( ) now = get_time ( ) until = now + expire if expire is not None else None with self . _token_lock : first_valid = None for ( pos , k ) in enumerate ( self . _token_timings ) : t = ...
Returns or creates the object associaten with the given token .
14,529
def handle_cmd ( self , cmd ) : cmd = cmd . strip ( ) segments = [ ] for s in cmd . split ( ) : if s . startswith ( '#' ) : break segments . append ( s ) args = [ ] if not len ( segments ) : return while segments : cur_cmd = "_" . join ( segments ) if cur_cmd in self . _cmd_methods : argc = self . _cmd_argc [ cur_cmd ]...
Handles a single server command .
14,530
def handle_request ( self ) : timeout = self . socket . gettimeout ( ) if timeout is None : timeout = self . timeout elif self . timeout is not None : timeout = min ( timeout , self . timeout ) ctime = get_time ( ) done_req = False shutdown_latency = self . shutdown_latency if timeout is not None : shutdown_latency = m...
Handles an HTTP request . The actual HTTP request is handled using a different thread .
14,531
def serve_forever ( self ) : self . start_cmd_loop ( ) try : while not self . done : self . handle_request ( ) except KeyboardInterrupt : if log_file == sys . stderr : log_file . write ( "\n" ) finally : if self . _clean_up_call is not None : self . _clean_up_call ( ) self . done = True
Starts the server handling commands and HTTP requests . The server will loop until done is True or a KeyboardInterrupt is received .
14,532
def can_ignore_error ( self , reqhnd = None ) : value = sys . exc_info ( ) [ 1 ] try : if isinstance ( value , BrokenPipeError ) or isinstance ( value , ConnectionResetError ) : return True except NameError : pass if not self . done : return False if not isinstance ( value , socket . error ) : return False need_close =...
Tests if the error is worth reporting .
14,533
def handle_error ( self , request , client_address ) : if self . can_ignore_error ( ) : return thread = threading . current_thread ( ) msg ( "Error in request ({0}): {1} in {2}\n{3}" , client_address , repr ( request ) , thread . name , traceback . format_exc ( ) )
Handle an error gracefully .
14,534
def _findRow ( subNo , model ) : items = model . findItems ( str ( subNo ) ) if len ( items ) == 0 : return None if len ( items ) > 1 : raise IndexError ( "Too many items with sub number %s" % subNo ) return items [ 0 ] . row ( )
Finds a row in a given model which has a column with a given number .
14,535
def _subtitlesAdded ( self , path , subNos ) : def action ( current , count , model , row ) : _setSubNo ( current + count , model , row ) def count ( current , nos ) : ret = 0 for no in nos : if current >= no : ret += 1 current += 1 return ret self . _changeSubNos ( path , subNos , count , action )
When subtitle is added all syncPoints greater or equal than a new subtitle are incremented .
14,536
def _subtitlesRemoved ( self , path , subNos ) : def action ( current , count , model , row ) : if count . equal > 0 : model . removeRow ( row ) else : _setSubNo ( current - count . greater_equal , model , row ) def count ( current , nos ) : return _GtEqCount ( current , nos ) self . _changeSubNos ( path , subNos , cou...
When subtitle is removed all syncPoints greater than removed subtitle are decremented . SyncPoint equal to removed subtitle is also removed .
14,537
def _get_csv_fieldnames ( csv_reader ) : fieldnames = [ ] for row in csv_reader : for col in row : field = ( col . strip ( ) . replace ( '"' , "" ) . replace ( " " , "" ) . replace ( "(" , "" ) . replace ( ")" , "" ) . lower ( ) ) fieldnames . append ( field ) if "id" in fieldnames : break else : del fieldnames [ : ] i...
Finds fieldnames in Polarion exported csv file .
14,538
def _get_results ( csv_reader , fieldnames ) : fieldnames_count = len ( fieldnames ) results = [ ] for row in csv_reader : for col in row : if col : break else : continue record = OrderedDict ( list ( zip ( fieldnames , row ) ) ) if record . get ( "exported" ) == "yes" : continue row_len = len ( row ) if fieldnames_cou...
Maps data to fieldnames .
14,539
def get_imported_data ( csv_file , ** kwargs ) : open_args = [ ] open_kwargs = { } try : unicode open_args . append ( "rb" ) except NameError : open_kwargs [ "encoding" ] = "utf-8" with open ( os . path . expanduser ( csv_file ) , * open_args , ** open_kwargs ) as input_file : reader = _get_csv_reader ( input_file ) fi...
Reads the content of the Polarion exported csv file and returns imported data .
14,540
def import_csv ( csv_file , ** kwargs ) : records = get_imported_data ( csv_file , ** kwargs ) _check_required_columns ( csv_file , records . results ) return records
Imports data and checks that all required columns are there .
14,541
def load_config ( filename ) : if filename is None : filename = '' abs_filename = os . path . join ( os . getcwd ( ) , filename ) global FILE if os . path . isfile ( filename ) : FILE = filename elif os . path . isfile ( abs_filename ) : FILE = abs_filename elif os . path . isfile ( FILE ) : pass else : if os . path . ...
Load data from config file to cfg that can be accessed by get set afterwards .
14,542
def init ( FILE ) : try : cfg . read ( FILE ) global _loaded _loaded = True except : file_not_found_message ( FILE )
Read config file
14,543
def get ( section , key ) : if not _loaded : init ( FILE ) try : return cfg . getfloat ( section , key ) except Exception : try : return cfg . getint ( section , key ) except : try : return cfg . getboolean ( section , key ) except : return cfg . get ( section , key )
returns the value of a given key of a given section of the main config file .
14,544
def to_internal_value ( self , data ) : if "session_event" in data : data [ "helper_metadata" ] [ "session_event" ] = data [ "session_event" ] return super ( InboundSerializer , self ) . to_internal_value ( data )
Adds extra data to the helper_metadata field .
14,545
def acceptAlias ( decoratedFunction ) : def wrapper ( self , * args , ** kwargs ) : SubAssert ( isinstance ( self , AliasBase ) ) if len ( args ) > 0 : key = args [ 0 ] if args [ 0 ] in self . _aliases . keys ( ) : key = self . _aliases [ args [ 0 ] ] return decoratedFunction ( self , key , * args [ 1 : ] , ** kwargs )...
This function should be used as a decorator . Each class method that is decorated will be able to accept alias or original names as a first function positional parameter .
14,546
def h ( values ) : ent = np . true_divide ( values , np . sum ( values ) ) return - np . sum ( np . multiply ( ent , np . log2 ( ent ) ) )
Function calculates entropy .
14,547
def info_gain_nominal ( x , y , separate_max ) : x_vals = np . unique ( x ) if len ( x_vals ) < 3 : return None y_dist = Counter ( y ) h_y = h ( y_dist . values ( ) ) dist , splits = nominal_splits ( x , y , x_vals , y_dist , separate_max ) indices , repeat = ( range ( 1 , len ( dist ) ) , 1 ) if len ( dist ) < 50 else...
Function calculates information gain for discrete features . If feature is continuous it is firstly discretized .
14,548
def multinomLog2 ( selectors ) : ln2 = 0.69314718055994528622 noAll = sum ( selectors ) lgNf = math . lgamma ( noAll + 1.0 ) / ln2 lgnFac = [ ] for selector in selectors : if selector == 0 or selector == 1 : lgnFac . append ( 0.0 ) elif selector == 2 : lgnFac . append ( 1.0 ) elif selector == noAll : lgnFac . append ( ...
Function calculates logarithm 2 of a kind of multinom .
14,549
def calc_mdl ( yx_dist , y_dist ) : prior = multinomLog2 ( y_dist . values ( ) ) prior += multinomLog2 ( [ len ( y_dist . keys ( ) ) - 1 , sum ( y_dist . values ( ) ) ] ) post = 0 for x_val in yx_dist : post += multinomLog2 ( [ x_val . get ( c , 0 ) for c in y_dist . keys ( ) ] ) post += multinomLog2 ( [ len ( y_dist ....
Function calculates mdl with given label distributions .
14,550
def mdl_nominal ( x , y , separate_max ) : x_vals = np . unique ( x ) if len ( x_vals ) == 1 : return None y_dist = Counter ( y ) dist , splits = nominal_splits ( x , y , x_vals , y_dist , separate_max ) prior_mdl = calc_mdl ( dist , y_dist ) max_mdl , max_i = 0 , 1 for i in range ( 1 , len ( dist ) ) : dist0_x = [ el ...
Function calculates minimum description length for discrete features . If feature is continuous it is firstly discretized .
14,551
def url ( section = "postGIS" , config_file = None ) : cfg . load_config ( config_file ) try : pw = keyring . get_password ( cfg . get ( section , "database" ) , cfg . get ( section , "username" ) ) except NoSectionError as e : print ( "There is no section {section} in your config file. Please " "choose one available s...
Retrieve the URL used to connect to the database .
14,552
def get_endpoints_using_raw_json_emission ( domain ) : uri = "http://{0}/data.json" . format ( domain ) r = requests . get ( uri ) r . raise_for_status ( ) return r . json ( )
Implements a raw HTTP GET against the entire Socrata portal for the domain in question . This method uses the first of the two ways of getting this information the raw JSON endpoint .
14,553
def get_endpoints_using_catalog_api ( domain , token ) : headers = { "X-App-Token" : token } uri = "http://api.us.socrata.com/api/catalog/v1?domains={0}&offset={1}&limit=1000" ret = [ ] endpoints_thus_far = set ( ) offset = 0 while True : try : r = requests . get ( uri . format ( domain , offset ) , headers = headers )...
Implements a raw HTTP GET against the entire Socrata portal for the domain in question . This method uses the second of the two ways of getting this information the catalog API .
14,554
def count_resources ( domain , token ) : resources = get_resources ( domain , token ) return dict ( Counter ( [ r [ 'resource' ] [ 'type' ] for r in resources if r [ 'resource' ] [ 'type' ] != 'story' ] ) )
Given the domain in question generates counts for that domain of each of the different data types .
14,555
def stratify_by_features ( features , n_strata , ** kwargs ) : n_items = features . shape [ 0 ] km = KMeans ( n_clusters = n_strata , ** kwargs ) allocations = km . fit_predict ( X = features ) return Strata ( allocations )
Stratify by clustering the items in feature space
14,556
def _heuristic_bin_width ( obs ) : IQR = sp . percentile ( obs , 75 ) - sp . percentile ( obs , 25 ) N = len ( obs ) return 2 * IQR * N ** ( - 1 / 3 )
Optimal histogram bin width based on the Freedman - Diaconis rule
14,557
def stratify_by_scores ( scores , goal_n_strata = 'auto' , method = 'cum_sqrt_F' , n_bins = 'auto' ) : available_methods = [ 'equal_size' , 'cum_sqrt_F' ] if method not in available_methods : raise ValueError ( "method argument is invalid" ) if ( method == 'cum_sqrt_F' ) or ( goal_n_strata == 'auto' ) : if n_bins == 'a...
Stratify by binning the items based on their scores
14,558
def auto_stratify ( scores , ** kwargs ) : if 'stratification_method' in kwargs : method = kwargs [ 'stratification_method' ] else : method = 'cum_sqrt_F' if 'stratification_n_strata' in kwargs : n_strata = kwargs [ 'stratification_n_strata' ] else : n_strata = 'auto' if 'stratification_n_bins' in kwargs : n_bins = kwa...
Generate Strata instance automatically
14,559
def _sample_stratum ( self , pmf = None , replace = True ) : if pmf is None : pmf = self . weights_ if not replace : empty = ( self . _n_sampled >= self . sizes_ ) if np . any ( empty ) : pmf = copy . copy ( pmf ) pmf [ empty ] = 0 if np . sum ( pmf ) == 0 : raise ( RuntimeError ) pmf /= np . sum ( pmf ) return np . ra...
Sample a stratum
14,560
def _sample_in_stratum ( self , stratum_idx , replace = True ) : if replace : stratum_loc = np . random . choice ( self . sizes_ [ stratum_idx ] ) else : stratum_locs = np . where ( ~ self . _sampled [ stratum_idx ] ) [ 0 ] stratum_loc = np . random . choice ( stratum_locs ) self . _sampled [ stratum_idx ] [ stratum_lo...
Sample an item uniformly from a stratum
14,561
def intra_mean ( self , values ) : if values . ndim > 1 : return np . array ( [ np . mean ( values [ x , : ] , axis = 0 ) for x in self . allocations_ ] ) else : return np . array ( [ np . mean ( values [ x ] ) for x in self . allocations_ ] )
Calculate the mean of a quantity within strata
14,562
def reset ( self ) : self . _sampled = [ np . repeat ( False , x ) for x in self . sizes_ ] self . _n_sampled = np . zeros ( self . n_strata_ , dtype = int )
Reset the instance to begin sampling from scratch
14,563
async def bluetooth_scan ( ) : devices = { } async with aiohttp . ClientSession ( ) as session : ghlocalapi = NetworkScan ( LOOP , session ) result = await ghlocalapi . scan_for_units ( IPRANGE ) for host in result : if host [ 'assistant_supported' ] : async with aiohttp . ClientSession ( ) as session : ghlocalapi = De...
Get devices from all GH units on the network .
14,564
def get_queue_obj ( session , queue_url , log_url ) : skip = False if not queue_url : logger . error ( "The queue url is not configured, skipping submit verification" ) skip = True if not session : logger . error ( "Missing requests session, skipping submit verification" ) skip = True queue = QueueSearch ( session = se...
Checks that all the data that is needed for submit verification is available .
14,565
def download_queue ( self , job_ids ) : if self . skip : return None url = "{}?jobtype=completed&jobIds={}" . format ( self . queue_url , "," . join ( str ( x ) for x in job_ids ) ) try : response = self . session . get ( url , headers = { "Accept" : "application/json" } ) if response : response = response . json ( ) e...
Downloads data of completed jobs .
14,566
def find_jobs ( self , job_ids ) : matched_jobs = [ ] if self . skip : return matched_jobs json_data = self . download_queue ( job_ids ) if not json_data : return matched_jobs jobs = json_data [ "jobs" ] for job in jobs : if ( job . get ( "id" ) in job_ids and job . get ( "status" , "" ) . lower ( ) not in _NOT_FINISHE...
Finds the jobs in the completed job queue .
14,567
def wait_for_jobs ( self , job_ids , timeout , delay ) : if self . skip : return logger . debug ( "Waiting up to %d sec for completion of the job IDs %s" , timeout , job_ids ) remaining_job_ids = set ( job_ids ) found_jobs = [ ] countdown = timeout while countdown > 0 : matched_jobs = self . find_jobs ( remaining_job_i...
Waits until the jobs appears in the completed job queue .
14,568
def _check_outcome ( self , jobs ) : if self . skip : return False if not jobs : logger . error ( "Import failed!" ) return False failed_jobs = [ ] for job in jobs : status = job . get ( "status" ) if not status : failed_jobs . append ( job ) continue if status . lower ( ) != "success" : failed_jobs . append ( job ) fo...
Parses returned messages and checks submit outcome .
14,569
def _download_log ( self , url , output_file ) : logger . info ( "Saving log %s to %s" , url , output_file ) def _do_log_download ( ) : try : return self . session . get ( url ) except Exception as err : logger . error ( err ) for __ in range ( 5 ) : log_data = _do_log_download ( ) if log_data or log_data is None : bre...
Saves log returned by the message bus .
14,570
def get_logs ( self , jobs , log_file = None ) : if not ( jobs and self . log_url ) : return for job in jobs : url = "{}?jobId={}" . format ( self . log_url , job . get ( "id" ) ) if log_file : self . _download_log ( "{}&download" . format ( url ) , log_file ) else : logger . info ( "Submit log for job %s: %s" , job . ...
Get log or log url of the jobs .
14,571
def submodules ( self ) : submodules = [ ] submodules . extend ( self . modules ) for p in self . packages : submodules . extend ( p . submodules ) return submodules
Property to return all sub - modules of the node recursively .
14,572
def get_target ( self , target ) : if target not in self . _target_cache : self . _target_cache [ target ] = self . _get_target ( target ) return self . _target_cache [ target ]
Get the result of _get_target cache it and return it .
14,573
def _get_target ( self , target ) : depth = target . count ( '.' ) + 1 parts = target . split ( '.' , 1 ) for m in self . modules : if parts [ 0 ] == m . name : if depth < 3 : return m for p in self . packages : if parts [ 0 ] == p . name : if depth == 1 : return p target = p . _get_target ( parts [ 1 ] ) if target : r...
Get the Package or Module related to given target .
14,574
def build_dependencies ( self ) : for m in self . modules : m . build_dependencies ( ) for p in self . packages : p . build_dependencies ( )
Recursively build the dependencies for sub - modules and sub - packages .
14,575
def print_graph ( self , format = None , output = sys . stdout , depth = 0 , ** kwargs ) : graph = self . as_graph ( depth = depth ) graph . print ( format = format , output = output , ** kwargs )
Print the graph for self s nodes .
14,576
def as_graph ( self , depth = 0 ) : if depth in self . _graph_cache : return self . _graph_cache [ depth ] self . _graph_cache [ depth ] = graph = Graph ( self , depth = depth ) return graph
Create a graph with self as node cache it return it .
14,577
def as_matrix ( self , depth = 0 ) : if depth in self . _matrix_cache : return self . _matrix_cache [ depth ] self . _matrix_cache [ depth ] = matrix = Matrix ( self , depth = depth ) return matrix
Create a matrix with self as node cache it return it .
14,578
def as_treemap ( self ) : if self . _treemap_cache : return self . _treemap_cache self . _treemap_cache = treemap = TreeMap ( self ) return treemap
Return the dependencies as a TreeMap .
14,579
def root ( self ) : node = self while node . package is not None : node = node . package return node
Property to return the root of this node .
14,580
def depth ( self ) : if self . _depth_cache is not None : return self . _depth_cache depth , node = 1 , self while node . package is not None : depth += 1 node = node . package self . _depth_cache = depth return depth
Property to tell the depth of the node in the tree .
14,581
def absolute_name ( self , depth = 0 ) : node , node_depth = self , self . depth if depth < 1 : depth = node_depth while node_depth > depth and node . package is not None : node = node . package node_depth -= 1 names = [ ] while node is not None : names . append ( node . name ) node = node . package return '.' . join (...
Return the absolute name of the node .
14,582
def color_msg ( msg , color ) : " Return colored message " return '' . join ( ( COLORS . get ( color , COLORS [ 'endc' ] ) , msg , COLORS [ 'endc' ] ) )
Return colored message
14,583
def gen_files ( path , prefix = "_" ) : " Return file generator " if op . isdir ( path ) : for name in listdir ( path ) : fpath = op . join ( path , name ) if is_parsed_file ( fpath ) : yield op . abspath ( fpath ) elif is_parsed_file ( path ) : yield op . abspath ( path )
Return file generator
14,584
def pack ( args ) : " Pack files. " from zetalibrary . packer import Packer args = parse_config ( args ) for path in gen_files ( args . source , prefix = args . prefix ) : Packer ( path , args ) . pack ( )
Pack files .
14,585
def nonzero_monies ( self ) : return [ copy . copy ( m ) for m in self . _money_obs if m . amount != 0 ]
Get a list of the underlying Money instances that are not zero
14,586
def index ( pc ) : click . echo ( "Format Version: {0}" . format ( pc . idx [ 'formatVersion' ] ) ) click . echo ( "Publication Date: {0}" . format ( pc . idx [ 'publicationDate' ] ) ) olist = '' for i , o in enumerate ( pc . idx [ 'offers' ] ) : if i < len ( pc . idx [ 'offers' ] ) - 1 : olist += o + ", " else : olist...
Show details about the Pricing API Index .
14,587
def product ( pc , service , attrib , sku ) : pc . service = service . lower ( ) pc . sku = sku pc . add_attributes ( attribs = attrib ) click . echo ( "Service Alias: {0}" . format ( pc . service_alias ) ) click . echo ( "URL: {0}" . format ( pc . service_url ) ) click . echo ( "Region: {0}" . format ( pc . region ) )...
Get a list of a service s products . The list will be in the given region matching the specific terms and any given attribute filters or a SKU .
14,588
def price ( pc , service , attrib , sku ) : pc . service = service . lower ( ) pc . sku = sku pc . add_attributes ( attribs = attrib ) click . echo ( "Service Alias: {0}" . format ( pc . service_alias ) ) click . echo ( "URL: {0}" . format ( pc . service_url ) ) click . echo ( "Region: {0}" . format ( pc . region ) ) c...
Get a list of a service s prices . The list will be in the given region matching the specific terms and any given attribute filters or a SKU .
14,589
def map_init ( interface , params ) : import numpy as np import random np . random . seed ( params [ 'seed' ] ) random . seed ( params [ 'seed' ] ) return params
Intialize random number generator with given seed params . seed .
14,590
def create_graph_name ( suffix = '' , dirname = None ) : if suffix : suffix = '-%s' % suffix caller = get_callers_name ( level = 3 ) name = '%s%s%s%s' % ( __prefix , caller , suffix , __suffix ) if dirname : name = os . path . join ( dirname , name ) return name
Create a graph name using the name of the caller .
14,591
def save_graph ( graph , suffix = '' , dirname = None , pdf = False ) : name = create_graph_name ( suffix , dirname ) graph . save ( name ) if pdf : graph . save_as_pdf ( name )
Save a graph using caller s name .
14,592
def save_data ( data , suffix = '' , dirname = None ) : if type ( data ) == list : data = np . array ( data ) . T name = create_graph_name ( suffix , dirname ) + '.txt' np . savetxt ( name , data )
Save a dataset using caller s name .
14,593
def read ( path , savedir ) : " Read file from path " if path . startswith ( 'http://' ) : name = op . basename ( path ) save_path = op . join ( savedir , name ) if not op . exists ( save_path ) : src = urllib2 . urlopen ( path ) . read ( ) try : open ( save_path , 'w' ) . write ( src ) except IOError : return src path...
Read file from path
14,594
def parse_imports ( self , src ) : " Parse imports from source. " result = [ ] def child ( obj ) : result . append ( obj . group ( 1 ) ) src = self . import_re . sub ( child , src ) return src , result
Parse imports from source .
14,595
def __get_users ( self ) : filter = [ '(objectclass=posixAccount)' ] results = self . client . search ( filter , [ 'uid' ] ) for result in results : yield result . uid . value
Get user list .
14,596
def by_user ( config ) : client = Client ( ) client . prepare_connection ( ) audit_api = API ( client ) CLI . parse_membership ( 'Groups by User' , audit_api . by_user ( ) )
Display LDAP group membership sorted by user .
14,597
def raw ( config ) : client = Client ( ) client . prepare_connection ( ) audit_api = API ( client ) print ( audit_api . raw ( ) )
Dump the contents of LDAP to console in raw format .
14,598
def get_sql_state ( self , state ) : if not hasattr ( state , 'sql_state' ) : setattr ( state , 'sql_state' , SQLStateGraph ( ) ) return state . sql_state
Get SQLStateGraph from state .
14,599
def get_sortkey ( table ) : wfs = WebFeatureService ( url = bcdata . OWS_URL , version = "2.0.0" ) return sorted ( wfs . get_schema ( "pub:" + table ) [ "properties" ] . keys ( ) ) [ 0 ]
Get a field to sort by