idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
5,800
def extract ( obj , pointer , bypass_ref = False ) : return Pointer ( pointer ) . extract ( obj , bypass_ref )
Extract member or element of obj according to pointer .
5,801
def aa_counts ( aln , weights = None , gap_chars = '-.' ) : if weights is None : counts = Counter ( ) for rec in aln : seq_counts = Counter ( str ( rec . seq ) ) counts . update ( seq_counts ) else : if weights == True : weights = sequence_weights ( aln ) else : assert len ( weights ) == len ( aln ) , ( "Length mismatch: weights = %d, alignment = %d" % ( len ( weights ) , len ( aln ) ) ) counts = defaultdict ( float ) for col in zip ( * aln ) : for aa , wt in zip ( col , weights ) : counts [ aa ] += wt for gap_char in gap_chars : if gap_char in counts : del counts [ gap_char ] return counts
Calculate the amino acid frequencies in a set of SeqRecords .
5,802
def aa_frequencies ( aln , weights = None , gap_chars = '-.' ) : counts = aa_counts ( aln , weights , gap_chars ) scale = 1.0 / sum ( counts . values ( ) ) return dict ( ( aa , cnt * scale ) for aa , cnt in counts . iteritems ( ) )
Frequency of each residue type in an alignment .
5,803
def blocks ( aln , threshold = 0.5 , weights = None ) : assert len ( aln ) if weights == False : def pct_nongaps ( col ) : return 1 - ( float ( col . count ( '-' ) ) / len ( col ) ) else : if weights in ( None , True ) : weights = sequence_weights ( aln , 'avg1' ) def pct_nongaps ( col ) : assert len ( col ) == len ( weights ) ngaps = sum ( wt * ( c == '-' ) for wt , c in zip ( weights , col ) ) return 1 - ( ngaps / len ( col ) ) seqstrs = [ str ( rec . seq ) for rec in aln ] clean_cols = [ col for col in zip ( * seqstrs ) if pct_nongaps ( col ) >= threshold ] alphabet = aln [ 0 ] . seq . alphabet clean_seqs = [ Seq ( '' . join ( row ) , alphabet ) for row in zip ( * clean_cols ) ] clean_recs = [ ] for rec , seq in zip ( aln , clean_seqs ) : newrec = deepcopy ( rec ) newrec . seq = seq clean_recs . append ( newrec ) return MultipleSeqAlignment ( clean_recs , alphabet = alphabet )
Remove gappy columns from an alignment .
5,804
def col_counts ( col , weights = None , gap_chars = '-.' ) : cnt = defaultdict ( float ) for aa , wt in zip ( col , weights ) : if aa not in gap_chars : cnt [ aa ] += wt return cnt
Absolute counts of each residue type in a single column .
5,805
def remove_empty_cols ( records ) : records = list ( records ) seqstrs = [ str ( rec . seq ) for rec in records ] clean_cols = [ col for col in zip ( * seqstrs ) if not all ( c == '-' for c in col ) ] clean_seqs = [ '' . join ( row ) for row in zip ( * clean_cols ) ] for rec , clean_seq in zip ( records , clean_seqs ) : yield SeqRecord ( Seq ( clean_seq , rec . seq . alphabet ) , id = rec . id , name = rec . name , description = rec . description , dbxrefs = rec . dbxrefs , features = rec . features , annotations = rec . annotations , letter_annotations = rec . letter_annotations )
Remove all - gap columns from aligned SeqRecords .
5,806
def sequence_weights ( aln , scaling = 'none' , gap_chars = '-.' ) : expectk = [ 0.0 , 1.0 , 1.953 , 2.861 , 3.705 , 4.524 , 5.304 , 6.026 , 6.724 , 7.397 , 8.04 , 8.622 , 9.191 , 9.739 , 10.264 , 10.758 , 11.194 , 11.635 , 12.049 , 12.468 , 12.806 , 13.185 , 13.539 , 13.863 , 14.177 , 14.466 , 14.737 , 15.005 , 15.245 , 15.491 , 15.681 , 15.916 , 16.12 , 16.301 , 16.485 , 16.671 , 16.831 , 16.979 , 17.151 , 17.315 , 17.427 , 17.559 , 17.68 , 17.791 , 17.914 , 18.009 , 18.113 , 18.203 , 18.298 , 18.391 , 18.46 , 18.547 , 18.617 , 18.669 , 18.77 , 18.806 , 18.858 , 18.934 , 18.978 , 19.027 , 19.085 , 19.119 , 19.169 , 19.202 , 19.256 , 19.291 , 19.311 , 19.357 , 19.399 , 19.416 , 19.456 , 19.469 , 19.5 , 19.53 , 19.553 , 19.562 , 19.602 , 19.608 , 19.629 , 19.655 , 19.67 , 19.681 , 19.7 , 19.716 , 19.724 , 19.748 , 19.758 , 19.765 , 19.782 , 19.791 , 19.799 , 19.812 , 19.82 , 19.828 , 19.844 , 19.846 , 19.858 , 19.863 , 19.862 , 19.871 , 19.882 ] def col_weight ( column ) : min_nongap = max ( 2 , .2 * len ( column ) ) if len ( [ c for c in column if c not in gap_chars ] ) < min_nongap : return ( [ 0 ] * len ( column ) , 0 ) counts = Counter ( column ) n_residues = len ( counts ) freqs = dict ( ( aa , 1.0 / ( n_residues * count ) ) for aa , count in counts . iteritems ( ) ) weights = [ freqs [ aa ] for aa in column ] return ( weights , n_residues ) seq_weights = [ 0 ] * len ( aln ) tot_nres = 0.0 for col in zip ( * aln ) : wts , nres = col_weight ( col ) assert sum ( wts ) <= 20 tot_nres += expectk [ nres ] if nres < len ( expectk ) else 20 for idx , wt in enumerate ( wts ) : seq_weights [ idx ] += wt if scaling == 'none' : avg_seq_len = tot_nres / len ( aln ) return [ wt / avg_seq_len for wt in seq_weights ] if scaling == 'max1' : scale = 1.0 / max ( seq_weights ) elif scaling == 'sum1' : scale = 1.0 / sum ( seq_weights ) elif scaling == 'avg1' : scale = len ( aln ) / sum ( seq_weights ) elif scaling == 'andy' : scale = len ( aln ) / sum ( seq_weights ) return [ min ( scale * wt , 1.0 ) for wt in seq_weights ] else : raise ValueError ( "Unknown scaling scheme '%s'" % scaling ) return [ scale * wt for wt in seq_weights ]
Weight aligned sequences to emphasize more divergent members .
5,807
def to_graph ( alnfname , weight_func ) : import networkx G = networkx . Graph ( ) aln = AlignIO . read ( alnfname , 'fasta' ) for i , arec in enumerate ( aln ) : for brec in aln [ i + 1 : ] : ident = weight_func ( str ( arec . seq ) , str ( brec . seq ) ) G . add_edge ( arec . id , brec . id , weight = ident ) return G
Create a NetworkX graph from a sequence alignment .
5,808
def guidance_UV ( index ) : if 0 < index < 3 : guidance = "Low exposure. No protection required. You can safely stay outside" elif 2 < index < 6 : guidance = "Moderate exposure. Seek shade during midday hours, cover up and wear sunscreen" elif 5 < index < 8 : guidance = "High exposure. Seek shade during midday hours, cover up and wear sunscreen" elif 7 < index < 11 : guidance = "Very high. Avoid being outside during midday hours. Shirt, sunscreen and hat are essential" elif index > 10 : guidance = "Extreme. Avoid being outside during midday hours. Shirt, sunscreen and hat essential." else : guidance = None return guidance
Return Met Office guidance regarding UV exposure based on UV index
5,809
def parse_sitelist ( sitelist ) : sites = [ ] for site in sitelist [ "Locations" ] [ "Location" ] : try : ident = site [ "id" ] name = site [ "name" ] except KeyError : ident = site [ "@id" ] name = site [ "@name" ] if "latitude" in site : lat = float ( site [ "latitude" ] ) lon = float ( site [ "longitude" ] ) else : lat = lon = None s = Site ( ident , name , lat , lon ) sites . append ( s ) return sites
Return list of Site instances from retrieved sitelist data
5,810
def _query ( self , data_category , resource_category , field , request , step , isotime = None ) : rest_url = "/" . join ( [ HOST , data_category , resource_category , field , DATA_TYPE , request ] ) query_string = "?" + "&" . join ( [ "res=" + step , "time=" + isotime if isotime is not None else "" , "key=" + self . key ] ) url = rest_url + query_string page = url_lib . urlopen ( url ) pg = page . read ( ) return pg
Request and return data from DataPoint RESTful API .
5,811
def stand_alone_imagery ( self ) : return json . loads ( self . _query ( IMAGE , FORECAST , SURFACE_PRESSURE , CAPABILITIES , "" ) . decode ( errors = "replace" ) )
Returns capabilities data for stand alone imagery and includes URIs for the images .
5,812
def map_overlay_forecast ( self ) : return json . loads ( self . _query ( LAYER , FORECAST , ALL , CAPABILITIES , "" ) . decode ( errors = "replace" ) )
Returns capabilities data for forecast map overlays .
5,813
def map_overlay_obs ( self ) : return json . loads ( self . _query ( LAYER , OBSERVATIONS , ALL , CAPABILITIES , "" ) . decode ( errors = "replace" ) )
Returns capabilities data for observation map overlays .
5,814
def load_and_append ( instrument_dict , instruments = None , raise_errors = False ) : if instruments is None : instruments = { } updated_instruments = { } updated_instruments . update ( instruments ) loaded_failed = { } for instrument_name , instrument_class_name in instrument_dict . items ( ) : instrument_settings = None module = None if instrument_name in list ( instruments . keys ( ) ) and instrument_class_name == instruments [ instrument_name ] . __name__ : print ( ( 'WARNING: instrument {:s} already exists. Did not load!' . format ( instrument_name ) ) ) loaded_failed [ instrument_name ] = instrument_name else : instrument_instance = None if isinstance ( instrument_class_name , dict ) : if 'settings' in instrument_class_name : instrument_settings = instrument_class_name [ 'settings' ] instrument_filepath = str ( instrument_class_name [ 'filepath' ] ) instrument_class_name = str ( instrument_class_name [ 'class' ] ) path_to_module , _ = module_name_from_path ( instrument_filepath ) module = import_module ( path_to_module ) class_of_instrument = getattr ( module , instrument_class_name ) try : if instrument_settings is None : instrument_instance = class_of_instrument ( name = instrument_name ) else : instrument_instance = class_of_instrument ( name = instrument_name , settings = instrument_settings ) except Exception as e : loaded_failed [ instrument_name ] = e if raise_errors : raise e continue elif isinstance ( instrument_class_name , Instrument ) : instrument_class_name = instrument_class_name . __class__ instrument_filepath = os . path . dirname ( inspect . getfile ( instrument_class_name ) ) raise NotImplementedError elif issubclass ( instrument_class_name , Instrument ) : class_of_instrument = instrument_class_name if instrument_settings is None : instrument_instance = class_of_instrument ( name = instrument_name ) else : instrument_instance = class_of_instrument ( name = instrument_name , settings = instrument_settings ) updated_instruments [ instrument_name ] = instrument_instance return updated_instruments , loaded_failed
load instrument from instrument_dict and append to instruments
5,815
def fail ( self , reason , obj , pointer = None ) : pointer = pointer_join ( pointer ) err = ValidationError ( reason , obj , pointer ) if self . fail_fast : raise err else : self . errors . append ( err ) return err
Called when validation fails .
5,816
def get_locus ( sequences , kir = False , verbose = False , refdata = None , evalue = 10 ) : if not refdata : refdata = ReferenceData ( ) file_id = str ( randomid ( ) ) input_fasta = file_id + ".fasta" output_xml = file_id + ".xml" SeqIO . write ( sequences , input_fasta , "fasta" ) blastn_cline = NcbiblastnCommandline ( query = input_fasta , db = refdata . blastdb , evalue = evalue , outfmt = 5 , reward = 1 , penalty = - 3 , gapopen = 5 , gapextend = 2 , dust = 'yes' , out = output_xml ) stdout , stderr = blastn_cline ( ) blast_qresult = SearchIO . read ( output_xml , 'blast-xml' ) cleanup ( file_id ) if len ( blast_qresult . hits ) == 0 : return '' loci = [ ] for i in range ( 0 , 3 ) : if kir : loci . append ( blast_qresult [ i ] . id . split ( "*" ) [ 0 ] ) else : loci . append ( blast_qresult [ i ] . id . split ( "*" ) [ 0 ] ) locus = set ( loci ) if len ( locus ) == 1 : if has_hla ( loci [ 0 ] ) or kir : return loci [ 0 ] else : return "HLA-" + loci [ 0 ] else : return ''
Gets the locus of the sequence by running blastn
5,817
def address ( self ) -> str : return str ( self . _public_key . to_address ( net_query ( self . network ) ) )
generate an address from pubkey
5,818
def sign_transaction ( self , txins : Union [ TxOut ] , tx : MutableTransaction ) -> MutableTransaction : solver = P2pkhSolver ( self . _private_key ) return tx . spend ( txins , [ solver for i in txins ] )
sign the parent txn outputs P2PKH
5,819
def format_name ( net : str ) -> str : if net . startswith ( 't' ) or 'testnet' in net : net = net [ 1 : ] + '-test' else : net = net return net
take care of specifics of cryptoid naming system
5,820
def get_url ( url : str ) -> Union [ dict , int , float , str ] : request = Request ( url , headers = { "User-Agent" : "pypeerassets" } ) response = cast ( HTTPResponse , urlopen ( request ) ) if response . status != 200 : raise Exception ( response . reason ) return json . loads ( response . read ( ) . decode ( ) )
Perform a GET request for the url and return a dictionary parsed from the JSON response .
5,821
def _scan_nodes ( nodelist , context , instance_types , current_block = None , ignore_blocks = None ) : results = [ ] for node in nodelist : if isinstance ( node , instance_types ) : results . append ( node ) elif isinstance ( node , IncludeNode ) : if node . template : if not callable ( getattr ( node . template , 'render' , None ) ) : template = get_template ( node . template . var ) else : template = node . template if TemplateAdapter is not None and isinstance ( template , TemplateAdapter ) : template = template . template results += _scan_nodes ( template . nodelist , context , instance_types , current_block ) elif isinstance ( node , ExtendsNode ) : results += _extend_nodelist ( node , context , instance_types ) elif isinstance ( node , VariableNode ) and current_block : if node . filter_expression . token == 'block.super' : if not hasattr ( current_block . parent , 'nodelist' ) : raise TemplateSyntaxError ( "Cannot read {{{{ block.super }}}} for {{% block {0} %}}, " "the parent template doesn't have this block." . format ( current_block . name ) ) results += _scan_nodes ( current_block . parent . nodelist , context , instance_types , current_block . parent ) elif isinstance ( node , BlockNode ) and ignore_blocks and node . name in ignore_blocks : continue elif hasattr ( node , 'child_nodelists' ) : for nodelist_name in node . child_nodelists : if hasattr ( node , nodelist_name ) : subnodelist = getattr ( node , nodelist_name ) if isinstance ( subnodelist , NodeList ) : if isinstance ( node , BlockNode ) : current_block = node results += _scan_nodes ( subnodelist , context , instance_types , current_block ) else : for attr in dir ( node ) : obj = getattr ( node , attr ) if isinstance ( obj , NodeList ) : if isinstance ( node , BlockNode ) : current_block = node results += _scan_nodes ( obj , context , instance_types , current_block ) return results
Loop through all nodes of a single scope level .
5,822
def get_node_instances ( nodelist , instances ) : context = _get_main_context ( nodelist ) if TemplateAdapter is not None and isinstance ( nodelist , TemplateAdapter ) : nodelist = nodelist . template return _scan_nodes ( nodelist , context , instances )
Find the nodes of a given instance .
5,823
def get_config_file ( ) : parser = argparse . ArgumentParser ( description = "Read configuration file." ) parser . add_argument ( '-ini' , help = "Full path of configuration file" ) args = parser . parse_args ( ) ini_file = args . ini if not FileClass . is_file_exists ( ini_file ) : print ( "Usage: -ini <full path to the configuration file.>" ) exit ( - 1 ) return ini_file
Get model configuration file name from argv
5,824
def isnumerical ( x ) : try : xx = float ( x ) except TypeError : return False except ValueError : return False except Exception : return False else : return True
Check the input x is numerical or not .
5,825
def rsquare ( obsvalues , simvalues ) : if len ( obsvalues ) != len ( simvalues ) : raise ValueError ( "The size of observed and simulated values must be " "the same for R-square calculation!" ) if not isinstance ( obsvalues , numpy . ndarray ) : obsvalues = numpy . array ( obsvalues ) if not isinstance ( simvalues , numpy . ndarray ) : simvalues = numpy . array ( simvalues ) obs_avg = numpy . mean ( obsvalues ) pred_avg = numpy . mean ( simvalues ) obs_minus_avg_sq = numpy . sum ( ( obsvalues - obs_avg ) ** 2 ) pred_minus_avg_sq = numpy . sum ( ( simvalues - pred_avg ) ** 2 ) obs_pred_minus_avgs = numpy . sum ( ( obsvalues - obs_avg ) * ( simvalues - pred_avg ) ) yy = obs_minus_avg_sq ** 0.5 * pred_minus_avg_sq ** 0.5 if MathClass . floatequal ( yy , 0. ) : return 1. return ( obs_pred_minus_avgs / yy ) ** 2.
Calculate Coefficient of determination .
5,826
def rmse ( obsvalues , simvalues ) : if len ( obsvalues ) != len ( simvalues ) : raise ValueError ( "The size of observed and simulated values must be " "the same for R-square calculation!" ) if not isinstance ( obsvalues , numpy . ndarray ) : obsvalues = numpy . array ( obsvalues ) if not isinstance ( simvalues , numpy . ndarray ) : simvalues = numpy . array ( simvalues ) return numpy . sqrt ( numpy . mean ( ( obsvalues - simvalues ) ** 2. ) )
Calculate RMSE .
5,827
def pbias ( obsvalues , simvalues ) : if len ( obsvalues ) != len ( simvalues ) : raise ValueError ( "The size of observed and simulated values must be" " the same for PBIAS calculation!" ) return sum ( map ( lambda x , y : ( x - y ) * 100 , obsvalues , simvalues ) ) / sum ( obsvalues )
Calculate PBIAS or percent model bias .
5,828
def convert_str2num ( unicode_str ) : if MathClass . isnumerical ( unicode_str ) : unicode_str = float ( unicode_str ) if unicode_str % 1. == 0. : unicode_str = int ( unicode_str ) return unicode_str elif is_string ( unicode_str ) : return str ( unicode_str ) elif isinstance ( unicode_str , tuple ) : return tuple ( StringClass . convert_str2num ( v ) for v in unicode_str ) elif isinstance ( unicode_str , list ) : return list ( StringClass . convert_str2num ( v ) for v in unicode_str ) else : return unicode_str
Convert string to string integer or float . Support tuple or list .
5,829
def string_in_list ( tmp_str , strlist ) : new_str_list = strlist [ : ] for i , str_in_list in enumerate ( new_str_list ) : new_str_list [ i ] = str_in_list . lower ( ) return tmp_str . lower ( ) in new_str_list
Is tmp_str in strlist case insensitive .
5,830
def is_file_exists ( filename ) : if filename is None or not os . path . exists ( filename ) or not os . path . isfile ( filename ) : return False else : return True
Check the existence of file path .
5,831
def is_dir_exists ( dirpath ) : if dirpath is None or not os . path . exists ( dirpath ) or not os . path . isdir ( dirpath ) : return False else : return True
Check the existence of folder path .
5,832
def copy_files ( filename , dstfilename ) : FileClass . remove_files ( dstfilename ) dst_prefix = os . path . splitext ( dstfilename ) [ 0 ] pattern = os . path . splitext ( filename ) [ 0 ] + '.*' for f in glob . iglob ( pattern ) : ext = os . path . splitext ( f ) [ 1 ] dst = dst_prefix + ext copy ( f , dst )
Copy files with the same name and different suffixes such as ESRI Shapefile .
5,833
def remove_files ( filename ) : pattern = os . path . splitext ( filename ) [ 0 ] + '.*' for f in glob . iglob ( pattern ) : os . remove ( f )
Delete all files with same root as fileName i . e . regardless of suffix such as ESRI shapefile
5,834
def is_up_to_date ( outfile , basedatetime ) : if os . path . exists ( outfile ) : if os . path . getmtime ( outfile ) >= basedatetime : return True return False
Return true if outfile exists and is no older than base datetime .
5,835
def get_executable_fullpath ( name , dirname = None ) : if name is None : return None if is_string ( name ) : name = str ( name ) else : raise RuntimeError ( 'The input function name or path must be string!' ) if dirname is not None : dirname = os . path . abspath ( dirname ) fpth = dirname + os . sep + name if os . path . isfile ( fpth ) : return fpth if sysstr == 'Windows' : findout = UtilClass . run_command ( 'where %s' % name ) else : findout = UtilClass . run_command ( 'which %s' % name ) if not findout or len ( findout ) == 0 : print ( "%s is not included in the env path" % name ) exit ( - 1 ) first_path = findout [ 0 ] . split ( '\n' ) [ 0 ] if os . path . exists ( first_path ) : return first_path return None
get the full path of a given executable name
5,836
def get_file_fullpath ( name , dirname = None ) : if name is None : return None if is_string ( name ) : name = str ( name ) else : raise RuntimeError ( 'The input function name or path must be string!' ) for sep in [ '\\' , '/' , os . sep ] : if sep in name : name = os . path . abspath ( name ) return name if dirname is not None : dirname = os . path . abspath ( dirname ) name = dirname + os . sep + name return name
Return full path if available .
5,837
def get_filename_by_suffixes ( dir_src , suffixes ) : list_files = os . listdir ( dir_src ) re_files = list ( ) if is_string ( suffixes ) : suffixes = [ suffixes ] if not isinstance ( suffixes , list ) : return None for i , suf in enumerate ( suffixes ) : if len ( suf ) >= 1 and suf [ 0 ] != '.' : suffixes [ i ] = '.' + suf for f in list_files : name , ext = os . path . splitext ( f ) if StringClass . string_in_list ( ext , suffixes ) : re_files . append ( f ) return re_files
get file names with the given suffixes in the given directory
5,838
def get_full_filename_by_suffixes ( dir_src , suffixes ) : file_names = FileClass . get_filename_by_suffixes ( dir_src , suffixes ) if file_names is None : return None return list ( dir_src + os . sep + name for name in file_names )
get full file names with the given suffixes in the given directory
5,839
def get_core_name_without_suffix ( file_path ) : if '\\' in file_path : file_path = file_path . replace ( '\\' , '/' ) file_name = os . path . basename ( file_path ) core_names = file_name . split ( '.' ) if len ( core_names ) > 1 : core_names = core_names [ : - 1 ] if isinstance ( core_names , list ) : return str ( '.' . join ( core_names ) ) else : return str ( core_names )
Return core file name without suffix .
5,840
def add_postfix ( file_path , postfix ) : cur_sep = '' for sep in [ '\\' , '/' , os . sep ] : if sep in file_path : cur_sep = sep break corename = FileClass . get_core_name_without_suffix ( file_path ) tmpspliter = os . path . basename ( file_path ) . split ( '.' ) suffix = '' if len ( tmpspliter ) > 1 : suffix = tmpspliter [ - 1 ] newname = os . path . dirname ( file_path ) + cur_sep + corename + '_' + postfix if suffix != '' : newname += '.' + suffix return str ( newname )
Add postfix for a full file path .
5,841
def day_of_year ( dt ) : sec = time . mktime ( dt . timetuple ( ) ) t = time . localtime ( sec ) return t . tm_yday
Day index of year from 1 to 365 or 366
5,842
def run_command ( commands ) : use_shell = False subprocess_flags = 0 startupinfo = None if sysstr == 'Windows' : if isinstance ( commands , list ) : commands = ' ' . join ( str ( c ) for c in commands ) import ctypes SEM_NOGPFAULTERRORBOX = 0x0002 ctypes . windll . kernel32 . SetErrorMode ( SEM_NOGPFAULTERRORBOX ) subprocess_flags = 0x8000000 startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW else : if is_string ( commands ) : use_shell = True elif isinstance ( commands , list ) : if commands [ 0 ] [ 0 ] == commands [ 0 ] [ - 1 ] == '"' or commands [ 0 ] [ 0 ] == commands [ 0 ] [ - 1 ] == "'" : commands [ 0 ] = commands [ 0 ] [ 1 : - 1 ] for idx , v in enumerate ( commands ) : if isinstance ( v , int ) or isinstance ( v , float ) : commands [ idx ] = repr ( v ) print ( commands ) process = subprocess . Popen ( commands , shell = use_shell , stdout = subprocess . PIPE , stdin = open ( os . devnull ) , stderr = subprocess . STDOUT , universal_newlines = True , startupinfo = startupinfo , creationflags = subprocess_flags ) out , err = process . communicate ( ) recode = process . returncode if out is None : return [ '' ] if recode is not None and recode != 0 : raise subprocess . CalledProcessError ( - 1 , commands , "ERROR occurred when running subprocess!" ) if '\n' in out : return out . split ( '\n' ) return [ out ]
Execute external command and return the output lines list . In windows refers to handling - subprocess - crash - in - windows _ .
5,843
def current_path ( local_function ) : from inspect import getsourcefile fpath = getsourcefile ( local_function ) if fpath is None : return None return os . path . dirname ( os . path . abspath ( fpath ) )
Get current path refers to how - do - i - get - the - path - of - the - current - executed - file - in - python _
5,844
def mkdir ( dir_path ) : if not os . path . isdir ( dir_path ) or not os . path . exists ( dir_path ) : os . makedirs ( dir_path )
Make directory if not existed
5,845
def rmmkdir ( dir_path ) : if not os . path . isdir ( dir_path ) or not os . path . exists ( dir_path ) : os . makedirs ( dir_path ) else : rmtree ( dir_path , True ) os . makedirs ( dir_path )
If directory existed then remove and make ; else make it .
5,846
def print_msg ( contentlist ) : if isinstance ( contentlist , list ) or isinstance ( contentlist , tuple ) : return '\n' . join ( contentlist ) else : if len ( contentlist ) > 1 and contentlist [ - 1 ] != '\n' : contentlist += '\n' return contentlist
concatenate message list as single string with line feed .
5,847
def decode_strs_in_dict ( unicode_dict ) : unicode_dict = { StringClass . convert_str2num ( k ) : StringClass . convert_str2num ( v ) for k , v in iteritems ( unicode_dict ) } for k , v in iteritems ( unicode_dict ) : if isinstance ( v , dict ) : unicode_dict [ k ] = UtilClass . decode_strs_in_dict ( v ) return unicode_dict
Decode strings in dictionary which may contains unicode strings or numeric values .
5,848
def undo ( self ) : self . undo_manager . undo ( ) self . notify_observers ( ) logging . debug ( 'undo_manager undo stack={}' . format ( self . undo_manager . _undo_stack ) )
Rewind the game to the previous state .
5,849
def redo ( self ) : self . undo_manager . redo ( ) self . notify_observers ( ) logging . debug ( 'undo_manager redo stack={}' . format ( self . undo_manager . _redo_stack ) )
Redo the latest undone command .
5,850
def net_query ( name : str ) -> Constants : for net_params in networks : if name in ( net_params . name , net_params . shortname , ) : return net_params raise UnsupportedNetwork
Find the NetworkParams for a network by its long or short name . Raises UnsupportedNetwork if no NetworkParams is found .
5,851
def get_port_at ( self , tile_id , direction ) : for port in self . ports : if port . tile_id == tile_id and port . direction == direction : return port port = Port ( tile_id , direction , PortType . none ) self . ports . append ( port ) return port
If no port is found a new none port is made and added to self . ports .
5,852
def rotate_ports ( self ) : for port in self . ports : port . tile_id = ( ( port . tile_id + 1 ) % len ( hexgrid . coastal_tile_ids ( ) ) ) + 1 port . direction = hexgrid . rotate_direction ( hexgrid . EDGE , port . direction , ccw = True ) self . notify_observers ( )
Rotates the ports 90 degrees . Useful when using the default port setup but the spectator is watching at a rotated angle from true north .
5,853
def intersect_keys ( keys , reffile , cache = False , clean_accs = False ) : index = None if cache : refcache = reffile + '.sqlite' if os . path . exists ( refcache ) : if os . stat ( refcache ) . st_mtime < os . stat ( reffile ) . st_mtime : logging . warn ( "Outdated cache; rebuilding index" ) else : try : index = ( SeqIO . index_db ( refcache , key_function = clean_accession ) if clean_accs else SeqIO . index_db ( refcache ) ) except Exception : logging . warn ( "Skipping corrupted cache; rebuilding index" ) index = None else : refcache = ':memory:' if index is None : index = ( SeqIO . index_db ( refcache , [ reffile ] , 'fasta' , key_function = clean_accession ) if clean_accs else SeqIO . index_db ( refcache , [ reffile ] , 'fasta' ) ) if clean_accs : keys = ( clean_accession ( k ) for k in keys ) for key in keys : try : record = index [ key ] except LookupError : logging . info ( "No match: %s" , repr ( key ) ) continue yield record
Extract SeqRecords from the index by matching keys .
5,854
def aa_frequencies ( seq , gap_chars = '-.' ) : aa_counts = Counter ( seq ) for gap_char in gap_chars : if gap_char in aa_counts : del aa_counts [ gap_char ] scale = 1.0 / sum ( aa_counts . values ( ) ) return dict ( ( aa , cnt * scale ) for aa , cnt in aa_counts . iteritems ( ) )
Calculate the amino acid frequencies in a sequence set .
5,855
def giving ( self ) : logging . debug ( 'give={}' . format ( self . _give ) ) c = Counter ( self . _give . copy ( ) ) return [ ( n , t ) for t , n in c . items ( ) ]
Returns tuples corresponding to the number and type of each resource in the trade from giver - > getter
5,856
def getting ( self ) : c = Counter ( self . _get . copy ( ) ) return [ ( n , t ) for t , n in c . items ( ) ]
Returns tuples corresponding to the number and type of each resource in the trade from getter - > giver
5,857
def family_check ( self ) : self . logger . info ( "Checking family relations for {0}" . format ( self . family_id ) ) for individual_id in self . individuals : self . logger . debug ( "Checking individual {0}" . format ( individual_id ) ) individual = self . individuals [ individual_id ] self . logger . debug ( "Checking if individual {0} is affected" . format ( individual_id ) ) if individual . affected : self . logger . debug ( "Found affected individual {0}" . format ( individual_id ) ) self . affected_individuals . add ( individual_id ) father = individual . father mother = individual . mother if individual . has_parents : self . logger . debug ( "Individual {0} has parents" . format ( individual_id ) ) self . no_relations = False try : self . check_parent ( father , father = True ) self . check_parent ( mother , father = False ) except PedigreeError as e : self . logger . error ( e . message ) raise e if individual . has_both_parents : self . trios . append ( set ( [ individual_id , father , mother ] ) ) elif father != '0' : self . duos . append ( set ( [ individual_id , father ] ) ) else : self . duos . append ( set ( [ individual_id , mother ] ) ) for individual_2_id in self . individuals : if individual_id != individual_2_id : if self . check_siblings ( individual_id , individual_2_id ) : individual . siblings . add ( individual_2_id )
Check if the family members break the structure of the family . eg . nonexistent parent wrong sex on parent etc . Also extracts all trios found this is of help for many at the moment since GATK can only do phasing of trios and duos .
5,858
def check_parent ( self , parent_id , father = False ) : self . logger . debug ( "Checking parent {0}" . format ( parent_id ) ) if parent_id != '0' : if parent_id not in self . individuals : raise PedigreeError ( self . family_id , parent_id , 'Parent is not in family.' ) if father : if self . individuals [ parent_id ] . sex != 1 : raise PedigreeError ( self . family_id , parent_id , 'Father is not specified as male.' ) else : if self . individuals [ parent_id ] . sex != 2 : raise PedigreeError ( self . family_id , parent_id , 'Mother is not specified as female.' ) return
Check if the parent info is correct . If an individual is not present in file raise exeption .
5,859
def to_ped ( self , outfile = None ) : ped_header = [ '#FamilyID' , 'IndividualID' , 'PaternalID' , 'MaternalID' , 'Sex' , 'Phenotype' , ] extra_headers = [ 'InheritanceModel' , 'Proband' , 'Consultand' , 'Alive' ] for individual_id in self . individuals : individual = self . individuals [ individual_id ] for info in individual . extra_info : if info in extra_headers : if info not in ped_header : ped_header . append ( info ) self . logger . debug ( "Ped headers found: {0}" . format ( ', ' . join ( ped_header ) ) ) if outfile : outfile . write ( '\t' . join ( ped_header ) + '\n' ) else : print ( '\t' . join ( ped_header ) ) for individual in self . to_json ( ) : ped_info = [ ] ped_info . append ( individual [ 'family_id' ] ) ped_info . append ( individual [ 'id' ] ) ped_info . append ( individual [ 'father' ] ) ped_info . append ( individual [ 'mother' ] ) ped_info . append ( individual [ 'sex' ] ) ped_info . append ( individual [ 'phenotype' ] ) if len ( ped_header ) > 6 : for header in ped_header [ 6 : ] : ped_info . append ( individual [ 'extra_info' ] . get ( header , '.' ) ) if outfile : outfile . write ( '\t' . join ( ped_info ) + '\n' ) else : print ( '\t' . join ( ped_info ) )
Print the individuals of the family in ped format The header will be the original ped header plus all headers found in extra info of the individuals
5,860
def find_deck ( provider : Provider , key : str , version : int , prod : bool = True ) -> Optional [ Deck ] : pa_params = param_query ( provider . network ) if prod : p2th = pa_params . P2TH_addr else : p2th = pa_params . test_P2TH_addr rawtx = provider . getrawtransaction ( key , 1 ) deck = deck_parser ( ( provider , rawtx , 1 , p2th ) ) return deck
Find specific deck by deck id .
5,861
def deck_spawn ( provider : Provider , deck : Deck , inputs : dict , change_address : str , locktime : int = 0 ) -> Transaction : network_params = net_query ( deck . network ) pa_params = param_query ( deck . network ) if deck . production : p2th_addr = pa_params . P2TH_addr else : p2th_addr = pa_params . test_P2TH_addr change_sum = Decimal ( inputs [ 'total' ] - network_params . min_tx_fee - pa_params . P2TH_fee ) txouts = [ tx_output ( network = deck . network , value = pa_params . P2TH_fee , n = 0 , script = p2pkh_script ( address = p2th_addr , network = deck . network ) ) , tx_output ( network = deck . network , value = Decimal ( 0 ) , n = 1 , script = nulldata_script ( deck . metainfo_to_protobuf ) ) , tx_output ( network = deck . network , value = change_sum , n = 2 , script = p2pkh_script ( address = change_address , network = deck . network ) ) ] unsigned_tx = make_raw_transaction ( network = deck . network , inputs = inputs [ 'utxos' ] , outputs = txouts , locktime = Locktime ( locktime ) ) return unsigned_tx
Creates Deck spawn raw transaction .
5,862
def get_card_transfer ( provider : Provider , deck : Deck , txid : str , debug : bool = False ) -> Iterator : rawtx = provider . getrawtransaction ( txid , 1 ) bundle = card_bundler ( provider , deck , rawtx ) return card_bundle_parser ( bundle , debug )
get a single card transfer by it s id
5,863
def find_all_valid_cards ( provider : Provider , deck : Deck ) -> Generator : unfiltered = ( card for batch in get_card_bundles ( provider , deck ) for card in batch ) for card in validate_card_issue_modes ( deck . issue_mode , list ( unfiltered ) ) : yield card
find all the valid cards on this deck filtering out cards which don t play nice with deck issue mode
5,864
def card_transfer ( provider : Provider , card : CardTransfer , inputs : dict , change_address : str , locktime : int = 0 ) -> Transaction : network_params = net_query ( provider . network ) pa_params = param_query ( provider . network ) if card . deck_p2th is None : raise Exception ( "card.deck_p2th required for tx_output" ) outs = [ tx_output ( network = provider . network , value = pa_params . P2TH_fee , n = 0 , script = p2pkh_script ( address = card . deck_p2th , network = provider . network ) ) , tx_output ( network = provider . network , value = Decimal ( 0 ) , n = 1 , script = nulldata_script ( card . metainfo_to_protobuf ) ) ] for addr , index in zip ( card . receiver , range ( len ( card . receiver ) ) ) : outs . append ( tx_output ( network = provider . network , value = Decimal ( 0 ) , n = index + 2 , script = p2pkh_script ( address = addr , network = provider . network ) ) ) change_sum = Decimal ( inputs [ 'total' ] - network_params . min_tx_fee - pa_params . P2TH_fee ) outs . append ( tx_output ( network = provider . network , value = change_sum , n = len ( outs ) + 1 , script = p2pkh_script ( address = change_address , network = provider . network ) ) ) unsigned_tx = make_raw_transaction ( network = provider . network , inputs = inputs [ 'utxos' ] , outputs = outs , locktime = Locktime ( locktime ) ) return unsigned_tx
Prepare the CardTransfer Transaction object
5,865
def rfc3339_to_datetime ( data ) : try : ts = time . strptime ( data , '%Y-%m-%d' ) return date ( * ts [ : 3 ] ) except ValueError : pass try : dt , _ , tz = data . partition ( 'Z' ) if tz : tz = offset ( tz ) else : tz = offset ( '00:00' ) if '.' in dt and dt . rsplit ( '.' , 1 ) [ - 1 ] . isdigit ( ) : ts = time . strptime ( dt , '%Y-%m-%dT%H:%M:%S.%f' ) else : ts = time . strptime ( dt , '%Y-%m-%dT%H:%M:%S' ) return datetime ( * ts [ : 6 ] , tzinfo = tz ) except ValueError : raise ValueError ( 'date-time {!r} is not a valid rfc3339 date representation' . format ( data ) )
convert a rfc3339 date representation into a Python datetime
5,866
def log_config ( verbose = 1 ) : if verbose == 0 : level = logging . WARNING fmt = "%(module)s: %(message)s" elif verbose == 1 : level = logging . INFO fmt = "%(module)s [@%(lineno)s]: %(message)s" else : level = logging . DEBUG fmt = "%(module)s [%(lineno)s]: %(levelname)s: %(message)s" logging . basicConfig ( format = fmt , level = level )
Set up logging the way I like it .
5,867
def refresh_instruments ( self ) : def list_access_nested_dict ( dict , somelist ) : return reduce ( operator . getitem , somelist , dict ) def update ( item ) : if item . isExpanded ( ) : for index in range ( item . childCount ( ) ) : child = item . child ( index ) if child . childCount ( ) == 0 : instrument , path_to_instrument = child . get_instrument ( ) path_to_instrument . reverse ( ) try : value = instrument . read_probes ( path_to_instrument [ - 1 ] ) except AssertionError : value = list_access_nested_dict ( instrument . settings , path_to_instrument ) child . value = value else : update ( child ) self . tree_settings . blockSignals ( True ) for index in range ( self . tree_settings . topLevelItemCount ( ) ) : instrument = self . tree_settings . topLevelItem ( index ) update ( instrument ) self . tree_settings . blockSignals ( False )
if self . tree_settings has been expanded ask instruments for their actual values
5,868
def update_parameters ( self , treeWidget ) : if treeWidget == self . tree_settings : item = treeWidget . currentItem ( ) instrument , path_to_instrument = item . get_instrument ( ) dictator = item . value for element in path_to_instrument : dictator = { element : dictator } old_value = instrument . settings path_to_instrument . reverse ( ) for element in path_to_instrument : old_value = old_value [ element ] instrument . update ( dictator ) new_value = item . value if new_value is not old_value : msg = "changed parameter {:s} from {:s} to {:s} on {:s}" . format ( item . name , str ( old_value ) , str ( new_value ) , instrument . name ) else : msg = "did not change parameter {:s} on {:s}" . format ( item . name , instrument . name ) self . log ( msg ) elif treeWidget == self . tree_scripts : item = treeWidget . currentItem ( ) script , path_to_script , _ = item . get_script ( ) instrument , path_to_instrument = item . get_instrument ( ) if instrument is not None : new_value = item . value msg = "changed parameter {:s} to {:s} in {:s}" . format ( item . name , str ( new_value ) , script . name ) else : new_value = item . value msg = "changed parameter {:s} to {:s} in {:s}" . format ( item . name , str ( new_value ) , script . name ) self . log ( msg )
updates the internal dictionaries for scripts and instruments with values from the respective trees
5,869
def script_finished ( self ) : script = self . current_script script . updateProgress . disconnect ( self . update_status ) self . script_thread . started . disconnect ( ) script . finished . disconnect ( ) self . current_script = None self . plot_script ( script ) self . progressBar . setValue ( 100 ) self . btn_start_script . setEnabled ( True ) self . btn_skip_subscript . setEnabled ( False )
waits for the script to emit the script_finshed signal
5,870
def update_probes ( self , progress ) : new_values = self . read_probes . probes_values probe_count = len ( self . read_probes . probes ) if probe_count > self . tree_probes . topLevelItemCount ( ) : self . fill_treewidget ( self . tree_probes , new_values ) else : for x in range ( probe_count ) : topLvlItem = self . tree_probes . topLevelItem ( x ) for child_id in range ( topLvlItem . childCount ( ) ) : child = topLvlItem . child ( child_id ) child . value = new_values [ topLvlItem . name ] [ child . name ] child . setText ( 1 , str ( child . value ) ) if self . probe_to_plot is not None : self . probe_to_plot . plot ( self . matplotlibwidget_1 . axes ) self . matplotlibwidget_1 . draw ( ) if self . chk_probe_log . isChecked ( ) : data = ',' . join ( list ( np . array ( [ [ str ( p ) for p in list ( p_dict . values ( ) ) ] for instr , p_dict in new_values . items ( ) ] ) . flatten ( ) ) ) self . probe_file . write ( '{:s}\n' . format ( data ) )
update the probe tree
5,871
def update_script_from_item ( self , item ) : script , path_to_script , script_item = item . get_script ( ) dictator = list ( script_item . to_dict ( ) . values ( ) ) [ 0 ] for instrument in list ( script . instruments . keys ( ) ) : script . instruments [ instrument ] [ 'settings' ] = dictator [ instrument ] [ 'settings' ] del dictator [ instrument ] for sub_script_name in list ( script . scripts . keys ( ) ) : sub_script_item = script_item . get_subscript ( sub_script_name ) self . update_script_from_item ( sub_script_item ) del dictator [ sub_script_name ] script . update ( dictator ) script . data_path = self . gui_settings [ 'data_folder' ]
updates the script based on the information provided in item
5,872
def message_search ( self , text , on_success , peer = None , min_date = None , max_date = None , max_id = None , offset = 0 , limit = 255 ) : raise TWXUnsupportedMethod ( )
Unsupported in the Bot API
5,873
def remove ( self , pointer ) : doc = deepcopy ( self . document ) parent , obj = None , doc try : for token in Pointer ( pointer ) : parent , obj = obj , token . extract ( obj , bypass_ref = True ) if isinstance ( parent , Mapping ) : del parent [ token ] if isinstance ( parent , MutableSequence ) : parent . pop ( int ( token ) ) except Exception as error : raise Error ( * error . args ) return Target ( doc )
Remove element from sequence member from mapping .
5,874
def _netname ( name : str ) -> dict : try : long = net_query ( name ) . name short = net_query ( name ) . shortname except AttributeError : raise UnsupportedNetwork ( ) return { 'long' : long , 'short' : short }
resolute network name required because some providers use shortnames and other use longnames .
5,875
def sendrawtransaction ( cls , rawtxn : str ) -> str : if cls . is_testnet : url = 'https://testnet-explorer.peercoin.net/api/sendrawtransaction?hex={0}' . format ( rawtxn ) else : url = 'https://explorer.peercoin.net/api/sendrawtransaction?hex={0}' . format ( rawtxn ) resp = urllib . request . urlopen ( url ) return resp . read ( ) . decode ( 'utf-8' )
sendrawtransaction remote API
5,876
def validateaddress ( self , address : str ) -> bool : "Returns True if the passed address is valid, False otherwise." try : Address . from_string ( address , self . network_properties ) except InvalidAddress : return False return True
Returns True if the passed address is valid False otherwise .
5,877
def chunker ( l , n ) : for i in ranger ( 0 , len ( l ) , n ) : yield l [ i : i + n ]
Generates n - sized chunks from the list l
5,878
def post ( self , endpoint , data , parallelism = 5 ) : headers = { "Content-Type" : "application/json" , "Accept" : "application/json" , "x-standardize-only" : "true" if self . standardize else "false" , "x-include-invalid" : "true" if self . invalid else "false" , "x-accept-keypair" : "true" if self . accept_keypair else "false" , } if not self . logging : headers [ "x-suppress-logging" ] = "false" params = { "auth-id" : self . auth_id , "auth-token" : self . auth_token } url = self . BASE_URL + endpoint rs = ( grequests . post ( url = url , data = json . dumps ( stringify ( data_chunk ) ) , params = params , headers = headers , ) for data_chunk in chunker ( data , 100 ) ) responses = grequests . imap ( rs , size = parallelism ) status_codes = { } addresses = AddressCollection ( [ ] ) for response in responses : if response . status_code not in status_codes . keys ( ) : status_codes [ response . status_code ] = 1 else : status_codes [ response . status_code ] += 1 if response . status_code == 200 : addresses [ 0 : 0 ] = AddressCollection ( response . json ( ) ) elif response . status_code == 401 : raise ERROR_CODES [ 401 ] if len ( status_codes . keys ( ) ) == 1 : if 200 in status_codes : return addresses , status_codes else : raise ERROR_CODES . get ( status_codes . keys ( ) [ 0 ] , SmartyStreetsError ) return addresses , status_codes
Executes most of the request .
5,879
def _cache_init ( self ) : cache_ = cache . get ( self . CACHE_KEY ) if cache_ is None : cache_ = defaultdict ( dict ) self . _cache = cache_
Initializes local cache from Django cache .
5,880
def get_contents_static ( self , block_alias , context ) : if 'request' not in context : return '' current_url = context [ 'request' ] . path try : resolver_match = resolve ( current_url ) namespace = '' if resolver_match . namespaces : namespace = resolver_match . namespaces [ 0 ] resolved_view_name = ':%s:%s' % ( namespace , resolver_match . url_name ) except Resolver404 : resolved_view_name = None self . _cache_init ( ) cache_entry_name = cache_get_key ( block_alias ) siteblocks_static = self . _cache_get ( cache_entry_name ) if not siteblocks_static : blocks = Block . objects . filter ( alias = block_alias , hidden = False ) . only ( 'url' , 'contents' ) siteblocks_static = [ defaultdict ( list ) , defaultdict ( list ) ] for block in blocks : if block . url == '*' : url_re = block . url elif block . url . startswith ( ':' ) : url_re = block . url if url_re . count ( ':' ) == 1 : url_re = ':%s' % url_re else : url_re = re . compile ( r'%s' % block . url ) if block . access_guest : siteblocks_static [ self . IDX_GUEST ] [ url_re ] . append ( block . contents ) elif block . access_loggedin : siteblocks_static [ self . IDX_AUTH ] [ url_re ] . append ( block . contents ) else : siteblocks_static [ self . IDX_GUEST ] [ url_re ] . append ( block . contents ) siteblocks_static [ self . IDX_AUTH ] [ url_re ] . append ( block . contents ) self . _cache_set ( cache_entry_name , siteblocks_static ) self . _cache_save ( ) user = getattr ( context [ 'request' ] , 'user' , None ) is_authenticated = getattr ( user , 'is_authenticated' , False ) if not DJANGO_2 : is_authenticated = is_authenticated ( ) if is_authenticated : lookup_area = siteblocks_static [ self . IDX_AUTH ] else : lookup_area = siteblocks_static [ self . IDX_GUEST ] static_block_contents = '' if '*' in lookup_area : static_block_contents = choice ( lookup_area [ '*' ] ) elif resolved_view_name in lookup_area : static_block_contents = choice ( lookup_area [ resolved_view_name ] ) else : for url , contents in lookup_area . items ( ) : if url . match ( current_url ) : static_block_contents = choice ( contents ) break return static_block_contents
Returns contents of a static block .
5,881
def get_contents_dynamic ( self , block_alias , context ) : dynamic_block = get_dynamic_blocks ( ) . get ( block_alias , [ ] ) if not dynamic_block : return '' dynamic_block = choice ( dynamic_block ) return dynamic_block ( block_alias = block_alias , block_context = context )
Returns contents of a dynamic block .
5,882
def hash_full_tree ( self , leaves ) : root_hash , hashes = self . _hash_full ( leaves , 0 , len ( leaves ) ) assert len ( hashes ) == count_bits_set ( len ( leaves ) ) assert ( self . _hash_fold ( hashes ) == root_hash if hashes else root_hash == self . hash_empty ( ) ) return root_hash
Hash a set of leaves representing a valid full tree .
5,883
def cal_model_performance ( obsl , siml ) : nse = MathClass . nashcoef ( obsl , siml ) r2 = MathClass . rsquare ( obsl , siml ) rmse = MathClass . rmse ( obsl , siml ) pbias = MathClass . pbias ( obsl , siml ) rsr = MathClass . rsr ( obsl , siml ) print ( 'NSE: %.2f, R-square: %.2f, PBIAS: %.2f%%, RMSE: %.2f, RSR: %.2f' % ( nse , r2 , pbias , rmse , rsr ) )
Calculate model performance indexes .
5,884
def load_features ( self ) : for loc in self . loci : if self . verbose : self . logger . info ( self . logname + "Loading features for " + loc ) self . all_feats . update ( { loc : self . locus_features ( loc ) } ) if self . verbose : self . logger . info ( self . logname + "Finished loading features for " + loc ) if self . verbose : mem = "{:4.4f}" . format ( sys . getsizeof ( self . all_feats ) / 1000000 ) self . logger . info ( self . logname + "Finished loading all features * all_feats = " + mem + " MB *" )
Loads all the known features from the feature service
5,885
def locus_features ( self , locus ) : features = self . api . list_features ( locus = locus ) feat_dict = { ":" . join ( [ a . locus , str ( a . rank ) , a . term , a . sequence ] ) : a . accession for a in features } return feat_dict
Returns all features associated with a locus
5,886
def tarfile_to_pif ( filename , temp_root_dir = '' , verbose = 0 ) : temp_dir = temp_root_dir + str ( uuid . uuid4 ( ) ) os . makedirs ( temp_dir ) try : tar = tarfile . open ( filename , 'r' ) tar . extractall ( path = temp_dir ) tar . close ( ) for i in os . listdir ( temp_dir ) : cur_dir = temp_dir + '/' + i if os . path . isdir ( cur_dir ) : return directory_to_pif ( cur_dir , verbose = verbose ) return directory_to_pif ( temp_dir , verbose = verbose ) finally : shutil . rmtree ( temp_dir )
Process a tar file that contains DFT data .
5,887
def archive_to_pif ( filename , verbose = 0 ) : if tarfile . is_tarfile ( filename ) : return tarfile_to_pif ( filename , verbose ) raise Exception ( 'Cannot process file type' )
Given a archive file that contains output from a DFT calculation parse the data and return a PIF object .
5,888
def files_to_pif ( files , verbose = 0 , quality_report = True , inline = True ) : found_parser = False for possible_parser in [ PwscfParser , VaspParser ] : try : parser = possible_parser ( files ) found_parser = True break except InvalidIngesterException : pass if not found_parser : raise Exception ( 'Directory is not in correct format for an existing parser' ) if verbose > 0 : print ( "Found a {} directory" . format ( parser . get_name ( ) ) ) chem = ChemicalSystem ( ) chem . chemical_formula = parser . get_composition ( ) software = Software ( name = parser . get_name ( ) , version = parser . get_version_number ( ) ) method = Method ( name = 'Density Functional Theory' , software = [ software ] ) conditions = [ ] for name , func in parser . get_setting_functions ( ) . items ( ) : cond = getattr ( parser , func ) ( ) if cond is None : continue if inline and cond . files is not None : continue cond . name = name conditions . append ( cond ) chem . properties = [ ] for name , func in parser . get_result_functions ( ) . items ( ) : prop = getattr ( parser , func ) ( ) if prop is None : continue if inline and prop . files is not None : continue prop . name = name prop . methods = [ method , ] prop . data_type = 'COMPUTATIONAL' if verbose > 0 and isinstance ( prop , Value ) : print ( name ) if prop . conditions is None : prop . conditions = conditions else : if not isinstance ( prop . conditions , list ) : prop . conditions = [ prop . conditions ] prop . conditions . extend ( conditions ) chem . properties . append ( prop ) if quality_report and isinstance ( parser , VaspParser ) : _add_quality_report ( parser , chem ) return chem
Given a directory that contains output from a DFT calculation parse the data and return a pif object
5,889
def wait_for_confirmation ( provider , transaction_id ) : 'Sleep on a loop until we see a confirmation of the transaction.' while ( True ) : transaction = provider . gettransaction ( transaction_id ) if transaction [ "confirmations" ] > 0 : break time . sleep ( 10 )
Sleep on a loop until we see a confirmation of the transaction .
5,890
def validate_card_issue_modes ( issue_mode : int , cards : list ) -> list : supported_mask = 63 if not bool ( issue_mode & supported_mask ) : return [ ] for i in [ 1 << x for x in range ( len ( IssueMode ) ) ] : if bool ( i & issue_mode ) : try : parser_fn = cast ( Callable [ [ list ] , Optional [ list ] ] , parsers [ IssueMode ( i ) . name ] ) except ValueError : continue parsed_cards = parser_fn ( cards ) if not parsed_cards : return [ ] cards = parsed_cards return cards
validate cards against deck_issue modes
5,891
def p2th_address ( self ) -> Optional [ str ] : if self . id : return Kutil ( network = self . network , privkey = bytearray . fromhex ( self . id ) ) . address else : return None
P2TH address of this deck
5,892
def p2th_wif ( self ) -> Optional [ str ] : if self . id : return Kutil ( network = self . network , privkey = bytearray . fromhex ( self . id ) ) . wif else : return None
P2TH privkey in WIF format
5,893
def metainfo_to_dict ( self ) -> dict : r = { "version" : self . version , "name" : self . name , "number_of_decimals" : self . number_of_decimals , "issue_mode" : self . issue_mode } if self . asset_specific_data : r . update ( { 'asset_specific_data' : self . asset_specific_data } ) return r
encode deck into dictionary
5,894
def to_json ( self ) -> dict : d = self . __dict__ d [ 'p2th_wif' ] = self . p2th_wif return d
export the Deck object to json - ready format
5,895
def metainfo_to_dict ( self ) -> dict : r = { "version" : self . version , "amount" : self . amount , "number_of_decimals" : self . number_of_decimals } if self . asset_specific_data : r . update ( { 'asset_specific_data' : self . asset_specific_data } ) return r
encode card into dictionary
5,896
def _sort_cards ( self , cards : Generator ) -> list : return sorted ( [ card . __dict__ for card in cards ] , key = itemgetter ( 'blocknum' , 'blockseq' , 'cardseq' ) )
sort cards by blocknum and blockseq
5,897
def main ( ) : input_tif = "../tests/data/Jamaica_dem.tif" output_tif = "../tests/data/tmp_results/log_dem.tif" rst = RasterUtilClass . read_raster ( input_tif ) rst_valid = rst . validValues output_data = np . log ( rst_valid ) RasterUtilClass . write_gtiff_file ( output_tif , rst . nRows , rst . nCols , output_data , rst . geotrans , rst . srs , rst . noDataValue , rst . dataType )
Read GeoTiff raster data and perform log transformation .
5,898
def val_factory ( val , datatypes ) : exceptions = [ ] for dt in datatypes : try : if isinstance ( val , dt ) : return val return type_handler_object ( val , dt ) except Exception as e : exceptions . append ( str ( e ) ) raise ValueError ( 'val_factory: Unable to instantiate {val} from types {types}. Exceptions: {excs}' . format ( val = val , types = datatypes , excs = exceptions ) )
return an instance of val that is of type datatype . keep track of exceptions so we can produce meaningful error messages .
5,899
def handler_for ( obj ) : for handler_type in handlers : if isinstance ( obj , handler_type ) : return handlers [ handler_type ] try : for handler_type in handlers : if issubclass ( obj , handler_type ) : return handlers [ handler_type ] except TypeError : pass
return the handler for the object type