idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
5,100 | def _format_variant ( self , case_id , gemini_variant , individual_objs , index = 0 , add_all_info = False ) : chrom = gemini_variant [ 'chrom' ] if chrom . startswith ( 'chr' ) or chrom . startswith ( 'CHR' ) : chrom = chrom [ 3 : ] variant_dict = { 'CHROM' : chrom , 'POS' : str ( gemini_variant [ 'start' ] ) , 'ID' : gemini_variant [ 'rs_ids' ] , 'REF' : gemini_variant [ 'ref' ] , 'ALT' : gemini_variant [ 'alt' ] , 'QUAL' : gemini_variant [ 'qual' ] , 'FILTER' : gemini_variant [ 'filter' ] } variant = Variant ( ** variant_dict ) variant . update_variant_id ( gemini_variant [ 'variant_id' ] ) logger . debug ( "Creating a variant object of variant {0}" . format ( variant . variant_id ) ) variant [ 'index' ] = index self . _add_most_severe_consequence ( variant , gemini_variant ) self . _add_impact_severity ( variant , gemini_variant ) variant . start = int ( gemini_variant [ 'start' ] ) variant . stop = int ( gemini_variant [ 'end' ] ) if self . variant_type == 'sv' : variant . sv_type = gemini_variant [ 'sub_type' ] variant . stop = int ( gemini_variant [ 'end' ] ) self . _add_sv_coordinates ( variant ) else : self . _add_transcripts ( variant , gemini_variant ) self . _add_thousand_g ( variant , gemini_variant ) self . _add_exac ( variant , gemini_variant ) self . _add_gmaf ( variant , gemini_variant ) if gemini_variant [ 'cadd_scaled' ] : variant . cadd_score = gemini_variant [ 'cadd_scaled' ] polyphen = gemini_variant [ 'polyphen_pred' ] if polyphen : variant . add_severity ( 'Polyphen' , polyphen ) sift = gemini_variant [ 'sift_pred' ] if sift : variant . add_severity ( 'SIFT' , sift ) self . _add_hgnc_symbols ( variant ) if self . variant_type == 'snv' : self . _add_genes ( variant ) self . _add_consequences ( variant ) if add_all_info : self . _add_genotypes ( variant , gemini_variant , case_id , individual_objs ) if self . variant_type == 'sv' : self . _add_genes ( variant ) return variant | Make a puzzle variant from a gemini variant |
5,101 | def _is_variant ( self , gemini_variant , ind_objs ) : indexes = ( ind . ind_index for ind in ind_objs ) for index in indexes : gt_call = gemini_variant [ 'gt_types' ] [ index ] if ( gt_call == 1 or gt_call == 3 ) : return True return False | Check if the variant is a variation in any of the individuals |
5,102 | def is_affected ( self ) : phenotype = self . phenotype if phenotype == '1' : return False elif phenotype == '2' : return True else : return False | Boolean for telling if the sample is affected . |
5,103 | def gene_list ( self , list_id ) : return self . query ( GeneList ) . filter_by ( list_id = list_id ) . first ( ) | Get a gene list from the database . |
5,104 | def add_genelist ( self , list_id , gene_ids , case_obj = None ) : new_genelist = GeneList ( list_id = list_id ) new_genelist . gene_ids = gene_ids if case_obj : new_genelist . cases . append ( case_obj ) self . session . add ( new_genelist ) self . save ( ) return new_genelist | Create a new gene list and optionally link to cases . |
5,105 | def remove_genelist ( self , list_id , case_obj = None ) : gene_list = self . gene_list ( list_id ) if case_obj : case_ids = [ case_obj . id ] else : case_ids = [ case . id for case in gene_list . cases ] self . session . delete ( gene_list ) case_links = self . query ( CaseGenelistLink ) . filter ( CaseGenelistLink . case_id . in_ ( case_ids ) , CaseGenelistLink . genelist_id == gene_list . id ) for case_link in case_links : self . session . delete ( case_link ) self . save ( ) | Remove a gene list and links to cases . |
5,106 | def case_genelist ( self , case_obj ) : list_id = "{}-HPO" . format ( case_obj . case_id ) gene_list = self . gene_list ( list_id ) if gene_list is None : gene_list = GeneList ( list_id = list_id ) case_obj . gene_lists . append ( gene_list ) self . session . add ( gene_list ) return gene_list | Get or create a new case specific gene list record . |
5,107 | def add_bigger_box ( self ) : start1 = "width='" + str ( int ( self . molecule . molsize1 ) ) + "px' height='" + str ( int ( self . molecule . molsize2 ) ) + "px' >" start2 = "<rect style='opacity:1.0;fill:#FFFFFF;stroke:none' width='" + str ( int ( self . molecule . molsize1 ) ) + "' height='" + str ( int ( self . molecule . molsize2 ) ) + "' x='0' y='0'> </rect>" bigger_box = "width='100%' height='100%' viewbox='0 0 " + str ( int ( self . molecule . x_dim ) ) + " " + str ( int ( self . molecule . y_dim ) ) + "' > " big_box2 = "<rect style='opacity:1.0;fill:white;stroke:none' width='" + str ( int ( self . molecule . x_dim ) ) + "px' height='" + str ( int ( self . molecule . y_dim ) ) + "px' x='0' y='0'> </rect> <g id='molecularDrawing' transform='translate(" + str ( ( self . molecule . x_dim - self . molecule . molsize1 ) / 2 ) + "," + str ( ( self . molecule . y_dim - self . molecule . molsize2 ) / 2 ) + ")'>'<rect style='opacity:1.0;fill:#ffffff;stroke:none' width='" + str ( self . molecule . molsize1 ) + "' height='" + str ( self . molecule . molsize2 ) + "' x='0' y='0' /> " self . end_symbol = "</svg>" no_end_symbol = "</g>" linewidth1 = "stroke-width:2px" linewidth2 = "stroke-width:5px" self . change_lines_in_svg ( "molecule.svg" , linewidth1 , linewidth2 ) self . change_lines_in_svg ( "molecule.svg" , start1 , bigger_box ) self . change_lines_in_svg ( "molecule.svg" , start2 , big_box2 ) self . change_lines_in_svg ( "molecule.svg" , self . end_symbol , no_end_symbol ) with open ( "molecule.svg" , "r" ) as f : lines = f . readlines ( ) self . filestart = " " . join ( map ( str , lines [ 0 : 8 ] ) ) self . draw_molecule = "" . join ( map ( str , lines [ 8 : ] ) ) f . close ( ) | Sets the size of the figure by expanding the space of molecule . svg file . These dimension have been previously determined . Also makes the lines of the molecule thicker . |
5,108 | def extend_with ( func ) : if not func . __name__ in ArgParseInator . _plugins : ArgParseInator . _plugins [ func . __name__ ] = func | Extends with class or function |
5,109 | def arg ( * args , ** kwargs ) : def decorate ( func ) : func . __cmd_name__ = kwargs . pop ( 'cmd_name' , getattr ( func , '__cmd_name__' , func . __name__ ) ) func . __cls__ = utils . check_class ( ) if not hasattr ( func , '__arguments__' ) : func . __arguments__ = utils . get_functarguments ( func ) if len ( args ) or len ( kwargs ) : arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) try : idx = func . __named__ . index ( arg_name ) del func . __named__ [ idx ] del func . __arguments__ [ idx ] except ValueError : pass func . __arguments__ . append ( ( args , kwargs , ) ) if func . __cls__ is None and isinstance ( func , types . FunctionType ) : ap_ = ArgParseInator ( skip_init = True ) if func . __cmd_name__ not in ap_ . commands : ap_ . commands [ func . __cmd_name__ ] = func return func return decorate | Dcorates a function or a class method to add to the argument parser |
5,110 | def class_args ( cls ) : ap_ = ArgParseInator ( skip_init = True ) utils . collect_appendvars ( ap_ , cls ) cls . __cls__ = cls cmds = { } cls . __arguments__ = getattr ( cls , '__arguments__' , [ ] ) for func in [ f for f in cls . __dict__ . values ( ) if hasattr ( f , '__cmd_name__' ) and not inspect . isclass ( f ) ] : func . __subcommands__ = None func . __cls__ = cls cmds [ func . __cmd_name__ ] = func if hasattr ( cls , '__cmd_name__' ) and cls . __cmd_name__ not in ap_ . commands : cls . __subcommands__ = cmds ap_ . commands [ cls . __cmd_name__ ] = cls else : for name , func in cmds . items ( ) : if name not in ap_ . commands : ap_ . commands [ name ] = func return cls | Decorates a class to handle the arguments parser . |
5,111 | def cmd_auth ( auth_phrase = None ) : def decorate ( func ) : ap_ = ArgParseInator ( skip_init = True ) auth_name = id ( func ) if auth_phrase is None : ap_ . auths [ auth_name ] = True else : ap_ . auths [ auth_name ] = str ( auth_phrase ) return func return decorate | set authorization for command or subcommand . |
5,112 | def parse_args ( self ) : self . _compile ( ) self . args = None self . _self_event ( 'before_parse' , 'parse' , * sys . argv [ 1 : ] , ** { } ) cmds = [ cmd for cmd in sys . argv [ 1 : ] if not cmd . startswith ( "-" ) ] if ( len ( cmds ) > 0 and not utils . check_help ( ) and self . default_cmd and cmds [ 0 ] not in self . commands ) : sys . argv . insert ( 1 , self . default_cmd ) self . args = self . parser . parse_args ( ) if self . args : if self . add_output and self . args . output is not None : self . encoding = self . args . encoding if self . args . encoding . lower ( ) == 'raw' : self . _output = open ( self . args . output , self . args . write_mode ) else : import codecs self . _output = codecs . open ( self . args . output , self . args . write_mode , encoding = self . args . encoding ) if self . _cfg_factory : self . cfg_file = self . args . config self . _is_parsed = True return self | Parse our arguments . |
5,113 | def check_auth ( self , name ) : if name in self . auths : auth = self . auths [ name ] if self . args . auth is None : raise exceptions . ArgParseInatorAuthorizationRequired elif ( ( auth is True and self . args . auth != self . auth_phrase ) or ( auth is not True and self . args . auth != auth ) ) : raise exceptions . ArgParseInatorNotValidAuthorization return True | Check the authorization for the command |
5,114 | def check_command ( self , ** new_attributes ) : if not self . _is_parsed : self . parse_args ( ) if not self . commands : raise exceptions . ArgParseInatorNoCommandsFound elif self . _single : func = self . _single else : if not self . args . command : self . parser . error ( "too few arguments" ) func = self . commands [ self . args . command ] if hasattr ( func , '__subcommands__' ) and func . __subcommands__ : command = func . __subcommands__ [ self . args . subcommand ] else : command = func self . cmd_name = command . __cmd_name__ if not self . check_auth ( id ( command ) ) : return 0 return self . _execute ( func , command , ** new_attributes ) | Check if was passed a valid action in the command line and if so executes it by passing parameters and returning the result . |
5,115 | def _call_event ( self , event_name , cmd , pargs , kwargs , ** kws ) : def get_result_params ( res ) : if not isinstance ( res , ( list , tuple ) ) : return res , pargs , kwargs elif len ( res ) == 2 : return res , pargs , kwargs return res [ 0 ] , ( pargs [ 0 ] , ) + tuple ( res [ 1 ] ) , kwargs if hasattr ( cmd , event_name ) : return get_result_params ( getattr ( cmd , event_name ) ( pargs [ 0 ] , * pargs [ 1 : ] , ** kwargs ) ) elif hasattr ( cmd . __cls__ , event_name ) : return get_result_params ( getattr ( cmd . __cls__ , event_name ) ( pargs [ 0 ] , cmd . __cmd_name__ or cmd . __name__ , * pargs [ 1 : ] , ** kwargs ) ) return None , pargs , kwargs | Try to call events for cmd . |
5,116 | def _self_event ( self , event_name , cmd , * pargs , ** kwargs ) : if hasattr ( self , event_name ) : getattr ( self , event_name ) ( cmd , * pargs , ** kwargs ) | Call self event |
5,117 | def write ( self , * string ) : self . _output . write ( ' ' . join ( [ six . text_type ( s ) for s in string ] ) ) return self | Writes to the output |
5,118 | def exit ( self , status = EXIT_OK , message = None ) : if not self . parser : self . parser = argparse . ArgumentParser ( ) if self . msg_on_error_only : if status != EXIT_OK : self . parser . exit ( status , message ) else : self . parser . exit ( status , None ) else : self . parser . exit ( status , message ) | Terminate the script . |
5,119 | def analyse_ligand_sasa ( self ) : i = 0 start = timer ( ) if self . trajectory == [ ] : self . trajectory = [ self . topology_data . universe . filename ] try : for traj in self . trajectory : new_traj = mdtraj . load ( traj , top = self . topology_data . universe . filename ) ligand_slice = new_traj . atom_slice ( atom_indices = self . topology_data . universe . ligand_noH . ids ) self . sasa = mdtraj . shrake_rupley ( ligand_slice ) self . atom_sasa [ i ] = self . assign_per_atom_sasa ( ) i += 1 self . total_sasa = self . get_total_per_atom_sasa ( ) except KeyError as e : print "WARNING: SASA analysis cannot be performed due to incorrect atom names in" print "the topology " , e print "SASA: " + str ( timer ( ) - start ) | Analysis of ligand SASA . |
5,120 | def assign_per_atom_sasa ( self ) : atom_names = [ atom . name for atom in self . topology_data . universe . ligand_noH . atoms ] sasa_dict = { } for atom in range ( 0 , self . topology_data . universe . ligand_noH . n_atoms ) : sasa_dict [ atom_names [ atom ] ] = [ self . sasa [ i ] [ atom ] for i in range ( len ( self . sasa ) ) ] return sasa_dict | Make a dictionary with SASA assigned to each ligand atom stored as list of SASA values over the simulation time . |
5,121 | def get_total_per_atom_sasa ( self ) : total_sasa = defaultdict ( int ) for traj in range ( len ( self . atom_sasa ) ) : for atom in self . atom_sasa [ traj ] : total_sasa [ atom ] += float ( sum ( ( self . atom_sasa [ traj ] [ atom ] ) ) ) / len ( self . atom_sasa [ traj ] [ atom ] ) for atom in total_sasa : total_sasa [ atom ] = float ( total_sasa [ atom ] ) / len ( self . atom_sasa ) return total_sasa | Return average SASA of the atoms . |
5,122 | def run ( self , * args ) : if self . running : return self self . _mut_finished ( False ) self . _mut_running ( True ) stream = self . target ( * args ) def subr ( ) : self . _mut_running ( True ) try : for each in stream : self . _product = each desc = self . descriptor_mapping ( each ) event = self . events . get ( desc ) if event : event ( self , each , globals ) self . _mut_finished ( True ) except ThreadExit : pass finally : self . _mut_running ( False ) self . _thread = thread = threading . Thread ( target = subr , args = ( ) ) thread . start ( ) return self | You can choose whether to use lock method when running threads . |
5,123 | def _add_consequences ( self , variant_obj ) : consequences = set ( ) for transcript in variant_obj . transcripts : for consequence in transcript . consequence . split ( '&' ) : consequences . add ( consequence ) variant_obj . consequences = list ( consequences ) | Add the consequences found in all transcripts |
5,124 | def _add_hgnc_symbols ( self , variant_obj ) : hgnc_symbols = set ( ) if variant_obj . transcripts : for transcript in variant_obj . transcripts : if transcript . hgnc_symbol : hgnc_symbols . add ( transcript . hgnc_symbol ) else : chrom = variant_obj . CHROM start = variant_obj . start stop = variant_obj . stop hgnc_symbols = get_gene_symbols ( chrom , start , stop ) variant_obj . gene_symbols = list ( hgnc_symbols ) | Add hgnc symbols to the variant If there are transcripts use the symbols found here otherwise use phizz to get the gene ids . |
5,125 | def _add_genes ( self , variant_obj ) : genes = [ ] ensembl_ids = [ ] hgnc_symbols = [ ] if variant_obj . transcripts : for transcript in variant_obj . transcripts : if transcript . ensembl_id : ensembl_ids . append ( transcript . ensembl_id ) if transcript . hgnc_symbol : hgnc_symbols . append ( transcript . hgnc_symbol ) else : hgnc_symbols = variant_obj . gene_symbols genes = get_gene_info ( ensembl_ids = ensembl_ids , hgnc_symbols = hgnc_symbols ) for gene in genes : variant_obj . add_gene ( gene ) | Add the Gene objects for a variant |
5,126 | def _redis_strict_pc ( func ) : phase = "session_%s" % func . __name__ @ functools . wraps ( func ) def wrapper ( self , session , * args , ** kwargs ) : try : func ( self , session , * args , ** kwargs ) self . logger . debug ( "%s -> %s" % ( session . meepo_unique_id , phase ) ) return True except Exception as e : if self . strict : raise if isinstance ( e , redis . ConnectionError ) : self . logger . warn ( "redis connection error in %s: %s" % ( phase , session . meepo_unique_id ) ) else : self . logger . exception ( e ) return False return wrapper | Strict deco for RedisPrepareCommit |
5,127 | def phase ( self , session ) : sp_key , _ = self . _keygen ( session ) if self . r . sismember ( sp_key , session . meepo_unique_id ) : return "prepare" else : return "commit" | Determine the session phase in prepare commit . |
5,128 | def prepare ( self , session , event ) : if not event : self . logger . warn ( "event empty!" ) return sp_key , sp_hkey = self . _keygen ( session ) def _pk ( obj ) : pk_values = tuple ( getattr ( obj , c . name ) for c in obj . __mapper__ . primary_key ) if len ( pk_values ) == 1 : return pk_values [ 0 ] return pk_values def _get_dump_value ( value ) : if hasattr ( value , '__mapper__' ) : return _pk ( value ) return value pickled_event = { k : pickle . dumps ( { _get_dump_value ( obj ) for obj in objs } ) for k , objs in event . items ( ) } with self . r . pipeline ( transaction = False ) as p : p . sadd ( sp_key , session . meepo_unique_id ) p . hmset ( sp_hkey , pickled_event ) p . execute ( ) | Prepare phase for session . |
5,129 | def commit ( self , session ) : sp_key , sp_hkey = self . _keygen ( session ) with self . r . pipeline ( transaction = False ) as p : p . srem ( sp_key , session . meepo_unique_id ) p . expire ( sp_hkey , 60 * 60 ) p . execute ( ) | Commit phase for session . |
5,130 | def clear ( self , ts = None ) : sp_key = "%s:session_prepare" % self . namespace ( ts or int ( time . time ( ) ) ) return self . r . delete ( sp_key ) | Clear all session in prepare phase . |
5,131 | def cases ( ctx , root ) : root = root or ctx . obj . get ( 'root' ) or os . path . expanduser ( "~/.puzzle" ) if os . path . isfile ( root ) : logger . error ( "'root' can't be a file" ) ctx . abort ( ) logger . info ( "Root directory is: {}" . format ( root ) ) db_path = os . path . join ( root , 'puzzle_db.sqlite3' ) logger . info ( "db path is: {}" . format ( db_path ) ) if not os . path . exists ( db_path ) : logger . warn ( "database not initialized, run 'puzzle init'" ) ctx . abort ( ) store = SqlStore ( db_path ) for case in store . cases ( ) : click . echo ( case ) | Show all cases in the database . |
5,132 | def init ( name , subnames , dest , skeleton , description , project_type , skip_core ) : dest = dest or CUR_DIR skeleton = join ( skeleton or SKEL_PATH , project_type ) project = join ( dest , name ) script = join ( project , name + '.py' ) core = join ( project , name ) if project_type == 'standalone' : renames = [ ( join ( project , 'project.py' ) , script ) , ( join ( project , 'project' ) , core ) ] copy_skeleton ( name , skeleton , project , renames = renames , description = description , ignore = False ) else : renames = [ ( join ( project , 'project.py' ) , script ) , ( join ( project , 'project' ) , core ) ] exclude_dirs = [ 'submodule' ] + ( [ 'project' ] if skip_core else [ ] ) copy_skeleton ( name , skeleton , project , renames = renames , description = description , exclude_dirs = exclude_dirs , ignore = True ) for subname in subnames : renames = [ ( join ( project , 'submodule' ) , join ( project , subname ) ) ] copy_skeleton ( subname , skeleton , project , renames = renames , description = description , ignore = True , exclude_dirs = [ 'project' ] , exclude_files = [ 'project.py' ] ) return 0 , "\n{}\n" . format ( project ) | Creates a standalone subprojects or submodules script sctrucure |
5,133 | def _check ( self ) : for k , ix in six . iteritems ( self . _indices ) : assert k is not None , 'null key' assert ix , 'Key does not map to any indices' assert ix == sorted ( ix ) , "Key's indices are not in order" for i in ix : assert i in self . _lines , 'Key index does not map to line' assert self . _lines [ i ] . key is not None , 'Key maps to comment' assert self . _lines [ i ] . key == k , 'Key does not map to itself' assert self . _lines [ i ] . value is not None , 'Key has null value' prev = None for i , line in six . iteritems ( self . _lines ) : assert prev is None or prev < i , 'Line indices out of order' prev = i if line . key is None : assert line . value is None , 'Comment/blank has value' assert line . source is not None , 'Comment source not stored' assert loads ( line . source ) == { } , 'Comment source is not comment' else : assert line . value is not None , 'Key has null value' if line . source is not None : assert loads ( line . source ) == { line . key : line . value } , 'Key source does not deserialize to itself' assert line . key in self . _indices , 'Key is missing from map' assert i in self . _indices [ line . key ] , 'Key does not map to itself' | Assert the internal consistency of the instance s data structures . This method is for debugging only . |
5,134 | def load ( cls , fp ) : obj = cls ( ) for i , ( k , v , src ) in enumerate ( parse ( fp ) ) : if k is not None : obj . _indices . setdefault ( k , [ ] ) . append ( i ) obj . _lines [ i ] = PropertyLine ( k , v , src ) return obj | Parse the contents of the ~io . IOBase . readline - supporting file - like object fp as a simple line - oriented . properties file and return a PropertiesFile instance . |
5,135 | def loads ( cls , s ) : if isinstance ( s , six . binary_type ) : fp = six . BytesIO ( s ) else : fp = six . StringIO ( s ) return cls . load ( fp ) | Parse the contents of the string s as a simple line - oriented . properties file and return a PropertiesFile instance . |
5,136 | def dump ( self , fp , separator = '=' ) : for line in six . itervalues ( self . _lines ) : if line . source is None : print ( join_key_value ( line . key , line . value , separator ) , file = fp ) else : fp . write ( line . source ) | Write the mapping to a file in simple line - oriented . properties format . |
5,137 | def dumps ( self , separator = '=' ) : s = six . StringIO ( ) self . dump ( s , separator = separator ) return s . getvalue ( ) | Convert the mapping to a text string in simple line - oriented . properties format . |
5,138 | def copy ( self ) : dup = type ( self ) ( ) dup . _indices = OrderedDict ( ( k , list ( v ) ) for k , v in six . iteritems ( self . _indices ) ) dup . _lines = self . _lines . copy ( ) return dup | Create a copy of the mapping including formatting information |
5,139 | def prepare_normal_vectors ( atomselection ) : ring_atomselection = [ atomselection . coordinates ( ) [ a ] for a in [ 0 , 2 , 4 ] ] vect1 = self . vector ( ring_atomselection [ 0 ] , ring_atomselection [ 1 ] ) vect2 = self . vector ( ring_atomselection [ 2 ] , ring_atomselection [ 0 ] ) return self . normalize_vector ( np . cross ( vect1 , vect2 ) ) | Create and normalize a vector across ring plane . |
5,140 | def refresh_session_if_necessary ( f ) : @ functools . wraps ( f ) def wrapped ( self , * args , ** kwargs ) : try : result = f ( self , * args , ** kwargs ) except Exception as ex : if hasattr ( ex , 'code' ) and ex . code in ( 401 , 403 ) : self . refresh_session ( ) result = f ( self , * args , ** kwargs ) else : raise ex return result return wrapped | Decorator to use on methods that are allowed to retry the request after reauthenticating the client . |
5,141 | def init_db ( db_path ) : logger . info ( "Creating database" ) with closing ( connect_database ( db_path ) ) as db : with open ( SCHEMA , 'r' ) as f : db . cursor ( ) . executescript ( f . read ( ) ) db . commit ( ) return | Build the sqlite database |
5,142 | def merge ( self ) : self . merged_root = self . _recursive_merge ( self . root , self . head , self . update ) if self . conflicts : raise MergeError ( 'Conflicts Occurred in Merge Process' , self . conflicts ) | Populates result members . |
5,143 | def hpo_genes ( phenotype_ids , username , password ) : if phenotype_ids : try : results = query_phenomizer . query ( username , password , phenotype_ids ) return [ result for result in results if result [ 'p_value' ] is not None ] except SystemExit , RuntimeError : pass return None | Return list of HGNC symbols matching HPO phenotype ids . |
5,144 | def mangle_form ( form ) : "Utility to monkeypatch forms into paperinputs, untested" for field , widget in form . fields . iteritems ( ) : if type ( widget ) is forms . widgets . TextInput : form . fields [ field ] . widget = PaperTextInput ( ) form . fields [ field ] . label = '' if type ( widget ) is forms . widgets . PasswordInput : field . widget = PaperPasswordInput ( ) field . label = '' return form | Utility to monkeypatch forms into paperinputs untested |
5,145 | def _keygen ( self , event , ts = None ) : return "%s:%s" % ( self . namespace ( ts or time . time ( ) ) , event ) | Generate redis key for event at timestamp . |
5,146 | def _zadd ( self , key , pk , ts = None , ttl = None ) : return self . r . eval ( self . LUA_ZADD , 1 , key , ts or self . _time ( ) , pk ) | Redis lua func to add an event to the corresponding sorted set . |
5,147 | def add ( self , event , pk , ts = None , ttl = None ) : key = self . _keygen ( event , ts ) try : self . _zadd ( key , pk , ts , ttl ) return True except redis . ConnectionError as e : self . logger . error ( "redis event store failed with connection error %r" % e ) return False | Add an event to event store . |
5,148 | def replay ( self , event , ts = 0 , end_ts = None , with_ts = False ) : key = self . _keygen ( event , ts ) end_ts = end_ts if end_ts else "+inf" elements = self . r . zrangebyscore ( key , ts , end_ts , withscores = with_ts ) if not with_ts : return [ s ( e ) for e in elements ] else : return [ ( s ( e [ 0 ] ) , int ( e [ 1 ] ) ) for e in elements ] | Replay events based on timestamp . |
5,149 | def query ( self , event , pk , ts = None ) : key = self . _keygen ( event , ts ) pk_ts = self . r . zscore ( key , pk ) return int ( pk_ts ) if pk_ts else None | Query the last update timestamp of an event pk . |
5,150 | def clear ( self , event , ts = None ) : return self . r . delete ( self . _keygen ( event , ts ) ) | Clear all stored record of event . |
5,151 | def add_configuration_file ( self , file_name ) : logger . info ( 'adding %s to configuration files' , file_name ) if file_name not in self . configuration_files and self . _inotify : self . _watch_manager . add_watch ( file_name , pyinotify . IN_MODIFY ) if os . access ( file_name , os . R_OK ) : self . configuration_files [ file_name ] = SafeConfigParser ( ) self . configuration_files [ file_name ] . read ( file_name ) else : logger . warn ( 'could not read %s' , file_name ) warnings . warn ( 'could not read {}' . format ( file_name ) , ResourceWarning ) | Register a file path from which to read parameter values . |
5,152 | def add_parameter ( self , ** kwargs ) : parameter_name = max ( kwargs [ 'options' ] , key = len ) . lstrip ( '-' ) if 'dest' in kwargs : parameter_name = kwargs [ 'dest' ] group = kwargs . pop ( 'group' , 'default' ) self . groups . add ( group ) parameter_name = '.' . join ( [ group , parameter_name ] ) . lstrip ( '.' ) . replace ( '-' , '_' ) logger . info ( 'adding parameter %s' , parameter_name ) if self . parsed : logger . warn ( 'adding parameter %s after parse' , parameter_name ) warnings . warn ( 'adding parameter {} after parse' . format ( parameter_name ) , RuntimeWarning ) self . parameters [ parameter_name ] = copy . copy ( kwargs ) self . parameters [ parameter_name ] [ 'group' ] = group self . parameters [ parameter_name ] [ 'type' ] = kwargs . get ( 'type' , str ) self . parameters [ parameter_name ] [ 'environment_prefix' ] = kwargs . pop ( 'environment_prefix' , os . path . basename ( sys . argv [ 0 ] ) ) if self . parameters [ parameter_name ] [ 'environment_prefix' ] is not None : self . parameters [ parameter_name ] [ 'environment_prefix' ] = self . parameters [ parameter_name ] [ 'environment_prefix' ] . upper ( ) . replace ( '-' , '_' ) logger . info ( 'group: %s' , group ) self . grouped_parameters . setdefault ( group , { } ) . setdefault ( parameter_name . replace ( group + '.' , '' ) , self . parameters [ parameter_name ] ) action_defaults = { 'store' : kwargs . get ( 'default' ) , 'store_const' : kwargs . get ( 'const' ) , 'store_true' : False , 'store_false' : True , 'append' : [ ] , 'append_const' : [ ] , 'count' : 0 , } self . defaults [ parameter_name ] = action_defaults [ kwargs . get ( 'action' , 'store' ) ] logger . info ( 'default value: %s' , kwargs . get ( 'default' ) ) if 'argument' in kwargs . pop ( 'only' , [ 'argument' ] ) : if group not in self . _group_parsers : self . _group_parsers [ group ] = self . _group_parsers [ 'default' ] . add_argument_group ( group ) if self . _group_prefix and group != 'default' : long_option = max ( kwargs [ 'options' ] , key = len ) kwargs [ 'options' ] . remove ( long_option ) kwargs [ 'options' ] . append ( long_option . replace ( '--' , '--' + group . replace ( '_' , '-' ) + '-' ) ) logger . debug ( 'options: %s' , kwargs [ 'options' ] ) self . _group_parsers [ group ] . add_argument ( * kwargs . pop ( 'options' ) , ** kwargs ) | Add the parameter to Parameters . |
5,153 | def parse ( self , only_known = False ) : self . parsed = not only_known or self . parsed logger . info ( 'parsing parameters' ) logger . debug ( 'sys.argv: %s' , sys . argv ) if only_known : args = [ _ for _ in copy . copy ( sys . argv ) if not re . match ( '-h|--help' , _ ) ] self . _group_parsers [ 'default' ] . parse_known_args ( args = args , namespace = self . _argument_namespace ) else : self . _group_parsers [ 'default' ] . parse_args ( namespace = self . _argument_namespace ) | Ensure all sources are ready to be queried . |
5,154 | def read_configuration_files ( self ) : for file_name , configuration_parser in self . configuration_files . items ( ) : if os . access ( file_name , os . R_OK ) : configuration_parser . read ( file_name ) else : logger . warn ( 'could not read %s' , file_name ) warnings . warn ( 'could not read {}' . format ( file_name ) , ResourceWarning ) | Explicitly read the configuration files . |
5,155 | def nr_genes ( self ) : if self [ 'genes' ] : nr_genes = len ( self [ 'genes' ] ) else : nr_genes = len ( self [ 'gene_symbols' ] ) return nr_genes | Return the number of genes |
5,156 | def display_name ( self ) : if self . is_snv : gene_ids = self . gene_symbols [ : 2 ] return ', ' . join ( gene_ids ) else : return "{this.cytoband_start} ({this.sv_len})" . format ( this = self ) | Readable name for the variant . |
5,157 | def md5 ( self ) : return hashlib . md5 ( '_' . join ( [ self . CHROM , str ( self . POS ) , self . REF , self . ALT ] ) ) . hexdigest ( ) | Return a md5 key string based on position ref and alt |
5,158 | def add_frequency ( self , name , value ) : logger . debug ( "Adding frequency {0} with value {1} to variant {2}" . format ( name , value , self [ 'variant_id' ] ) ) self [ 'frequencies' ] . append ( { 'label' : name , 'value' : value } ) | Add a frequency that will be displayed on the variant level |
5,159 | def set_max_freq ( self , max_freq = None ) : if max_freq : self [ 'max_freq' ] = max_freq else : for frequency in self [ 'frequencies' ] : if self [ 'max_freq' ] : if frequency [ 'value' ] > self [ 'max_freq' ] : self [ 'max_freq' ] = frequency [ 'value' ] else : self [ 'max_freq' ] = frequency [ 'value' ] return | Set the max frequency for the variant |
5,160 | def add_severity ( self , name , value ) : logger . debug ( "Adding severity {0} with value {1} to variant {2}" . format ( name , value , self [ 'variant_id' ] ) ) self [ 'severities' ] . append ( { name : value } ) | Add a severity to the variant |
5,161 | def add_individual ( self , genotype ) : logger . debug ( "Adding genotype {0} to variant {1}" . format ( genotype , self [ 'variant_id' ] ) ) self [ 'individuals' ] . append ( genotype ) | Add the information for a individual |
5,162 | def add_transcript ( self , transcript ) : logger . debug ( "Adding transcript {0} to variant {1}" . format ( transcript , self [ 'variant_id' ] ) ) self [ 'transcripts' ] . append ( transcript ) | Add the information transcript |
5,163 | def add_gene ( self , gene ) : logger . debug ( "Adding gene {0} to variant {1}" . format ( gene , self [ 'variant_id' ] ) ) self [ 'genes' ] . append ( gene ) | Add the information of a gene |
5,164 | def add_compound ( self , compound ) : logger . debug ( "Adding compound {0} to variant {1}" . format ( compound , self [ 'variant_id' ] ) ) self [ 'compounds' ] . append ( compound ) | Add the information of a compound variant |
5,165 | def _set_variant_id ( self , variant_id = None ) : if not variant_id : variant_id = '_' . join ( [ self . CHROM , str ( self . POS ) , self . REF , self . ALT ] ) logger . debug ( "Updating variant id to {0}" . format ( variant_id ) ) self [ 'variant_id' ] = variant_id | Set the variant id for this variant |
5,166 | def move_to_result ( self , lst_idx ) : self . in_result_idx . add ( lst_idx ) if lst_idx in self . not_in_result_root_match_idx : self . not_in_result_root_match_idx . remove ( lst_idx ) | Moves element from lst available at lst_idx . |
5,167 | def add_root_match ( self , lst_idx , root_idx ) : self . root_matches [ lst_idx ] = root_idx if lst_idx in self . in_result_idx : return self . not_in_result_root_match_idx . add ( lst_idx ) | Adds a match for the elements avaialble at lst_idx and root_idx . |
5,168 | def _add_transcripts ( self , variant_obj , gemini_variant ) : query = "SELECT * from variant_impacts WHERE variant_id = {0}" . format ( gemini_variant [ 'variant_id' ] ) gq = GeminiQuery ( self . db ) gq . run ( query ) for gemini_transcript in gq : transcript = Transcript ( hgnc_symbol = gemini_transcript [ 'gene' ] , transcript_id = gemini_transcript [ 'transcript' ] , consequence = gemini_transcript [ 'impact_so' ] , biotype = gemini_transcript [ 'biotype' ] , polyphen = gemini_transcript [ 'polyphen_pred' ] , sift = gemini_transcript [ 'sift_pred' ] , HGVSc = gemini_transcript [ 'codon_change' ] , HGVSp = ', ' . join ( [ gemini_transcript [ 'aa_change' ] or '' , gemini_transcript [ 'aa_length' ] or '' ] ) ) variant_obj . add_transcript ( transcript ) | Add all transcripts for a variant |
5,169 | def mysql_pub ( mysql_dsn , tables = None , blocking = False , ** kwargs ) : parsed = urlparse ( mysql_dsn ) mysql_settings = { "host" : parsed . hostname , "port" : parsed . port or 3306 , "user" : parsed . username , "passwd" : parsed . password } stream = pymysqlreplication . BinLogStreamReader ( mysql_settings , server_id = random . randint ( 1000000000 , 4294967295 ) , blocking = blocking , only_events = [ DeleteRowsEvent , UpdateRowsEvent , WriteRowsEvent ] , ** kwargs ) def _pk ( values ) : if isinstance ( event . primary_key , str ) : return values [ event . primary_key ] return tuple ( values [ k ] for k in event . primary_key ) for event in stream : if not event . primary_key : continue if tables and event . table not in tables : continue try : rows = event . rows except ( UnicodeDecodeError , ValueError ) as e : logger . exception ( e ) continue timestamp = datetime . datetime . fromtimestamp ( event . timestamp ) if isinstance ( event , WriteRowsEvent ) : sg_name = "%s_write" % event . table sg = signal ( sg_name ) sg_raw = signal ( "%s_raw" % sg_name ) for row in rows : pk = _pk ( row [ "values" ] ) sg . send ( pk ) sg_raw . send ( row ) logger . debug ( "%s -> %s, %s" % ( sg_name , pk , timestamp ) ) elif isinstance ( event , UpdateRowsEvent ) : sg_name = "%s_update" % event . table sg = signal ( sg_name ) sg_raw = signal ( "%s_raw" % sg_name ) for row in rows : pk = _pk ( row [ "after_values" ] ) sg . send ( pk ) sg_raw . send ( row ) logger . debug ( "%s -> %s, %s" % ( sg_name , pk , timestamp ) ) elif isinstance ( event , DeleteRowsEvent ) : sg_name = "%s_delete" % event . table sg = signal ( sg_name ) sg_raw = signal ( "%s_raw" % sg_name ) for row in rows : pk = _pk ( row [ "values" ] ) sg . send ( pk ) sg_raw . send ( row ) logger . debug ( "%s -> %s, %s" % ( sg_name , pk , timestamp ) ) signal ( "mysql_binlog_pos" ) . send ( "%s:%s" % ( stream . log_file , stream . log_pos ) ) | MySQL row - based binlog events pub . |
5,170 | def load_molecule_in_rdkit_smiles ( self , molSize , kekulize = True , bonds = [ ] , bond_color = None , atom_color = { } , size = { } ) : mol_in_rdkit = self . topology_data . mol try : mol_in_rdkit = Chem . RemoveHs ( mol_in_rdkit ) self . topology_data . smiles = Chem . MolFromSmiles ( Chem . MolToSmiles ( mol_in_rdkit ) ) except ValueError : mol_in_rdkit = Chem . RemoveHs ( mol_in_rdkit , sanitize = False ) self . topology_data . smiles = Chem . MolFromSmiles ( Chem . MolToSmiles ( mol_in_rdkit ) , sanitize = False ) self . atom_identities = { } i = 0 for atom in self . topology_data . smiles . GetAtoms ( ) : self . atom_identities [ mol_in_rdkit . GetProp ( '_smilesAtomOutputOrder' ) [ 1 : ] . rsplit ( "," ) [ i ] ] = atom . GetIdx ( ) i += 1 mc = Chem . Mol ( self . topology_data . smiles . ToBinary ( ) ) if kekulize : try : Chem . Kekulize ( mc ) except : mc = Chem . Mol ( self . topology_data . smiles . ToBinary ( ) ) if not mc . GetNumConformers ( ) : rdDepictor . Compute2DCoords ( mc ) atoms = [ ] colors = { } for i in range ( mol_in_rdkit . GetNumAtoms ( ) ) : atoms . append ( i ) if len ( atom_color ) == 0 : colors [ i ] = ( 1 , 1 , 1 ) else : colors = atom_color drawer = rdMolDraw2D . MolDraw2DSVG ( int ( molSize [ 0 ] ) , int ( molSize [ 1 ] ) ) drawer . DrawMolecule ( mc , highlightAtoms = atoms , highlightBonds = bonds , highlightAtomColors = colors , highlightAtomRadii = size , highlightBondColors = bond_color ) drawer . FinishDrawing ( ) self . svg = drawer . GetDrawingText ( ) . replace ( 'svg:' , '' ) filesvg = open ( "molecule.svg" , "w+" ) filesvg . write ( self . svg ) | Loads mol file in rdkit without the hydrogens - they do not have to appear in the final figure . Once loaded the molecule is converted to SMILES format which RDKit appears to draw best - since we do not care about the actual coordinates of the original molecule it is sufficient to have just 2D information . Some molecules can be problematic to import and steps such as stopping sanitize function can be taken . This is done automatically if problems are observed . However better solutions can also be implemented and need more research . The molecule is then drawn from SMILES in 2D representation without hydrogens . The drawing is saved as an SVG file . |
5,171 | def calc_2d_forces ( self , x1 , y1 , x2 , y2 , width ) : if x1 > x2 : a = x1 - x2 else : a = x2 - x1 a_sq = a * a if y1 > y2 : b = y1 - y2 else : b = y2 - y1 b_sq = b * b from math import sqrt c_sq = a_sq + b_sq c = sqrt ( c_sq ) if c > width : return 0 , 0 else : overlap = width - c return - overlap / 2 , overlap / 2 | Calculate overlap in 2D space |
5,172 | def do_step ( self , values , xy_values , coeff , width ) : forces = { k : [ ] for k , i in enumerate ( xy_values ) } for ( index1 , value1 ) , ( index2 , value2 ) in combinations ( enumerate ( xy_values ) , 2 ) : f = self . calc_2d_forces ( value1 [ 0 ] , value1 [ 1 ] , value2 [ 0 ] , value2 [ 1 ] , width ) if coeff [ index1 ] < coeff [ index2 ] : if self . b_lenght - coeff [ index2 ] < self . b_lenght / 10 : forces [ index1 ] . append ( f [ 1 ] ) forces [ index2 ] . append ( f [ 0 ] ) else : forces [ index1 ] . append ( f [ 0 ] ) forces [ index2 ] . append ( f [ 1 ] ) else : if self . b_lenght - coeff [ index1 ] < self . b_lenght / 10 : forces [ index1 ] . append ( f [ 0 ] ) forces [ index2 ] . append ( f [ 1 ] ) else : forces [ index1 ] . append ( f [ 1 ] ) forces [ index2 ] . append ( f [ 0 ] ) forces = { k : sum ( v ) for k , v in forces . items ( ) } energy = sum ( [ abs ( x ) for x in forces . values ( ) ] ) return [ ( forces [ k ] / 10 + v ) for k , v in enumerate ( values ) ] , energy | Calculates forces between two diagrams and pushes them apart by tenth of width |
5,173 | def variants ( self , case_id , skip = 0 , count = 1000 , filters = None ) : filters = filters or { } logger . debug ( "Fetching case with case_id: {0}" . format ( case_id ) ) case_obj = self . case ( case_id ) plugin , case_id = self . select_plugin ( case_obj ) self . filters = plugin . filters gene_lists = ( self . gene_list ( list_id ) for list_id in filters . get ( 'gene_lists' , [ ] ) ) nested_geneids = ( gene_list . gene_ids for gene_list in gene_lists ) gene_ids = set ( itertools . chain . from_iterable ( nested_geneids ) ) if filters . get ( 'gene_ids' ) : filters [ 'gene_ids' ] . extend ( gene_ids ) else : filters [ 'gene_ids' ] = gene_ids variants = plugin . variants ( case_id , skip , count , filters ) return variants | Fetch variants for a case . |
5,174 | def variant ( self , case_id , variant_id ) : case_obj = self . case ( case_id ) plugin , case_id = self . select_plugin ( case_obj ) variant = plugin . variant ( case_id , variant_id ) return variant | Fetch a single variant from variant source . |
5,175 | def redis_es_sub ( session , tables , redis_dsn , strict = False , namespace = None , ttl = 3600 * 24 * 3 , socket_timeout = 1 ) : logger = logging . getLogger ( "meepo.sub.redis_es_sub" ) if not isinstance ( tables , ( list , set ) ) : raise ValueError ( "tables should be list or set" ) event_store = RedisEventStore ( redis_dsn , namespace = namespace , ttl = ttl , socket_timeout = socket_timeout ) def _es_event_sub ( pk , event ) : if event_store . add ( event , str ( pk ) ) : logger . info ( "%s: %s -> %s" % ( event , pk , datetime . datetime . now ( ) ) ) else : logger . error ( "event sourcing failed: %s" % pk ) events = ( "%s_%s" % ( tb , action ) for tb , action in itertools . product ( * [ tables , [ "write" , "update" , "delete" ] ] ) ) for event in events : sub_func = functools . partial ( _es_event_sub , event = event ) signal ( event ) . connect ( sub_func , weak = False ) prepare_commit = RedisPrepareCommit ( redis_dsn , strict = strict , namespace = namespace , socket_timeout = socket_timeout ) signal ( "session_prepare" ) . connect ( prepare_commit . prepare , sender = session , weak = False ) signal ( "session_commit" ) . connect ( prepare_commit . commit , sender = session , weak = False ) signal ( "session_rollback" ) . connect ( prepare_commit . rollback , sender = session , weak = False ) return event_store , prepare_commit | Redis EventSourcing sub . |
5,176 | def setup_parser ( ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '-p' , '--port' , type = int , default = 5005 ) parser . add_argument ( '-i' , '--interval' , type = int , default = 480 ) parser . add_argument ( 'host' , type = str , help = 'hostname' ) return parser | Setup an ArgumentParser . |
5,177 | def main ( ) : args = setup_parser ( ) . parse_args ( ) host = getattr ( args , "host" ) port = getattr ( args , "port" ) ipv4 = socket . gethostbyname ( host ) interval = getattr ( args , "interval" ) receiver = McDevice ( ipv4 , udp_port = port , mc_interval = interval ) receiver . handle_status ( ) while True : time . sleep ( 0.2 ) | Connect to a McDevice |
5,178 | def from_sqlite ( cls , database_path , base_url , version = 'auto' , client_id = 'ghost-admin' ) : import os import sqlite3 fd = os . open ( database_path , os . O_RDONLY ) connection = sqlite3 . connect ( '/dev/fd/%d' % fd ) os . close ( fd ) try : row = connection . execute ( 'SELECT secret FROM clients WHERE slug = ?' , ( client_id , ) ) . fetchone ( ) if row : return cls ( base_url , version = version , client_id = client_id , client_secret = row [ 0 ] ) else : raise GhostException ( 401 , [ { 'errorType' : 'InternalError' , 'message' : 'No client_secret found for client_id: %s' % client_id } ] ) finally : connection . close ( ) | Initialize a new Ghost API client reading the client ID and secret from the SQlite database . |
5,179 | def login ( self , username , password ) : data = self . _authenticate ( grant_type = 'password' , username = username , password = password , client_id = self . _client_id , client_secret = self . _client_secret ) self . _username = username self . _password = password return data | Authenticate with the server . |
5,180 | def refresh_session ( self ) : if not self . _refresh_token : if self . _username and self . _password : return self . login ( self . _username , self . _password ) return return self . _authenticate ( grant_type = 'refresh_token' , refresh_token = self . _refresh_token , client_id = self . _client_id , client_secret = self . _client_secret ) | Re - authenticate using the refresh token if available . Otherwise log in using the username and password if it was used to authenticate initially . |
5,181 | def revoke_access_token ( self ) : if not self . _access_token : return self . execute_post ( 'authentication/revoke' , json = dict ( token_type_hint = 'access_token' , token = self . _access_token ) ) self . _access_token = None | Revoke the access token currently in use . |
5,182 | def revoke_refresh_token ( self ) : if not self . _refresh_token : return self . execute_post ( 'authentication/revoke' , json = dict ( token_type_hint = 'refresh_token' , token = self . _refresh_token ) ) self . _refresh_token = None | Revoke the refresh token currently active . |
5,183 | def logout ( self ) : self . revoke_refresh_token ( ) self . revoke_access_token ( ) self . _username , self . _password = None , None | Log out revoking the access tokens and forgetting the login details if they were given . |
5,184 | def upload ( self , file_obj = None , file_path = None , name = None , data = None ) : close = False if file_obj : file_name , content = os . path . basename ( file_obj . name ) , file_obj elif file_path : file_name , content = os . path . basename ( file_path ) , open ( file_path , 'rb' ) close = True elif name and data : file_name , content = name , data else : raise GhostException ( 400 , 'Either `file_obj` or `file_path` or ' '`name` and `data` needs to be specified' ) try : content_type , _ = mimetypes . guess_type ( file_name ) file_arg = ( file_name , content , content_type ) response = self . execute_post ( 'uploads/' , files = { 'uploadimage' : file_arg } ) return response finally : if close : content . close ( ) | Upload an image and return its path on the server . Either file_obj or file_path or name and data has to be specified . |
5,185 | def execute_get ( self , resource , ** kwargs ) : url = '%s/%s' % ( self . base_url , resource ) headers = kwargs . pop ( 'headers' , dict ( ) ) headers [ 'Accept' ] = 'application/json' headers [ 'Content-Type' ] = 'application/json' if kwargs : separator = '&' if '?' in url else '?' for key , value in kwargs . items ( ) : if hasattr ( value , '__iter__' ) and type ( value ) not in six . string_types : url = '%s%s%s=%s' % ( url , separator , key , ',' . join ( value ) ) else : url = '%s%s%s=%s' % ( url , separator , key , value ) separator = '&' if self . _access_token : headers [ 'Authorization' ] = 'Bearer %s' % self . _access_token else : separator = '&' if '?' in url else '?' url = '%s%sclient_id=%s&client_secret=%s' % ( url , separator , self . _client_id , self . _client_secret ) response = requests . get ( url , headers = headers ) if response . status_code // 100 != 2 : raise GhostException ( response . status_code , response . json ( ) . get ( 'errors' , [ ] ) ) return response . json ( ) | Execute an HTTP GET request against the API endpoints . This method is meant for internal use . |
5,186 | def execute_post ( self , resource , ** kwargs ) : return self . _request ( resource , requests . post , ** kwargs ) . json ( ) | Execute an HTTP POST request against the API endpoints . This method is meant for internal use . |
5,187 | def execute_put ( self , resource , ** kwargs ) : return self . _request ( resource , requests . put , ** kwargs ) . json ( ) | Execute an HTTP PUT request against the API endpoints . This method is meant for internal use . |
5,188 | def execute_delete ( self , resource , ** kwargs ) : self . _request ( resource , requests . delete , ** kwargs ) | Execute an HTTP DELETE request against the API endpoints . This method is meant for internal use . Does not return anything but raises an exception when failed . |
5,189 | def token_distance ( t1 , t2 , initial_match_penalization ) : if isinstance ( t1 , NameInitial ) or isinstance ( t2 , NameInitial ) : if t1 . token == t2 . token : return 0 if t1 == t2 : return initial_match_penalization return 1.0 return _normalized_edit_dist ( t1 . token , t2 . token ) | Calculates the edit distance between two tokens . |
5,190 | def simple_tokenize ( name ) : last_names , first_names = name . split ( ',' ) last_names = _RE_NAME_TOKEN_SEPARATOR . split ( last_names ) first_names = _RE_NAME_TOKEN_SEPARATOR . split ( first_names ) first_names = [ NameToken ( n ) if len ( n ) > 1 else NameInitial ( n ) for n in first_names if n ] last_names = [ NameToken ( n ) if len ( n ) > 1 else NameInitial ( n ) for n in last_names if n ] return { 'lastnames' : last_names , 'nonlastnames' : first_names } | Simple tokenizer function to be used with the normalizers . |
5,191 | def calculate_descriptors ( self , mol ) : self . ligand_atoms = { index : { "name" : x . name } for index , x in enumerate ( self . topology_data . universe . ligand_noH . atoms ) } contribs = self . calculate_logP ( mol ) self . calculate_Gasteiger_charges ( mol ) fcharges = self . calculate_formal_charge ( mol ) for atom in self . ligand_atoms . keys ( ) : self . ligand_atoms [ atom ] [ "logP" ] = contribs [ atom ] [ 0 ] self . ligand_atoms [ atom ] [ "MR" ] = contribs [ atom ] [ 1 ] self . ligand_atoms [ atom ] [ "Gasteiger_ch" ] = mol . GetAtomWithIdx ( atom ) . GetProp ( "_GasteigerCharge" ) self . ligand_atoms [ atom ] [ "Formal charges" ] = fcharges [ atom ] self . rot_bonds = self . get_rotatable_bonds ( mol ) | Calculates descriptors such as logP charges and MR and saves that in a dictionary . |
5,192 | def variants ( self , case_id , skip = 0 , count = 1000 , filters = None ) : filters = filters or { } case_obj = self . case ( case_id = case_id ) limit = count + skip genes = set ( ) if filters . get ( 'gene_ids' ) : genes = set ( [ gene_id . strip ( ) for gene_id in filters [ 'gene_ids' ] ] ) frequency = None if filters . get ( 'frequency' ) : frequency = float ( filters [ 'frequency' ] ) cadd = None if filters . get ( 'cadd' ) : cadd = float ( filters [ 'cadd' ] ) genetic_models = None if filters . get ( 'genetic_models' ) : genetic_models = set ( filters [ 'genetic_models' ] ) sv_len = None if filters . get ( 'sv_len' ) : sv_len = float ( filters [ 'sv_len' ] ) impact_severities = None if filters . get ( 'impact_severities' ) : impact_severities = set ( filters [ 'impact_severities' ] ) vcf_file_path = case_obj . variant_source self . head = get_header ( vcf_file_path ) self . vep_header = self . head . vep_columns self . snpeff_header = self . head . snpeff_columns variants = self . _get_filtered_variants ( vcf_file_path , filters ) result = [ ] skip_index = 0 for index , variant in enumerate ( variants ) : index += 1 if skip_index >= skip : variant_obj = self . _format_variants ( variant = variant , index = index , case_obj = case_obj , ) if genes and variant_obj : if not set ( variant_obj [ 'gene_symbols' ] ) . intersection ( genes ) : variant_obj = None if impact_severities and variant_obj : if not variant_obj [ 'impact_severity' ] in impact_severities : variant_obj = None if frequency and variant_obj : if variant_obj . max_freq > frequency : variant_obj = None if cadd and variant_obj : if variant_obj [ 'cadd_score' ] < cadd : variant_obj = None if genetic_models and variant_obj : models = set ( variant_obj . genetic_models ) if not models . intersection ( genetic_models ) : variant_obj = None if sv_len and variant_obj : if variant_obj . sv_len < sv_len : variant_obj = None if variant_obj : skip_index += 1 if skip_index <= limit : result . append ( variant_obj ) else : break else : skip_index += 1 return Results ( result , len ( result ) ) | Return all variants in the VCF . |
5,193 | def _get_filtered_variants ( self , vcf_file_path , filters = { } ) : genes = set ( ) consequences = set ( ) sv_types = set ( ) if filters . get ( 'gene_ids' ) : genes = set ( [ gene_id . strip ( ) for gene_id in filters [ 'gene_ids' ] ] ) if filters . get ( 'consequence' ) : consequences = set ( filters [ 'consequence' ] ) if filters . get ( 'sv_types' ) : sv_types = set ( filters [ 'sv_types' ] ) logger . info ( "Get variants from {0}" . format ( vcf_file_path ) ) if filters . get ( 'range' ) : range_str = "{0}:{1}-{2}" . format ( filters [ 'range' ] [ 'chromosome' ] , filters [ 'range' ] [ 'start' ] , filters [ 'range' ] [ 'end' ] ) vcf = VCF ( vcf_file_path ) handle = vcf ( range_str ) else : handle = VCF ( vcf_file_path ) for variant in handle : variant_line = str ( variant ) keep_variant = True if genes and keep_variant : keep_variant = False for gene in genes : if "{0}" . format ( gene ) in variant_line : keep_variant = True break if consequences and keep_variant : keep_variant = False for consequence in consequences : if consequence in variant_line : keep_variant = True break if sv_types and keep_variant : keep_variant = False for sv_type in sv_types : if sv_type in variant_line : keep_variant = True break if keep_variant : yield variant | Check if variants follows the filters |
5,194 | def fnv ( data , hval_init , fnv_prime , fnv_size ) : assert isinstance ( data , bytes ) hval = hval_init for byte in data : hval = ( hval * fnv_prime ) % fnv_size hval = hval ^ _get_byte ( byte ) return hval | Core FNV hash algorithm used in FNV0 and FNV1 . |
5,195 | def session_prepare ( self , session , _ ) : if not hasattr ( session , 'meepo_unique_id' ) : self . _session_init ( session ) evt = collections . defaultdict ( set ) for action in ( "write" , "update" , "delete" ) : objs = getattr ( session , "pending_%s" % action ) if self . tables : objs = [ o for o in objs if o . __table__ . fullname in self . tables ] for obj in objs : evt_name = "%s_%s" % ( obj . __table__ . fullname , action ) evt [ evt_name ] . add ( obj ) self . logger . debug ( "%s - session_prepare: %s -> %s" % ( session . meepo_unique_id , evt_name , evt ) ) if evt : signal ( "session_prepare" ) . send ( session , event = evt ) | Send session_prepare signal in session before_commit . |
5,196 | def session_commit ( self , session ) : if not hasattr ( session , 'meepo_unique_id' ) : self . logger . debug ( "skipped - session_commit" ) return self . logger . debug ( "%s - session_commit" % session . meepo_unique_id ) self . _session_pub ( session ) signal ( "session_commit" ) . send ( session ) self . _session_del ( session ) | Send session_commit signal in sqlalchemy before_commit . |
5,197 | def session_rollback ( self , session ) : if not hasattr ( session , 'meepo_unique_id' ) : self . logger . debug ( "skipped - session_rollback" ) return self . logger . debug ( "%s - after_rollback" % session . meepo_unique_id ) signal ( "session_rollback" ) . send ( session ) self . _session_del ( session ) | Send session_rollback signal in sqlalchemy after_rollback . |
5,198 | def process_fig_and_ax_argument ( fig , ax , default_figsize = None ) : if default_figsize is not None : assert type ( default_figsize ) in [ tuple , list ] assert len ( default_figsize ) == 2 if ( fig is None ) and ( ax is None ) : fig , ax = plt . subplots ( figsize = default_figsize ) else : assert ( is_figure ( fig ) ) and ( is_axes ( ax ) ) return fig , ax | Process fig and ax arguments . |
5,199 | def get_square_axes_limits ( coords , margin = 0.05 ) : try : coords = [ np . array ( coord ) for coord in coords ] except : raise Exception ( "Failed to convert elements of 'coords' into numpy.array" ) lims = [ ( coord . min ( ) , coord . max ( ) ) for coord in coords ] mids = [ 0.5 * ( lim [ 0 ] + lim [ 1 ] ) for lim in lims ] widths = [ 0.5 * ( lim [ 1 ] - lim [ 0 ] ) for lim in lims ] max_width = max ( widths ) max_width += max_width * margin ax_lims = tuple ( ( mid - max_width , mid + max_width ) for mid in mids ) return ax_lims | Return N - dimensional square s limits |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.