idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
50,900
def load_orthologs ( fo : IO , metadata : dict ) : version = metadata [ "metadata" ] [ "version" ] with timy . Timer ( "Load Orthologs" ) as timer : arango_client = arangodb . get_client ( ) belns_db = arangodb . get_belns_handle ( arango_client ) arangodb . batch_load_docs ( belns_db , orthologs_iterator ( fo , versio...
Load orthologs into ArangoDB
50,901
def orthologs_iterator ( fo , version ) : species_list = config [ "bel_resources" ] . get ( "species_list" , [ ] ) fo . seek ( 0 ) with gzip . open ( fo , "rt" ) as f : for line in f : edge = json . loads ( line ) if "metadata" in edge : source = edge [ "metadata" ] [ "source" ] continue if "ortholog" in edge : edge = ...
Ortholog node and edge iterator
50,902
def migrate ( belstr : str ) -> str : bo . ast = bel . lang . partialparse . get_ast_obj ( belstr , "2.0.0" ) return migrate_ast ( bo . ast ) . to_string ( )
Migrate BEL 1 to 2 . 0 . 0
50,903
def migrate_into_triple ( belstr : str ) -> str : bo . ast = bel . lang . partialparse . get_ast_obj ( belstr , "2.0.0" ) return migrate_ast ( bo . ast ) . to_triple ( )
Migrate BEL1 assertion into BEL 2 . 0 . 0 SRO triple
50,904
def convert ( ast ) : if ast and ast . type == "Function" : if ( ast . name != "molecularActivity" and ast . name in spec [ "namespaces" ] [ "Activity" ] [ "list" ] ) : print ( "name" , ast . name , "type" , ast . type ) ast = convert_activity ( ast ) return ast elif ast . name in [ "tloc" , "translocation" ] : ast = c...
Convert BEL1 AST Function to BEL2 AST Function
50,905
def get_client ( host = None , port = None , username = None , password = None , enable_logging = True ) : host = utils . first_true ( [ host , config [ "bel_api" ] [ "servers" ] [ "arangodb_host" ] , "localhost" ] ) port = utils . first_true ( [ port , config [ "bel_api" ] [ "servers" ] [ "arangodb_port" ] , 8529 ] ) ...
Get arango client and edgestore db handle
50,906
def get_edgestore_handle ( client : arango . client . ArangoClient , username = None , password = None , edgestore_db_name : str = edgestore_db_name , edgestore_edges_name : str = edgestore_edges_name , edgestore_nodes_name : str = edgestore_nodes_name , edgestore_pipeline_name : str = edgestore_pipeline_name , edgesto...
Get Edgestore arangodb database handle
50,907
def get_belns_handle ( client , username = None , password = None ) : ( username , password ) = get_user_creds ( username , password ) sys_db = client . db ( "_system" , username = username , password = password ) try : if username and password : belns_db = sys_db . create_database ( name = belns_db_name , users = [ { ...
Get BEL namespace arango db handle
50,908
def get_belapi_handle ( client , username = None , password = None ) : ( username , password ) = get_user_creds ( username , password ) sys_db = client . db ( "_system" , username = username , password = password ) try : if username and password : belapi_db = sys_db . create_database ( name = belapi_db_name , users = [...
Get BEL API arango db handle
50,909
def delete_database ( client , db_name , username = None , password = None ) : ( username , password ) = get_user_creds ( username , password ) sys_db = client . db ( "_system" , username = username , password = password ) try : return sys_db . delete_database ( db_name ) except Exception : log . warn ( "No arango data...
Delete Arangodb database
50,910
def batch_load_docs ( db , doc_iterator , on_duplicate = "replace" ) : batch_size = 100 counter = 0 collections = { } docs = { } if on_duplicate not in [ "error" , "update" , "replace" , "ignore" ] : log . error ( f"Bad parameter for on_duplicate: {on_duplicate}" ) return for ( collection_name , doc ) in doc_iterator :...
Batch load documents
50,911
def load_resource ( resource_url : str , forceupdate : bool = False ) : log . info ( f"Loading resource {resource_url}" ) try : fo = bel . utils . download_file ( resource_url ) if not fo : log . error ( f"Could not download and open file {resource_url}" ) return "Failed to download resource_url" fo . seek ( 0 ) with g...
Load BEL Resource file
50,912
def get_normalized_term ( term_id : str , equivalents : list , namespace_targets : dict ) -> str : if equivalents and len ( equivalents ) > 0 : for start_ns in namespace_targets : if re . match ( start_ns , term_id ) : for target_ns in namespace_targets [ start_ns ] : for e in equivalents : if e and target_ns in e [ "n...
Get normalized term
50,913
def get_labels ( term_ids : list ) -> dict : term_labels = { } for term_id in term_ids : term = get_terms ( term_id ) term_labels [ term_id ] = term [ 0 ] . get ( "label" , "" ) return term_labels
Get term labels given term ids
50,914
def _get_params ( self ) : params = { 'accountNumber' : self . _service . accountNumber } for key , val in self . __dict__ . iteritems ( ) : if key in self . field_order : if isinstance ( val , str , ) : val = val . decode ( 'utf8' ) params [ key ] = val for key in self . field_order : if key not in params : params [ k...
Generate SOAP parameters .
50,915
def _generate_hash ( self ) : self . hash = None str_hash = '' for key , val in self . _get_params ( ) . iteritems ( ) : str_hash += smart_str ( val ) str_hash += self . _service . encryption_key self . hash = hashlib . md5 ( str_hash ) . hexdigest ( )
Generates a hash based on the specific fields for the method .
50,916
def _send_request ( self ) : self . _generate_hash ( ) params = self . _get_params ( ) try : resp = self . _endpoint ( ** params ) logger . debug ( resp ) except WebFault , e : logger . exception ( 'An error occurred while making the SOAP request.' ) return None self . response = XmlDictConfig ( ElementTree . XML ( sma...
Make the SOAP request and convert the result to a dictionary .
50,917
def client_factory ( self ) : if self . _service . production : url = self . production_url else : url = self . testing_url proxy_options = dict ( ) https_proxy_setting = os . environ . get ( 'PAYEX_HTTPS_PROXY' ) or os . environ . get ( 'https_proxy' ) http_proxy_setting = os . environ . get ( 'PAYEX_HTTP_PROXY' ) or ...
Custom client factory to set proxy options .
50,918
def get_combination_action ( combination ) : accepted_actions = ( 'link' , 'js' ) for action in accepted_actions : if action in combination : return { action : combination [ action ] } return { }
Prepares the action for a keyboard combination also filters another strange actions declared by the user .
50,919
def get_processed_hotkeys ( hotkeys = None ) : hotkeys = hotkeys or ks_settings . HOTKEYS processed_hotkeys = AutoVivification ( ) if not hotkeys : return processed_hotkeys for combination in hotkeys : key_codes = get_key_codes ( combination [ 'keys' ] ) if len ( key_codes ) == 1 : processed_hotkeys [ key_codes [ 0 ] ]...
Process passed dict with key combinations or the HOTKEYS dict from settings .
50,920
def parse ( self , assertion : Union [ str , Mapping [ str , str ] ] , strict : bool = False , parseinfo : bool = False , rule_name : str = "start" , error_level : str = "WARNING" , ) -> "BEL" : self . ast = None self . parse_valid = False self . parse_visualize_error = "" self . validation_messages = [ ] if isinstance...
Parse and semantically validate BEL statement
50,921
def canonicalize ( self , namespace_targets : Mapping [ str , List [ str ] ] = None ) -> "BEL" : if not self . ast : return self if not self . ast . collected_nsarg_norms : self = self . collect_nsarg_norms ( ) self . ast . canonicalize ( ) return self
Takes an AST and returns a canonicalized BEL statement string .
50,922
def collect_nsarg_norms ( self ) : start_time = datetime . datetime . now ( ) self . ast = bel_utils . populate_ast_nsarg_defaults ( self . ast , self . ast ) self . ast . collected_nsarg_norms = True if ( hasattr ( self . ast , "bel_object" ) and self . ast . bel_object and self . ast . bel_object . type == "BELAst" )...
Adds canonical and decanonical values to NSArgs in AST
50,923
def orthologize ( self , species_id : str ) -> "BEL" : if not self . ast : return self if not self . ast . collected_orthologs : self = self . collect_orthologs ( [ species_id ] ) self . ast . species = set ( ) self . ast = bel_utils . orthologize ( self . ast , self , species_id ) return self
Orthologize BEL AST to given species_id
50,924
def compute_edges ( self , rules : List [ str ] = None , ast_result = False , fmt = "medium" ) -> List [ Mapping [ str , Any ] ] : if not self . ast : return self edges_asts = bel . edge . computed . compute_edges ( self . ast , self . spec ) if ast_result : return edges_asts edges = [ ] for ast in edges_asts : edges ....
Computed edges from primary BEL statement
50,925
def ast_dict_to_objects ( ast_dict : Mapping [ str , Any ] , bel_obj ) -> BELAst : ast_subject = ast_dict . get ( "subject" , None ) ast_object = ast_dict . get ( "object" , None ) bel_subject = None bel_object = None bel_relation = ast_dict . get ( "relation" ) if ast_subject : bel_subject = function_ast_to_objects ( ...
Convert Tatsu AST dictionary to BEL AST object
50,926
def subcomponents ( self , subcomponents ) : for arg in self . args : if arg . __class__ . __name__ == "Function" : subcomponents . append ( arg . to_string ( ) ) if arg . function_type == "primary" : arg . subcomponents ( subcomponents ) else : subcomponents . append ( arg . to_string ( ) ) return subcomponents
Generate subcomponents of the BEL subject or object
50,927
def update_nsval ( self , * , nsval : str = None , ns : str = None , val : str = None ) -> None : if not ( ns and val ) and nsval : ( ns , val ) = nsval . split ( ":" , 1 ) elif not ( ns and val ) and not nsval : log . error ( "Did not update NSArg - no ns:val or nsval provided" ) self . namespace = ns self . value = v...
Update Namespace and valueast .
50,928
def orthologize ( self , ortho_species_id , belast ) : if ( self . orthologs and ortho_species_id in self . orthologs and ortho_species_id != self . species_id ) : self . orthology_species = ortho_species_id self . canonical = self . orthologs [ ortho_species_id ] [ "canonical" ] self . decanonical = self . orthologs [...
Decanonical ortholog name used
50,929
def convert_csv_str_to_list ( csv_str : str ) -> list : csv_str = re . sub ( "^\s*{" , "" , csv_str ) csv_str = re . sub ( "}\s*$" , "" , csv_str ) r = csv . reader ( [ csv_str ] ) row = list ( r ) [ 0 ] new = [ ] for col in row : col = re . sub ( '^\s*"?\s*' , "" , col ) col = re . sub ( '\s*"?\s*$' , "" , col ) new ....
Convert CSV str to list
50,930
def split_bel_stmt ( stmt : str , line_num ) -> tuple : m = re . match ( f"^(.*?\))\s+([a-zA-Z=\->\|:]+)\s+([\w(]+.*?)$" , stmt , flags = 0 ) if m : return ( m . group ( 1 ) , m . group ( 2 ) , m . group ( 3 ) ) else : log . info ( f"Could not parse bel statement into components at line number: {line_num} assertion: {s...
Split bel statement into subject relation object tuple
50,931
def yield_nanopub ( assertions , annotations , line_num ) : if not assertions : return { } anno = copy . deepcopy ( annotations ) evidence = anno . pop ( "evidence" , None ) stmt_group = anno . pop ( "statement_group" , None ) citation = anno . pop ( "citation" , None ) anno_list = [ ] for anno_type in anno : if isinst...
Yield nanopub object
50,932
def process_documentline ( line , nanopubs_metadata ) : matches = re . match ( 'SET DOCUMENT\s+(\w+)\s+=\s+"?(.*?)"?$' , line ) key = matches . group ( 1 ) val = matches . group ( 2 ) nanopubs_metadata [ key ] = val return nanopubs_metadata
Process SET DOCUMENT line in BEL script
50,933
def process_definition ( line , nanopubs_metadata ) : matches = re . match ( 'DEFINE\s+(\w+)\s+(\w+)\s+AS\s+URL\s+"(.*?)"\s*$' , line ) if matches : def_type = matches . group ( 1 ) . lower ( ) if def_type == "namespace" : def_type = "namespaces" elif def_type == "annotation" : def_type == "annotations" key = matches ....
Process DEFINE line in BEL script
50,934
def process_unset ( line , annotations ) : matches = re . match ( 'UNSET\s+"?(.*?)"?\s*$' , line ) if matches : val = matches . group ( 1 ) if val == "ALL" or val == "STATEMENT_GROUP" : annotations = { } elif re . match ( "{" , val ) : vals = convert_csv_str_to_list ( val ) for val in vals : annotations . pop ( val , N...
Process UNSET lines in BEL Script
50,935
def process_set ( line , annotations ) : matches = re . match ( 'SET\s+(\w+)\s*=\s*"?(.*?)"?\s*$' , line ) key = None if matches : key = matches . group ( 1 ) val = matches . group ( 2 ) if key == "STATEMENT_GROUP" : annotations [ "statement_group" ] = val elif key == "Citation" : annotations [ "citation" ] = process_c...
Convert annotations into nanopub_bel annotations format
50,936
def preprocess_belscript ( lines ) : set_flag = False for line in lines : if set_flag is False and re . match ( "SET" , line ) : set_flag = True set_line = [ line . rstrip ( ) ] elif set_flag and re . match ( "SET" , line ) : yield f"{' '.join(set_line)}\n" set_line = [ line . rstrip ( ) ] elif set_flag and re . match ...
Convert any multi - line SET statements into single line SET statements
50,937
def parse_belscript ( lines ) : nanopubs_metadata = { } annotations = { } assertions = [ ] line_num = 0 for line in set_single_line ( lines ) : line_num += 1 line = re . sub ( "\/\/.*?$" , "" , line ) line = line . rstrip ( ) while re . search ( "\\\s*$" , line ) : line = line . replace ( "\\" , "" ) + next ( lines ) i...
Lines from the BELScript - can be an iterator or list
50,938
def __stringify_body ( self , request_or_response ) : headers = self . __track_info [ '{}_headers' . format ( request_or_response ) ] body = self . __track_info . get ( '{}_body' . format ( request_or_response ) ) if isinstance ( body , CaseInsensitiveDict ) : body = json . dumps ( dict ( body ) , ensure_ascii = False ...
this method reference from httprunner
50,939
def read_nanopubs ( fn : str ) -> Iterable [ Mapping [ str , Any ] ] : jsonl_flag , json_flag , yaml_flag = False , False , False if fn == "-" or "jsonl" in fn : jsonl_flag = True elif "json" in fn : json_flag = True elif re . search ( "ya?ml" , fn ) : yaml_flag = True else : log . error ( "Do not recognize nanopub fil...
Read file and generate nanopubs
50,940
def create_nanopubs_fh ( output_fn : str ) : json_flag , jsonl_flag , yaml_flag = False , False , False if output_fn : if re . search ( "gz$" , output_fn ) : out_fh = gzip . open ( output_fn , "wt" ) else : out_fh = click . open_file ( output_fn , mode = "wt" ) if re . search ( "ya?ml" , output_fn ) : yaml_flag = True ...
Create Nanopubs output filehandle
50,941
def write_edges ( edges : Mapping [ str , Any ] , filename : str , jsonlines : bool = False , gzipflag : bool = False , yaml : bool = False , ) : pass
Write edges to file
50,942
def add_index_alias ( es , index_name , alias_name ) : es . indices . put_alias ( index = index_name , name = terms_alias )
Add index alias to index_name
50,943
def delete_index ( es , index_name : str ) : if not index_name : log . warn ( "No index name given to delete" ) return None result = es . indices . delete ( index = index_name ) return result
Delete the terms index
50,944
def create_terms_index ( es , index_name : str ) : with open ( mappings_terms_fn , "r" ) as f : mappings_terms = yaml . load ( f , Loader = yaml . SafeLoader ) try : es . indices . create ( index = index_name , body = mappings_terms ) except Exception as e : log . error ( f"Could not create elasticsearch terms index: {...
Create terms index
50,945
def delete_terms_indexes ( es , index_name : str = "terms_*" ) : try : es . indices . delete ( index = index_name ) except Exception as e : log . error ( f"Could not delete all terms indices: {e}" )
Delete all terms indexes
50,946
def bulk_load_docs ( es , docs ) : chunk_size = 200 try : results = elasticsearch . helpers . bulk ( es , docs , chunk_size = chunk_size ) log . debug ( f"Elasticsearch documents loaded: {results[0]}" ) if len ( results [ 1 ] ) > 0 : log . error ( "Bulk load errors {}" . format ( results ) ) except elasticsearch . Elas...
Bulk load docs
50,947
def validate ( bo , error_level : str = "WARNING" ) -> Tuple [ bool , List [ Tuple [ str , str ] ] ] : if bo . ast : bo = validate_functions ( bo . ast , bo ) if error_level == "WARNING" : bo = validate_arg_values ( bo . ast , bo ) else : bo . validation_messages . append ( ( "ERROR" , "Invalid BEL Statement - cannot p...
Semantically validate BEL AST
50,948
def validate_functions ( ast : BELAst , bo ) : if isinstance ( ast , Function ) : log . debug ( f"Validating: {ast.name}, {ast.function_type}, {ast.args}" ) function_signatures = bo . spec [ "functions" ] [ "signatures" ] [ ast . name ] [ "signatures" ] function_name = ast . name ( valid_function , messages ) = check_f...
Recursively validate function signatures
50,949
def get_belbio_conf_files ( ) : home = os . path . expanduser ( "~" ) cwd = os . getcwd ( ) belbio_conf_fp , belbio_secrets_fp = "" , "" env_conf_dir = os . getenv ( "BELBIO_CONF" , "" ) . rstrip ( "/" ) conf_paths = [ f"{cwd}/belbio_conf.yaml" , f"{cwd}/belbio_conf.yml" , f"{env_conf_dir}/belbio_conf.yaml" , f"{env_co...
Get belbio configuration from files
50,950
def load_configuration ( ) : ( belbio_conf_fp , belbio_secrets_fp ) = get_belbio_conf_files ( ) log . info ( f"Using conf: {belbio_conf_fp} and secrets files: {belbio_secrets_fp} " ) config = { } if belbio_conf_fp : with open ( belbio_conf_fp , "r" ) as f : config = yaml . load ( f , Loader = yaml . SafeLoader ) config...
Load the configuration
50,951
def get_versions ( config ) -> dict : try : import bel . __version__ config [ "bel" ] [ "version" ] = bel . __version__ . __version__ except KeyError : config [ "bel" ] = { "version" : bel . __version__ . __version__ } except ModuleNotFoundError : pass try : import tools . __version__ config [ "bel_resources" ] [ "vers...
Get versions of bel modules and tools
50,952
def add_environment_vars ( config : MutableMapping [ str , Any ] ) : for e in os . environ : if re . match ( "BELBIO_" , e ) : val = os . environ . get ( e ) if val : e . replace ( "BELBIO_" , "" ) env_keys = e . lower ( ) . split ( "__" ) if len ( env_keys ) > 1 : joined = '"]["' . join ( env_keys ) eval_config = f'co...
Override config with environment variables
50,953
def merge_config ( config : Mapping [ str , Any ] , override_config : Mapping [ str , Any ] = None , override_config_fn : str = None , ) -> Mapping [ str , Any ] : if override_config_fn : with open ( override_config_fn , "r" ) as f : override_config = yaml . load ( f , Loader = yaml . SafeLoader ) if not override_confi...
Override config with additional configuration in override_config or override_config_fn
50,954
def rec_merge ( d1 , d2 ) : for k , v in d1 . items ( ) : if k in d2 : if all ( isinstance ( e , collections . MutableMapping ) for e in ( v , d2 [ k ] ) ) : d2 [ k ] = rec_merge ( v , d2 [ k ] ) d3 = d1 . copy ( ) d3 . update ( d2 ) return d3
Recursively merge two dictionaries
50,955
def load_terms ( fo : IO , metadata : dict , forceupdate : bool ) : version = metadata [ "metadata" ] [ "version" ] with timy . Timer ( "Load Terms" ) as timer : es = bel . db . elasticsearch . get_client ( ) es_version = version . replace ( "T" , "" ) . replace ( "-" , "" ) . replace ( ":" , "" ) index_prefix = f"term...
Load terms into Elasticsearch and ArangoDB
50,956
def terms_iterator_for_elasticsearch ( fo : IO , index_name : str ) : species_list = config [ "bel_resources" ] . get ( "species_list" , [ ] ) fo . seek ( 0 ) with gzip . open ( fo , "rt" ) as f : for line in f : term = json . loads ( line ) if "term" not in term : continue term = term [ "term" ] species_id = term . ge...
Add index_name to term documents for bulk load
50,957
def get_pubtator ( pmid ) : r = get_url ( PUBTATOR_TMPL . replace ( "PMID" , pmid ) , timeout = 10 ) if r and r . status_code == 200 : pubtator = r . json ( ) [ 0 ] else : log . error ( f"Cannot access Pubtator, status: {r.status_code} url: {PUBTATOR_TMPL.replace('PMID', pmid)}" ) return None known_types = [ "CHEBI" , ...
Get Pubtator Bioconcepts from Pubmed Abstract
50,958
def process_pub_date ( year , mon , day ) : pub_date = None if year and re . match ( "[a-zA-Z]+" , mon ) : pub_date = datetime . datetime . strptime ( f"{year}-{mon}-{day}" , "%Y-%b-%d" ) . strftime ( "%Y-%m-%d" ) elif year : pub_date = f"{year}-{mon}-{day}" return pub_date
Create pub_date from what Pubmed provides in Journal PubDate entry
50,959
def enhance_pubmed_annotations ( pubmed : Mapping [ str , Any ] ) -> Mapping [ str , Any ] : text = pubmed [ "title" ] + pubmed [ "abstract" ] annotations = { } for nsarg in pubmed [ "annotations" ] : url = f'{config["bel_api"]["servers"]["api_url"]}/terms/{url_path_param_quoting(nsarg)}' log . info ( f"URL: {url}" ) r...
Enhance pubmed namespace IDs
50,960
def get_orthologs ( canonical_gene_id : str , species : list = [ ] ) -> List [ dict ] : gene_id_key = bel . db . arangodb . arango_id_to_key ( canonical_gene_id ) orthologs = { } if species : query_filter = f"FILTER vertex.tax_id IN {species}" query = f cursor = belns_db . aql . execute ( query , batch_size = 20 ) resu...
Get orthologs for given gene_id and species
50,961
def normalize_value ( val ) : if val is not None : if val . lower ( ) == 'false' : val = False elif val . lower ( ) == 'true' : val = True return val
Normalize strings with booleans into Python types .
50,962
def normalize_dictionary_values ( dictionary ) : for key , val in dictionary . iteritems ( ) : if isinstance ( val , dict ) : dictionary [ key ] = normalize_dictionary_values ( val ) elif isinstance ( val , list ) : dictionary [ key ] = list ( val ) else : dictionary [ key ] = normalize_value ( val ) return dictionary
Normalizes the values in a dictionary recursivly .
50,963
def timespan ( start_time ) : timespan = datetime . datetime . now ( ) - start_time timespan_ms = timespan . total_seconds ( ) * 1000 return timespan_ms
Return time in milliseconds from start_time
50,964
def first_true ( iterable , default = False , pred = None ) : return next ( filter ( pred , iterable ) , default )
Returns the first true value in the iterable .
50,965
def _create_hash_from_doc ( doc : Mapping [ str , Any ] ) -> str : doc_string = json . dumps ( doc , sort_keys = True ) return _create_hash ( doc_string )
Create hash Id from edge record
50,966
def elapsed ( self ) : if self . end is None : return ( self ( ) - self . start ) * self . factor else : return ( self . end - self . start ) * self . factor
Return the current elapsed time since start If the elapsed property is called in the context manager scope the elapsed time bewteen start and property access is returned . However if it is accessed outside of the context manager scope it returns the elapsed time bewteen entering and exiting the scope . The elapsed prop...
50,967
def load_edges_into_db ( nanopub_id : str , nanopub_url : str , edges : list = [ ] , edges_coll_name : str = edges_coll_name , nodes_coll_name : str = nodes_coll_name , ) : start_time = datetime . datetime . now ( ) query = f try : edgestore_db . aql . execute ( query ) except Exception as e : log . debug ( f"Could not...
Load edges into Edgestore
50,968
def edge_iterator ( edges = [ ] , edges_fn = None ) : for edge in itertools . chain ( edges , files . read_edges ( edges_fn ) ) : subj = copy . deepcopy ( edge [ "edge" ] [ "subject" ] ) subj_id = str ( utils . _create_hash_from_doc ( subj ) ) subj [ "_key" ] = subj_id obj = copy . deepcopy ( edge [ "edge" ] [ "object"...
Yield documents from edge for loading into ArangoDB
50,969
def update_nanopubstore_start_dt ( url : str , start_dt : str ) : hostname = urllib . parse . urlsplit ( url ) [ 1 ] start_dates_doc = state_mgmt . get ( start_dates_doc_key ) if not start_dates_doc : start_dates_doc = { "_key" : start_dates_doc_key , "start_dates" : [ { "nanopubstore" : hostname , "start_dt" : start_d...
Add nanopubstore start_dt to belapi . state_mgmt collection
50,970
def get_nanopubstore_start_dt ( url : str ) : hostname = urllib . parse . urlsplit ( url ) [ 1 ] start_dates_doc = state_mgmt . get ( start_dates_doc_key ) if start_dates_doc and start_dates_doc . get ( "start_dates" ) : date = [ dt [ "start_dt" ] for dt in start_dates_doc [ "start_dates" ] if dt [ "nanopubstore" ] == ...
Get last start_dt recorded for getting new nanopub ID s
50,971
def get_nanopub_urls ( ns_root_url : str = None , start_dt : str = None ) -> dict : if not ns_root_url : ns_root_url = config [ "bel_api" ] [ "servers" ] [ "nanopubstore" ] url = f"{ns_root_url}/nanopubs/timed" if not start_dt : start_dt = get_nanopubstore_start_dt ( ns_root_url ) params = { "startTime" : start_dt , "p...
Get modified and deleted nanopub urls
50,972
def get_nanopub ( url ) : r = bel . utils . get_url ( url , cache = False ) if r and r . json ( ) : return r . json ( ) else : return { }
Get Nanopub from nanopubstore given url
50,973
def convert_belscript ( ctx , input_fn , output_fn ) : try : ( out_fh , yaml_flag , jsonl_flag , json_flag , ) = bel . nanopub . files . create_nanopubs_fh ( output_fn ) if yaml_flag or json_flag : docs = [ ] if re . search ( "gz$" , input_fn ) : f = gzip . open ( input_fn , "rt" ) else : f = open ( input_fn , "rt" ) f...
Convert belscript to nanopubs_bel format
50,974
def reformat ( ctx , input_fn , output_fn ) : try : ( out_fh , yaml_flag , jsonl_flag , json_flag , ) = bel . nanopub . files . create_nanopubs_fh ( output_fn ) if yaml_flag or json_flag : docs = [ ] if re . search ( "gz$" , input_fn ) : f = gzip . open ( input_fn , "rt" ) else : f = open ( input_fn , "rt" ) for np in ...
Reformat between JSON YAML JSONLines formats
50,975
def nanopub_stats ( ctx , input_fn ) : counts = { "nanopubs" : 0 , "assertions" : { "total" : 0 , "subject_only" : 0 , "nested" : 0 , "relations" : { } } , } for np in bnf . read_nanopubs ( input_fn ) : if "nanopub" in np : counts [ "nanopubs" ] += 1 counts [ "assertions" ] [ "total" ] += len ( np [ "nanopub" ] [ "asse...
Collect statistics on nanopub file
50,976
def edges ( ctx , statement , rules , species , namespace_targets , version , api , config_fn ) : if config_fn : config = bel . db . Config . merge_config ( ctx . config , override_config_fn = config_fn ) else : config = ctx . config if namespace_targets : namespace_targets = json . loads ( namespace_targets ) if rules...
Create BEL Edges from BEL Statement
50,977
def elasticsearch ( delete , index_name ) : if delete : bel . db . elasticsearch . get_client ( delete = True ) else : bel . db . elasticsearch . get_client ( )
Setup Elasticsearch namespace indexes
50,978
def arangodb ( delete , db_name ) : if delete : client = bel . db . arangodb . get_client ( ) bel . db . arangodb . delete_database ( client , db_name ) if db_name == "belns" : bel . db . arangodb . get_belns_handle ( client ) elif db_name == "edgestore" : bel . db . arangodb . get_edgestore_handle ( client )
Setup ArangoDB database
50,979
def validate_to_schema ( nanopub , schema ) -> Tuple [ bool , List [ Tuple [ str , str ] ] ] : v = jsonschema . Draft4Validator ( schema ) messages = [ ] errors = sorted ( v . iter_errors ( nanopub ) , key = lambda e : e . path ) for error in errors : for suberror in sorted ( error . context , key = lambda e : e . sche...
Validate nanopub against jsonschema for nanopub
50,980
def hash_nanopub ( nanopub : Mapping [ str , Any ] ) -> str : hash_list = [ ] hash_list . append ( nanopub [ "nanopub" ] [ "type" ] . get ( "name" , "" ) . strip ( ) ) hash_list . append ( nanopub [ "nanopub" ] [ "type" ] . get ( "version" , "" ) . strip ( ) ) if nanopub [ "nanopub" ] [ "citation" ] . get ( "database" ...
Create CityHash64 from nanopub for duplicate check
50,981
def validate ( self , nanopub : Mapping [ str , Any ] ) -> Tuple [ bool , List [ Tuple [ str , str ] ] ] : ( is_valid , messages ) = validate_to_schema ( nanopub , self . nanopub_schema ) if not is_valid : return messages if nanopub [ "nanopub" ] [ "type" ] [ "name" ] . upper ( ) == "BEL" : bel_version = nanopub [ "nan...
Validates using the nanopub schema
50,982
def bel_edges ( self , nanopub : Mapping [ str , Any ] , namespace_targets : Mapping [ str , List [ str ] ] = { } , rules : List [ str ] = [ ] , orthologize_target : str = None , ) -> List [ Mapping [ str , Any ] ] : edges = bel . edge . edges . create_edges ( nanopub , self . endpoint , namespace_targets = namespace_t...
Create BEL Edges from BEL nanopub
50,983
def main_hrun ( ) : parser = argparse . ArgumentParser ( description = "Tools for http(s) test. Base on rtsf." ) parser . add_argument ( '--log-level' , default = 'INFO' , help = "Specify logging level, default is INFO." ) parser . add_argument ( '--log-file' , help = "Write logs to specified file path." ) parser . add...
parse command line options and run commands .
50,984
def _void_array_to_nested_list ( res , _func , _args ) : try : shape = res . coords . len , 2 ptr = cast ( res . coords . data , POINTER ( c_double ) ) array = np . ctypeslib . as_array ( ptr , shape ) return array . tolist ( ) finally : drop_array ( res . coords )
Dereference the FFI result to a list of coordinates
50,985
def lower_ext ( abspath ) : fname , ext = os . path . splitext ( abspath ) return fname + ext . lower ( )
Convert file extension to lowercase .
50,986
def pretty_dumps ( data ) : try : return json . dumps ( data , sort_keys = True , indent = 4 , ensure_ascii = False ) except : return json . dumps ( data , sort_keys = True , indent = 4 , ensure_ascii = True )
Return json string in pretty format .
50,987
def _get_pad_left_right ( small , large ) : assert small < large , "Can only pad when new size larger than old size" padsize = large - small if padsize % 2 != 0 : leftpad = ( padsize - 1 ) / 2 else : leftpad = padsize / 2 rightpad = padsize - leftpad return int ( leftpad ) , int ( rightpad )
Compute left and right padding values .
50,988
def pad_add ( av , size = None , stlen = 10 ) : if size is None : size = list ( ) for s in av . shape : size . append ( int ( 2 * s ) ) elif not hasattr ( size , "__len__" ) : size = [ size ] assert len ( av . shape ) in [ 1 , 2 ] , "Only 1D and 2D arrays!" assert len ( av . shape ) == len ( size ) , "`size` must have ...
Perform linear padding for complex array
50,989
def _pad_add_1d ( av , size , stlen ) : assert len ( size ) == 1 padx = _get_pad_left_right ( av . shape [ 0 ] , size [ 0 ] ) mask = np . zeros ( av . shape , dtype = bool ) mask [ stlen : - stlen ] = True border = av [ ~ mask ] if av . dtype . name . count ( "complex" ) : padval = np . average ( np . abs ( border ) ) ...
2D component of pad_add
50,990
def pad_rem ( pv , size = None ) : if size is None : size = list ( ) for s in pv . shape : assert s % 2 == 0 , "Uneven size; specify correct size of output!" size . append ( int ( s / 2 ) ) elif not hasattr ( size , "__len__" ) : size = [ size ] assert len ( pv . shape ) in [ 1 , 2 ] , "Only 1D and 2D arrays!" assert l...
Removes linear padding from array
50,991
def rate_limit ( self ) : rate_limited_msg = False while True : is_rate_limited = self . limit . is_rate_limited ( uuid ) if is_rate_limited : time . sleep ( 0.3 ) if not rate_limited_msg : self . logger . info ( 'Rate limit active..please wait...' ) rate_limited_msg = True if not is_rate_limited : self . logger . info...
Simple rate limit function using redis
50,992
def scan_file ( self , filename , apikey ) : url = self . base_url + "file/scan" params = { 'apikey' : apikey } scanfile = { "file" : open ( filename , 'rb' ) } response = requests . post ( url , files = scanfile , params = params ) rate_limit_clear = self . rate_limit ( ) if rate_limit_clear : if response . status_cod...
Sends a file to virus total for assessment
50,993
def rescan_file ( self , filename , sha256hash , apikey ) : url = self . base_url + "file/rescan" params = { 'apikey' : apikey , 'resource' : sha256hash } rate_limit_clear = self . rate_limit ( ) if rate_limit_clear : response = requests . post ( url , params = params ) if response . status_code == self . HTTP_OK : sel...
just send the hash check the date
50,994
def binary_report ( self , sha256sum , apikey ) : url = self . base_url + "file/report" params = { "apikey" : apikey , "resource" : sha256sum } rate_limit_clear = self . rate_limit ( ) if rate_limit_clear : response = requests . post ( url , data = params ) if response . status_code == self . HTTP_OK : json_response = ...
retrieve report from file scan
50,995
def send_ip ( self , ipaddr , apikey ) : url = self . base_url + "ip-address/report" parameters = { "ip" : ipaddr , "apikey" : apikey } rate_limit_clear = self . rate_limit ( ) if rate_limit_clear : response = requests . get ( url , params = parameters ) if response . status_code == self . HTTP_OK : json_response = res...
Send IP address for list of past malicous domain associations
50,996
def url_report ( self , scan_url , apikey ) : url = self . base_url + "url/report" params = { "apikey" : apikey , 'resource' : scan_url } rate_limit_clear = self . rate_limit ( ) if rate_limit_clear : response = requests . post ( url , params = params , headers = self . headers ) if response . status_code == self . HTT...
Send URLS for list of past malicous associations
50,997
def _read_requirements ( filename , extra_packages ) : requirements_file = open ( filename ) . read ( ) hard_requirements = [ ] for line in requirements_file . splitlines ( ) : if _is_requirement ( line ) : if line . find ( ';' ) > - 1 : dep , condition = tuple ( line . split ( ';' ) ) extra_packages [ condition . stri...
Returns a list of package requirements read from the file .
50,998
def field ( ctx , text , index , delimiter = ' ' ) : splits = text . split ( delimiter ) splits = [ f for f in splits if f != delimiter and len ( f . strip ( ) ) > 0 ] index = conversions . to_integer ( index , ctx ) if index < 1 : raise ValueError ( 'Field index cannot be less than 1' ) if index <= len ( splits ) : re...
Reference a field in string separated by a delimiter
50,999
def epoch ( ctx , datetime ) : return conversions . to_decimal ( str ( conversions . to_datetime ( datetime , ctx ) . timestamp ( ) ) , ctx )
Converts the given date to the number of seconds since January 1st 1970 UTC