idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
50,800
def _load_resources ( self ) : for t in self . doc . find ( 'Root.Table' ) : for c in t . find ( 'Table.Column' ) : if c . get_value ( 'datatype' ) == 'geometry' : c [ 'transform' ] = '^empty_str' c [ 'datatype' ] = 'text' return super ( ) . _load_resources ( )
Remove the geography from the files since it isn t particularly useful in Excel
50,801
def make_bucket_policy_statements ( bucket ) : import yaml from os . path import dirname , join , abspath import copy import metatab with open ( join ( dirname ( abspath ( metatab . __file__ ) ) , 'support' , 'policy_parts.yaml' ) ) as f : parts = yaml . load ( f ) statements = { } cl = copy . deepcopy ( parts [ 'list'...
Return the statemtns in a bucket policy as a dict of dicts
50,802
def bucket_dict_to_policy ( args , bucket_name , d ) : import json iam = get_resource ( args , 'iam' ) statements = make_bucket_policy_statements ( bucket_name ) user_stats = set ( ) for ( user , prefix ) , mode in d . items ( ) : user_stats . add ( ( user , 'list' ) ) user_stats . add ( ( user , 'bucket' ) ) if mode =...
Create a bucket policy document from a permissions dict .
50,803
def bucket_policy_to_dict ( policy ) : import json if not isinstance ( policy , dict ) : policy = json . loads ( policy ) statements = { s [ 'Sid' ] : s for s in policy [ 'Statement' ] } d = { } for rw in ( 'Read' , 'Write' ) : for prefix in TOP_LEVEL_DIRS : sid = rw . title ( ) + prefix . title ( ) if sid in statement...
Produce a dictionary of read write permissions for an existing bucket policy document
50,804
def get_iam_account ( l , args , user_name ) : iam = get_resource ( args , 'iam' ) user = iam . User ( user_name ) user . load ( ) return l . find_or_new_account ( user . arn )
Return the local Account for a user name by fetching User and looking up the arn .
50,805
def getSVD ( data , k , getComponents = False , getS = False , normalization = 'mean' ) : if normalization == 'nanmean' : data2 = data . tordd ( ) . sortByKey ( ) . values ( ) . map ( lambda x : _convert_to_vector ( x . flatten ( ) - np . nanmean ( x ) ) ) elif normalization == 'mean' : data2 = data . tordd ( ) . sortB...
Wrapper for computeSVD that will normalize and handle a Thunder Images object
50,806
def human_duration ( duration_seconds : float ) -> str : if duration_seconds < 0.001 : return '0 ms' if duration_seconds < 1 : return '{} ms' . format ( int ( duration_seconds * 1000 ) ) return '{} s' . format ( int ( duration_seconds ) )
Convert a duration in seconds into a human friendly string .
50,807
def call_with_retry ( func : Callable , exceptions , max_retries : int , logger : Logger , * args , ** kwargs ) : attempt = 0 while True : try : return func ( * args , ** kwargs ) except exceptions as e : attempt += 1 if attempt >= max_retries : raise delay = exponential_backoff ( attempt , cap = 60 ) logger . warning ...
Call a function and retry it on failure .
50,808
def exponential_backoff ( attempt : int , cap : int = 1200 ) -> timedelta : base = 3 temp = min ( base * 2 ** attempt , cap ) return timedelta ( seconds = temp / 2 + random . randint ( 0 , temp / 2 ) )
Calculate a delay to retry using an exponential backoff algorithm .
50,809
def float_to_fp ( signed , n_bits , n_frac ) : if signed : max_v = ( 1 << ( n_bits - 1 ) ) - 1 min_v = - max_v - 1 else : min_v = 0 max_v = ( 1 << n_bits ) - 1 scale = 2.0 ** n_frac def bitsk ( value ) : int_val = int ( scale * value ) return max ( ( min ( max_v , int_val ) , min_v ) ) return bitsk
Return a function to convert a floating point value to a fixed point value .
50,810
def get_metadata_path ( name ) : return pkg_resources . resource_filename ( 'voobly' , os . path . join ( METADATA_PATH , '{}.json' . format ( name ) ) )
Get reference metadata file path .
50,811
def _make_request ( session , url , argument = None , params = None , raw = False ) : if not params : params = { } params [ 'key' ] = session . auth . key try : if argument : request_url = '{}{}{}{}' . format ( session . auth . base_url , VOOBLY_API_URL , url , argument ) else : request_url = '{}{}' . format ( VOOBLY_A...
Make a request to API endpoint .
50,812
def make_scrape_request ( session , url , mode = 'get' , data = None ) : try : html = session . request ( mode , url , data = data ) except RequestException : raise VooblyError ( 'failed to connect' ) if SCRAPE_FETCH_ERROR in html . text : raise VooblyError ( 'not logged in' ) if html . status_code != 200 or SCRAPE_PAG...
Make a request to URL .
50,813
def get_ladder ( session , ladder_id , user_id = None , user_ids = None , start = 0 , limit = LADDER_RESULT_LIMIT ) : params = { 'start' : start , 'limit' : limit } if isinstance ( ladder_id , str ) : ladder_id = lookup_ladder_id ( ladder_id ) if limit > LADDER_RESULT_LIMIT : raise VooblyError ( 'limited to 40 rows' ) ...
Get ladder .
50,814
def get_lobbies ( session , game_id ) : if isinstance ( game_id , str ) : game_id = lookup_game_id ( game_id ) lobbies = _make_request ( session , LOBBY_URL , game_id ) for lobby in lobbies : if len ( lobby [ 'ladders' ] ) > 0 : lobby [ 'ladders' ] = lobby [ 'ladders' ] [ : - 1 ] . split ( '|' ) return lobbies
Get lobbies for a game .
50,815
def get_user ( session , user_id ) : try : user_id = int ( user_id ) except ValueError : user_id = find_user ( session , user_id ) resp = _make_request ( session , USER_URL , user_id ) if not resp : raise VooblyError ( 'user id not found' ) return resp [ 0 ]
Get user .
50,816
def find_user ( session , username ) : resp = _make_request ( session , FIND_USER_URL , username ) if not resp : raise VooblyError ( 'user not found' ) try : return int ( resp [ 0 ] [ 'uid' ] ) except ValueError : raise VooblyError ( 'user not found' )
Find user by name - returns user ID .
50,817
def find_users ( session , * usernames ) : user_string = ',' . join ( usernames ) return _make_request ( session , FIND_USERS_URL , user_string )
Find multiple users by name .
50,818
def user ( session , uid , ladder_ids = None ) : data = get_user ( session , uid ) resp = dict ( data ) if not ladder_ids : return resp resp [ 'ladders' ] = { } for ladder_id in ladder_ids : if isinstance ( ladder_id , str ) : ladder_id = lookup_ladder_id ( ladder_id ) try : ladder_data = dict ( get_ladder ( session , ...
Get all possible user info by name .
50,819
def ladders ( session , game_id ) : if isinstance ( game_id , str ) : game_id = lookup_game_id ( game_id ) lobbies = get_lobbies ( session , game_id ) ladder_ids = set ( ) for lobby in lobbies : ladder_ids |= set ( lobby [ 'ladders' ] ) return list ( ladder_ids )
Get a list of ladder IDs .
50,820
def get_clan_matches ( session , subdomain , clan_id , from_timestamp = None , limit = None ) : return get_recent_matches ( session , 'https://{}.voobly.com/{}/{}/0' . format ( subdomain , TEAM_MATCHES_URL , clan_id ) , from_timestamp , limit )
Get recent matches by clan .
50,821
def get_user_matches ( session , user_id , from_timestamp = None , limit = None ) : return get_recent_matches ( session , '{}{}/{}/Matches/games/matches/user/{}/0' . format ( session . auth . base_url , PROFILE_URL , user_id , user_id ) , from_timestamp , limit )
Get recent matches by user .
50,822
def get_recent_matches ( session , init_url , from_timestamp , limit ) : if not from_timestamp : from_timestamp = datetime . datetime . now ( ) - datetime . timedelta ( days = 1 ) matches = [ ] page_id = 0 done = False while not done and page_id < MAX_MATCH_PAGE_ID : url = '{}/{}' . format ( init_url , page_id ) parsed...
Get recently played user matches .
50,823
def get_ladder_matches ( session , ladder_id , from_timestamp = None , limit = LADDER_MATCH_LIMIT ) : if not from_timestamp : from_timestamp = datetime . datetime . now ( ) - datetime . timedelta ( days = 1 ) matches = [ ] page_id = 0 done = False i = 0 while not done and page_id < MAX_LADDER_PAGE_ID : url = '{}{}/{}/{...
Get recently played ladder matches .
50,824
def get_match ( session , match_id ) : url = '{}{}/{}' . format ( session . auth . base_url , MATCH_URL , match_id ) parsed = make_scrape_request ( session , url ) game = parsed . find ( 'h3' ) . text if game != GAME_AOC : raise ValueError ( 'not an aoc match' ) date_played = parsed . find ( text = MATCH_DATE_PLAYED ) ...
Get match metadata .
50,825
def download_rec ( session , rec_url , target_path ) : try : resp = session . get ( session . auth . base_url + rec_url ) except RequestException : raise VooblyError ( 'failed to connect for download' ) try : downloaded = zipfile . ZipFile ( io . BytesIO ( resp . content ) ) downloaded . extractall ( target_path ) exce...
Download and extract a recorded game .
50,826
def login ( session ) : if not session . auth . username or not session . auth . password : raise VooblyError ( 'must supply username and password' ) _LOGGER . info ( "logging in (no valid cookie found)" ) session . cookies . clear ( ) try : session . get ( session . auth . base_url + LOGIN_PAGE ) resp = session . post...
Login to Voobly .
50,827
def get_session ( key = None , username = None , password = None , cache = True , cache_expiry = datetime . timedelta ( days = 7 ) , cookie_path = COOKIE_PATH , backend = 'memory' , version = VERSION_GLOBAL ) : class VooblyAuth ( AuthBase ) : def __init__ ( self , key , username , password , cookie_path , version ) : s...
Get Voobly API session .
50,828
def main ( ) : neutron_config . register_agent_state_opts_helper ( CONF ) common_config . init ( sys . argv [ 1 : ] ) neutron_config . setup_logging ( ) hyperv_agent = HyperVNeutronAgent ( ) LOG . info ( "Agent initialized successfully, now running... " ) hyperv_agent . daemon_loop ( )
The entry point for the Hyper - V Neutron Agent .
50,829
def _setup_qos_extension ( self ) : if not CONF . AGENT . enable_qos_extension : return self . _qos_ext = qos_extension . QosAgentExtension ( ) self . _qos_ext . consume_api ( self ) self . _qos_ext . initialize ( self . _connection , 'hyperv' )
Setup the QOS extension if it is required .
50,830
def run_cell_magic ( self , magic_name , line , cell ) : if magic_name == 'bash' : self . shebang ( "bash" , cell ) elif magic_name == 'metatab' : self . mm . metatab ( line , cell )
Run a limited number of magics from scripts without IPython
50,831
def var_expand ( self , cmd , depth = 0 , formatter = DollarFormatter ( ) ) : ns = self . user_ns . copy ( ) try : frame = sys . _getframe ( depth + 1 ) except ValueError : pass else : ns . update ( frame . f_locals ) try : cmd = formatter . vformat ( cmd , args = [ ] , kwargs = ns ) except Exception : pass return cmd
Expand python variables in a string .
50,832
def shebang ( self , line , cell ) : argv = arg_split ( line , posix = not sys . platform . startswith ( 'win' ) ) args , cmd = self . shebang . parser . parse_known_args ( argv ) try : p = Popen ( cmd , stdout = PIPE , stderr = PIPE , stdin = PIPE ) except OSError as e : if e . errno == errno . ENOENT : print ( "Could...
Run a cell via a shell command
50,833
def _get_vertices_neighbours ( nets ) : zero_fn = ( lambda : 0 ) vertices_neighbours = defaultdict ( lambda : defaultdict ( zero_fn ) ) for net in nets : if net . weight != 0 : for sink in net . sinks : vertices_neighbours [ net . source ] [ sink ] += net . weight vertices_neighbours [ sink ] [ net . source ] += net . ...
Generate a listing of each vertex s immedate neighbours in an undirected interpretation of a graph .
50,834
def _dfs ( vertex , vertices_neighbours ) : visited = set ( ) to_visit = deque ( [ vertex ] ) while to_visit : vertex = to_visit . pop ( ) if vertex not in visited : yield vertex visited . add ( vertex ) to_visit . extend ( vertices_neighbours [ vertex ] )
Generate all the vertices connected to the supplied vertex in depth - first - search order .
50,835
def _get_connected_subgraphs ( vertices , vertices_neighbours ) : remaining_vertices = set ( vertices ) subgraphs = [ ] while remaining_vertices : subgraph = set ( _dfs ( remaining_vertices . pop ( ) , vertices_neighbours ) ) remaining_vertices . difference_update ( subgraph ) subgraphs . append ( subgraph ) return sub...
Break a graph containing unconnected subgraphs into a list of connected subgraphs .
50,836
def _cuthill_mckee ( vertices , vertices_neighbours ) : vertices_degrees = { v : sum ( itervalues ( vertices_neighbours [ v ] ) ) for v in vertices } peripheral_vertex = min ( vertices , key = ( lambda v : vertices_degrees [ v ] ) ) visited = set ( [ peripheral_vertex ] ) cm_order = [ peripheral_vertex ] previous_layer...
Yield the Cuthill - McKee order for a connected undirected graph .
50,837
def rcm_vertex_order ( vertices_resources , nets ) : vertices_neighbours = _get_vertices_neighbours ( nets ) for subgraph_vertices in _get_connected_subgraphs ( vertices_resources , vertices_neighbours ) : cm_order = _cuthill_mckee ( subgraph_vertices , vertices_neighbours ) for vertex in reversed ( cm_order ) : yield ...
A generator which iterates over the vertices in Reverse - Cuthill - McKee order .
50,838
def rcm_chip_order ( machine ) : vertices = list ( machine ) nets = [ ] for ( x , y ) in vertices : neighbours = [ ] for link in Links : if ( x , y , link ) in machine : dx , dy = link . to_vector ( ) neighbour = ( ( x + dx ) % machine . width , ( y + dy ) % machine . height ) if neighbour in machine : neighbours . app...
A generator which iterates over a set of chips in a machine in Reverse - Cuthill - McKee order .
50,839
def register_datadog ( tracer = None , namespace : Optional [ str ] = None , service : str = 'spinach' ) : if tracer is None : from ddtrace import tracer @ signals . job_started . connect_via ( namespace ) def job_started ( namespace , job , ** kwargs ) : tracer . trace ( 'spinach.task' , service = service , span_type ...
Register the Datadog integration .
50,840
def copy_reference ( resource , doc , env , * args , ** kwargs ) : yield from doc . reference ( resource . name )
A row - generating function that yields from a reference . This permits an upstream package to be copied and modified by this package while being formally referenced as a dependency
50,841
def copy_reference_group ( resource , doc , env , * args , ** kwargs ) : all_headers = [ ] for ref in doc . references ( ) : if ref . get_value ( 'Group' ) == resource . get_value ( 'Group' ) : for row in ref . iterrowproxy ( ) : all_headers . append ( list ( row . keys ( ) ) ) break headers = [ ] for e in zip ( * all_...
A Row generating function that copies all of the references that have the same Group argument as this reference
50,842
def is_older_than_metadata ( self ) : try : path = self . doc_file . path except AttributeError : path = self . doc_file source_ref = self . _doc . ref . path try : age_diff = getmtime ( source_ref ) - getmtime ( path ) return age_diff > 0 except ( FileNotFoundError , OSError ) : return False
Return True if the package save file is older than the metadata . If it is it should be rebuilt . Returns False if the time of either can t be determined
50,843
def _load_resource ( self , source_r , abs_path = False ) : from itertools import islice from metapack . exc import MetapackError from os . path import splitext r = self . datafile ( source_r . name ) if self . reuse_resources : self . prt ( "Re-using data for '{}' " . format ( r . name ) ) else : self . prt ( "Loading...
The CSV package has no resources so we just need to resolve the URLs to them . Usually the CSV package is built from a file system ackage on a publically acessible server .
50,844
def _load_documentation ( self , term , contents , file_name ) : try : title = term [ 'title' ] . value except KeyError : self . warn ( "Documentation has no title, skipping: '{}' " . format ( term . value ) ) return if term . term_is ( 'Root.Readme' ) : package_sub_dir = 'docs' else : try : eu = term . expanded_url pa...
Load a single documentation entry
50,845
def _getPayload ( self , record ) : try : d = record . __dict__ pid = d . pop ( 'process' , 'nopid' ) tid = d . pop ( 'thread' , 'notid' ) payload = { k : v for ( k , v ) in d . items ( ) if k in TOP_KEYS } payload [ 'meta' ] = { k : v for ( k , v ) in d . items ( ) if k in META_KEYS } payload [ 'details' ] = { k : sim...
The data that will be sent to the RESTful API
50,846
def _getEndpoint ( self , add_tags = None ) : return 'https://logs-01.loggly.com/bulk/{0}/tag/{1}/' . format ( self . custom_token , self . _implodeTags ( add_tags = add_tags ) )
Override Build Loggly s RESTful API endpoint
50,847
def _getPayload ( self , record ) : payload = super ( LogglyHandler , self ) . _getPayload ( record ) payload [ 'tags' ] = self . _implodeTags ( ) return payload
The data that will be sent to loggly .
50,848
def nonver_name ( self ) : nv = self . as_version ( None ) if not nv : import re nv = re . sub ( r'-[^-]+$' , '' , self . name ) return nv
Return the non versioned name
50,849
def set_wrappable_term ( self , v , term ) : import textwrap for t in self [ 'Root' ] . find ( term ) : self . remove_term ( t ) for l in textwrap . wrap ( v , 80 ) : self [ 'Root' ] . new_term ( term , l )
Set the Root . Description possibly splitting long descriptions across multiple terms .
50,850
def get_lib_module_dict ( self ) : from importlib import import_module if not self . ref : return { } u = parse_app_url ( self . ref ) if u . scheme == 'file' : if not self . set_sys_path ( ) : return { } for module_name in self . lib_dir_names : try : m = import_module ( module_name ) return { k : v for k , v in m . _...
Load the lib directory as a python module so it can be used to provide functions for rowpipe transforms . This only works filesystem packages
50,851
def _repr_html_ ( self , ** kwargs ) : from jinja2 import Template from markdown import markdown as convert_markdown extensions = [ 'markdown.extensions.extra' , 'markdown.extensions.admonition' ] return convert_markdown ( self . markdown , extensions )
Produce HTML for Jupyter Notebook
50,852
def write_csv ( self , path = None ) : self . sort_sections ( [ 'Root' , 'Contacts' , 'Documentation' , 'References' , 'Resources' , 'Citations' , 'Schema' ] ) if self . description : self . description = self . description if self . abstract : self . description = self . abstract t = self [ 'Root' ] . get_or_new_term ...
Write CSV file . Sorts the sections before calling the superclass write_csv
50,853
def dimensions_wizard ( ) : option = yield MultipleChoice ( "What type of SpiNNaker system to you have?" , [ "A single four-chip 'SpiNN-3' board" , "A single forty-eight-chip 'SpiNN-5' board" , "Multiple forty-eight-chip 'SpiNN-5' boards" , "Other" ] , None ) assert 0 <= option < 4 if option == 0 : raise Success ( { "d...
A wizard which attempts to determine the dimensions of a SpiNNaker system .
50,854
def ip_address_wizard ( ) : option = yield MultipleChoice ( "Would you like to auto-detect the SpiNNaker system's IP address?" , [ "Auto-detect" , "Manually Enter IP address or hostname" ] , 0 ) assert 0 <= option < 2 if option == 0 : yield Prompt ( "Make sure the SpiNNaker system is switched on and is not booted." ) y...
A wizard which attempts to determine the IP of a SpiNNaker system .
50,855
def cat ( * wizards ) : data = { } for wizard in wizards : try : response = None while True : response = yield wizard . send ( response ) except Success as s : data . update ( s . data ) raise Success ( data )
A higher - order wizard which is the concatenation of a number of other wizards .
50,856
def cli_wrapper ( generator ) : first = True response = None while True : if not first : print ( ) first = False try : message = generator . send ( response ) if isinstance ( message , MultipleChoice ) : print ( message . question ) for num , choice in enumerate ( message . options ) : print ( " {}: {}" . format ( n...
Given a wizard implements an interactive command - line human - friendly interface for it .
50,857
def _net_cost ( net , placements , has_wrap_around_links , machine ) : if has_wrap_around_links : x , y = placements [ net . source ] num_vertices = len ( net . sinks ) + 1 xs = [ x ] * num_vertices ys = [ y ] * num_vertices i = 1 for v in net . sinks : x , y = placements [ v ] xs [ i ] = x ys [ i ] = y i += 1 xs . sor...
Get the cost of a given net .
50,858
def _vertex_net_cost ( vertex , v2n , placements , has_wrap_around_links , machine ) : total_cost = 0.0 for net in v2n [ vertex ] : total_cost += _net_cost ( net , placements , has_wrap_around_links , machine ) return total_cost
Get the total cost of the nets connected to the given vertex .
50,859
def _get_candidate_swap ( resources , location , l2v , vertices_resources , fixed_vertices , machine ) : chip_resources = machine [ location ] vertices = l2v [ location ] to_move = [ ] i = 0 while overallocated ( subtract_resources ( chip_resources , resources ) ) : if i >= len ( vertices ) : return None elif vertices ...
Given a chip location select a set of vertices which would have to be moved elsewhere to accommodate the arrival of the specified set of resources .
50,860
def _swap ( vas , vas_location , vbs , vbs_location , l2v , vertices_resources , placements , machine ) : vas_location2v = l2v [ vas_location ] vbs_location2v = l2v [ vbs_location ] vas_resources = machine [ vas_location ] vbs_resources = machine [ vbs_location ] for va in vas : placements [ va ] = vbs_location vas_loc...
Swap the positions of two sets of vertices .
50,861
def _load_script ( self , filename : str ) -> Script : with open ( path . join ( here , 'redis_scripts' , filename ) , mode = 'rb' ) as f : script_data = f . read ( ) rv = self . _r . register_script ( script_data ) if script_data . startswith ( b'-- idempotency protected script' ) : self . _idempotency_protected_scrip...
Load a Lua script .
50,862
def main ( ) : neutron_config . register_agent_state_opts_helper ( CONF ) common_config . init ( sys . argv [ 1 : ] ) neutron_config . setup_logging ( ) hnv_agent = HNVAgent ( ) LOG . info ( "Agent initialized successfully, now running... " ) hnv_agent . daemon_loop ( )
The entry point for the HNV Agent .
50,863
def send_scp ( self , buffer_size , x , y , p , cmd , arg1 = 0 , arg2 = 0 , arg3 = 0 , data = b'' , expected_args = 3 , timeout = 0.0 ) : class Callback ( object ) : def __init__ ( self ) : self . packet = None def __call__ ( self , packet ) : self . packet = SCPPacket . from_bytestring ( packet , n_args = expected_arg...
Transmit a packet to the SpiNNaker machine and block until an acknowledgement is received .
50,864
def get_specification ( version : str ) -> Mapping [ str , Any ] : spec_dir = config [ "bel" ] [ "lang" ] [ "specifications" ] spec_dict = { } bel_versions = get_bel_versions ( ) if version not in bel_versions : log . error ( "Cannot get unknown version BEL specification" ) return { "error" : "unknown version of BEL" }...
Get BEL Specification
50,865
def get_bel_versions ( ) -> List [ str ] : spec_dir = config [ "bel" ] [ "lang" ] [ "specifications" ] fn = f"{spec_dir}/versions.json" with open ( fn , "r" ) as f : versions = json . load ( f ) return versions
Get BEL Language versions supported
50,866
def update_specifications ( force : bool = False ) : spec_dir = config [ "bel" ] [ "lang" ] [ "specifications" ] if not os . path . isdir ( spec_dir ) : os . mkdir ( spec_dir ) log . info ( f"Updating BEL Specifications - stored in {spec_dir}" ) if config [ "bel" ] [ "lang" ] [ "specification_github_repo" ] : github_be...
Update BEL specifications
50,867
def github_belspec_files ( spec_dir , force : bool = False ) : if not force : dtnow = datetime . datetime . utcnow ( ) delta = datetime . timedelta ( 1 ) yesterday = dtnow - delta for fn in glob . glob ( f"{spec_dir}/bel*yaml" ) : if datetime . datetime . fromtimestamp ( os . path . getmtime ( fn ) ) > yesterday : log ...
Get belspec files from Github repo
50,868
def belspec_yaml2json ( yaml_fn : str , json_fn : str ) -> str : try : spec_dict = yaml . load ( open ( yaml_fn , "r" ) . read ( ) , Loader = yaml . SafeLoader ) spec_dict [ "admin" ] = { } spec_dict [ "admin" ] [ "version_underscored" ] = spec_dict [ "version" ] . replace ( "." , "_" ) spec_dict [ "admin" ] [ "parser_...
Enhance BEL specification and save as JSON file
50,869
def add_relations ( spec_dict : Mapping [ str , Any ] ) -> Mapping [ str , Any ] : spec_dict [ "relations" ] [ "list" ] = [ ] spec_dict [ "relations" ] [ "list_short" ] = [ ] spec_dict [ "relations" ] [ "list_long" ] = [ ] spec_dict [ "relations" ] [ "to_short" ] = { } spec_dict [ "relations" ] [ "to_long" ] = { } for ...
Add relation keys to spec_dict
50,870
def add_functions ( spec_dict : Mapping [ str , Any ] ) -> Mapping [ str , Any ] : spec_dict [ "functions" ] [ "list" ] = [ ] spec_dict [ "functions" ] [ "list_long" ] = [ ] spec_dict [ "functions" ] [ "list_short" ] = [ ] spec_dict [ "functions" ] [ "primary" ] = { } spec_dict [ "functions" ] [ "primary" ] [ "list_lon...
Add function keys to spec_dict
50,871
def enhance_function_signatures ( spec_dict : Mapping [ str , Any ] ) -> Mapping [ str , Any ] : for func in spec_dict [ "functions" ] [ "signatures" ] : for i , sig in enumerate ( spec_dict [ "functions" ] [ "signatures" ] [ func ] [ "signatures" ] ) : args = sig [ "arguments" ] req_args = [ ] pos_args = [ ] opt_args ...
Enhance function signatures
50,872
def create_ebnf_parser ( files ) : flag = False for belspec_fn in files : if config [ "bel" ] [ "lang" ] [ "specification_github_repo" ] : tmpl_fn = get_ebnf_template ( ) ebnf_fn = belspec_fn . replace ( ".yaml" , ".ebnf" ) if not os . path . exists ( ebnf_fn ) or os . path . getmtime ( belspec_fn ) > os . path . getmt...
Create EBNF files and EBNF - based parsers
50,873
def get_function_help ( function : str , bel_spec : BELSpec ) : function_long = bel_spec [ "functions" ] [ "to_long" ] . get ( function ) function_help = [ ] if function_long : for signature in bel_spec [ "functions" ] [ "signatures" ] [ function_long ] [ "signatures" ] : function_help . append ( { "function_summary" :...
Get function_help given function name
50,874
def in_span ( loc : int , span : Span ) -> bool : if loc >= span [ 0 ] and loc <= span [ 1 ] : return True else : return False
Checks if loc is inside span
50,875
def relation_completions ( completion_text : str , bel_spec : BELSpec , bel_fmt : str , size : int ) -> list : if bel_fmt == "short" : relation_list = bel_spec [ "relations" ] [ "list_short" ] else : relation_list = bel_spec [ "relations" ] [ "list_long" ] matches = [ ] for r in relation_list : if re . match ( completi...
Filter BEL relations by prefix
50,876
def function_completions ( completion_text : str , bel_spec : BELSpec , function_list : list , bel_fmt : str , size : int , ) -> list : if isinstance ( function_list , list ) : if bel_fmt in [ "short" , "medium" ] : function_list = [ bel_spec [ "functions" ] [ "to_short" ] [ fn ] for fn in function_list ] else : functi...
Filter BEL functions by prefix
50,877
def add_completions ( replace_list : list , belstr : str , replace_span : Span , completion_text : str ) -> List [ Mapping [ str , Any ] ] : completions = [ ] for r in replace_list : if len ( belstr ) > 0 : belstr_end = len ( belstr ) - 1 else : belstr_end = 0 log . debug ( f'Replace list {r} Replace_span {replace_spa...
Create completions to return given replacement list
50,878
def get_completions ( belstr : str , cursor_loc : int , bel_spec : BELSpec , bel_comp : str , bel_fmt : str , species_id : str , size : int , ) : ast , errors = pparse . get_ast_dict ( belstr ) spans = pparse . collect_spans ( ast ) completion_text = "" completions = [ ] function_help = [ ] log . debug ( f"Cursor locat...
Get BEL Assertion completions
50,879
def parse_functions ( bels : list , char_locs : CharLocs , parsed : Parsed , errors : Errors ) -> Tuple [ Parsed , Errors ] : parens = char_locs [ "parens" ] if not parens : bels_len = len ( bels ) - 1 span = ( 0 , bels_len ) parsed [ span ] = { "name" : "" . join ( bels ) , "type" : "Function" , "span" : span , "name_...
Parse functions from BEL using paren comma quote character locations
50,880
def parse_args ( bels : list , char_locs : CharLocs , parsed : Parsed , errors : Errors ) -> Tuple [ Parsed , Errors ] : commas = char_locs [ "commas" ] for span in parsed : if parsed [ span ] [ "type" ] != "Function" or "parens_span" not in parsed [ span ] : continue sp , ep = parsed [ span ] [ "parens_span" ] if ep =...
Parse arguments from functions
50,881
def arg_types ( parsed : Parsed , errors : Errors ) -> Tuple [ Parsed , Errors ] : func_pattern = re . compile ( r"\s*[a-zA-Z]+\(" ) nsarg_pattern = re . compile ( r"^\s*([A-Z]+):(.*?)\s*$" ) for span in parsed : if parsed [ span ] [ "type" ] != "Function" or "parens_span" not in parsed [ span ] : continue for i , arg ...
Add argument types to parsed function data structure
50,882
def parse_relations ( belstr : str , char_locs : CharLocs , parsed : Parsed , errors : Errors ) -> Tuple [ Parsed , Errors ] : quotes = char_locs [ "quotes" ] quoted_range = set ( [ i for start , end in quotes . items ( ) for i in range ( start , end ) ] ) for match in relations_pattern_middle . finditer ( belstr ) : (...
Parse relations from BEL string
50,883
def parse_nested ( bels : list , char_locs : CharLocs , parsed : Parsed , errors : Errors ) -> Tuple [ Parsed , Errors ] : for sp in char_locs [ "nested_parens" ] : ep , level = char_locs [ "nested_parens" ] [ sp ] if ep == - 1 : ep = len ( bels ) + 1 parsed [ ( sp , ep ) ] = { "type" : "Nested" , "span" : ( sp , ep ) ...
Parse nested BEL object
50,884
def dump_json ( d : dict ) -> None : import json k = d . keys ( ) v = d . values ( ) k1 = [ str ( i ) for i in k ] return json . dumps ( dict ( zip ( * [ k1 , v ] ) ) , indent = 4 )
Dump json when using tuples for dictionary keys
50,885
def collect_spans ( ast : AST ) -> List [ Tuple [ str , Tuple [ int , int ] ] ] : spans = [ ] if ast . get ( "subject" , False ) : spans . extend ( collect_spans ( ast [ "subject" ] ) ) if ast . get ( "object" , False ) : spans . extend ( collect_spans ( ast [ "object" ] ) ) if ast . get ( "nested" , False ) : spans . ...
Collect flattened list of spans of BEL syntax types
50,886
def print_spans ( spans , max_idx : int ) -> None : bel_spans = [ " " ] * ( max_idx + 3 ) for val , span in spans : if val in [ "Nested" , "NSArg" ] : continue for i in range ( span [ 0 ] , span [ 1 ] + 1 ) : bel_spans [ i ] = val [ 0 ] bel_spans = [ " " ] * ( max_idx + 3 ) for val , span in spans : if val not in [ "Ne...
Quick test to show how character spans match original BEL String
50,887
def parsed_function_to_ast ( parsed : Parsed , parsed_key ) : sub = parsed [ parsed_key ] subtree = { "type" : "Function" , "span" : sub [ "span" ] , "function" : { "name" : sub [ "name" ] , "name_span" : sub [ "name_span" ] , "parens_span" : sub . get ( "parens_span" , [ ] ) , } , } args = [ ] for arg in parsed [ pars...
Create AST for top - level functions
50,888
def parsed_top_level_errors ( parsed , errors , component_type : str = "" ) -> Errors : fn_cnt = 0 rel_cnt = 0 nested_cnt = 0 for key in parsed : if parsed [ key ] [ "type" ] == "Function" : fn_cnt += 1 if parsed [ key ] [ "type" ] == "Relation" : rel_cnt += 1 if parsed [ key ] [ "type" ] == "Nested" : nested_cnt += 1 ...
Check full parse for errors
50,889
def parsed_to_ast ( parsed : Parsed , errors : Errors , component_type : str = "" ) : ast = { } sorted_keys = sorted ( parsed . keys ( ) ) for key in sorted_keys : if parsed [ key ] [ "type" ] == "Nested" : nested_component_stack = [ "subject" , "object" ] if component_type : component_stack = [ component_type ] else :...
Convert parsed data struct to AST dictionary
50,890
def get_ast_dict ( belstr , component_type : str = "" ) : errors = [ ] parsed = { } bels = list ( belstr ) char_locs , errors = parse_chars ( bels , errors ) parsed , errors = parse_functions ( belstr , char_locs , parsed , errors ) parsed , errors = parse_args ( bels , char_locs , parsed , errors ) parsed , errors = a...
Convert BEL string to AST dictionary
50,891
def get_ast_obj ( belstr , bel_version , component_type : str = "" ) : ast_dict , errors = get_ast_dict ( belstr , component_type ) spec = bel_specification . get_specification ( bel_version ) subj = ast_dict [ "subject" ] subj_ast = add_ast_fn ( subj , spec ) relation = None obj = None if "relation" in ast_dict : rela...
Convert AST partialparse dict to BELAst
50,892
def add_ast_fn ( d , spec , parent_function = None ) : if d [ "type" ] == "Function" : ast_fn = Function ( d [ "function" ] [ "name" ] , spec , parent_function = parent_function ) for arg in d [ "args" ] : if arg [ "type" ] == "Function" : ast_fn . add_argument ( add_ast_fn ( arg , spec , parent_function = ast_fn ) ) e...
Convert dict AST to object AST Function
50,893
def convert_namespaces_str ( bel_str : str , api_url : str = None , namespace_targets : Mapping [ str , List [ str ] ] = None , canonicalize : bool = False , decanonicalize : bool = False , ) -> str : matches = re . findall ( r'([A-Z]+:"(?:\\.|[^"\\])*"|[A-Z]+:(?:[^\),\s]+))' , bel_str ) for nsarg in matches : if "DEFA...
Convert namespace in string
50,894
def convert_namespaces_ast ( ast , api_url : str = None , namespace_targets : Mapping [ str , List [ str ] ] = None , canonicalize : bool = False , decanonicalize : bool = False , ) : if isinstance ( ast , NSArg ) : given_term_id = "{}:{}" . format ( ast . namespace , ast . value ) if ( canonicalize and not ast . canon...
Recursively convert namespaces of BEL Entities in BEL AST using API endpoint
50,895
def orthologize ( ast , bo , species_id : str ) : if not species_id : bo . validation_messages . append ( ( "WARNING" , "No species id was provided for orthologization" ) ) return ast if isinstance ( ast , NSArg ) : if ast . orthologs : if ast . orthologs . get ( species_id , None ) : orthologized_nsarg_val = ast . ort...
Recursively orthologize BEL Entities in BEL AST using API endpoint
50,896
def populate_ast_nsarg_orthologs ( ast , species ) : ortholog_namespace = "EG" if isinstance ( ast , NSArg ) : if re . match ( ortholog_namespace , ast . canonical ) : orthologs = bel . terms . orthologs . get_orthologs ( ast . canonical , list ( species . keys ( ) ) ) for species_id in species : if species_id in ortho...
Recursively collect NSArg orthologs for BEL AST
50,897
def preprocess_bel_stmt ( stmt : str ) -> str : stmt = stmt . strip ( ) stmt = re . sub ( r",+" , "," , stmt ) stmt = re . sub ( r"," , ", " , stmt ) stmt = re . sub ( r" +" , " " , stmt ) return stmt
Clean up basic formatting of BEL statement
50,898
def _dump_spec ( spec ) : with open ( "spec.yaml" , "w" ) as f : yaml . dump ( spec , f , Dumper = MyDumper , default_flow_style = False )
Dump bel specification dictionary using YAML
50,899
def process_rule ( edges : Edges , ast : Function , rule : Mapping [ str , Any ] , spec : BELSpec ) : ast_type = ast . __class__ . __name__ trigger_functions = rule . get ( "trigger_function" , [ ] ) trigger_types = rule . get ( "trigger_type" , [ ] ) rule_subject = rule . get ( "subject" ) rule_relation = rule . get (...
Process computed edge rule