idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
10,400
def filter ( self , drop_duplicates = False , drop_improper_mate_pairs = False , min_mapping_quality = None , min_base_quality = None , filters = None ) : if filters is None : filters = [ ] if drop_duplicates : filters . append ( lambda e : not e . alignment . is_duplicate ) if drop_improper_mate_pairs : filters . append ( lambda e : e . alignment . is_proper_pair ) if min_mapping_quality is not None : filters . append ( lambda e : e . alignment . mapping_quality >= min_mapping_quality ) if min_base_quality is not None : filters . append ( lambda e : e . min_base_quality >= min_base_quality ) pileups = OrderedDict ( ( locus , pileup . filter ( filters ) ) for ( locus , pileup ) in self . pileups . items ( ) ) return PileupCollection ( pileups = pileups , parent = self )
Return a new PileupCollection that includes only pileup elements satisfying the specified criteria .
10,401
def merge ( self , * others ) : new_pileups = { } for collection in ( self , ) + others : for ( locus , pileup ) in collection . pileups . items ( ) : if locus in new_pileups : new_pileups [ locus ] . update ( pileup ) else : new_pileups [ locus ] = Pileup ( locus , pileup . elements ) return PileupCollection ( new_pileups , parent = self )
Return a new PileupCollection that is the union of self and the other specified collections .
10,402
def from_bam ( pysam_samfile , loci , normalized_contig_names = True ) : loci = [ to_locus ( obj ) for obj in loci ] close_on_completion = False if typechecks . is_string ( pysam_samfile ) : pysam_samfile = Samfile ( pysam_samfile ) close_on_completion = True try : if normalized_contig_names : chromosome_name_map = { } for name in pysam_samfile . references : normalized = pyensembl . locus . normalize_chromosome ( name ) chromosome_name_map [ normalized ] = name chromosome_name_map [ name ] = name else : chromosome_name_map = None result = PileupCollection ( { } ) locus_iterator = itertools . chain . from_iterable ( ( Locus . from_interbase_coordinates ( locus_interval . contig , pos ) for pos in locus_interval . positions ) for locus_interval in sorted ( loci ) ) for locus in locus_iterator : result . pileups [ locus ] = Pileup ( locus , [ ] ) if normalized_contig_names : try : chromosome = chromosome_name_map [ locus . contig ] except KeyError : logging . warn ( "No such contig in bam: %s" % locus . contig ) continue else : chromosome = locus . contig columns = pysam_samfile . pileup ( chromosome , locus . position , locus . position + 1 , truncate = True , stepper = "nofilter" ) try : column = next ( columns ) except StopIteration : continue pileups = column . pileups assert list ( columns ) == [ ] for pileup_read in pileups : if not pileup_read . is_refskip : element = PileupElement . from_pysam_alignment ( locus , pileup_read ) result . pileups [ locus ] . append ( element ) return result finally : if close_on_completion : pysam_samfile . close ( )
Create a PileupCollection for a set of loci from a BAM file .
10,403
def invenio_query_factory ( parser = None , walkers = None ) : parser = parser or Main walkers = walkers or [ PypegConverter ( ) ] walkers . append ( ElasticSearchDSL ( ) ) def invenio_query ( pattern ) : query = pypeg2 . parse ( pattern , parser , whitespace = "" ) for walker in walkers : query = query . accept ( walker ) return query return invenio_query
Create a parser returning Elastic Search DSL query instance .
10,404
def check_dimensions ( self , dataset ) : results = [ ] required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are timeseries-profile-orthogonal feature types' ) message = '{} must be a valid profile-orthogonal feature type. It must have dimensions of (station, time, z).' message += ' If it\'s a single station, it must have dimensions (time, z). x and y dimensions must be scalar or have' message += ' dimensions (station). time must be a coordinate variable with dimension (time) and z must be a' message += ' coordinate variabel with dimension (z).' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_timeseries_profile_single_station ( dataset , variable ) is_valid = is_valid or util . is_timeseries_profile_multi_station ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable ) ) results . append ( required_ctx . to_result ( ) ) return results
Checks that the feature types of this dataset are consistent with a timeseries - profile - orthogonal dataset .
10,405
def theme ( name = 'readthedocs' ) : os . environ [ 'SPHINX_THEME' ] = name if os . environ [ 'SPHINX_THEME' ] == 'bootstrap' : local ( 'cp docs/source/_templates/layout_bootstrap.html docs/source/_templates/layout.html' ) elif name is 'readthedocs' : return else : local ( 'cp docs/source/_templates/layout_simple.html docs/source/_templates/layout.html' )
set name to bootstrap in case you want to use bootstrap . This also requires the template sto be in the main dir
10,406
def html ( theme_name = 'readthedocs' ) : os . environ [ 'RSTPAGES' ] = 'FALSE' theme ( theme_name ) api ( ) man ( ) clean ( ) local ( "cd docs; make html" ) local ( "fab security.check" ) local ( "touch docs/build/html/.nojekyll" )
build the doc locally and view
10,407
def sign_message ( body : ByteString , secret : Text ) -> Text : return 'sha1={}' . format ( hmac . new ( secret . encode ( ) , body , sha1 ) . hexdigest ( ) )
Compute a message s signature .
10,408
async def _get_user ( self ) : if self . _cache is None : try : self . _cache = await self . facebook . get_user ( self . fbid , self . page_id ) except PlatformOperationError : self . _cache = { } return self . _cache
Get the user dict from cache or query it from the platform if missing .
10,409
async def get_friendly_name ( self ) -> Text : u = await self . _get_user ( ) f = u . get ( 'first_name' , '' ) . strip ( ) l = u . get ( 'last_name' , '' ) . strip ( ) return f or l
The friendly name is mapped to Facebook s first name . If the first name is missing use the last name .
10,410
async def get_gender ( self ) -> User . Gender : u = await self . _get_user ( ) try : return User . Gender ( u . get ( 'gender' ) ) except ValueError : return User . Gender . unknown
Get the gender from Facebook .
10,411
def get_user ( self ) -> FacebookUser : return FacebookUser ( self . _event [ 'sender' ] [ 'id' ] , self . get_page_id ( ) , self . _facebook , self , )
Generate a Facebook user instance
10,412
def get_layers ( self ) -> List [ BaseLayer ] : out = [ ] msg = self . _event . get ( 'message' , { } ) if 'text' in msg : out . append ( lyr . RawText ( msg [ 'text' ] ) ) for attachment in msg . get ( 'attachments' ) or [ ] : if attachment [ 'type' ] == 'image' : out . append ( lyr . Image ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'audio' : out . append ( lyr . Audio ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'file' : out . append ( lyr . File ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'video' : out . append ( lyr . Video ( UrlMedia ( attachment [ 'payload' ] [ 'url' ] ) ) ) elif attachment [ 'type' ] == 'location' : out . append ( lyr . Location ( lyr . Location . Point ( lat = attachment [ 'payload' ] [ 'coordinates' ] [ 'lat' ] , lon = attachment [ 'payload' ] [ 'coordinates' ] [ 'long' ] , ) ) ) if 'quick_reply' in msg : out . append ( QuickReply ( msg [ 'quick_reply' ] [ 'payload' ] ) ) if 'postback' in self . _event : payload = ujson . loads ( self . _event [ 'postback' ] [ 'payload' ] ) out . append ( lyr . Postback ( payload ) ) if 'optin' in self . _event : out . append ( OptIn ( self . _event [ 'optin' ] [ 'ref' ] ) ) return out
Return all layers that can be found in the message .
10,413
def verify_token ( self ) : h = sha256 ( ) h . update ( self . app_access_token . encode ( ) ) return h . hexdigest ( )
Automatically generated secure verify token
10,414
def hook_up ( self , router : UrlDispatcher ) : router . add_get ( self . webhook_path , self . check_hook ) router . add_post ( self . webhook_path , self . receive_events )
Dynamically hooks the right webhook paths
10,415
async def check_hook ( self , request : HttpRequest ) : verify_token = request . query . get ( 'hub.verify_token' ) if not verify_token : return json_response ( { 'error' : 'No verification token was provided' , } , status = 400 ) if verify_token == self . verify_token : return Response ( text = request . query . get ( 'hub.challenge' , '' ) ) return json_response ( { 'error' : 'could not find the page token in the configuration' , } )
Called when Facebook checks the hook
10,416
async def receive_events ( self , request : HttpRequest ) : body = await request . read ( ) s = self . settings ( ) try : content = ujson . loads ( body ) except ValueError : return json_response ( { 'error' : True , 'message' : 'Cannot decode body' } , status = 400 ) secret = s [ 'app_secret' ] actual_sig = request . headers [ 'X-Hub-Signature' ] expected_sig = sign_message ( body , secret ) if not hmac . compare_digest ( actual_sig , expected_sig ) : return json_response ( { 'error' : True , 'message' : 'Invalid signature' , } , status = 401 ) for entry in content [ 'entry' ] : for raw_message in entry . get ( 'messaging' , [ ] ) : message = FacebookMessage ( raw_message , self ) await self . handle_event ( message ) return json_response ( { 'ok' : True , } )
Events received from Facebook
10,417
async def _deferred_init ( self ) : await self . _check_subscriptions ( ) await self . _set_whitelist ( ) await self . _set_get_started ( ) await self . _set_greeting_text ( ) await self . _set_persistent_menu ( )
Run those things in a sepearate tasks as they are not required for the bot to work and they take a lot of time to run .
10,418
async def _send_to_messenger_profile ( self , page , content ) : log_name = ', ' . join ( repr ( x ) for x in content . keys ( ) ) page_id = page [ 'page_id' ] current = await self . _get_messenger_profile ( page , content . keys ( ) ) if dict_is_subset ( content , current ) : logger . info ( 'Page %s: %s is already up to date' , page_id , log_name ) return params = { 'access_token' : page [ 'page_token' ] , } headers = { 'content-type' : 'application/json' , } post = self . session . post ( PROFILE_ENDPOINT , params = params , headers = headers , data = ujson . dumps ( content ) ) try : async with post as r : await self . _handle_fb_response ( r ) except Exception : logger . exception ( 'Page %s: %s could not be set' , page_id , log_name ) reporter . report ( ) else : logger . info ( 'Page %s: %s was updated' , page_id , log_name )
The messenger profile API handles all meta - information about the bot like the menu . This allows to submit data to this API endpoint .
10,419
async def _set_get_started ( self ) : page = self . settings ( ) if 'get_started' in page : payload = page [ 'get_started' ] else : payload = { 'action' : 'get_started' } await self . _send_to_messenger_profile ( page , { 'get_started' : { 'payload' : ujson . dumps ( payload ) , } , } ) logger . info ( 'Get started set for page %s' , page [ 'page_id' ] )
Set the get started action for all configured pages .
10,420
async def _set_greeting_text ( self ) : page = self . settings ( ) if 'greeting' in page : await self . _send_to_messenger_profile ( page , { 'greeting' : page [ 'greeting' ] , } ) logger . info ( 'Greeting text set for page %s' , page [ 'page_id' ] )
Set the greeting text of the page
10,421
async def _set_persistent_menu ( self ) : page = self . settings ( ) if 'menu' in page : await self . _send_to_messenger_profile ( page , { 'persistent_menu' : page [ 'menu' ] , } ) logger . info ( 'Set menu for page %s' , page [ 'page_id' ] )
Define the persistent menu for all pages
10,422
async def _set_whitelist ( self ) : page = self . settings ( ) if 'whitelist' in page : await self . _send_to_messenger_profile ( page , { 'whitelisted_domains' : page [ 'whitelist' ] , } ) logger . info ( 'Whitelisted %s for page %s' , page [ 'whitelist' ] , page [ 'page_id' ] )
Whitelist domains for the messenger extensions
10,423
def _get_subscriptions_endpoint ( self ) : s = self . settings ( ) params = { 'access_token' : self . app_access_token , } return ( GRAPH_ENDPOINT . format ( f'{s["app_id"]}/subscriptions' ) , params , )
Generates the URL and tokens for the subscriptions endpoint
10,424
async def _get_subscriptions ( self ) -> Tuple [ Set [ Text ] , Text ] : url , params = self . _get_subscriptions_endpoint ( ) get = self . session . get ( url , params = params ) async with get as r : await self . _handle_fb_response ( r ) data = await r . json ( ) for scope in data [ 'data' ] : if scope [ 'object' ] == 'page' : return ( set ( x [ 'name' ] for x in scope [ 'fields' ] ) , scope [ 'callback_url' ] , ) return set ( ) , ''
List the subscriptions currently active
10,425
async def _set_subscriptions ( self , subscriptions ) : url , params = self . _get_subscriptions_endpoint ( ) data = { 'object' : 'page' , 'callback_url' : self . webhook_url , 'fields' : ', ' . join ( subscriptions ) , 'verify_token' : self . verify_token , } headers = { 'Content-Type' : 'application/json' , } post = self . session . post ( url , params = params , data = ujson . dumps ( data ) , headers = headers , ) async with post as r : await self . _handle_fb_response ( r ) data = await r . json ( )
Set the subscriptions to a specific list of values
10,426
async def _check_subscriptions ( self ) : subscribed , url = await self . _get_subscriptions ( ) expect = set ( settings . FACEBOOK_SUBSCRIPTIONS ) if ( expect - subscribed ) or url != self . webhook_url : await self . _set_subscriptions ( expect | subscribed ) logger . info ( 'Updated webhook subscriptions' ) else : logger . info ( 'No need to update webhook subscriptions' )
Checks that all subscriptions are subscribed
10,427
async def handle_event ( self , event : FacebookMessage ) : responder = FacebookResponder ( self ) await self . _notify ( event , responder )
Handle an incoming message from Facebook .
10,428
def _access_token ( self , request : Request = None , page_id : Text = '' ) : if not page_id : msg = request . message page_id = msg . get_page_id ( ) page = self . settings ( ) if page [ 'page_id' ] == page_id : return page [ 'page_token' ] raise PlatformOperationError ( 'Trying to get access token of the ' 'page "{}", which is not configured.' . format ( page_id ) )
Guess the access token for that specific request .
10,429
async def _make_qr ( self , qr : QuickRepliesList . BaseOption , request : Request ) : if isinstance ( qr , QuickRepliesList . TextOption ) : return { 'content_type' : 'text' , 'title' : await render ( qr . text , request ) , 'payload' : qr . slug , } elif isinstance ( qr , QuickRepliesList . LocationOption ) : return { 'content_type' : 'location' , }
Generate a single quick reply s content .
10,430
async def _send_text ( self , request : Request , stack : Stack ) : parts = [ ] for layer in stack . layers : if isinstance ( layer , lyr . MultiText ) : lines = await render ( layer . text , request , multi_line = True ) for line in lines : for part in wrap ( line , 320 ) : parts . append ( part ) elif isinstance ( layer , ( lyr . Text , lyr . RawText ) ) : text = await render ( layer . text , request ) for part in wrap ( text , 320 ) : parts . append ( part ) for part in parts [ : - 1 ] : await self . _send ( request , { 'text' : part , } , stack ) part = parts [ - 1 ] msg = { 'text' : part , } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
Send text layers to the user . Each layer will go in its own bubble .
10,431
async def _send_generic_template ( self , request : Request , stack : Stack ) : gt = stack . get_layer ( GenericTemplate ) payload = await gt . serialize ( request ) msg = { 'attachment' : { 'type' : 'template' , 'payload' : payload } } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
Generates and send a generic template .
10,432
async def _send_button_template ( self , request : Request , stack : Stack ) : gt = stack . get_layer ( ButtonTemplate ) payload = { 'template_type' : 'button' , 'text' : await render ( gt . text , request ) , 'buttons' : [ await b . serialize ( request ) for b in gt . buttons ] , } msg = { 'attachment' : { 'type' : 'template' , 'payload' : payload } } await self . _add_qr ( stack , msg , request ) await self . _send ( request , msg , stack )
Generates and send a button template .
10,433
async def _send_typing ( self , request : Request , stack : Stack ) : active = stack . get_layer ( lyr . Typing ) . active msg = ujson . dumps ( { 'recipient' : { 'id' : request . conversation . fbid , } , 'sender_action' : 'typing_on' if active else 'typing_off' , } ) headers = { 'content-type' : 'application/json' , } params = { 'access_token' : self . _access_token ( request ) , } post = self . session . post ( MESSAGES_ENDPOINT , params = params , data = msg , headers = headers , ) logger . debug ( 'Sending: %s' , msg ) async with post as r : await self . _handle_fb_response ( r )
Send to Facebook typing indications
10,434
async def _handle_fb_response ( self , response : aiohttp . ClientResponse ) : ok = response . status == 200 if not ok : try : error = ( await response . json ( ) ) [ 'error' ] [ 'message' ] except Exception : error = '(nothing)' raise PlatformOperationError ( 'Facebook says: "{}"' . format ( error ) )
Check that Facebook was OK with the API call we just made and raise an exception if it failed .
10,435
async def _send ( self , request : Request , content : Dict [ Text , Any ] , stack : Stack ) : msg = { 'recipient' : { 'id' : request . conversation . fbid , } , 'message' : content , } if stack and stack . has_layer ( MessagingType ) : mt = stack . get_layer ( MessagingType ) else : mt = MessagingType ( response = True ) msg . update ( mt . serialize ( ) ) msg_json = ujson . dumps ( msg ) headers = { 'content-type' : 'application/json' , } params = { 'access_token' : self . _access_token ( request ) , } post = self . session . post ( MESSAGES_ENDPOINT , params = params , data = msg_json , headers = headers , ) logger . debug ( 'Sending: %s' , msg_json ) async with post as r : await self . _handle_fb_response ( r )
Actually proceed to sending the message to the Facebook API .
10,436
async def get_user ( self , user_id , page_id ) : access_token = self . _access_token ( page_id = page_id ) params = { 'fields' : 'first_name,last_name,profile_pic,locale,timezone' ',gender' , 'access_token' : access_token , } url = GRAPH_ENDPOINT . format ( user_id ) get = self . session . get ( url , params = params ) async with get as r : await self . _handle_fb_response ( r ) return await r . json ( )
Query a user from the API and return its JSON
10,437
async def ensure_usable_media ( self , media : BaseMedia ) -> UrlMedia : if not isinstance ( media , UrlMedia ) : raise ValueError ( 'Facebook platform only accepts URL media' ) return media
So far let s just accept URL media . We ll see in the future how it goes .
10,438
def _make_fake_message ( self , user_id , page_id , payload ) : event = { 'sender' : { 'id' : user_id , } , 'recipient' : { 'id' : page_id , } , 'postback' : { 'payload' : ujson . dumps ( payload ) , } , } return FacebookMessage ( event , self , False )
Creates a fake message for the given user_id . It contains a postback with the given payload .
10,439
def _message_from_sr ( self , token : Text , payload : Any ) -> Optional [ BaseMessage ] : page = self . settings ( ) secret = page [ 'app_secret' ] try : sr_data = SignedRequest . parse ( token , secret ) except ( TypeError , ValueError , SignedRequestError ) as e : return return self . _make_fake_message ( sr_data [ 'psid' ] , page [ 'page_id' ] , payload , )
Tries to verify the signed request
10,440
def _message_from_token ( self , token : Text , payload : Any ) -> Optional [ BaseMessage ] : try : tk = jwt . decode ( token , settings . WEBVIEW_SECRET_KEY ) except jwt . InvalidTokenError : return try : user_id = tk [ 'fb_psid' ] assert isinstance ( user_id , Text ) page_id = tk [ 'fb_pid' ] assert isinstance ( page_id , Text ) except ( KeyError , AssertionError ) : return if self . settings ( ) [ 'page_id' ] == page_id : return self . _make_fake_message ( user_id , page_id , payload )
Analyzes a signed token and generates the matching message
10,441
def get_trans_reg ( self , name : Text , default : Any = None ) -> Any : tr = self . register . get ( Register . TRANSITION , { } ) return tr . get ( name , default )
Convenience function to access the transition register of a specific kind .
10,442
async def get_locale ( self ) -> Text : if self . _locale_override : return self . _locale_override else : return await self . user . get_locale ( )
Get the locale to use for this request . It s either the overridden locale or the locale provided by the platform .
10,443
async def get_trans_flags ( self ) -> 'Flags' : from bernard . middleware import MiddlewareManager async def make_flags ( request : Request ) -> 'Flags' : return { } mf = MiddlewareManager . instance ( ) . get ( 'make_trans_flags' , make_flags ) return await mf ( self )
Gives a chance to middlewares to make the translation flags
10,444
async def sign_url ( self , url , method = HASH ) : token = await self . get_token ( ) if method == self . QUERY : return patch_qs ( url , { settings . WEBVIEW_TOKEN_KEY : token , } ) elif method == self . HASH : hash_id = 5 p = list ( urlparse ( url ) ) p [ hash_id ] = quote ( token ) return urlunparse ( p ) else : raise ValueError ( f'Invalid signing method "{method}"' )
Sign an URL with this request s auth token
10,445
def layers ( self , value : List [ 'BaseLayer' ] ) : self . _layers = list ( value ) self . _index = self . _make_index ( ) self . _transformed = { }
Perform a copy of the layers list in order to avoid the list changing without updating the index .
10,446
def _make_index ( self ) : out = { } for layer in self . _layers : cls = layer . __class__ out [ cls ] = out . get ( cls , [ ] ) + [ layer ] return out
Perform the index computation . It groups layers by type into a dictionary to allow quick access .
10,447
def has_layer ( self , class_ : Type [ L ] , became : bool = True ) -> bool : return ( class_ in self . _index or ( became and class_ in self . _transformed ) )
Test the presence of a given layer type .
10,448
def get_layer ( self , class_ : Type [ L ] , became : bool = True ) -> L : try : return self . _index [ class_ ] [ 0 ] except KeyError : if became : return self . _transformed [ class_ ] [ 0 ] else : raise
Return the first layer of a given class . If that layer is not present then raise a KeyError .
10,449
def get_layers ( self , class_ : Type [ L ] , became : bool = True ) -> List [ L ] : out = self . _index . get ( class_ , [ ] ) if became : out += self . _transformed . get ( class_ , [ ] ) return out
Returns the list of layers of a given class . If no layers are present then the list will be empty .
10,450
def check_trajectory_id ( self , dataset ) : results = [ ] exists_ctx = TestCtx ( BaseCheck . MEDIUM , 'Variable defining "trajectory_id" exists' ) trajectory_ids = dataset . get_variables_by_attributes ( cf_role = 'trajectory_id' ) exists_ctx . assert_true ( trajectory_ids , 'variable defining cf_role="trajectory_id" exists' ) if not trajectory_ids : return exists_ctx . to_result ( ) results . append ( exists_ctx . to_result ( ) ) test_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended attributes for the {} variable' . format ( trajectory_ids [ 0 ] . name ) ) test_ctx . assert_true ( getattr ( trajectory_ids [ 0 ] , 'long_name' , '' ) != "" , "long_name attribute should exist and not be empty" ) results . append ( test_ctx . to_result ( ) ) return results
Checks that if a variable exists for the trajectory id it has the appropriate attributes
10,451
def check_required_attributes ( self , dataset ) : results = [ ] required_ctx = TestCtx ( BaseCheck . HIGH , 'Required Global Attributes for Trajectory dataset' ) required_ctx . assert_true ( getattr ( dataset , 'nodc_template_version' , '' ) . lower ( ) == self . valid_templates [ 0 ] . lower ( ) , 'nodc_template_version attribute must be {}' . format ( self . valid_templates [ 0 ] ) ) required_ctx . assert_true ( getattr ( dataset , 'cdm_data_type' , '' ) == 'Trajectory' , 'cdm_data_type attribute must be set to Trajectory' ) required_ctx . assert_true ( getattr ( dataset , 'featureType' , '' ) == 'trajectory' , 'featureType attribute must be set to trajectory' ) results . append ( required_ctx . to_result ( ) ) return results
Feature type specific check of global required and highly recommended attributes .
10,452
def login ( self , user , remember = True , session = None ) : logger = logging . getLogger ( __name__ ) logger . debug ( u'User `{0}` logged in' . format ( user . login ) ) if session is None : session = self . session session [ 'permanent' ] = remember session [ self . session_key ] = user . get_uhmac ( ) if callable ( getattr ( session , 'save' , None ) ) : session . save ( )
Sets the current user UID in the session .
10,453
def index ( elem ) : parent = elem . getparent ( ) for x in range ( 0 , len ( parent . getchildren ( ) ) ) : if parent . getchildren ( ) [ x ] == elem : return x return - 1
Return the index position of an element in the children of a parent .
10,454
def replaceelement ( oldelem , newelem ) : parent = oldelem . getparent ( ) if parent is not None : size = len ( parent . getchildren ( ) ) for x in range ( 0 , size ) : if parent . getchildren ( ) [ x ] == oldelem : parent . remove ( oldelem ) parent . insert ( x , newelem )
Given a parent element replace oldelem with newelem .
10,455
def parseelement ( elem ) : xml = '<%(tag)s>%(content)s</%(tag)s>' % { 'tag' : elem . tag , 'content' : elem . text } et = etree . fromstring ( xml ) replaceelement ( elem , et )
Convert the content of an element into more ElementTree structures . We do this because sometimes we want to set xml as the content of an element .
10,456
def _check_min_max_range ( self , var , test_ctx ) : if 'valid_range' in var . ncattrs ( ) : test_ctx . assert_true ( var . valid_range . dtype == var . dtype and len ( var . valid_range ) == 2 and var . valid_range [ 0 ] <= var . valid_range [ 1 ] , "valid_range must be a two element vector of min followed by max with the same data type as {}" . format ( var . name ) ) else : for bound in ( 'valid_min' , 'valid_max' ) : v_bound = getattr ( var , bound , '' ) warn_msg = '{} attribute should exist, have the same type as {}, and not be empty or valid_range should be defined' . format ( bound , var . name ) if isinstance ( v_bound , six . string_types ) : test_ctx . assert_true ( v_bound != '' and var . dtype . char == 'S' , warn_msg ) else : test_ctx . assert_true ( v_bound . dtype == var . dtype , warn_msg ) return test_ctx
Checks that either both valid_min and valid_max exist or valid_range exists .
10,457
def check_base_required_attributes ( self , dataset ) : test_ctx = TestCtx ( BaseCheck . HIGH , 'Required global attributes' ) conventions = getattr ( dataset , 'Conventions' , '' ) metadata_conventions = getattr ( dataset , 'Metadata_Conventions' , '' ) feature_type = getattr ( dataset , 'featureType' , '' ) cdm_data_type = getattr ( dataset , 'cdm_data_type' , '' ) standard_name_vocab = getattr ( dataset , 'standard_name_vocabulary' , '' ) accepted_conventions = 'CF-1.6' test_ctx . assert_true ( conventions == accepted_conventions , 'Conventions attribute is missing or is not equal to CF-1.6: {}' . format ( conventions ) ) test_ctx . assert_true ( metadata_conventions == 'Unidata Dataset Discovery v1.0' , "Metadata_Conventions attribute is required to be 'Unidata Dataset Discovery v1.0': {}" . format ( metadata_conventions ) ) test_ctx . assert_true ( feature_type in [ 'point' , 'timeSeries' , 'trajectory' , 'profile' , 'timeSeriesProfile' , 'trajectoryProfile' ] , 'Feature type must be one of point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile: {}' . format ( feature_type ) ) test_ctx . assert_true ( cdm_data_type . lower ( ) in [ 'grid' , 'image' , 'point' , 'radial' , 'station' , 'swath' , 'trajectory' ] , 'cdm_data_type must be one of Grid, Image, Point, Radial, Station, Swath, Trajectory: {}' . format ( cdm_data_type ) ) regex = re . compile ( r'[sS]tandard [nN]ame [tT]able' ) test_ctx . assert_true ( regex . search ( standard_name_vocab ) , "standard_name_vocabulary doesn't contain 'Standard Name Table': {}" . format ( standard_name_vocab ) ) return test_ctx . to_result ( )
Check the global required and highly recommended attributes for 1 . 1 templates . These go an extra step besides just checking that they exist .
10,458
def check_base_required_attributes ( self , dataset ) : test_ctx = TestCtx ( BaseCheck . HIGH , 'Required global attributes' ) conventions = getattr ( dataset , 'Conventions' , '' ) feature_type = getattr ( dataset , 'featureType' , '' ) accepted_conventions = [ 'CF-1.6' , 'ACDD-1.3' ] dataset_conventions = conventions . replace ( ' ' , '' ) . split ( ',' ) for accepted_convention in accepted_conventions : if accepted_convention not in dataset_conventions : test_ctx . assert_true ( False , 'Conventions attribute is missing or is not equal to "CF-1.6, ACDD-1.3": {}' . format ( conventions ) ) break else : test_ctx . assert_true ( True , '' ) test_ctx . assert_true ( feature_type in [ 'point' , 'timeSeries' , 'trajectory' , 'profile' , 'timeSeriesProfile' , 'trajectoryProfile' ] , 'Feature type must be one of point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile: {}' . format ( feature_type ) ) return test_ctx . to_result ( )
Check the global required and highly recommended attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
10,459
def check_recommended_global_attributes ( self , dataset ) : recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended global attributes' ) sea_names = [ sn . lower ( ) for sn in util . get_sea_names ( ) ] sea_name = getattr ( dataset , 'sea_name' , '' ) sea_name = sea_name . replace ( ', ' , ',' ) sea_name = sea_name . split ( ',' ) if sea_name else [ ] for sea in sea_name : recommended_ctx . assert_true ( sea . lower ( ) in sea_names , 'sea_name attribute should exist and should be from the NODC sea names list: {} is not a valid sea name' . format ( sea ) ) for attr in [ 'time_coverage_start' , 'time_coverage_end' , 'date_created' , 'date_modified' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_datetime ( attr_value ) recommended_ctx . assert_true ( True , '' ) except ISO8601Error : recommended_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) value = getattr ( dataset , 'geospatial_vertical_positive' , '' ) recommended_ctx . assert_true ( value . lower ( ) in [ 'up' , 'down' ] , 'geospatial_vertical_positive attribute should be up or down: {}' . format ( value ) ) ack_exists = any ( ( getattr ( dataset , attr , '' ) != '' for attr in [ 'acknowledgment' , 'acknowledgement' ] ) ) recommended_ctx . assert_true ( ack_exists , 'acknowledgement attribute should exist and not be empty' ) standard_name_vocab = getattr ( dataset , 'standard_name_vocabulary' , '' ) regex = re . compile ( r'[sS]tandard [nN]ame [tT]able' ) recommended_ctx . assert_true ( regex . search ( standard_name_vocab ) , "standard_name_vocabulary doesn't contain 'Standard Name Table': {}" . format ( standard_name_vocab ) ) if hasattr ( dataset , 'comment' ) : recommended_ctx . assert_true ( getattr ( dataset , 'comment' , '' ) != '' , 'comment attribute should not be empty if specified' ) return recommended_ctx . to_result ( )
Check the global recommended attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
10,460
def check_base_suggested_attributes ( self , dataset ) : suggested_ctx = TestCtx ( BaseCheck . LOW , 'Suggested global attributes' ) platform_name = getattr ( dataset , 'platform' , '' ) suggested_ctx . assert_true ( platform_name != '' , 'platform should exist and point to a term in :platform_vocabulary.' ) cdm_data_type = getattr ( dataset , 'cdm_data_type' , '' ) suggested_ctx . assert_true ( cdm_data_type . lower ( ) in [ 'grid' , 'image' , 'point' , 'radial' , 'station' , 'swath' , 'trajectory' ] , 'cdm_data_type must be one of Grid, Image, Point, Radial, Station, Swath, Trajectory: {}' . format ( cdm_data_type ) ) for attr in [ 'date_modified' , 'date_issued' , 'date_metadata_modified' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_datetime ( attr_value ) suggested_ctx . assert_true ( True , '' ) except ISO8601Error : suggested_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) units = getattr ( dataset , 'geospatial_lat_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_north' , 'geospatial_lat_units attribute should be degrees_north: {}' . format ( units ) ) units = getattr ( dataset , 'geospatial_lon_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_east' , 'geospatial_lon_units attribute should be degrees_east: {}' . format ( units ) ) contributor_name = getattr ( dataset , 'contributor_name' , '' ) contributor_role = getattr ( dataset , 'contributor_role' , '' ) names = contributor_role . split ( ',' ) roles = contributor_role . split ( ',' ) suggested_ctx . assert_true ( contributor_name != '' , 'contributor_name should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) suggested_ctx . assert_true ( contributor_role != '' , 'contributor_role should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) return suggested_ctx . to_result ( )
Check the global suggested attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
10,461
def _configure ( self ) : path = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'config.yml' ) with open ( path ) as file : defaultconfig = yaml . load ( file ) self . config = merge_dict ( self . config , defaultconfig ) if 'logging' in self . config : logging . config . dictConfig ( self . config [ 'logging' ] ) else : logging . getLogger ( 'sirbot' ) . setLevel ( 'INFO' )
Configure the core of sirbot
10,462
def _import_plugins ( self ) -> None : logger . debug ( 'Importing plugins' ) self . _pm = pluggy . PluginManager ( 'sirbot' ) self . _pm . add_hookspecs ( hookspecs ) for plugin in self . config [ 'sirbot' ] [ 'plugins' ] : try : p = importlib . import_module ( plugin ) except ( ModuleNotFoundError , ) : if os . getcwd ( ) not in sys . path : sys . path . append ( os . getcwd ( ) ) p = importlib . import_module ( plugin ) else : raise self . _pm . register ( p )
Import and register plugin in the plugin manager .
10,463
def _initialize_plugins ( self ) : logger . debug ( 'Initializing plugins' ) plugins = self . _pm . hook . plugins ( loop = self . _loop ) if plugins : for plugin in plugins : name = plugin . __name__ registry_name = plugin . __registry__ or plugin . __name__ config = self . config . get ( name , { } ) priority = config . get ( 'priority' , 50 ) if priority : self . _plugins [ name ] = { 'plugin' : plugin , 'config' : config , 'priority' : priority , 'factory' : registry_name } self . _start_priority [ priority ] . append ( name ) else : logger . error ( 'No plugins found' )
Initialize the plugins
10,464
def _register_factory ( self ) : for name , info in self . _plugins . items ( ) : if info [ 'priority' ] : factory = getattr ( info [ 'plugin' ] , 'factory' , None ) if callable ( factory ) : registry [ info [ 'factory' ] ] = info [ 'plugin' ] . factory registry . freeze ( )
Index the available factories
10,465
async def _configure_plugins ( self ) -> None : logger . debug ( 'Configuring plugins' ) funcs = [ info [ 'plugin' ] . configure ( config = info [ 'config' ] , session = self . _session , router = self . app . router ) for info in self . _plugins . values ( ) ] if funcs : await asyncio . gather ( * funcs , loop = self . _loop ) logger . debug ( 'Plugins configured' )
Configure the plugins
10,466
async def _start_plugins ( self ) -> None : logger . debug ( 'Starting plugins' ) for priority in sorted ( self . _start_priority , reverse = True ) : logger . debug ( 'Starting plugins %s' , ', ' . join ( self . _start_priority [ priority ] ) ) for name in self . _start_priority [ priority ] : plugin = self . _plugins [ name ] self . _tasks [ name ] = self . _loop . create_task ( plugin [ 'plugin' ] . start ( ) ) while not all ( self . _plugins [ name ] [ 'plugin' ] . started for name in self . _tasks ) : for task in self . _tasks . values ( ) : if task . done ( ) : task . result ( ) await asyncio . sleep ( 0.2 , loop = self . _loop ) else : logger . debug ( 'Plugins %s started' , ', ' . join ( self . _start_priority [ priority ] ) )
Start the plugins by priority
10,467
def _create_settings ( self ) : self . settings = { "columns" : [ { "Header" : s , "accessor" : s } for s in self . settings ] , "port" : self . port , "docs" : construct_trie ( self . docs ) }
Creates the settings object that will be sent to the frontend vizualization
10,468
def run_server ( self ) : app = build_app ( ) run ( app , host = 'localhost' , port = self . port )
Runs a server to handle queries to the index without creating the javascript table .
10,469
def strip_spaces ( value , sep = None , join = True ) : value = value . strip ( ) value = [ v . strip ( ) for v in value . split ( sep ) ] join_sep = sep or ' ' return join_sep . join ( value ) if join else value
Cleans trailing whitespaces and replaces also multiple whitespaces with a single space .
10,470
async def rank ( self , request , origin : Optional [ Text ] ) -> Tuple [ float , Optional [ BaseTrigger ] , Optional [ type ] , Optional [ bool ] , ] : if self . origin_name == origin : score = 1.0 elif self . origin_name is None : score = settings . JUMPING_TRIGGER_PENALTY else : return 0.0 , None , None , None trigger = self . factory ( request ) rank = await run_or_return ( trigger . rank ( ) ) score *= self . weight * ( rank or 0.0 ) return score , trigger , self . dest , self . do_not_register
Computes the rank of this transition for a given request .
10,471
def check_dimensions ( self , dataset ) : required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are point feature types' ) t = util . get_time_variable ( dataset ) if not t : required_ctx . assert_true ( False , 'A dimension representing time is required for point feature types' ) return required_ctx . to_result ( ) t_dims = dataset . variables [ t ] . dimensions o = None or ( t_dims and t_dims [ 0 ] ) message = '{} must be a valid timeseries feature type. It must have dimensions of ({}), and all coordinates must have dimensions of ({})' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_point ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable , o , o ) ) return required_ctx . to_result ( )
Checks that the feature types of this dataset are consitent with a point dataset
10,472
def settings ( cls ) : from bernard . platforms . management import get_platform_settings for platform in get_platform_settings ( ) : candidate = import_class ( platform [ 'class' ] ) if candidate == cls : return platform . get ( 'settings' , { } )
Find the settings for the current class inside the platforms configuration .
10,473
async def _notify ( self , message : BaseMessage , responder : Responder ) : for cb in self . _listeners : coro = cb ( message , responder , self . fsm_creates_task ) if not self . fsm_creates_task : self . _register = await coro
Notify all callbacks that a message was received .
10,474
async def async_init ( self ) : self . session = aiohttp . ClientSession ( ) asyncio . get_event_loop ( ) . create_task ( self . _deferred_init ( ) )
During async init we just need to create a HTTP session so we can keep outgoing connexions to the platform alive .
10,475
def accept ( self , stack : Stack ) : for name , pattern in self . PATTERNS . items ( ) : if stack . match_exp ( pattern ) : stack . annotation = name return True return False
Checks that the stack can be accepted according to the PATTERNS .
10,476
def send ( self , request : Request , stack : Stack ) -> Coroutine : if stack . annotation not in self . PATTERNS : if not self . accept ( stack ) : raise UnacceptableStack ( 'Cannot accept stack {}' . format ( stack ) ) func = getattr ( self , '_send_' + stack . annotation ) return func ( request , stack )
Send a stack to the platform .
10,477
def to_unit_memory ( number ) : kb = 1024 number /= kb if number < 100 : return '{} Kb' . format ( round ( number , 2 ) ) number /= kb if number < 300 : return '{} Mb' . format ( round ( number , 2 ) ) number /= kb return '{} Gb' . format ( round ( number , 2 ) )
Creates a string representation of memory size given number .
10,478
def to_percentage ( number , rounding = 2 ) : number = float ( number ) * 100 number_as_int = int ( number ) rounded = round ( number , rounding ) return '{}%' . format ( number_as_int if number_as_int == rounded else rounded )
Creates a percentage string representation from the given number . The number is multiplied by 100 before adding a % character .
10,479
def set_editor ( self , editor ) : if self . _editor is not None : try : self . _editor . offset_calculator . pic_infos_available . disconnect ( self . _update ) except ( AttributeError , RuntimeError , ReferenceError ) : pass self . _editor = weakref . proxy ( editor ) if editor else editor try : self . _editor . offset_calculator . pic_infos_available . connect ( self . _update ) except AttributeError : pass
Sets the associated editor when the editor s offset calculator mode emit the signal pic_infos_available the table is automatically refreshed .
10,480
def patch_conf ( settings_patch = None , settings_file = None ) : if settings_patch is None : settings_patch = { } reload_config ( ) os . environ [ ENVIRONMENT_VARIABLE ] = settings_file if settings_file else '' from bernard . conf import settings as l_settings r_settings = l_settings . _settings r_settings . update ( settings_patch ) if 'bernard.i18n' in modules : from bernard . i18n import translate , intents translate . _regenerate_word_dict ( ) intents . _refresh_intents_db ( ) yield
Reload the configuration form scratch . Only the default config is loaded not the environment - specified config .
10,481
def resolve ( self , key ) : registration = self . _registrations . get ( key ) if registration is None : raise KeyError ( "Unknown key: '{0}'" . format ( key ) ) return registration . resolve ( self , key )
Resolves the requested key to an object instance raising a KeyError if the key is missing
10,482
def dispose ( self ) : for registration in self . _registrations . values ( ) : registration . dispose ( ) self . _registrations = { }
Disposes every performed registration ; the container can then be used again
10,483
def build_workspace_path ( user_id , workflow_id = None ) : workspace_path = os . path . join ( 'users' , str ( user_id ) , 'workflows' ) if workflow_id : workspace_path = os . path . join ( workspace_path , str ( workflow_id ) ) return workspace_path
Build user s workspace relative path .
10,484
def _get_workflow_with_uuid_or_name ( uuid_or_name , user_uuid ) : from reana_db . models import Workflow if not uuid_or_name : raise ValueError ( 'No Workflow was specified.' ) try : uuid_or_name . encode ( 'ascii' ) except UnicodeEncodeError : raise ValueError ( 'Workflow name {} is not valid.' . format ( uuid_or_name ) ) try : is_uuid = UUID ( '{' + uuid_or_name + '}' , version = 4 ) except ( TypeError , ValueError ) : is_uuid = None if is_uuid : return _get_workflow_by_uuid ( uuid_or_name ) else : try : workflow_name , run_number = uuid_or_name . rsplit ( '.' , maxsplit = 1 ) except ValueError : return _get_workflow_by_name ( uuid_or_name , user_uuid ) if not run_number : return _get_workflow_by_name ( workflow_name , user_uuid ) if not run_number . isdigit ( ) : return _get_workflow_by_name ( uuid_or_name , user_uuid ) workflow = Workflow . query . filter ( Workflow . name == workflow_name , Workflow . run_number == run_number , Workflow . owner_id == user_uuid ) . one_or_none ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_name , run_number ) ) return workflow
Get Workflow from database with uuid or name .
10,485
def _get_workflow_by_name ( workflow_name , user_uuid ) : from reana_db . models import Workflow workflow = Workflow . query . filter ( Workflow . name == workflow_name , Workflow . owner_id == user_uuid ) . order_by ( Workflow . run_number . desc ( ) ) . first ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_name ) ) return workflow
From Workflows named as workflow_name the latest run_number .
10,486
def _get_workflow_by_uuid ( workflow_uuid ) : from reana_db . models import Workflow workflow = Workflow . query . filter ( Workflow . id_ == workflow_uuid ) . first ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_uuid ) ) return workflow
Get Workflow with UUIDv4 .
10,487
async def _watch ( self ) : file_name = os . path . basename ( self . _file_path ) logger . info ( 'Watching %s "%s"' , self . THING , self . _file_path , ) while self . _running : evt = await self . _watcher . get_event ( ) if evt . name == file_name : await self . _load ( ) logger . info ( 'Reloading changed %s from "%s"' , self . THING , self . _file_path )
Start the watching loop .
10,488
async def start ( self , file_path , locale = None , kwargs = None ) : self . _file_path = os . path . realpath ( file_path ) self . _locale = locale if kwargs : self . _kwargs = kwargs if settings . I18N_LIVE_RELOAD : loop = asyncio . get_event_loop ( ) self . _running = True self . _watcher = aionotify . Watcher ( ) self . _watcher . watch ( path = os . path . dirname ( self . _file_path ) , flags = aionotify . Flags . MOVED_TO | aionotify . Flags . MODIFY , ) await self . _watcher . setup ( loop ) await self . _load ( ) loop . create_task ( self . _watch ( ) ) else : await self . _load ( )
Setup the watching utilities start the loop and load data a first time .
10,489
def _update ( self , data : TransDict , * args , ** kwargs ) : for l in self . listeners : l ( data , * args , ** kwargs )
Propagate updates to listeners
10,490
def print_info ( self ) : d = dir ( self ) self . plugins = [ ] for key in d : if key . startswith ( "info_" ) : self . plugins . append ( key ) for key in self . plugins : if self . echo : Console . ok ( "> {0}" . format ( key . replace ( "_" , " " , 1 ) ) ) exec ( "self.%s()" % key )
prints some info that the user may find useful
10,491
def load_from_args_as_dataframe ( args ) : if not args . variants and not args . single_variant : return None if args . variant_source_name : variant_source_names = util . expand ( args . variant_source_name , 'variant_source_name' , 'variant source' , len ( args . variants ) ) else : variant_source_names = util . drop_prefix ( args . variants ) variant_to_sources = collections . defaultdict ( list ) dfs = [ ] for i in range ( len ( args . variants ) ) : name = variant_source_names [ i ] prefix = ( 'metadata:' if len ( args . variants ) == 1 else "metadata:%s:" % name ) df = load_as_dataframe ( args . variants [ i ] , name = name , genome = args . genome , max_variants = args . max_variants_per_source , only_passing = not args . include_failing_variants , metadata_column_prefix = prefix ) if df . shape [ 0 ] == 0 : logging . warn ( "No variants loaded from: %s" % args . variants [ i ] ) else : for variant in df . variant : variant_to_sources [ variant ] . append ( name ) dfs . append ( df ) if args . single_variant : variants = [ ] extra_args = { } if args . genome : extra_args = { 'ensembl' : varcode . reference . infer_genome ( args . genome ) } for ( locus_str , ref , alt ) in args . single_variant : locus = Locus . parse ( locus_str ) variant = varcode . Variant ( locus . contig , locus . inclusive_start , ref , alt , ** extra_args ) variants . append ( variant ) variant_to_sources [ variant ] . append ( "commandline" ) dfs . append ( variants_to_dataframe ( variants ) ) df = dfs . pop ( 0 ) for other_df in dfs : df = pandas . merge ( df , other_df , how = 'outer' , on = [ "variant" ] + STANDARD_DATAFRAME_COLUMNS ) genomes = df [ "genome" ] . unique ( ) if len ( genomes ) > 1 : raise ValueError ( "Mixing references is not supported. " "Reference genomes: %s" % ( ", " . join ( genomes ) ) ) df [ "sources" ] = [ " " . join ( variant_to_sources [ v ] ) for v in df . variant ] if args . ref : df = df . ix [ df . ref . isin ( args . ref ) ] if args . alt : df = df . ix [ df . alt . isin ( args . alt ) ] loci = loci_util . load_from_args ( util . remove_prefix_from_parsed_args ( args , "variant" ) ) if loci is not None : df = df . ix [ [ loci . intersects ( pileup_collection . to_locus ( v ) ) for v in df . variant ] ] return df
Given parsed variant - loading arguments return a pandas DataFrame .
10,492
def request ( self , cmd , * args , ** kwargs ) : params = { 'action' : cmd } params . update ( kwargs ) return self . __request ( self . url , params )
Request data fromo the server .
10,493
def __request ( self , url , params ) : log . debug ( 'request: %s %s' % ( url , str ( params ) ) ) try : response = urlopen ( url , urlencode ( params ) ) . read ( ) if params . get ( 'action' ) != 'data' : log . debug ( 'response: %s' % response ) if params . get ( 'action' , None ) == 'data' : return response else : return json . loads ( response ) except TypeError , e : log . exception ( 'request error' ) raise ServerError ( e ) except IOError , e : log . error ( 'request error: %s' % str ( e ) ) raise ServerError ( e )
Make an HTTP POST request to the server and return JSON data .
10,494
def position ( self ) : if self . end != self . start + 1 : raise ValueError ( "Not a single base: %s" % str ( self ) ) return self . start
If this locus spans a single base this property gives that position . Otherwise raises a ValueError .
10,495
def from_interbase_coordinates ( contig , start , end = None ) : typechecks . require_string ( contig ) typechecks . require_integer ( start ) if end is None : end = start + 1 typechecks . require_integer ( end ) contig = pyensembl . locus . normalize_chromosome ( contig ) return Locus ( contig , start , end )
Given coordinates in 0 - based interbase coordinates return a Locus instance .
10,496
def variant_context ( reference_fasta , contig , inclusive_start , inclusive_end , alt , context_length ) : start = int ( inclusive_start ) - 1 end = int ( inclusive_end ) full_sequence = reference_fasta [ contig ] left = str ( full_sequence [ start - context_length : start ] . seq ) . upper ( ) middle = str ( full_sequence [ start : end ] . seq ) . upper ( ) right = str ( full_sequence [ end : end + context_length ] . seq ) . upper ( ) if middle [ 0 ] in ( 'A' , 'G' ) : context_5prime = pyfaidx . complement ( right ) [ : : - 1 ] context_3prime = pyfaidx . complement ( left ) [ : : - 1 ] context_mutation = "%s>%s" % ( pyfaidx . complement ( middle ) [ : : - 1 ] , pyfaidx . complement ( alt ) [ : : - 1 ] ) else : context_5prime = left context_3prime = right context_mutation = "%s>%s" % ( middle , alt ) return ( context_5prime , context_mutation , context_3prime )
Retrieve the surronding reference region from a variant .
10,497
def similarity ( self , other : 'Trigram' ) -> float : if not len ( self . _trigrams ) or not len ( other . _trigrams ) : return 0 count = float ( len ( self . _trigrams & other . _trigrams ) ) len1 = float ( len ( self . _trigrams ) ) len2 = float ( len ( other . _trigrams ) ) return count / ( len1 + len2 - count )
Compute the similarity with the provided other trigram .
10,498
def _match ( self , local : Tuple [ Trigram , ... ] , other : Trigram ) -> float : pos = local [ 0 ] % other neg = max ( ( x % other for x in local [ 1 : ] ) , default = 0 ) if neg > pos : return 0.0 return pos
Match a trigram with another one . If the negative matching wins returns an inverted matching .
10,499
def similarity ( self , other : Trigram ) -> float : return max ( ( self . _match ( x , other ) for x in self . trigrams ) , default = 0 )
Find the best similarity within known trigrams .