idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
1,900
def entrypoint ( section , option ) : try : return entrypoints ( section ) [ option ] except KeyError : raise KeyError ( 'Cannot resolve type "{}" to a recognised vsgen "{}" type.' . format ( option , section ) )
Returns the the entry point object given a section option pair .
1,901
def infer_declared ( ms , namespace = None ) : conditions = [ ] for m in ms : for cav in m . caveats : if cav . location is None or cav . location == '' : conditions . append ( cav . caveat_id_bytes . decode ( 'utf-8' ) ) return infer_declared_from_conditions ( conditions , namespace )
Retrieves any declared information from the given macaroons and returns it as a key - value map . Information is declared with a first party caveat as created by declared_caveat .
1,902
def infer_declared_from_conditions ( conds , namespace = None ) : conflicts = [ ] if namespace is None : namespace = Namespace ( ) prefix = namespace . resolve ( STD_NAMESPACE ) if prefix is None : prefix = '' declared_cond = prefix + COND_DECLARED info = { } for cond in conds : try : name , rest = parse_caveat ( cond ) except ValueError : name , rest = '' , '' if name != declared_cond : continue parts = rest . split ( ' ' , 1 ) if len ( parts ) != 2 : continue key , val = parts [ 0 ] , parts [ 1 ] old_val = info . get ( key ) if old_val is not None and old_val != val : conflicts . append ( key ) continue info [ key ] = val for key in set ( conflicts ) : del info [ key ] return info
like infer_declared except that it is passed a set of first party caveat conditions as a list of string rather than a set of macaroons .
1,903
def _pre_activate_injection ( self ) : if not self . app . plugins . classes . exist ( self . __class__ . __name__ ) : self . app . plugins . classes . register ( [ self . __class__ ] ) self . _load_needed_plugins ( ) self . app . signals . send ( "plugin_activate_pre" , self )
Injects functions before the activation routine of child classes gets called
1,904
def register ( self , signal , description ) : return self . __app . signals . register ( signal , self . _plugin , description )
Registers a new signal . Only registered signals are allowed to be send .
1,905
def get ( self , signal = None ) : return self . __app . signals . get ( signal , self . _plugin )
Returns a single signal or a dictionary of signals for this plugin .
1,906
def get_receiver ( self , receiver = None ) : return self . __app . signals . get_receiver ( receiver , self . _plugin )
Returns a single receiver or a dictionary of receivers for this plugin .
1,907
def validate ( mcs , bases , attributes ) : if bases [ 0 ] is object : return None mcs . check_model_cls ( attributes ) mcs . check_include_exclude ( attributes ) mcs . check_properties ( attributes )
Check attributes .
1,908
def get_properties ( attributes ) : return [ key for key , value in six . iteritems ( attributes ) if isinstance ( value , property ) ]
Return tuple of names of defined properties .
1,909
def check_properties ( mcs , attributes ) : include , exclude = mcs . get_prepared_include_exclude ( attributes ) properties = mcs . get_properties ( attributes ) intersections = list ( set ( properties ) . intersection ( include if include else exclude ) ) if not intersections : return None attr_name = '__include__' if include else '__exclude__' raise AttributeError ( "It is not allowed to mention already defined properties: " "{0} in {1} attributes." . format ( ", " . join ( intersections ) , attr_name ) )
Check whether intersections exist .
1,910
def on_deleted ( self , event ) : key = 'filesystem:file_deleted' data = { 'filepath' : event . src_path , 'is_directory' : event . is_directory , 'dirpath' : os . path . dirname ( event . src_path ) } bmsg = BroadcastMessage ( key = key , data = data ) BroadcastManager . broadcast ( bmsg )
Event Handler when a file is deleted
1,911
def read_auth_info ( agent_file_content ) : try : data = json . loads ( agent_file_content ) return AuthInfo ( key = bakery . PrivateKey . deserialize ( data [ 'key' ] [ 'private' ] ) , agents = list ( Agent ( url = a [ 'url' ] , username = a [ 'username' ] ) for a in data . get ( 'agents' , [ ] ) ) , ) except ( KeyError , ValueError , TypeError , ) as e : raise AgentFileFormatError ( 'invalid agent file' , e )
Loads agent authentication information from the specified content string as read from an agents file . The returned information is suitable for passing as an argument to the AgentInteractor constructor .
1,912
def interact ( self , client , location , interaction_required_err ) : p = interaction_required_err . interaction_method ( 'agent' , InteractionInfo ) if p . login_url is None or p . login_url == '' : raise httpbakery . InteractionError ( 'no login-url field found in agent interaction method' ) agent = self . _find_agent ( location ) if not location . endswith ( '/' ) : location += '/' login_url = urljoin ( location , p . login_url ) resp = requests . get ( login_url , params = { 'username' : agent . username , 'public-key' : str ( self . _auth_info . key . public_key ) } , auth = client . auth ( ) ) if resp . status_code != 200 : raise httpbakery . InteractionError ( 'cannot acquire agent macaroon: {} {}' . format ( resp . status_code , resp . text ) ) m = resp . json ( ) . get ( 'macaroon' ) if m is None : raise httpbakery . InteractionError ( 'no macaroon in response' ) m = bakery . Macaroon . from_dict ( m ) ms = bakery . discharge_all ( m , None , self . _auth_info . key ) b = bytearray ( ) for m in ms : b . extend ( utils . b64decode ( m . serialize ( ) ) ) return httpbakery . DischargeToken ( kind = 'agent' , value = bytes ( b ) )
Implement Interactor . interact by obtaining obtaining a macaroon from the discharger discharging it with the local private key using the discharged macaroon as a discharge token
1,913
def legacy_interact ( self , client , location , visit_url ) : agent = self . _find_agent ( location ) client = copy . copy ( client ) client . key = self . _auth_info . key resp = client . request ( method = 'POST' , url = visit_url , json = { 'username' : agent . username , 'public_key' : str ( self . _auth_info . key . public_key ) , } , ) if resp . status_code != 200 : raise httpbakery . InteractionError ( 'cannot acquire agent macaroon from {}: {} (response body: {!r})' . format ( visit_url , resp . status_code , resp . text ) ) if not resp . json ( ) . get ( 'agent_login' , False ) : raise httpbakery . InteractionError ( 'agent login failed' )
Implement LegacyInteractor . legacy_interact by obtaining the discharge macaroon using the client s private key
1,914
def expiry_time ( ns , cavs ) : prefix = ns . resolve ( STD_NAMESPACE ) time_before_cond = condition_with_prefix ( prefix , COND_TIME_BEFORE ) t = None for cav in cavs : if not cav . first_party ( ) : continue cav = cav . caveat_id_bytes . decode ( 'utf-8' ) name , rest = parse_caveat ( cav ) if name != time_before_cond : continue try : et = pyrfc3339 . parse ( rest , utc = True ) . replace ( tzinfo = None ) if t is None or et < t : t = et except ValueError : continue return t
Returns the minimum time of any time - before caveats found in the given list or None if no such caveats were found .
1,915
def replace_all ( text , replace_dict ) : for i , j in replace_dict . items ( ) : text = text . replace ( i , j ) return text
Replace multiple strings in a text .
1,916
def map_or_apply ( function , param ) : try : if isinstance ( param , list ) : return [ next ( iter ( function ( i ) ) ) for i in param ] else : return next ( iter ( function ( param ) ) ) except StopIteration : return None
Map the function on param or apply it depending whether param \ is a list or an item .
1,917
def batch ( iterable , size ) : item = iter ( iterable ) while True : batch_iterator = islice ( item , size ) try : yield chain ( [ next ( batch_iterator ) ] , batch_iterator ) except StopIteration : return
Get items from a sequence a batch at a time .
1,918
def slugify ( value ) : try : unicode_type = unicode except NameError : unicode_type = str if not isinstance ( value , unicode_type ) : value = unicode_type ( value ) value = ( unicodedata . normalize ( 'NFKD' , value ) . encode ( 'ascii' , 'ignore' ) . decode ( 'ascii' ) ) value = unicode_type ( _SLUGIFY_STRIP_RE . sub ( '' , value ) . strip ( ) ) return _SLUGIFY_HYPHENATE_RE . sub ( '_' , value )
Normalizes string converts to lowercase removes non - alpha characters and converts spaces to hyphens to have nice filenames .
1,919
def get_plaintext_citations ( arxiv_id ) : plaintext_citations = [ ] bbl_files = arxiv . get_bbl ( arxiv_id ) for bbl_file in bbl_files : plaintext_citations . extend ( bbl . get_plaintext_citations ( bbl_file ) ) return plaintext_citations
Get the citations of a given preprint in plain text .
1,920
def get_cited_dois ( arxiv_id ) : dois = { } bbl_files = arxiv . get_bbl ( arxiv_id ) for bbl_file in bbl_files : dois . update ( bbl . get_cited_dois ( bbl_file ) ) return dois
Get the DOIs of the papers cited in a . bbl file .
1,921
def get_subcommand_kwargs ( mgr , name , namespace ) : subcmd = mgr . get ( name ) subcmd_kwargs = { } for opt in list ( subcmd . args . values ( ) ) + list ( subcmd . options . values ( ) ) : if hasattr ( namespace , opt . dest ) : subcmd_kwargs [ opt . dest ] = getattr ( namespace , opt . dest ) return ( subcmd , subcmd_kwargs )
Get subcommand options from global parsed arguments .
1,922
def get_plaintext_citations ( file ) : if os . path . isfile ( file ) : with open ( file , 'r' ) as fh : content = fh . readlines ( ) else : content = file . splitlines ( ) cleaned_citations = [ tools . clean_whitespaces ( line ) for line in content ] return cleaned_citations
Parse a plaintext file to get a clean list of plaintext citations . The \ file should have one citation per line .
1,923
def get_cited_dois ( file ) : if not isinstance ( file , list ) : plaintext_citations = get_plaintext_citations ( file ) else : plaintext_citations = file dois = { } crossref_queue = [ ] for citation in plaintext_citations [ : ] : matched_dois = doi . extract_from_text ( citation ) if len ( matched_dois ) > 0 : dois [ citation ] = next ( iter ( matched_dois ) ) continue matched_arxiv = arxiv . extract_from_text ( citation ) if len ( matched_arxiv ) > 0 : dois [ citation ] = arxiv . to_doi ( next ( iter ( matched_arxiv ) ) ) continue crossref_queue . append ( tools . remove_urls ( citation ) ) for batch in tools . batch ( crossref_queue , CROSSREF_MAX_BATCH_SIZE ) : batch = [ i for i in batch ] try : request = requests . post ( CROSSREF_LINKS_API_URL , json = batch ) for result in request . json ( ) [ "results" ] : try : dois [ result [ "text" ] ] = result [ "doi" ] except KeyError : dois [ result [ "text" ] ] = None except ( RequestException , ValueError , KeyError ) : for i in batch : dois [ i ] = None return dois
Get the DOIs of the papers cited in a plaintext file . The file should \ have one citation per line .
1,924
def is_valid ( isbn_id ) : return ( ( not isbnlib . notisbn ( isbn_id ) ) and ( isbnlib . get_canonical_isbn ( isbn_id ) == isbn_id or isbnlib . mask ( isbnlib . get_canonical_isbn ( isbn_id ) ) == isbn_id ) )
Check that a given string is a valid ISBN .
1,925
def extract_from_text ( text ) : isbns = [ isbnlib . get_canonical_isbn ( isbn ) for isbn in isbnlib . get_isbnlike ( text ) ] return [ i for i in isbns if i is not None ]
Extract ISBNs from a text .
1,926
def get_bibtex ( isbn_identifier ) : bibtex = doi . get_bibtex ( to_doi ( isbn_identifier ) ) if bibtex is None : bibtex = isbnlib . registry . bibformatters [ 'bibtex' ] ( isbnlib . meta ( isbn_identifier , 'default' ) ) return bibtex
Get a BibTeX string for the given ISBN .
1,927
def used_options ( self ) : for option_str in filter ( lambda c : c . startswith ( '-' ) , self . words ) : for option in list ( self . cmd . options . values ( ) ) : if option_str in option . option_strings : yield option
Return options already used in the command line
1,928
def available_options ( self ) : for option in list ( self . cmd . options . values ( ) ) : if ( option . is_multiple or option not in list ( self . used_options ) ) : yield option
Return options that can be used given the current cmd line
1,929
def used_args ( self ) : values = [ ] for idx , c in enumerate ( self . words [ 1 : ] ) : if c . startswith ( '-' ) : continue option_str = self . words [ 1 : ] [ idx - 1 ] option = self . get_option ( option_str ) if option is None or not option . need_value : values . append ( ( c , c == self . document . get_word_before_cursor ( WORD = True ) ) ) logger . debug ( "Found args values %s" % values ) for arg in self . cmd . args . values ( ) : if not values : raise StopIteration if arg . is_multiple : values = [ ] yield arg elif type ( arg . nargs ) is int : for _ in range ( arg . nargs ) : value = values . pop ( 0 ) if value [ 1 ] is False : yield arg if not values : raise StopIteration
Return args already used in the command line
1,930
def available_args ( self ) : used = list ( self . used_args ) logger . debug ( 'Found used args: %s' % used ) for arg in list ( self . cmd . args . values ( ) ) : if ( arg . is_multiple or arg not in used ) : yield arg elif ( type ( arg . nargs ) is int and arg . nargs > 1 and not arg . nargs == used . count ( arg ) ) : yield arg
Return args that can be used given the current cmd line
1,931
def is_elem_ref ( elem_ref ) : return ( elem_ref and isinstance ( elem_ref , tuple ) and len ( elem_ref ) == 3 and ( elem_ref [ 0 ] == ElemRefObj or elem_ref [ 0 ] == ElemRefArr ) )
Returns true if the elem_ref is an element reference
1,932
def get_elem ( elem_ref , default = None ) : if not is_elem_ref ( elem_ref ) : return elem_ref elif elem_ref [ 0 ] == ElemRefObj : return getattr ( elem_ref [ 1 ] , elem_ref [ 2 ] , default ) elif elem_ref [ 0 ] == ElemRefArr : return elem_ref [ 1 ] [ elem_ref [ 2 ] ]
Gets the element referenced by elem_ref or returns the elem_ref directly if its not a reference .
1,933
def set_elem ( elem_ref , elem ) : if elem_ref is None or elem_ref == elem or not is_elem_ref ( elem_ref ) : return elem elif elem_ref [ 0 ] == ElemRefObj : setattr ( elem_ref [ 1 ] , elem_ref [ 2 ] , elem ) return elem elif elem_ref [ 0 ] == ElemRefArr : elem_ref [ 1 ] [ elem_ref [ 2 ] ] = elem return elem
Sets element referenced by the elem_ref . Returns the elem .
1,934
def _preprocess ( inp ) : inp = re . sub ( r'(\b)a(\b)' , r'\g<1>one\g<2>' , inp ) inp = re . sub ( r'to the (.*) power' , r'to \g<1>' , inp ) inp = re . sub ( r'to the (.*?)(\b)' , r'to \g<1>\g<2>' , inp ) inp = re . sub ( r'log of' , r'log' , inp ) inp = re . sub ( r'(square )?root( of)?' , r'sqrt' , inp ) inp = re . sub ( r'squared' , r'to two' , inp ) inp = re . sub ( r'cubed' , r'to three' , inp ) inp = re . sub ( r'divided?( by)?' , r'divide' , inp ) inp = re . sub ( r'(\b)over(\b)' , r'\g<1>divide\g<2>' , inp ) inp = re . sub ( r'(\b)EE(\b)' , r'\g<1>e\g<2>' , inp ) inp = re . sub ( r'(\b)E(\b)' , r'\g<1>e\g<2>' , inp ) inp = re . sub ( r'(\b)pie(\b)' , r'\g<1>pi\g<2>' , inp ) inp = re . sub ( r'(\b)PI(\b)' , r'\g<1>pi\g<2>' , inp ) def findImplicitMultiplications ( inp ) : def findConstantMultiplications ( inp ) : split = inp . split ( ' ' ) revision = "" converter = NumberService ( ) for i , w in enumerate ( split ) : if i > 0 and w in MathService . __constants__ : if converter . isValid ( split [ i - 1 ] ) : revision += " times" if not revision : revision = w else : revision += " " + w return revision def findUnaryMultiplications ( inp ) : split = inp . split ( ' ' ) revision = "" for i , w in enumerate ( split ) : if i > 0 and w in MathService . __unaryOperators__ : last_op = split [ i - 1 ] binary = last_op in MathService . __binaryOperators__ unary = last_op in MathService . __unaryOperators__ if last_op and not ( binary or unary ) : revision += " times" if not revision : revision = w else : revision += " " + w return revision return findUnaryMultiplications ( findConstantMultiplications ( inp ) ) return findImplicitMultiplications ( inp )
Revise wording to match canonical and expected forms .
1,935
def _calculate ( numbers , symbols ) : if len ( numbers ) is 1 : return numbers [ 0 ] precedence = [ [ pow ] , [ mul , div ] , [ add , sub ] ] for op_group in precedence : for i , op in enumerate ( symbols ) : if op in op_group : a = numbers [ i ] b = numbers [ i + 1 ] result = MathService . _applyBinary ( a , b , op ) new_numbers = numbers [ : i ] + [ result ] + numbers [ i + 2 : ] new_symbols = symbols [ : i ] + symbols [ i + 1 : ] return MathService . _calculate ( new_numbers , new_symbols )
Calculates a final value given a set of numbers and symbols .
1,936
def parseEquation ( self , inp ) : inp = MathService . _preprocess ( inp ) split = inp . split ( ' ' ) for i , w in enumerate ( split ) : if w in self . __unaryOperators__ : op = self . __unaryOperators__ [ w ] eq1 = ' ' . join ( split [ : i ] ) eq2 = ' ' . join ( split [ i + 1 : ] ) result = MathService . _applyUnary ( self . parseEquation ( eq2 ) , op ) return self . parseEquation ( eq1 + " " + str ( result ) ) def extractNumbersAndSymbols ( inp ) : numbers = [ ] symbols = [ ] next_number = "" for w in inp . split ( ' ' ) : if w in self . __binaryOperators__ : symbols . append ( self . __binaryOperators__ [ w ] ) if next_number : numbers . append ( next_number ) next_number = "" else : if next_number : next_number += " " next_number += w if next_number : numbers . append ( next_number ) def convert ( n ) : if n in self . __constants__ : return self . __constants__ [ n ] converter = NumberService ( ) return converter . parse ( n ) numbers = [ convert ( n ) for n in numbers ] return numbers , symbols numbers , symbols = extractNumbersAndSymbols ( inp ) return MathService . _calculate ( numbers , symbols )
Solves the equation specified by the input string .
1,937
def register ( self , command , description , function , params = [ ] ) : return self . app . commands . register ( command , description , function , params , self . plugin )
Registers a new command for a plugin .
1,938
def get ( self , name = None ) : return self . app . commands . get ( name , self . plugin )
Returns commands which can be filtered by name .
1,939
def get ( self , name = None , plugin = None ) : if plugin is not None : if name is None : command_list = { } for key in self . _commands . keys ( ) : if self . _commands [ key ] . plugin == plugin : command_list [ key ] = self . _commands [ key ] return command_list else : if name in self . _commands . keys ( ) : if self . _commands [ name ] . plugin == plugin : return self . _commands [ name ] else : return None else : return None else : if name is None : return self . _commands else : if name in self . _commands . keys ( ) : return self . _commands [ name ] else : return None
Returns commands which can be filtered by name or plugin .
1,940
def unregister ( self , command ) : if command not in self . _commands . keys ( ) : self . log . warning ( "Can not unregister command %s" % command ) else : del ( self . _click_root_command . commands [ command ] ) del ( self . _commands [ command ] ) self . log . debug ( "Command %s got unregistered" % command )
Unregisters an existing command so that this command is no longer available on the command line interface .
1,941
def declared_caveat ( key , value ) : if key . find ( ' ' ) >= 0 or key == '' : return error_caveat ( 'invalid caveat \'declared\' key "{}"' . format ( key ) ) return _first_party ( COND_DECLARED , key + ' ' + value )
Returns a declared caveat asserting that the given key is set to the given value .
1,942
def _operation_caveat ( cond , ops ) : for op in ops : if op . find ( ' ' ) != - 1 : return error_caveat ( 'invalid operation name "{}"' . format ( op ) ) return _first_party ( cond , ' ' . join ( ops ) )
Helper for allow_caveat and deny_caveat .
1,943
def to_bytes ( s ) : if isinstance ( s , six . binary_type ) : return s if isinstance ( s , six . string_types ) : return s . encode ( 'utf-8' ) raise TypeError ( 'want string or bytes, got {}' , type ( s ) )
Return s as a bytes type using utf - 8 encoding if necessary .
1,944
def b64decode ( s ) : s = to_bytes ( s ) if not s . endswith ( b'=' ) : s = s + b'=' * ( - len ( s ) % 4 ) try : if '_' or '-' in s : return base64 . urlsafe_b64decode ( s ) else : return base64 . b64decode ( s ) except ( TypeError , binascii . Error ) as e : raise ValueError ( str ( e ) )
Base64 decodes a base64 - encoded string in URL - safe or normal format with or without padding . The argument may be string or bytes .
1,945
def raw_urlsafe_b64encode ( b ) : b = to_bytes ( b ) b = base64 . urlsafe_b64encode ( b ) b = b . rstrip ( b'=' ) return b
Base64 encode using URL - safe encoding with padding removed .
1,946
def cookie ( url , name , value , expires = None ) : u = urlparse ( url ) domain = u . hostname if '.' not in domain and not _is_ip_addr ( domain ) : domain += ".local" port = str ( u . port ) if u . port is not None else None secure = u . scheme == 'https' if expires is not None : if expires . tzinfo is not None : raise ValueError ( 'Cookie expiration must be a naive datetime' ) expires = ( expires - datetime ( 1970 , 1 , 1 ) ) . total_seconds ( ) return http_cookiejar . Cookie ( version = 0 , name = name , value = value , port = port , port_specified = port is not None , domain = domain , domain_specified = True , domain_initial_dot = False , path = u . path , path_specified = True , secure = secure , expires = expires , discard = False , comment = None , comment_url = None , rest = None , rfc2109 = False , )
Return a new Cookie using a slightly more friendly API than that provided by six . moves . http_cookiejar
1,947
def _login ( self ) : self . logger . debug ( "Logging into " + "{}/{}" . format ( self . _im_api_url , "j_spring_security_check" ) ) self . _im_session . headers . update ( { 'Content-Type' : 'application/x-www-form-urlencoded' , 'User-Agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36' } ) self . j_username = self . _username self . j_password = self . _password requests . packages . urllib3 . disable_warnings ( ) payload = { 'j_username' : self . j_username , 'j_password' : self . j_password , 'submit' : 'Login' } r = self . _im_session . post ( "{}/{}" . format ( self . _im_api_url , "j_spring_security_check" ) , verify = self . _im_verify_ssl , data = payload ) self . logger . debug ( "Login POST response: " + "{}" . format ( r . text ) ) self . _im_logged_in = True
LOGIN CAN ONLY BE DONE BY POSTING TO A HTTP FORM . A COOKIE IS THEN USED FOR INTERACTING WITH THE API
1,948
def _do_get ( self , uri , ** kwargs ) : scaleioapi_get_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } self . logger . debug ( "_do_get() " + "{}/{}" . format ( self . _api_url , uri ) ) if kwargs : for key , value in kwargs . iteritems ( ) : if key == 'headers' : scaleio_get_headersvalue = value try : response = self . _im_session . get ( "{}/{}" . format ( self . _api_url , uri ) , ** kwargs ) . json ( ) if response . status_code == requests . codes . ok : return response else : raise RuntimeError ( "_do_get() - HTTP response error" + response . status_code ) except : raise RuntimeError ( "_do_get() - Communication error with ScaleIO gateway" ) return response
Convinient method for GET requests Returns http request status value from a POST request
1,949
def uploadFileToIM ( self , directory , filename , title ) : self . logger . debug ( "uploadFileToIM(" + "{},{},{})" . format ( directory , filename , title ) ) parameters = { 'data-filename-placement' : 'inside' , 'title' : str ( filename ) , 'filename' : str ( filename ) , 'type' : 'file' , 'name' : 'files' , 'id' : 'fileToUpload' , 'multiple' : '' } file_dict = { 'files' : ( str ( filename ) , open ( directory + filename , 'rb' ) , 'application/x-rpm' ) } m = MultipartEncoder ( fields = file_dict ) temp_username = self . _username temp_password = self . _password temp_im_api_url = self . _im_api_url temp_im_session = requests . Session ( ) temp_im_session . mount ( 'https://' , TLS1Adapter ( ) ) temp_im_verify_ssl = self . _im_verify_ssl resp = temp_im_session . post ( "{}/{}" . format ( temp_im_api_url , "types/InstallationPackage/instances/uploadPackage" ) , auth = HTTPBasicAuth ( temp_username , temp_password ) , files = file_dict , verify = False , data = parameters ) self . logger . info ( "Uploaded: " + "{}" . format ( filename ) ) self . logger . debug ( "HTTP Response: " + "{}" . format ( resp . status_code ) )
Parameters as they look in the form for uploading packages to IM
1,950
async def dump_varint_t ( writer , type_or , pv ) : width = int_mark_to_size ( type_or ) n = ( pv << 2 ) | type_or buffer = _UINT_BUFFER for _ in range ( width ) : buffer [ 0 ] = n & 0xff await writer . awrite ( buffer ) n >>= 8 return width
Binary dump of the integer of given type
1,951
async def dump_varint ( writer , val ) : if val <= 63 : return await dump_varint_t ( writer , PortableRawSizeMark . BYTE , val ) elif val <= 16383 : return await dump_varint_t ( writer , PortableRawSizeMark . WORD , val ) elif val <= 1073741823 : return await dump_varint_t ( writer , PortableRawSizeMark . DWORD , val ) else : if val > 4611686018427387903 : raise ValueError ( 'Int too big' ) return await dump_varint_t ( writer , PortableRawSizeMark . INT64 , val )
Binary dump of the variable size integer
1,952
async def load_varint ( reader ) : buffer = _UINT_BUFFER await reader . areadinto ( buffer ) width = int_mark_to_size ( buffer [ 0 ] & PortableRawSizeMark . MASK ) result = buffer [ 0 ] shift = 8 for _ in range ( width - 1 ) : await reader . areadinto ( buffer ) result += buffer [ 0 ] << shift shift += 8 return result >> 2
Binary load of variable size integer serialized by dump_varint
1,953
async def dump_string ( writer , val ) : await dump_varint ( writer , len ( val ) ) await writer . awrite ( val )
Binary string dump
1,954
async def load_string ( reader ) : ivalue = await load_varint ( reader ) fvalue = bytearray ( ivalue ) await reader . areadinto ( fvalue ) return bytes ( fvalue )
Loads string from binary stream
1,955
async def dump_blob ( writer , elem , elem_type , params = None ) : elem_is_blob = isinstance ( elem , x . BlobType ) data = bytes ( getattr ( elem , x . BlobType . DATA_ATTR ) if elem_is_blob else elem ) await dump_varint ( writer , len ( elem ) ) await writer . awrite ( data )
Dumps blob to a binary stream
1,956
async def container_load ( self , container_type , params = None , container = None , obj = None ) : elem_type = x . container_elem_type ( container_type , params ) elem_size = await self . get_element_size ( elem_type = elem_type , params = params ) data_left = len ( self . iobj . buffer ) c_len = container_type . SIZE if not container_type . FIX_SIZE : if data_left == 0 : return None if data_left % elem_size != 0 : raise helpers . ArchiveException ( 'Container size mod elem size not 0' ) c_len = data_left // elem_size res = container if container else [ ] for i in range ( c_len ) : try : self . tracker . push_index ( i ) fvalue = await self . _load_field ( elem_type , params [ 1 : ] if params else None , x . eref ( res , i ) if container else None ) self . tracker . pop ( ) except Exception as e : raise helpers . ArchiveException ( e , tracker = self . tracker ) from e if not container : res . append ( fvalue ) return res
Loads container of elements from the reader . Supports the container ref . Returns loaded container . Blob array writer as in XMRRPC is serialized without size serialization .
1,957
def make_index_for ( package , index_dir , verbose = True ) : index_template = item_template = '<li><a href="{1}">{0}</a></li>' index_filename = os . path . join ( index_dir , "index.html" ) if not os . path . isdir ( index_dir ) : os . makedirs ( index_dir ) parts = [ ] for pkg_filename in package . files : pkg_name = os . path . basename ( pkg_filename ) if pkg_name == "index.html" : pkg_name = os . path . basename ( os . path . dirname ( pkg_filename ) ) else : pkg_name = package . splitext ( pkg_name ) pkg_relpath_to = os . path . relpath ( pkg_filename , index_dir ) parts . append ( item_template . format ( pkg_name , pkg_relpath_to ) ) if not parts : print ( "OOPS: Package %s has no files" % package . name ) return if verbose : root_index = not Package . isa ( package . files [ 0 ] ) if root_index : info = "with %d package(s)" % len ( package . files ) else : package_versions = sorted ( set ( package . versions ) ) info = ", " . join ( reversed ( package_versions ) ) message = "%-30s %s" % ( package . name , info ) print ( message ) with open ( index_filename , "w" ) as f : packages = "\n" . join ( parts ) text = index_template . format ( title = package . name , packages = packages ) f . write ( text . strip ( ) ) f . close ( )
Create an index . html for one package .
1,958
def make_package_index ( download_dir ) : if not os . path . isdir ( download_dir ) : raise ValueError ( "No such directory: %r" % download_dir ) pkg_rootdir = os . path . join ( download_dir , "simple" ) if os . path . isdir ( pkg_rootdir ) : shutil . rmtree ( pkg_rootdir , ignore_errors = True ) os . mkdir ( pkg_rootdir ) package_map = { } packages = [ ] for filename in sorted ( os . listdir ( download_dir ) ) : if not Package . isa ( filename ) : continue pkg_filepath = os . path . join ( download_dir , filename ) package_name = Package . get_pkgname ( pkg_filepath ) package = package_map . get ( package_name , None ) if not package : package = Package ( pkg_filepath ) package_map [ package . name ] = package packages . append ( package ) else : package . files . append ( pkg_filepath ) root_package = Package ( None , "Python Package Index" ) root_package . files = [ os . path . join ( pkg_rootdir , pkg . name , "index.html" ) for pkg in packages ] make_index_for ( root_package , pkg_rootdir ) for package in packages : index_dir = os . path . join ( pkg_rootdir , package . name ) make_index_for ( package , index_dir )
Create a pypi server like file structure below download directory .
1,959
def _convert_to_list ( self , value , delimiters ) : if not value : return [ ] if delimiters : return [ l . strip ( ) for l in value . split ( delimiters ) ] return [ l . strip ( ) for l in value . split ( ) ]
Return a list value translating from other types if necessary .
1,960
def getlist ( self , section , option , raw = False , vars = None , fallback = [ ] , delimiters = ',' ) : v = self . get ( section , option , raw = raw , vars = vars , fallback = fallback ) return self . _convert_to_list ( v , delimiters = delimiters )
A convenience method which coerces the option in the specified section to a list of strings .
1,961
def getfile ( self , section , option , raw = False , vars = None , fallback = "" , validate = False ) : v = self . get ( section , option , raw = raw , vars = vars , fallback = fallback ) v = self . _convert_to_path ( v ) return v if not validate or os . path . isfile ( v ) else fallback
A convenience method which coerces the option in the specified section to a file .
1,962
def getdir ( self , section , option , raw = False , vars = None , fallback = "" , validate = False ) : v = self . get ( section , option , raw = raw , vars = vars , fallback = fallback ) v = self . _convert_to_path ( v ) return v if not validate or os . path . isdir ( v ) else fallback
A convenience method which coerces the option in the specified section to a directory .
1,963
def getdirs ( self , section , option , raw = False , vars = None , fallback = [ ] ) : globs = self . getlist ( section , option , fallback = [ ] ) return [ f for g in globs for f in glob . glob ( g ) if os . path . isdir ( f ) ]
A convenience method which coerces the option in the specified section to a list of directories .
1,964
def register ( self , name , content , description = None ) : return self . __app . documents . register ( name , content , self . _plugin , description )
Register a new document .
1,965
def unregister ( self , document ) : if document not in self . documents . keys ( ) : self . log . warning ( "Can not unregister document %s" % document ) else : del ( self . documents [ document ] ) self . __log . debug ( "Document %s got unregistered" % document )
Unregisters an existing document so that this document is no longer available .
1,966
def get ( self , document = None , plugin = None ) : if plugin is not None : if document is None : documents_list = { } for key in self . documents . keys ( ) : if self . documents [ key ] . plugin == plugin : documents_list [ key ] = self . documents [ key ] return documents_list else : if document in self . documents . keys ( ) : if self . documents [ document ] . plugin == plugin : return self . documents [ document ] else : return None else : return None else : if document is None : return self . documents else : if document in self . documents . keys ( ) : return self . documents [ document ] else : return None
Get one or more documents .
1,967
def initialise_by_names ( self , plugins = None ) : if plugins is None : plugins = [ ] self . _log . debug ( "Plugins Initialisation started" ) if not isinstance ( plugins , list ) : raise AttributeError ( "plugins must be a list, not %s" % type ( plugins ) ) self . _log . debug ( "Plugins to initialise: %s" % ", " . join ( plugins ) ) plugin_initialised = [ ] for plugin_name in plugins : if not isinstance ( plugin_name , str ) : raise AttributeError ( "plugin name must be a str, not %s" % type ( plugin_name ) ) plugin_class = self . classes . get ( plugin_name ) self . initialise ( plugin_class . clazz , plugin_name ) plugin_initialised . append ( plugin_name ) self . _log . info ( "Plugins initialised: %s" % ", " . join ( plugin_initialised ) )
Initialises given plugins but does not activate them .
1,968
def activate ( self , plugins = [ ] ) : self . _log . debug ( "Plugins Activation started" ) if not isinstance ( plugins , list ) : raise AttributeError ( "plugins must be a list, not %s" % type ( plugins ) ) self . _log . debug ( "Plugins to activate: %s" % ", " . join ( plugins ) ) plugins_activated = [ ] for plugin_name in plugins : if not isinstance ( plugin_name , str ) : raise AttributeError ( "plugin name must be a str, not %s" % type ( plugin_name ) ) if plugin_name not in self . _plugins . keys ( ) and plugin_name in self . classes . _classes . keys ( ) : self . _log . debug ( "Initialisation needed before activation." ) try : self . initialise_by_names ( [ plugin_name ] ) except Exception as e : self . _log . error ( "Couldn't initialise plugin %s. Reason %s" % ( plugin_name , e ) ) if self . _app . strict : error = "Couldn't initialise plugin %s" % plugin_name if sys . version_info [ 0 ] < 3 : error += "Reason: %s" % e raise_from ( Exception ( error ) , e ) else : continue if plugin_name in self . _plugins . keys ( ) : self . _log . debug ( "Activating plugin %s" % plugin_name ) if not self . _plugins [ plugin_name ] . active : try : self . _plugins [ plugin_name ] . activate ( ) except Exception as e : raise_from ( PluginNotActivatableException ( "Plugin %s could not be activated: %s" % ( plugin_name , e ) ) , e ) else : self . _log . debug ( "Plugin %s activated" % plugin_name ) plugins_activated . append ( plugin_name ) else : self . _log . warning ( "Plugin %s got already activated." % plugin_name ) if self . _app . strict : raise PluginNotInitialisableException ( ) self . _log . info ( "Plugins activated: %s" % ", " . join ( plugins_activated ) )
Activates given plugins .
1,969
def deactivate ( self , plugins = [ ] ) : self . _log . debug ( "Plugins Deactivation started" ) if not isinstance ( plugins , list ) : raise AttributeError ( "plugins must be a list, not %s" % type ( plugins ) ) self . _log . debug ( "Plugins to deactivate: %s" % ", " . join ( plugins ) ) plugins_deactivated = [ ] for plugin_name in plugins : if not isinstance ( plugin_name , str ) : raise AttributeError ( "plugin name must be a str, not %s" % type ( plugin_name ) ) if plugin_name not in self . _plugins . keys ( ) : self . _log . info ( "Unknown activated plugin %s" % plugin_name ) continue else : self . _log . debug ( "Deactivating plugin %s" % plugin_name ) if not self . _plugins [ plugin_name ] . active : self . _log . warning ( "Plugin %s seems to be already deactivated" % plugin_name ) else : try : self . _plugins [ plugin_name ] . deactivate ( ) except Exception as e : raise_from ( PluginNotDeactivatableException ( "Plugin %s could not be deactivated" % plugin_name ) , e ) else : self . _log . debug ( "Plugin %s deactivated" % plugin_name ) plugins_deactivated . append ( plugin_name ) self . _log . info ( "Plugins deactivated: %s" % ", " . join ( plugins_deactivated ) )
Deactivates given plugins .
1,970
def get ( self , name = None ) : if name is None : return self . _plugins else : if name not in self . _plugins . keys ( ) : return None else : return self . _plugins [ name ]
Returns the plugin object with the given name . Or if a name is not given the complete plugin dictionary is returned .
1,971
def is_active ( self , name ) : if name in self . _plugins . keys ( ) : return self . _plugins [ "name" ] . active return None
Returns True if plugin exists and is active . If plugin does not exist it returns None
1,972
def register ( self , classes = [ ] ) : if not isinstance ( classes , list ) : raise AttributeError ( "plugins must be a list, not %s." % type ( classes ) ) plugin_registered = [ ] for plugin_class in classes : plugin_name = plugin_class . __name__ self . register_class ( plugin_class , plugin_name ) self . _log . debug ( "Plugin %s registered" % plugin_name ) plugin_registered . append ( plugin_name ) self . _log . info ( "Plugins registered: %s" % ", " . join ( plugin_registered ) )
Registers new plugins .
1,973
def get ( self , name = None ) : if name is None : return self . _classes else : if name not in self . _classes . keys ( ) : return None else : return self . _classes [ name ]
Returns the plugin class object with the given name . Or if a name is not given the complete plugin dictionary is returned .
1,974
def write ( self ) : filters = { 'MSGUID' : lambda x : ( '{%s}' % x ) . upper ( ) , 'relslnfile' : lambda x : os . path . relpath ( x , os . path . dirname ( self . FileName ) ) } context = { 'sln' : self } return self . render ( self . __jinja_template__ , self . FileName , context , filters )
Writes the . sln file to disk .
1,975
def load_annotations ( self , aname , sep = ',' ) : ann = pd . read_csv ( aname ) cell_names = np . array ( list ( self . adata . obs_names ) ) all_cell_names = np . array ( list ( self . adata_raw . obs_names ) ) if ( ann . shape [ 1 ] > 1 ) : ann = pd . read_csv ( aname , index_col = 0 , sep = sep ) if ( ann . shape [ 0 ] != all_cell_names . size ) : ann = pd . read_csv ( aname , index_col = 0 , header = None , sep = sep ) else : if ( ann . shape [ 0 ] != all_cell_names . size ) : ann = pd . read_csv ( aname , header = None , sep = sep ) ann . index = np . array ( list ( ann . index . astype ( '<U100' ) ) ) ann1 = np . array ( list ( ann . T [ cell_names ] . T . values . flatten ( ) ) ) ann2 = np . array ( list ( ann . values . flatten ( ) ) ) self . adata_raw . obs [ 'annotations' ] = pd . Categorical ( ann2 ) self . adata . obs [ 'annotations' ] = pd . Categorical ( ann1 )
Loads cell annotations .
1,976
def dispersion_ranking_NN ( self , nnm , num_norm_avg = 50 ) : self . knn_avg ( nnm ) D_avg = self . adata . layers [ 'X_knn_avg' ] mu , var = sf . mean_variance_axis ( D_avg , axis = 0 ) dispersions = np . zeros ( var . size ) dispersions [ mu > 0 ] = var [ mu > 0 ] / mu [ mu > 0 ] self . adata . var [ 'spatial_dispersions' ] = dispersions . copy ( ) ma = np . sort ( dispersions ) [ - num_norm_avg : ] . mean ( ) dispersions [ dispersions >= ma ] = ma weights = ( ( dispersions / dispersions . max ( ) ) ** 0.5 ) . flatten ( ) self . adata . var [ 'weights' ] = weights return weights
Computes the spatial dispersion factors for each gene .
1,977
def plot_correlated_groups ( self , group = None , n_genes = 5 , ** kwargs ) : geneID_groups = self . adata . uns [ 'gene_groups' ] if ( group is None ) : for i in range ( len ( geneID_groups ) ) : self . show_gene_expression ( geneID_groups [ i ] [ 0 ] , ** kwargs ) else : for i in range ( n_genes ) : self . show_gene_expression ( geneID_groups [ group ] [ i ] , ** kwargs )
Plots orthogonal expression patterns .
1,978
def plot_correlated_genes ( self , name , n_genes = 5 , number_of_features = 1000 , ** kwargs ) : all_gene_names = np . array ( list ( self . adata . var_names ) ) if ( ( all_gene_names == name ) . sum ( ) == 0 ) : print ( "Gene not found in the filtered dataset. Note that genes " "are case sensitive." ) return sds = self . corr_bin_genes ( input_gene = name , number_of_features = number_of_features ) if ( n_genes + 1 > sds . size ) : x = sds . size else : x = n_genes + 1 for i in range ( 1 , x ) : self . show_gene_expression ( sds [ i ] , ** kwargs ) return sds [ 1 : ]
Plots gene expression patterns correlated with the input gene .
1,979
def run_tsne ( self , X = None , metric = 'correlation' , ** kwargs ) : if ( X is not None ) : dt = man . TSNE ( metric = metric , ** kwargs ) . fit_transform ( X ) return dt else : dt = man . TSNE ( metric = self . distance , ** kwargs ) . fit_transform ( self . adata . obsm [ 'X_pca' ] ) tsne2d = dt self . adata . obsm [ 'X_tsne' ] = tsne2d
Wrapper for sklearn s t - SNE implementation .
1,980
def run_umap ( self , X = None , metric = None , ** kwargs ) : import umap as umap if metric is None : metric = self . distance if ( X is not None ) : umap_obj = umap . UMAP ( metric = metric , ** kwargs ) dt = umap_obj . fit_transform ( X ) return dt else : umap_obj = umap . UMAP ( metric = metric , ** kwargs ) umap2d = umap_obj . fit_transform ( self . adata . obsm [ 'X_pca' ] ) self . adata . obsm [ 'X_umap' ] = umap2d
Wrapper for umap - learn .
1,981
def scatter ( self , projection = None , c = None , cmap = 'rainbow' , linewidth = 0.0 , edgecolor = 'k' , axes = None , colorbar = True , s = 10 , ** kwargs ) : if ( not PLOTTING ) : print ( "matplotlib not installed!" ) else : if ( isinstance ( projection , str ) ) : try : dt = self . adata . obsm [ projection ] except KeyError : print ( 'Please create a projection first using run_umap or' 'run_tsne' ) elif ( projection is None ) : try : dt = self . adata . obsm [ 'X_umap' ] except KeyError : try : dt = self . adata . obsm [ 'X_tsne' ] except KeyError : print ( "Please create either a t-SNE or UMAP projection" "first." ) return else : dt = projection if ( axes is None ) : plt . figure ( ) axes = plt . gca ( ) if ( c is None ) : plt . scatter ( dt [ : , 0 ] , dt [ : , 1 ] , s = s , linewidth = linewidth , edgecolor = edgecolor , ** kwargs ) else : if isinstance ( c , str ) : try : c = self . adata . obs [ c ] . get_values ( ) except KeyError : 0 if ( ( isinstance ( c [ 0 ] , str ) or isinstance ( c [ 0 ] , np . str_ ) ) and ( isinstance ( c , np . ndarray ) or isinstance ( c , list ) ) ) : i = ut . convert_annotations ( c ) ui , ai = np . unique ( i , return_index = True ) cax = axes . scatter ( dt [ : , 0 ] , dt [ : , 1 ] , c = i , cmap = cmap , s = s , linewidth = linewidth , edgecolor = edgecolor , ** kwargs ) if ( colorbar ) : cbar = plt . colorbar ( cax , ax = axes , ticks = ui ) cbar . ax . set_yticklabels ( c [ ai ] ) else : if not ( isinstance ( c , np . ndarray ) or isinstance ( c , list ) ) : colorbar = False i = c cax = axes . scatter ( dt [ : , 0 ] , dt [ : , 1 ] , c = i , cmap = cmap , s = s , linewidth = linewidth , edgecolor = edgecolor , ** kwargs ) if ( colorbar ) : plt . colorbar ( cax , ax = axes )
Display a scatter plot .
1,982
def show_gene_expression ( self , gene , avg = True , axes = None , ** kwargs ) : all_gene_names = np . array ( list ( self . adata . var_names ) ) cell_names = np . array ( list ( self . adata . obs_names ) ) all_cell_names = np . array ( list ( self . adata_raw . obs_names ) ) idx = np . where ( all_gene_names == gene ) [ 0 ] name = gene if ( idx . size == 0 ) : print ( "Gene note found in the filtered dataset. Note that genes " "are case sensitive." ) return if ( avg ) : a = self . adata . layers [ 'X_knn_avg' ] [ : , idx ] . toarray ( ) . flatten ( ) if a . sum ( ) == 0 : a = np . log2 ( self . adata_raw . X [ np . in1d ( all_cell_names , cell_names ) , : ] [ : , idx ] . toarray ( ) . flatten ( ) + 1 ) else : a = np . log2 ( self . adata_raw . X [ np . in1d ( all_cell_names , cell_names ) , : ] [ : , idx ] . toarray ( ) . flatten ( ) + 1 ) if axes is None : plt . figure ( ) axes = plt . gca ( ) self . scatter ( c = a , axes = axes , ** kwargs ) axes . set_title ( name )
Display a gene s expressions .
1,983
def louvain_clustering ( self , X = None , res = 1 , method = 'modularity' ) : if X is None : X = self . adata . uns [ 'neighbors' ] [ 'connectivities' ] save = True else : if not sp . isspmatrix_csr ( X ) : X = sp . csr_matrix ( X ) save = False import igraph as ig import louvain adjacency = sparse_knn ( X . dot ( X . T ) / self . k , self . k ) . tocsr ( ) sources , targets = adjacency . nonzero ( ) weights = adjacency [ sources , targets ] if isinstance ( weights , np . matrix ) : weights = weights . A1 g = ig . Graph ( directed = True ) g . add_vertices ( adjacency . shape [ 0 ] ) g . add_edges ( list ( zip ( sources , targets ) ) ) try : g . es [ 'weight' ] = weights except BaseException : pass if method == 'significance' : cl = louvain . find_partition ( g , louvain . SignificanceVertexPartition ) else : cl = louvain . find_partition ( g , louvain . RBConfigurationVertexPartition , resolution_parameter = res ) if save : self . adata . obs [ 'louvain_clusters' ] = pd . Categorical ( np . array ( cl . membership ) ) else : return np . array ( cl . membership )
Runs Louvain clustering using the vtraag implementation . Assumes that louvain optional dependency is installed .
1,984
def kmeans_clustering ( self , numc , X = None , npcs = 15 ) : from sklearn . cluster import KMeans if X is None : D_sub = self . adata . uns [ 'X_processed' ] X = ( D_sub - D_sub . mean ( 0 ) ) . dot ( self . adata . uns [ 'pca_obj' ] . components_ [ : npcs , : ] . T ) save = True else : save = False cl = KMeans ( n_clusters = numc ) . fit_predict ( Normalizer ( ) . fit_transform ( X ) ) if save : self . adata . obs [ 'kmeans_clusters' ] = pd . Categorical ( cl ) else : return cl
Performs k - means clustering .
1,985
def identify_marker_genes_rf ( self , labels = None , clusters = None , n_genes = 4000 ) : if ( labels is None ) : try : keys = np . array ( list ( self . adata . obs_keys ( ) ) ) lbls = self . adata . obs [ ut . search_string ( keys , '_clusters' ) [ 0 ] [ 0 ] ] . get_values ( ) except KeyError : print ( "Please generate cluster labels first or set the " "'labels' keyword argument." ) return elif isinstance ( labels , str ) : lbls = self . adata . obs [ labels ] . get_values ( ) . flatten ( ) else : lbls = labels from sklearn . ensemble import RandomForestClassifier markers = { } if clusters == None : lblsu = np . unique ( lbls ) else : lblsu = np . unique ( clusters ) indices = np . argsort ( - self . adata . var [ 'weights' ] . values ) X = self . adata . layers [ 'X_disp' ] [ : , indices [ : n_genes ] ] . toarray ( ) for K in range ( lblsu . size ) : print ( K ) y = np . zeros ( lbls . size ) y [ lbls == lblsu [ K ] ] = 1 clf = RandomForestClassifier ( n_estimators = 100 , max_depth = None , random_state = 0 ) clf . fit ( X , y ) idx = np . argsort ( - clf . feature_importances_ ) markers [ lblsu [ K ] ] = self . adata . uns [ 'ranked_genes' ] [ idx ] if clusters is None : self . adata . uns [ 'marker_genes_rf' ] = markers return markers
Ranks marker genes for each cluster using a random forest classification approach .
1,986
def identify_marker_genes_corr ( self , labels = None , n_genes = 4000 ) : if ( labels is None ) : try : keys = np . array ( list ( self . adata . obs_keys ( ) ) ) lbls = self . adata . obs [ ut . search_string ( keys , '_clusters' ) [ 0 ] [ 0 ] ] . get_values ( ) except KeyError : print ( "Please generate cluster labels first or set the " "'labels' keyword argument." ) return elif isinstance ( labels , str ) : lbls = self . adata . obs [ labels ] . get_values ( ) . flatten ( ) else : lbls = labels w = self . adata . var [ 'weights' ] . values s = StandardScaler ( ) idxg = np . argsort ( - w ) [ : n_genes ] y1 = s . fit_transform ( self . adata . layers [ 'X_disp' ] [ : , idxg ] . A ) * w [ idxg ] all_gene_names = np . array ( list ( self . adata . var_names ) ) [ idxg ] markers = { } lblsu = np . unique ( lbls ) for i in lblsu : Gcells = np . array ( list ( self . adata . obs_names [ lbls == i ] ) ) z1 = y1 [ np . in1d ( self . adata . obs_names , Gcells ) , : ] m1 = ( z1 - z1 . mean ( 1 ) [ : , None ] ) / z1 . std ( 1 ) [ : , None ] ref = z1 . mean ( 0 ) ref = ( ref - ref . mean ( ) ) / ref . std ( ) g2 = ( m1 * ref ) . mean ( 0 ) markers [ i ] = all_gene_names [ np . argsort ( - g2 ) ] self . adata . uns [ 'marker_genes_corr' ] = markers return markers
Ranking marker genes based on their respective magnitudes in the correlation dot products with cluster - specific reference expression profiles .
1,987
def add ( self , action = None , subject = None , ** conditions ) : self . add_rule ( Rule ( True , action , subject , ** conditions ) )
Add ability are allowed using two arguments .
1,988
def addnot ( self , action = None , subject = None , ** conditions ) : self . add_rule ( Rule ( False , action , subject , ** conditions ) )
Defines an ability which cannot be done .
1,989
def can ( self , action , subject , ** conditions ) : for rule in self . relevant_rules_for_match ( action , subject ) : if rule . matches_conditions ( action , subject , ** conditions ) : return rule . base_behavior return False
Check if the user has permission to perform a given action on an object
1,990
def relevant_rules_for_match ( self , action , subject ) : matches = [ ] for rule in self . rules : rule . expanded_actions = self . expand_actions ( rule . actions ) if rule . is_relevant ( action , subject ) : matches . append ( rule ) return self . optimize ( matches [ : : - 1 ] )
retrive match action and subject
1,991
def expand_actions ( self , actions ) : r = [ ] for action in actions : r . append ( action ) if action in self . aliased_actions : r . extend ( self . aliased_actions [ action ] ) return r
Accepts an array of actions and returns an array of actions which match
1,992
def alias_action ( self , * args , ** kwargs ) : to = kwargs . pop ( 'to' , None ) if not to : return error_message = ( "You can't specify target ({}) as alias " "because it is real action name" . format ( to ) ) if to in list ( itertools . chain ( * self . aliased_actions . values ( ) ) ) : raise Exception ( error_message ) self . aliased_actions . setdefault ( to , [ ] ) . extend ( args )
Alias one or more actions into another one .
1,993
def fetch ( table , cols = "*" , where = ( ) , group = "" , order = ( ) , limit = ( ) , ** kwargs ) : return select ( table , cols , where , group , order , limit , ** kwargs ) . fetchall ( )
Convenience wrapper for database SELECT and fetch all .
1,994
def fetchone ( table , cols = "*" , where = ( ) , group = "" , order = ( ) , limit = ( ) , ** kwargs ) : return select ( table , cols , where , group , order , limit , ** kwargs ) . fetchone ( )
Convenience wrapper for database SELECT and fetch one .
1,995
def insert ( table , values = ( ) , ** kwargs ) : values = dict ( values , ** kwargs ) . items ( ) sql , args = makeSQL ( "INSERT" , table , values = values ) return execute ( sql , args ) . lastrowid
Convenience wrapper for database INSERT .
1,996
def select ( table , cols = "*" , where = ( ) , group = "" , order = ( ) , limit = ( ) , ** kwargs ) : where = dict ( where , ** kwargs ) . items ( ) sql , args = makeSQL ( "SELECT" , table , cols , where , group , order , limit ) return execute ( sql , args )
Convenience wrapper for database SELECT .
1,997
def update ( table , values , where = ( ) , ** kwargs ) : where = dict ( where , ** kwargs ) . items ( ) sql , args = makeSQL ( "UPDATE" , table , values = values , where = where ) return execute ( sql , args ) . rowcount
Convenience wrapper for database UPDATE .
1,998
def delete ( table , where = ( ) , ** kwargs ) : where = dict ( where , ** kwargs ) . items ( ) sql , args = makeSQL ( "DELETE" , table , where = where ) return execute ( sql , args ) . rowcount
Convenience wrapper for database DELETE .
1,999
def make_cursor ( path , init_statements = ( ) , _connectioncache = { } ) : connection = _connectioncache . get ( path ) if not connection : is_new = not os . path . exists ( path ) or not os . path . getsize ( path ) try : is_new and os . makedirs ( os . path . dirname ( path ) ) except OSError : pass connection = sqlite3 . connect ( path , isolation_level = None , check_same_thread = False , detect_types = sqlite3 . PARSE_DECLTYPES ) for x in init_statements or ( ) : connection . execute ( x ) try : is_new and ":memory:" not in path . lower ( ) and os . chmod ( path , 0707 ) except OSError : pass connection . row_factory = lambda cur , row : dict ( sqlite3 . Row ( cur , row ) ) _connectioncache [ path ] = connection return connection . cursor ( )
Returns a cursor to the database making new connection if not cached .