idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
10,800
def run_server ( chatrooms , use_default_logging = True ) : if use_default_logging : configure_logging ( ) logger . info ( 'Starting Hermes chatroom server...' ) bots = [ ] for name , params in chatrooms . items ( ) : bot_class = params . get ( 'CLASS' , 'hermes.Chatroom' ) if type ( bot_class ) == type : pass else : bot_class_path = bot_class . split ( '.' ) if len ( bot_class_path ) == 1 : module , classname = '__main__' , bot_class_path [ - 1 ] else : module , classname = '.' . join ( bot_class_path [ : - 1 ] ) , bot_class_path [ - 1 ] _ = __import__ ( module , globals ( ) , locals ( ) , [ classname ] ) bot_class = getattr ( _ , classname ) bot = bot_class ( name , params ) bots . append ( bot ) while True : try : logger . info ( "Connecting to servers..." ) sockets = _get_sockets ( bots ) if len ( sockets . keys ( ) ) == 0 : logger . info ( 'No chatrooms defined. Exiting.' ) return _listen ( sockets ) except socket . error , ex : if ex . errno == 9 : logger . exception ( 'broken socket detected' ) else : logger . exception ( 'Unknown socket error %d' % ( ex . errno , ) ) except Exception : logger . exception ( 'Unexpected exception' ) time . sleep ( 1 )
Sets up and serves specified chatrooms . Main entrypoint to Hermes .
10,801
def _get_sockets ( bots ) : sockets = { } for bot in bots : bot . connect ( ) sockets [ bot . client . Connection . _sock ] = bot return sockets
Connects and gathers sockets for all chatrooms
10,802
def _listen ( sockets ) : while True : ( i , o , e ) = select . select ( sockets . keys ( ) , [ ] , [ ] , 1 ) for socket in i : if isinstance ( sockets [ socket ] , Chatroom ) : data_len = sockets [ socket ] . client . Process ( 1 ) if data_len is None or data_len == 0 : raise Exception ( 'Disconnected from server' ) else : raise Exception ( "Unknown socket type: %s" % repr ( sockets [ socket ] ) )
Main server loop . Listens for incoming events and dispatches them to appropriate chatroom
10,803
def _send ( self , method , path , data , filename ) : if filename is None : return self . _send_json ( method , path , data ) else : return self . _send_file ( method , path , data , filename )
Send data to a remote server either with a POST or a PUT request .
10,804
def get_platform_settings ( ) : s = settings . PLATFORMS if hasattr ( settings , 'FACEBOOK' ) and settings . FACEBOOK : s . append ( { 'class' : 'bernard.platforms.facebook.platform.Facebook' , 'settings' : settings . FACEBOOK , } ) return s
Returns the content of settings . PLATFORMS with a twist .
10,805
async def run_checks ( self ) : async for check in self . fsm . health_check ( ) : yield check async for check in self . self_check ( ) : yield check for check in MiddlewareManager . health_check ( ) : yield check
Run checks on itself and on the FSM
10,806
async def self_check ( self ) : platforms = set ( ) for platform in get_platform_settings ( ) : try : name = platform [ 'class' ] cls : Type [ Platform ] = import_class ( name ) except KeyError : yield HealthCheckFail ( '00004' , 'Missing platform `class` name in configuration.' ) except ( AttributeError , ImportError , ValueError ) : yield HealthCheckFail ( '00003' , f'Platform "{name}" cannot be imported.' ) else : if cls in platforms : yield HealthCheckFail ( '00002' , f'Platform "{name}" is imported more than once.' ) platforms . add ( cls ) async for check in cls . self_check ( ) : yield check
Checks that the platforms configuration is all right .
10,807
def _index_classes ( self ) -> Dict [ Text , Type [ Platform ] ] : out = { } for p in get_platform_settings ( ) : cls : Type [ Platform ] = import_class ( p [ 'class' ] ) if 'name' in p : out [ p [ 'name' ] ] = cls else : out [ cls . NAME ] = cls return out
Build a name index for all platform classes
10,808
async def build_platform ( self , cls : Type [ Platform ] , custom_id ) : from bernard . server . http import router p = cls ( ) if custom_id : p . _id = custom_id await p . async_init ( ) p . on_message ( self . fsm . handle_message ) p . hook_up ( router ) return p
Build the Facebook platform . Nothing fancy .
10,809
def get_class ( self , platform ) -> Type [ Platform ] : if platform in self . _classes : return self . _classes [ platform ] raise PlatformDoesNotExist ( 'Platform "{}" is not in configuration' . format ( platform ) )
For a given platform name gets the matching class
10,810
async def get_platform ( self , name : Text ) : if not self . _is_init : await self . init ( ) if name not in self . platforms : self . platforms [ name ] = await self . build_platform ( self . get_class ( name ) , name ) return self . platforms [ name ]
Get a valid instance of the specified platform . Do not cache this object it might change with configuration changes .
10,811
async def get_all_platforms ( self ) -> AsyncIterator [ Platform ] : for name in self . _classes . keys ( ) : yield await self . get_platform ( name )
Returns all platform instances
10,812
async def message_from_token ( self , token : Text , payload : Any ) -> Tuple [ Optional [ BaseMessage ] , Optional [ Platform ] ] : async for platform in self . get_all_platforms ( ) : m = await platform . message_from_token ( token , payload ) if m : return m , platform return None , None
Given an authentication token find the right platform that can recognize this token and create a message for this platform .
10,813
def add_args ( parser , positional = False ) : group = parser . add_argument_group ( "read loading" ) group . add_argument ( "reads" if positional else "--reads" , nargs = "+" , default = [ ] , help = "Paths to bam files. Any number of paths may be specified." ) group . add_argument ( "--read-source-name" , nargs = "+" , help = "Names for each read source. The number of names specified " "must match the number of bam files. If not specified, filenames are " "used for names." ) group = parser . add_argument_group ( "read filtering" , "A number of read filters are available. See the pysam " "documentation (http://pysam.readthedocs.org/en/latest/api.html) " "for details on what these fields mean. When multiple filter " "options are specified, reads must match *all* filters." ) for ( name , ( kind , message , function ) ) in READ_FILTERS . items ( ) : extra = { } if kind is bool : extra [ "action" ] = "store_true" extra [ "default" ] = None elif kind is int : extra [ "type" ] = int extra [ "metavar" ] = "N" elif kind is str : extra [ "metavar" ] = "STRING" group . add_argument ( "--" + name . replace ( "_" , "-" ) , help = message , ** extra )
Extends a commandline argument parser with arguments for specifying read sources .
10,814
def load_from_args ( args ) : if not args . reads : return None if args . read_source_name : read_source_names = util . expand ( args . read_source_name , 'read_source_name' , 'read source' , len ( args . reads ) ) else : read_source_names = util . drop_prefix ( args . reads ) filters = [ ] for ( name , info ) in READ_FILTERS . items ( ) : value = getattr ( args , name ) if value is not None : filters . append ( functools . partial ( info [ - 1 ] , value ) ) return [ load_bam ( filename , name , filters ) for ( filename , name ) in zip ( args . reads , read_source_names ) ]
Given parsed commandline arguments returns a list of ReadSource objects
10,815
def get_int ( config , key , default ) : try : return int ( config [ key ] ) except ( KeyError , ValueError ) : return default
A helper to retrieve an integer value from a given dictionary containing string values . If the requested value is not present in the dictionary or if it cannot be converted to an integer a default value will be returned instead .
10,816
def compact_bucket ( db , buck_key , limit ) : records = db . lrange ( str ( buck_key ) , 0 , - 1 ) loader = limits . BucketLoader ( limit . bucket_class , db , limit , str ( buck_key ) , records , stop_summarize = True ) buck_record = msgpack . dumps ( dict ( bucket = loader . bucket . dehydrate ( ) , uuid = str ( uuid . uuid4 ( ) ) ) ) result = db . linsert ( str ( buck_key ) , 'after' , loader . last_summarize_rec , buck_record ) if result < 0 : LOG . warning ( "Bucket compaction on %s failed; will retry" % buck_key ) return db . ltrim ( str ( buck_key ) , loader . last_summarize_idx + 1 , - 1 )
Perform the compaction operation . This reads in the bucket information from the database builds a compacted bucket record inserts that record in the appropriate place in the database then removes outdated updates .
10,817
def compactor ( conf ) : db = conf . get_database ( 'compactor' ) limit_map = LimitContainer ( conf , db ) config = conf [ 'compactor' ] if get_int ( config , 'max_updates' , 0 ) <= 0 : LOG . warning ( "Compaction is not enabled. Enable it by " "setting a positive integer value for " "'compactor.max_updates' in the configuration." ) key_getter = GetBucketKey . factory ( config , db ) LOG . info ( "Compactor initialized" ) while True : try : buck_key = limits . BucketKey . decode ( key_getter ( ) ) except ValueError as exc : LOG . warning ( "Error interpreting bucket key: %s" % exc ) continue if buck_key . version < 2 : continue try : limit = limit_map [ buck_key . uuid ] except KeyError : LOG . warning ( "Unable to compact bucket for limit %s" % buck_key . uuid ) continue LOG . debug ( "Compacting bucket %s" % buck_key ) try : compact_bucket ( db , buck_key , limit ) except Exception : LOG . exception ( "Failed to compact bucket %s" % buck_key ) else : LOG . debug ( "Finished compacting bucket %s" % buck_key )
The compactor daemon . This fuction watches the sorted set containing bucket keys that need to be compacted performing the necessary compaction .
10,818
def factory ( cls , config , db ) : if not hasattr ( db , 'register_script' ) : LOG . debug ( "Redis client does not support register_script()" ) return GetBucketKeyByLock ( config , db ) info = db . info ( ) if version_greater ( '2.6' , info [ 'redis_version' ] ) : LOG . debug ( "Redis server supports register_script()" ) return GetBucketKeyByScript ( config , db ) LOG . debug ( "Redis server does not support register_script()" ) return GetBucketKeyByLock ( config , db )
Given a configuration and database select and return an appropriate instance of a subclass of GetBucketKey . This will ensure that both client and server support are available for the Lua script feature of Redis and if not a lock will be used .
10,819
def get ( self , now ) : with self . lock : items = self . db . zrangebyscore ( self . key , 0 , now - self . min_age , start = 0 , num = 1 ) if not items : return None item = items [ 0 ] self . db . zrem ( item ) return item
Get a bucket key to compact . If none are available returns None . This uses a configured lock to ensure that the bucket key is popped off the sorted set in an atomic fashion .
10,820
def get ( self , now ) : items = self . script ( keys = [ self . key ] , args = [ now - self . min_age ] ) return items [ 0 ] if items else None
Get a bucket key to compact . If none are available returns None . This uses a Lua script to ensure that the bucket key is popped off the sorted set in an atomic fashion .
10,821
def parse ( text , elements , fallback ) : tokens = [ ] for etype in elements : for match in etype . find ( text ) : tokens . append ( Token ( etype , match , text , fallback ) ) tokens . sort ( ) tokens = _resolve_overlap ( tokens ) return make_elements ( tokens , text , fallback = fallback )
Parse given text and produce a list of inline elements .
10,822
def make_elements ( tokens , text , start = 0 , end = None , fallback = None ) : result = [ ] end = end or len ( text ) prev_end = start for token in tokens : if prev_end < token . start : result . append ( fallback ( text [ prev_end : token . start ] ) ) result . append ( token . as_element ( ) ) prev_end = token . end if prev_end < end : result . append ( fallback ( text [ prev_end : end ] ) ) return result
Make elements from a list of parsed tokens . It will turn all unmatched holes into fallback elements .
10,823
def main_cli ( ) : args = _cli_argument_parser ( ) delta_secs = args . delay i2cbus = args . bus i2c_address = args . address sensor_key = args . sensor sensor_params = args . params params = { } if sensor_params : def _parse_param ( str_param ) : key , value = str_param . split ( '=' ) try : value = int ( value ) except ValueError : pass return { key . strip ( ) : value } [ params . update ( _parse_param ( sp ) ) for sp in sensor_params ] if sensor_key : from time import sleep try : import smbus bus_handler = smbus . SMBus ( i2cbus ) except ImportError as exc : print ( exc , "\n" , "Please install smbus-cffi before." ) sys . exit ( - 1 ) try : sensor_handler , i2c_default_address = SENSORS [ sensor_key ] except KeyError : print ( "'%s' is not recognized as an implemented i2c sensor." % sensor_key ) sys . exit ( - 1 ) if i2c_address : i2c_address = hex ( int ( i2c_address , 0 ) ) else : i2c_address = i2c_default_address sensor = sensor_handler ( bus_handler , i2c_address , ** params ) try : while True : sensor . update ( ) if not sensor . sample_ok : print ( "An error has occured." ) break print ( sensor . current_state_str ) sleep ( delta_secs ) except KeyboardInterrupt : print ( "Bye!" ) else : from subprocess import check_output cmd = '/usr/sbin/i2cdetect -y {}' . format ( i2cbus ) try : output = check_output ( cmd . split ( ) ) print ( "Running i2cdetect utility in i2c bus {}:\n" "The command '{}' has returned:\n{}" . format ( i2cbus , cmd , output . decode ( ) ) ) except FileNotFoundError : print ( "Please install i2cdetect before." ) sys . exit ( - 1 ) addresses = [ '0x' + l for line in output . decode ( ) . splitlines ( ) [ 1 : ] for l in line . split ( ) [ 1 : ] if l != '--' ] if addresses : print ( "{} sensors detected in {}" . format ( len ( addresses ) , ', ' . join ( addresses ) ) ) else : print ( "No i2c sensors detected." )
CLI minimal interface .
10,824
def extract_domain ( var_name , output ) : var = getenv ( var_name ) if var : p = urlparse ( var ) output . append ( p . hostname )
Extracts just the domain name from an URL and adds it to a list
10,825
def numchannels ( samples : np . ndarray ) -> int : if len ( samples . shape ) == 1 : return 1 else : return samples . shape [ 1 ]
return the number of channels present in samples
10,826
def turnstile_filter ( global_conf , ** local_conf ) : klass = TurnstileMiddleware if 'turnstile' in local_conf : klass = utils . find_entrypoint ( 'turnstile.middleware' , local_conf [ 'turnstile' ] , required = True ) def wrapper ( app ) : return klass ( app , local_conf ) return wrapper
Factory function for turnstile .
10,827
def format_delay ( self , delay , limit , bucket , environ , start_response ) : status = self . conf . status headers = HeadersDict ( [ ( 'Retry-After' , "%d" % math . ceil ( delay ) ) ] ) status , entity = limit . format ( status , headers , environ , bucket , delay ) start_response ( status , headers . items ( ) ) return entity
Formats the over - limit response for the request . May be overridden in subclasses to allow alternate responses .
10,828
def find_entrypoint ( group , name , compat = True , required = False ) : if group is None or ( compat and ':' in name ) : try : return pkg_resources . EntryPoint . parse ( "x=" + name ) . load ( False ) except ( ImportError , pkg_resources . UnknownExtra ) as exc : pass else : for ep in pkg_resources . iter_entry_points ( group , name ) : try : return ep . load ( ) except ( ImportError , pkg_resources . UnknownExtra ) : continue if required : raise ImportError ( "Cannot import %r entrypoint %r" % ( group , name ) ) return None
Finds the first available entrypoint with the given name in the given group .
10,829
def transfer ( self , transfer_payload = None , * , from_user , to_user ) : if self . persist_id is None : raise EntityNotYetPersistedError ( ( 'Entities cannot be transferred ' 'until they have been ' 'persisted' ) ) return self . plugin . transfer ( self . persist_id , transfer_payload , from_user = from_user , to_user = to_user )
Transfer this entity to another owner on the backing persistence layer
10,830
def transfer ( self , rights_assignment_data = None , * , from_user , to_user , rights_assignment_format = 'jsonld' ) : rights_assignment = RightsAssignment . from_data ( rights_assignment_data or { } , plugin = self . plugin ) transfer_payload = rights_assignment . _to_format ( data_format = rights_assignment_format ) transfer_id = super ( ) . transfer ( transfer_payload , from_user = from_user , to_user = to_user ) rights_assignment . persist_id = transfer_id return rights_assignment
Transfer this Right to another owner on the backing persistence layer .
10,831
def _set_repo ( self , url ) : if url . startswith ( 'http' ) : try : self . repo = Proxy ( url ) except ProxyError , e : log . exception ( 'Error setting repo: %s' % url ) raise GritError ( e ) else : try : self . repo = Local ( url ) except NotGitRepository : raise GritError ( 'Invalid url: %s' % url ) except Exception , e : log . exception ( 'Error setting repo: %s' % url ) raise GritError ( e )
sets the underlying repo object
10,832
def new ( self , url , clone_from = None , bare = True ) : if clone_from : self . clone ( path = url , bare = bare ) else : if url . startswith ( 'http' ) : proxy = Proxy ( url ) proxy . new ( path = url , bare = bare ) else : local = Local . new ( path = url , bare = bare ) return Repo ( url )
Creates a new Repo instance .
10,833
def CheckEmails ( self , checkTypo = False , fillWrong = True ) : self . wrong_emails = [ ] for email in self . emails : if self . CheckEmail ( email , checkTypo ) is False : self . wrong_emails . append ( email )
Checks Emails in List Wether they are Correct or not
10,834
def CheckEmail ( self , email , checkTypo = False ) : contents = email . split ( '@' ) if len ( contents ) == 2 : if contents [ 1 ] in self . valid : return True return False
Checks a Single email if it is correct
10,835
def CorrectWrongEmails ( self , askInput = True ) : for email in self . wrong_emails : corrected_email = self . CorrectEmail ( email ) self . emails [ self . emails . index ( email ) ] = corrected_email self . wrong_emails = [ ]
Corrects Emails in wrong_emails
10,836
def CorrectEmail ( self , email ) : print ( "Wrong Email : " + email ) contents = email . split ( '@' ) if len ( contents ) == 2 : domain_data = contents [ 1 ] . split ( '.' ) for vemail in self . valid : alters = perms ( vemail . split ( '.' , 1 ) [ 0 ] ) if domain_data [ 0 ] in alters and qyn . query_yes_no ( "Did you mean : " + contents [ 0 ] + '@' + vemail ) is True : return contents [ 0 ] + '@' + vemail corrected = input ( 'Enter Corrected Email : ' ) while self . CheckEmail ( corrected ) is False : corrected = input ( 'PLEASE Enter "Corrected" Email : ' ) return corrected else : print ( 'Looks like you missed/overused `@`' ) if len ( contents ) == 1 : for vemail in self . valid : if email [ len ( email ) - len ( vemail ) : ] == vemail and qyn . query_yes_no ( "Did you mean : " + email [ : len ( email ) - len ( vemail ) ] + '@' + vemail ) is True : return email [ : len ( email ) - len ( vemail ) ] + '@' + vemail corrected = input ( 'Enter Corrected Email : ' ) while self . CheckEmail ( corrected ) is False : corrected = input ( 'PLEASE Enter "Corrected" Email : ' ) return corrected
Returns a Corrected email USER INPUT REQUIRED
10,837
def add_element ( self , element , override = False ) : if issubclass ( element , inline . InlineElement ) : dest = self . inline_elements elif issubclass ( element , block . BlockElement ) : dest = self . block_elements else : raise TypeError ( 'The element should be a subclass of either `BlockElement` or ' '`InlineElement`.' ) if not override : dest [ element . __name__ ] = element else : for cls in element . __bases__ : if cls in dest . values ( ) : dest [ cls . __name__ ] = element break else : dest [ element . __name__ ] = element
Add an element to the parser .
10,838
def parse ( self , source_or_text ) : if isinstance ( source_or_text , string_types ) : block . parser = self inline . parser = self return self . block_elements [ 'Document' ] ( source_or_text ) element_list = self . _build_block_element_list ( ) ast = [ ] while not source_or_text . exhausted : for ele_type in element_list : if ele_type . match ( source_or_text ) : result = ele_type . parse ( source_or_text ) if not hasattr ( result , 'priority' ) : result = ele_type ( result ) ast . append ( result ) break else : break return ast
Do the actual parsing and returns an AST or parsed element .
10,839
def parse_inline ( self , text ) : element_list = self . _build_inline_element_list ( ) return inline_parser . parse ( text , element_list , fallback = self . inline_elements [ 'RawText' ] )
Parses text into inline elements . RawText is not considered in parsing but created as a wrapper of holes that don t match any other elements .
10,840
def _build_block_element_list ( self ) : return sorted ( [ e for e in self . block_elements . values ( ) if not e . virtual ] , key = lambda e : e . priority , reverse = True )
Return a list of block elements ordered from highest priority to lowest .
10,841
def make_app ( * args , ** kw ) : default_options = [ [ 'content_path' , '.' ] , [ 'uri_marker' , '' ] ] args = list ( args ) options = dict ( default_options ) options . update ( kw ) while default_options and args : _d = default_options . pop ( 0 ) _a = args . pop ( 0 ) options [ _d [ 0 ] ] = _a options [ 'content_path' ] = os . path . abspath ( options [ 'content_path' ] . decode ( 'utf8' ) ) options [ 'uri_marker' ] = options [ 'uri_marker' ] . decode ( 'utf8' ) selector = WSGIHandlerSelector ( ) git_inforefs_handler = GitHTTPBackendInfoRefs ( ** options ) git_rpc_handler = GitHTTPBackendSmartHTTP ( ** options ) static_handler = StaticServer ( ** options ) file_handler = FileServer ( ** options ) json_handler = JSONServer ( ** options ) ui_handler = UIServer ( ** options ) if options [ 'uri_marker' ] : marker_regex = r'(?P<decorative_path>.*?)(?:/' + options [ 'uri_marker' ] + ')' else : marker_regex = '' selector . add ( marker_regex + r'(?P<working_path>.*?)/info/refs\?.*?service=(?P<git_command>git-[^&]+).*$' , GET = git_inforefs_handler , HEAD = git_inforefs_handler ) selector . add ( marker_regex + r'(?P<working_path>.*)/(?P<git_command>git-[^/]+)$' , POST = git_rpc_handler ) selector . add ( marker_regex + r'/static/(?P<working_path>.*)$' , GET = static_handler , HEAD = static_handler ) selector . add ( marker_regex + r'(?P<working_path>.*)/file$' , GET = file_handler , HEAD = file_handler ) selector . add ( marker_regex + r'(?P<working_path>.*)$' , GET = ui_handler , POST = json_handler , HEAD = ui_handler ) return selector
Assembles basic WSGI - compatible application providing functionality of git - http - backend .
10,842
def now ( tzinfo = True ) : if dj_now : return dj_now ( ) if tzinfo : return datetime . utcnow ( ) . replace ( tzinfo = utc ) return datetime . now ( )
Return an aware or naive datetime . datetime depending on settings . USE_TZ .
10,843
def match_unit ( data , p , m = 'a' ) : if data is None : return p is None if m != 'e' and isinstance ( p , six . string_types ) : p = re . compile ( p ) if isinstance ( data , Sequence ) and not isinstance ( data , six . string_types ) : return any ( [ match_unit ( field , p , m = m ) for field in data ] ) elif isinstance ( data , MutableMapping ) : return any ( [ match_unit ( field , p , m = m ) for field in data . values ( ) ] ) if isinstance ( p , tuple ) : left , right = p return ( left <= data ) and ( data <= right ) if m == 'e' : return six . text_type ( data ) == p return p . search ( six . text_type ( data ) ) is not None
Match data to basic match unit .
10,844
def generate_ppi_network ( ppi_graph_path : str , dge_list : List [ Gene ] , max_adj_p : float , max_log2_fold_change : float , min_log2_fold_change : float , ppi_edge_min_confidence : Optional [ float ] = None , current_disease_ids_path : Optional [ str ] = None , disease_associations_path : Optional [ str ] = None , ) -> Network : protein_interactions = parsers . parse_ppi_graph ( ppi_graph_path , ppi_edge_min_confidence ) protein_interactions = protein_interactions . simplify ( ) if disease_associations_path is not None and current_disease_ids_path is not None : current_disease_ids = parsers . parse_disease_ids ( current_disease_ids_path ) disease_associations = parsers . parse_disease_associations ( disease_associations_path , current_disease_ids ) else : disease_associations = None network = Network ( protein_interactions , max_adj_p = max_adj_p , max_l2fc = max_log2_fold_change , min_l2fc = min_log2_fold_change , ) network . set_up_network ( dge_list , disease_associations = disease_associations ) return network
Generate the protein - protein interaction network .
10,845
def parse_dge ( dge_path : str , entrez_id_header : str , log2_fold_change_header : str , adj_p_header : str , entrez_delimiter : str , base_mean_header : Optional [ str ] = None ) -> List [ Gene ] : if dge_path . endswith ( '.xlsx' ) : return parsers . parse_excel ( dge_path , entrez_id_header = entrez_id_header , log_fold_change_header = log2_fold_change_header , adjusted_p_value_header = adj_p_header , entrez_delimiter = entrez_delimiter , base_mean_header = base_mean_header , ) if dge_path . endswith ( '.csv' ) : return parsers . parse_csv ( dge_path , entrez_id_header = entrez_id_header , log_fold_change_header = log2_fold_change_header , adjusted_p_value_header = adj_p_header , entrez_delimiter = entrez_delimiter , base_mean_header = base_mean_header , ) if dge_path . endswith ( '.tsv' ) : return parsers . parse_csv ( dge_path , entrez_id_header = entrez_id_header , log_fold_change_header = log2_fold_change_header , adjusted_p_value_header = adj_p_header , entrez_delimiter = entrez_delimiter , base_mean_header = base_mean_header , sep = "\t" ) raise ValueError ( f'Unsupported extension: {dge_path}' )
Parse a differential expression file .
10,846
def _load ( self , file_path : Text ) -> None : module_ = types . ModuleType ( 'settings' ) module_ . __file__ = file_path try : with open ( file_path , encoding = 'utf-8' ) as f : exec ( compile ( f . read ( ) , file_path , 'exec' ) , module_ . __dict__ ) except IOError as e : e . strerror = 'Unable to load configuration file ({})' . format ( e . strerror ) raise for key in dir ( module_ ) : if CONFIG_ATTR . match ( key ) : self [ key ] = getattr ( module_ , key )
Load the configuration from a plain Python file . This file is executed on its own .
10,847
def _settings ( self ) -> Settings : if self . __dict__ [ '__settings' ] is None : self . __dict__ [ '__settings' ] = Settings ( ) for file_path in self . _get_files ( ) : if file_path : self . __dict__ [ '__settings' ] . _load ( file_path ) return self . __dict__ [ '__settings' ]
Return the actual settings object or create it if missing .
10,848
async def _start ( self , key : Text ) -> None : for _ in range ( 0 , 1000 ) : with await self . pool as r : just_set = await r . set ( self . lock_key ( key ) , '' , expire = settings . REGISTER_LOCK_TIME , exist = r . SET_IF_NOT_EXIST , ) if just_set : break await asyncio . sleep ( settings . REDIS_POLL_INTERVAL )
Start the lock .
10,849
async def _get ( self , key : Text ) -> Dict [ Text , Any ] : try : with await self . pool as r : return ujson . loads ( await r . get ( self . register_key ( key ) ) ) except ( ValueError , TypeError ) : return { }
Get the value for the key . It is automatically deserialized from JSON and returns an empty dictionary by default .
10,850
async def _replace ( self , key : Text , data : Dict [ Text , Any ] ) -> None : with await self . pool as r : await r . set ( self . register_key ( key ) , ujson . dumps ( data ) )
Replace the register with a new value .
10,851
def getFileName ( self , suffix = None , extension = "jar" ) : assert ( self . _artifactId is not None ) assert ( self . _version is not None ) return "{0}-{1}{2}{3}" . format ( self . _artifactId , self . _version . getRawString ( ) , "-" + suffix . lstrip ( "-" ) if suffix is not None else "" , "." + extension . lstrip ( "." ) if extension is not None else "" )
Returns the basename of the artifact s file using Maven s conventions .
10,852
def getPath ( self , suffix = None , extension = "jar" , separator = os . sep ) : assert ( self . _groupId is not None ) resultComponents = [ self . _groupId . replace ( "." , separator ) ] if self . _artifactId is not None : resultComponents . append ( self . _artifactId ) version = self . _version if version is not None : resultComponents . append ( version . getRawString ( ) ) resultComponents . append ( self . getFileName ( suffix , extension ) ) return separator . join ( resultComponents )
Returns the full path relative to the root of a Maven repository of the current artifact using Maven s conventions .
10,853
def allele_support_df ( loci , sources ) : return pandas . DataFrame ( allele_support_rows ( loci , sources ) , columns = EXPECTED_COLUMNS )
Returns a DataFrame of allele counts for all given loci in the read sources
10,854
def variant_support ( variants , allele_support_df , ignore_missing = False ) : missing = [ c for c in EXPECTED_COLUMNS if c not in allele_support_df . columns ] if missing : raise ValueError ( "Missing columns: %s" % " " . join ( missing ) ) allele_support_df [ [ "interbase_start" , "interbase_end" ] ] = ( allele_support_df [ [ "interbase_start" , "interbase_end" ] ] . astype ( int ) ) sources = sorted ( allele_support_df [ "source" ] . unique ( ) ) allele_support_dict = collections . defaultdict ( dict ) for ( i , row ) in allele_support_df . iterrows ( ) : key = ( row [ 'source' ] , row . contig , row . interbase_start , row . interbase_end ) allele_support_dict [ key ] [ row . allele ] = row [ "count" ] allele_support_dict = dict ( allele_support_dict ) dataframe_dicts = collections . defaultdict ( lambda : collections . defaultdict ( list ) ) for variant in variants : for source in sources : key = ( source , variant . contig , variant . start - 1 , variant . end ) try : alleles = allele_support_dict [ key ] except KeyError : message = ( "No allele counts in source %s for variant %s" % ( source , str ( variant ) ) ) if ignore_missing : logging . warning ( message ) alleles = { } else : raise ValueError ( message ) alt = alleles . get ( variant . alt , 0 ) ref = alleles . get ( variant . ref , 0 ) total = sum ( alleles . values ( ) ) other = total - alt - ref dataframe_dicts [ "num_alt" ] [ source ] . append ( alt ) dataframe_dicts [ "num_ref" ] [ source ] . append ( ref ) dataframe_dicts [ "num_other" ] [ source ] . append ( other ) dataframe_dicts [ "total_depth" ] [ source ] . append ( total ) dataframe_dicts [ "alt_fraction" ] [ source ] . append ( float ( alt ) / max ( 1 , total ) ) dataframe_dicts [ "any_alt_fraction" ] [ source ] . append ( float ( alt + other ) / max ( 1 , total ) ) dataframes = dict ( ( label , pandas . DataFrame ( value , index = variants ) ) for ( label , value ) in dataframe_dicts . items ( ) ) return pandas . Panel ( dataframes )
Collect the read evidence support for the given variants .
10,855
def sndinfo ( path : str ) -> SndInfo : backend = _getBackend ( path ) logger . debug ( f"sndinfo: using backend {backend.name}" ) return backend . getinfo ( path )
Get info about a soundfile
10,856
def asmono ( samples : np . ndarray , channel : Union [ int , str ] = 0 ) -> np . ndarray : if numchannels ( samples ) == 1 : if isinstance ( samples [ 0 ] , float ) : return samples elif isinstance ( samples [ 0 ] , np . dnarray ) : return np . reshape ( samples , ( len ( samples ) , ) ) else : raise TypeError ( "Samples should be numeric, found: %s" % str ( type ( samples [ 0 ] ) ) ) if isinstance ( channel , int ) : return samples [ : , channel ] elif channel == 'mix' : return _mix ( samples , scale_by_numchannels = True ) else : raise ValueError ( "channel has to be an integer indicating a channel," " or 'mix' to mix down all channels" )
convert samples to mono if they are not mono already .
10,857
def getchannel ( samples : np . ndarray , ch : int ) -> np . ndarray : N = numchannels ( samples ) if ch > ( N - 1 ) : raise ValueError ( "channel %d out of range" % ch ) if N == 1 : return samples return samples [ : , ch ]
Returns a view into a channel of samples .
10,858
def bitdepth ( data : np . ndarray , snap : bool = True ) -> int : data = asmono ( data ) maxitems = min ( 4096 , data . shape [ 0 ] ) maxbits = max ( x . as_integer_ratio ( ) [ 1 ] for x in data [ : maxitems ] ) . bit_length ( ) if snap : if maxbits <= 8 : maxbits = 8 elif maxbits <= 16 : maxbits = 16 elif maxbits <= 24 : maxbits = 24 elif maxbits <= 32 : maxbits = 32 else : maxbits = 64 return maxbits
returns the number of bits actually used to represent the data .
10,859
def sndwrite_like ( samples : np . ndarray , likefile : str , outfile : str ) -> None : info = sndinfo ( likefile ) sndwrite ( samples , info . samplerate , outfile , encoding = info . encoding )
Write samples to outfile with samplerate and encoding taken from likefile
10,860
def _wavReadData ( fid , size : int , channels : int , encoding : str , bigendian : bool ) -> np . ndarray : bits = int ( encoding [ 3 : ] ) if bits == 8 : data = np . fromfile ( fid , dtype = np . ubyte , count = size ) if channels > 1 : data = data . reshape ( - 1 , channels ) else : bytes = bits // 8 if encoding in ( 'pcm16' , 'pcm32' , 'pcm64' ) : if bigendian : dtype = '>i%d' % bytes else : dtype = '<i%d' % bytes data = np . fromfile ( fid , dtype = dtype , count = size // bytes ) if channels > 1 : data = data . reshape ( - 1 , channels ) elif encoding [ : 3 ] == 'flt' : print ( "flt32!" ) if bits == 32 : if bigendian : dtype = '>f4' else : dtype = '<f4' else : raise NotImplementedError data = np . fromfile ( fid , dtype = dtype , count = size // bytes ) if channels > 1 : data = data . reshape ( - 1 , channels ) elif encoding == 'pcm24' : data = _numpy24to32bit ( np . fromfile ( fid , dtype = np . ubyte , count = size ) , bigendian = False ) if channels > 1 : data = data . reshape ( - 1 , channels ) return data
adapted from scipy . io . wavfile . _read_data_chunk
10,861
def _wavGetInfo ( f : Union [ IO , str ] ) -> Tuple [ SndInfo , Dict [ str , Any ] ] : if isinstance ( f , ( str , bytes ) ) : f = open ( f , 'rb' ) needsclosing = True else : needsclosing = False fsize , bigendian = _wavReadRiff ( f ) fmt = ">i" if bigendian else "<i" while ( f . tell ( ) < fsize ) : chunk_id = f . read ( 4 ) if chunk_id == b'fmt ' : chunksize , sampfmt , chans , sr , byterate , align , bits = _wavReadFmt ( f , bigendian ) elif chunk_id == b'data' : datasize = _struct . unpack ( fmt , f . read ( 4 ) ) [ 0 ] nframes = int ( datasize / ( chans * ( bits / 8 ) ) ) break else : _warnings . warn ( "chunk not understood: %s" % chunk_id ) data = f . read ( 4 ) size = _struct . unpack ( fmt , data ) [ 0 ] f . seek ( size , 1 ) encoding = _encoding ( sampfmt , bits ) if needsclosing : f . close ( ) info = SndInfo ( sr , nframes , chans , encoding , "wav" ) return info , { 'fsize' : fsize , 'bigendian' : bigendian , 'datasize' : datasize }
Read the info of a wav file . taken mostly from scipy . io . wavfile
10,862
def connect ( self ) : "Create connection to server" family , stype , proto , cname , sockaddr = self . best_connection_params ( self . host , self . port ) self . sock = socket . socket ( family , stype ) self . sock . settimeout ( self . timeout ) self . sock . connect ( sockaddr )
Create connection to server
10,863
def getchallenge ( self ) : "Return server challenge" self . sock . send ( CHALLENGE_PACKET ) for packet in self . read_iterator ( self . CHALLENGE_TIMEOUT ) : if packet . startswith ( CHALLENGE_RESPONSE_HEADER ) : return parse_challenge_response ( packet )
Return server challenge
10,864
def send ( self , command ) : "Send rcon command to server" if self . secure_rcon == self . RCON_NOSECURE : self . sock . send ( rcon_nosecure_packet ( self . password , command ) ) elif self . secure_rcon == self . RCON_SECURE_TIME : self . sock . send ( rcon_secure_time_packet ( self . password , command ) ) elif self . secure_rcon == self . RCON_SECURE_CHALLENGE : challenge = self . getchallenge ( ) self . sock . send ( rcon_secure_challenge_packet ( self . password , challenge , command ) ) else : raise ValueError ( "Bad value of secure_rcon" )
Send rcon command to server
10,865
def parse ( html_string , wrapper = Parser , * args , ** kwargs ) : return Parser ( lxml . html . fromstring ( html_string ) , * args , ** kwargs )
Parse html with wrapper
10,866
def str2int ( string_with_int ) : return int ( "" . join ( [ char for char in string_with_int if char in string . digits ] ) or 0 )
Collect digits from a string
10,867
def to_unicode ( obj , encoding = 'utf-8' ) : if isinstance ( obj , string_types ) or isinstance ( obj , binary_type ) : if not isinstance ( obj , text_type ) : obj = text_type ( obj , encoding ) return obj
Convert string to unicode string
10,868
def strip_spaces ( s ) : return u" " . join ( [ c for c in s . split ( u' ' ) if c ] )
Strip excess spaces from a string
10,869
def strip_linebreaks ( s ) : return u"\n" . join ( [ c for c in s . split ( u'\n' ) if c ] )
Strip excess line breaks from a string
10,870
def get ( self , selector , index = 0 , default = None ) : elements = self ( selector ) if elements : try : return elements [ index ] except ( IndexError ) : pass return default
Get first element from CSSSelector
10,871
def html ( self , unicode = False ) : html = lxml . html . tostring ( self . element , encoding = self . encoding ) if unicode : html = html . decode ( self . encoding ) return html
Return HTML of element
10,872
def parse ( self , func , * args , ** kwargs ) : result = [ ] for element in self . xpath ( 'child::node()' ) : if isinstance ( element , Parser ) : children = element . parse ( func , * args , ** kwargs ) element_result = func ( element , children , * args , ** kwargs ) if element_result : result . append ( element_result ) else : result . append ( element ) return u"" . join ( result )
Parse element with given function
10,873
def _wrap_result ( self , func ) : def wrapper ( * args ) : result = func ( * args ) if hasattr ( result , '__iter__' ) and not isinstance ( result , etree . _Element ) : return [ self . _wrap_element ( element ) for element in result ] else : return self . _wrap_element ( result ) return wrapper
Wrap result in Parser instance
10,874
def _wrap_element ( self , result ) : if isinstance ( result , lxml . html . HtmlElement ) : return Parser ( result ) else : return result
Wrap single element in Parser instance
10,875
def parse_inline ( self ) : if self . inline_children : self . children = parser . parse_inline ( self . children ) elif isinstance ( getattr ( self , 'children' , None ) , list ) : for child in self . children : if isinstance ( child , BlockElement ) : child . parse_inline ( )
Inline parsing is postponed so that all link references are seen before that .
10,876
def work_model_factory ( * , validator = validators . is_work_model , ** kwargs ) : kwargs [ 'ld_type' ] = 'AbstractWork' return _model_factory ( validator = validator , ** kwargs )
Generate a Work model .
10,877
def manifestation_model_factory ( * , validator = validators . is_manifestation_model , ld_type = 'CreativeWork' , ** kwargs ) : return _model_factory ( validator = validator , ld_type = ld_type , ** kwargs )
Generate a Manifestation model .
10,878
def right_model_factory ( * , validator = validators . is_right_model , ld_type = 'Right' , ** kwargs ) : return _model_factory ( validator = validator , ld_type = ld_type , ** kwargs )
Generate a Right model .
10,879
def copyright_model_factory ( * , validator = validators . is_copyright_model , ** kwargs ) : kwargs [ 'ld_type' ] = 'Copyright' return _model_factory ( validator = validator , ** kwargs )
Generate a Copyright model .
10,880
def mark_error_retryable ( error ) : if isinstance ( error , Exception ) : alsoProvides ( error , IRetryableError ) elif inspect . isclass ( error ) and issubclass ( error , Exception ) : classImplements ( error , IRetryableError ) else : raise ValueError ( 'only exception objects or types may be marked retryable' )
Mark an exception instance or type as retryable . If this exception is caught by pyramid_retry then it may retry the request .
10,881
def is_last_attempt ( request ) : environ = request . environ attempt = environ . get ( 'retry.attempt' ) attempts = environ . get ( 'retry.attempts' ) if attempt is None or attempts is None : return True return attempt + 1 == attempts
Return True if the request is on its last attempt meaning that pyramid_retry will not be issuing any new attempts regardless of what happens when executing this request .
10,882
def includeme ( config ) : settings = config . get_settings ( ) config . add_view_predicate ( 'last_retry_attempt' , LastAttemptPredicate ) config . add_view_predicate ( 'retryable_error' , RetryableErrorPredicate ) def register ( ) : attempts = int ( settings . get ( 'retry.attempts' ) or 3 ) settings [ 'retry.attempts' ] = attempts activate_hook = settings . get ( 'retry.activate_hook' ) activate_hook = config . maybe_dotted ( activate_hook ) policy = RetryableExecutionPolicy ( attempts , activate_hook = activate_hook , ) config . set_execution_policy ( policy ) config . action ( None , register , order = PHASE1_CONFIG )
Activate the pyramid_retry execution policy in your application .
10,883
def filter_butter_coeffs ( filtertype , freq , samplerate , order = 5 ) : assert filtertype in ( 'low' , 'high' , 'band' ) nyq = 0.5 * samplerate if isinstance ( freq , tuple ) : assert filtertype == 'band' low , high = freq low /= nyq high /= nyq b , a = signal . butter ( order , [ low , high ] , btype = 'band' ) else : freq = freq / nyq b , a = signal . butter ( order , freq , btype = filtertype ) return b , a
calculates the coefficients for a digital butterworth filter
10,884
def filter_butter ( samples , samplerate , filtertype , freq , order = 5 ) : assert filtertype in ( 'low' , 'high' , 'band' ) b , a = filter_butter_coeffs ( filtertype , freq , samplerate , order = order ) return apply_multichannel ( samples , lambda data : signal . lfilter ( b , a , data ) )
Filters the samples with a digital butterworth filter
10,885
def token_middleware ( ctx , get_response ) : async def middleware ( request ) : params = request . setdefault ( 'params' , { } ) if params . get ( "token" ) is None : params [ 'token' ] = ctx . token return await get_response ( request ) return middleware
Reinject token and consistency into requests .
10,886
def rebuild ( self ) : scene = self . scene ( ) if ( not scene ) : return sourcePos = self . sourceItem ( ) . viewItem ( ) . pos ( ) sourceRect = self . sourceItem ( ) . viewItem ( ) . rect ( ) targetPos = self . targetItem ( ) . viewItem ( ) . pos ( ) targetRect = self . targetItem ( ) . viewItem ( ) . rect ( ) cellWidth = scene . ganttWidget ( ) . cellWidth ( ) startX = sourcePos . x ( ) + sourceRect . width ( ) - ( cellWidth / 2.0 ) startY = sourcePos . y ( ) + ( sourceRect . height ( ) / 2.0 ) endX = targetPos . x ( ) - 2 endY = targetPos . y ( ) + ( targetRect . height ( ) / 2.0 ) path = QPainterPath ( ) path . moveTo ( startX , startY ) path . lineTo ( startX , endY ) path . lineTo ( endX , endY ) a = QPointF ( endX - 10 , endY - 3 ) b = QPointF ( endX , endY ) c = QPointF ( endX - 10 , endY + 3 ) self . _polygon = QPolygonF ( [ a , b , c , a ] ) path . addPolygon ( self . _polygon ) self . setPath ( path )
Rebuilds the dependency path for this item .
10,887
def _writeBlock ( block , blockID ) : with open ( "blockIDs.txt" , "a" ) as fp : fp . write ( "blockID: " + str ( blockID ) + "\n" ) sentences = "" for sentence in block : sentences += sentence + "," fp . write ( "block sentences: " + sentences [ : - 1 ] + "\n" ) fp . write ( "\n" )
writes the block to a file with the id
10,888
def _writeSentenceInBlock ( sentence , blockID , sentenceID ) : with open ( "sentenceIDs.txt" , "a" ) as fp : fp . write ( "sentenceID: " + str ( blockID ) + "_" + str ( sentenceID ) + "\n" ) fp . write ( "sentence string: " + sentence + "\n" ) fp . write ( "\n" )
writes the sentence in a block to a file with the id
10,889
def _writeWordFromSentenceInBlock ( word , blockID , sentenceID , wordID ) : with open ( "wordIDs.txt" , "a" ) as fp : fp . write ( "wordID: " + str ( blockID ) + "_" + str ( sentenceID ) + "_" + str ( wordID ) + "\n" ) fp . write ( "wordString: " + word + "\n" ) fp . write ( "\n" )
writes the word from a sentence in a block to a file with the id
10,890
def _writeBk ( target = "sentenceContainsTarget(+SID,+WID)." , treeDepth = "3" , nodeSize = "3" , numOfClauses = "8" ) : with open ( 'bk.txt' , 'w' ) as bk : bk . write ( "useStdLogicVariables: true\n" ) bk . write ( "setParam: treeDepth=" + str ( treeDepth ) + '.\n' ) bk . write ( "setParam: nodeSize=" + str ( nodeSize ) + '.\n' ) bk . write ( "setParam: numOfClauses=" + str ( numOfClauses ) + '.\n' ) bk . write ( "mode: nextSentenceInBlock(+BID,+SID,-SID).\n" ) bk . write ( "mode: nextSentenceInBlock(+BID,-SID,+SID).\n" ) bk . write ( "mode: earlySentenceInBlock(+BID,-SID).\n" ) bk . write ( "mode: midWaySentenceInBlock(+BID,-SID).\n" ) bk . write ( "mode: lateSentenceInBlock(+BID,-SID).\n" ) bk . write ( "mode: sentenceInBlock(-SID,+BID).\n" ) bk . write ( "mode: wordString(+WID,#WSTR).\n" ) bk . write ( "mode: partOfSpeechTag(+WID,#WPOS).\n" ) bk . write ( "mode: nextWordInSentence(+SID,+WID,-WID).\n" ) bk . write ( "mode: earlyWordInSentence(+SID,-WID).\n" ) bk . write ( "mode: midWayWordInSentence(+SID,-WID).\n" ) bk . write ( "mode: lateWordInSentence(+SID,-WID).\n" ) bk . write ( "mode: wordInSentence(-WID,+SID).\n" ) bk . write ( "mode: " + target + "\n" ) return
Writes a background file to disk .
10,891
def traverse_depth_first_pre_order ( self , callback ) : n = len ( self . suftab ) root = [ 0 , 0 , n - 1 , "" ] def _traverse_top_down ( interval ) : callback ( interval ) i , j = interval [ 1 ] , interval [ 2 ] if i != j : children = self . _get_child_intervals ( i , j ) children . sort ( key = lambda child : child [ 3 ] ) for child in children : _traverse_top_down ( child ) _traverse_top_down ( root )
Visits the internal nodes of the enhanced suffix array in depth - first pre - order .
10,892
def traverse_depth_first_post_order ( self , callback ) : last_interval = None n = len ( self . suftab ) stack = [ [ 0 , 0 , None , [ ] ] ] for i in xrange ( 1 , n ) : lb = i - 1 while self . lcptab [ i ] < stack [ - 1 ] [ 0 ] : stack [ - 1 ] [ 2 ] = i - 1 last_interval = stack . pop ( ) callback ( last_interval ) lb = last_interval [ 1 ] if self . lcptab [ i ] <= stack [ - 1 ] [ 0 ] : stack [ - 1 ] [ 3 ] . append ( last_interval ) last_interval = None if self . lcptab [ i ] > stack [ - 1 ] [ 0 ] : if last_interval : stack . append ( [ self . lcptab [ i ] , lb , None , [ last_interval ] ] ) last_interval = None else : stack . append ( [ self . lcptab [ i ] , lb , None , [ ] ] ) stack [ - 1 ] [ 2 ] = n - 1 callback ( stack [ - 1 ] )
Visits the internal nodes of the enhanced suffix array in depth - first post - order .
10,893
def _DecodeKey ( self , key ) : if self . dict . attrindex . HasBackward ( key ) : return self . dict . attrindex . GetBackward ( key ) return key
Turn a key into a string if possible
10,894
def AddAttribute ( self , key , value ) : if isinstance ( value , list ) : values = value else : values = [ value ] ( key , values ) = self . _EncodeKeyValues ( key , values ) self . setdefault ( key , [ ] ) . extend ( values )
Add an attribute to the packet .
10,895
def CreateAuthenticator ( ) : data = [ ] for i in range ( 16 ) : data . append ( random_generator . randrange ( 0 , 256 ) ) if six . PY3 : return bytes ( data ) else : return '' . join ( chr ( b ) for b in data )
Create a packet autenticator . All RADIUS packets contain a sixteen byte authenticator which is used to authenticate replies from the RADIUS server and in the password hiding algorithm . This function returns a suitable random string that can be used as an authenticator .
10,896
def DecodePacket ( self , packet ) : try : ( self . code , self . id , length , self . authenticator ) = struct . unpack ( '!BBH16s' , packet [ 0 : 20 ] ) except struct . error : raise PacketError ( 'Packet header is corrupt' ) if len ( packet ) != length : raise PacketError ( 'Packet has invalid length' ) if length > 8192 : raise PacketError ( 'Packet length is too long (%d)' % length ) self . clear ( ) packet = packet [ 20 : ] while packet : try : ( key , attrlen ) = struct . unpack ( '!BB' , packet [ 0 : 2 ] ) except struct . error : raise PacketError ( 'Attribute header is corrupt' ) if attrlen < 2 : raise PacketError ( 'Attribute length is too small (%d)' % attrlen ) value = packet [ 2 : attrlen ] if key == 26 : ( vendor , subattrs ) = self . _PktDecodeVendorAttribute ( value ) if vendor is None : self . setdefault ( key , [ ] ) . append ( value ) else : for ( k , v ) in subattrs : self . setdefault ( ( vendor , k ) , [ ] ) . append ( v ) else : self . setdefault ( key , [ ] ) . append ( value ) packet = packet [ attrlen : ]
Initialize the object from raw packet data . Decode a packet as received from the network and decode it .
10,897
def PwDecrypt ( self , password ) : buf = password pw = six . b ( '' ) last = self . authenticator while buf : hash = md5_constructor ( self . secret + last ) . digest ( ) if six . PY3 : for i in range ( 16 ) : pw += bytes ( ( hash [ i ] ^ buf [ i ] , ) ) else : for i in range ( 16 ) : pw += chr ( ord ( hash [ i ] ) ^ ord ( buf [ i ] ) ) ( last , buf ) = ( buf [ : 16 ] , buf [ 16 : ] ) while pw . endswith ( six . b ( '\x00' ) ) : pw = pw [ : - 1 ] return pw . decode ( 'utf-8' )
Unobfuscate a RADIUS password . RADIUS hides passwords in packets by using an algorithm based on the MD5 hash of the packet authenticator and RADIUS secret . This function reverses the obfuscation process .
10,898
def PwCrypt ( self , password ) : if self . authenticator is None : self . authenticator = self . CreateAuthenticator ( ) if isinstance ( password , six . text_type ) : password = password . encode ( 'utf-8' ) buf = password if len ( password ) % 16 != 0 : buf += six . b ( '\x00' ) * ( 16 - ( len ( password ) % 16 ) ) hash = md5_constructor ( self . secret + self . authenticator ) . digest ( ) result = six . b ( '' ) last = self . authenticator while buf : hash = md5_constructor ( self . secret + last ) . digest ( ) if six . PY3 : for i in range ( 16 ) : result += bytes ( ( hash [ i ] ^ buf [ i ] , ) ) else : for i in range ( 16 ) : result += chr ( ord ( hash [ i ] ) ^ ord ( buf [ i ] ) ) last = result [ - 16 : ] buf = buf [ 16 : ] return result
Obfuscate password . RADIUS hides passwords in packets by using an algorithm based on the MD5 hash of the packet authenticator and RADIUS secret . If no authenticator has been set before calling PwCrypt one is created automatically . Changing the authenticator after setting a password that has been encrypted using this function will not work .
10,899
def clear ( self ) : super ( XToolBar , self ) . clear ( ) if self . isCollapsable ( ) : self . _collapseButton = QToolButton ( self ) self . _collapseButton . setAutoRaise ( True ) self . _collapseButton . setSizePolicy ( QSizePolicy . Expanding , QSizePolicy . Expanding ) self . addWidget ( self . _collapseButton ) self . refreshButton ( ) self . _collapseButton . clicked . connect ( self . toggleCollapsed ) elif self . _collapseButton : self . _collapseButton . setParent ( None ) self . _collapseButton . deleteLater ( ) self . _collapseButton = None
Clears out this toolbar from the system .