idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
6,900
|
def status ( * args , ** kwargs ) : def decor ( func ) : @ wraps ( func ) def wrapper ( * args , ** krgs ) : echo ( "[!] " + msg , end = " " , flush = True ) result = func ( * args , ** krgs ) echo ( fin , flush = True ) return result return wrapper fin = kwargs . pop ( 'fin' , "DONE." ) args = list ( args ) if len ( args ) > 1 and callable ( args [ 1 ] ) : msg = args . pop ( 0 ) func = args . pop ( 0 ) try : fargs = args . pop ( 0 ) except : fargs = [ ] try : fkrgs = args . pop ( 0 ) except : fkrgs = { } return decor ( func ) ( * fargs , ** fkrgs ) msg = args . pop ( 0 ) return decor
|
Prints a status message at the start and finish of an associated function . Can be used as a function decorator or as a function that accepts another function as the first parameter .
|
6,901
|
def fatal ( msg , exitcode = 1 , ** kwargs ) : pause_before_exit = kwargs . pop ( "pause" ) if "pause" in kwargs . keys ( ) else False echo ( "[FATAL] " + msg , ** kwargs ) if pause_before_exit : pause ( ) sys . exit ( exitcode )
|
Prints a message then exits the program . Optionally pause before exit with pause = True kwarg .
|
6,902
|
def hrule ( width = None , char = None ) : width = width or HRWIDTH char = char or HRCHAR return echo ( getline ( char , width ) )
|
Outputs or returns a horizontal line of the given character and width . Returns printed string .
|
6,903
|
def title ( msg ) : if sys . platform . startswith ( "win" ) : ctypes . windll . kernel32 . SetConsoleTitleW ( tounicode ( msg ) )
|
Sets the title of the console window .
|
6,904
|
def wrap ( item , args = None , krgs = None , ** kwargs ) : with Wrap ( ** kwargs ) : if callable ( item ) : args = args or [ ] krgs = krgs or { } item ( * args , ** krgs ) else : echo ( item )
|
Wraps the given item content between horizontal lines . Item can be a string or a function .
|
6,905
|
def _guess_name ( desc , taken = None ) : taken = taken or [ ] name = "" for word in desc . split ( ) : c = word [ 0 ] . lower ( ) if not c . isalnum ( ) : continue name += c if name not in taken : break count = 2 while name in taken : name = name + str ( count ) count += 1 return name
|
Attempts to guess the menu entry name from the function name .
|
6,906
|
def add ( self , name , desc , func = None , args = None , krgs = None ) : self . entries . append ( MenuEntry ( name , desc , func , args or [ ] , krgs or { } ) )
|
Add a menu entry .
|
6,907
|
def enum ( self , desc , func = None , args = None , krgs = None ) : name = str ( len ( self . entries ) + 1 ) self . entries . append ( MenuEntry ( name , desc , func , args or [ ] , krgs or { } ) )
|
Add a menu entry whose name will be an auto indexed number .
|
6,908
|
def run ( self , name ) : for entry in self . entries : if entry . name == name : run_func ( entry ) break
|
Runs the function associated with the given entry name .
|
6,909
|
def partition_scripts ( scripts , start_type1 , start_type2 ) : match , other = [ ] , [ ] for script in scripts : if ( HairballPlugin . script_start_type ( script ) == start_type1 or HairballPlugin . script_start_type ( script ) == start_type2 ) : match . append ( script ) else : other . append ( script ) return match , other
|
Return two lists of scripts out of the original scripts list .
|
6,910
|
def attribute_result ( cls , sprites ) : retval = dict ( ( x , True ) for x in cls . ATTRIBUTES ) for properties in sprites . values ( ) : for attribute , state in properties . items ( ) : retval [ attribute ] &= state != cls . STATE_MODIFIED return retval
|
Return mapping of attributes to if they were initialized or not .
|
6,911
|
def attribute_state ( cls , scripts , attribute ) : green_flag , other = partition_scripts ( scripts , cls . HAT_GREEN_FLAG , cls . HAT_CLONE ) block_set = cls . BLOCKMAPPING [ attribute ] state = cls . STATE_NOT_MODIFIED for script in green_flag : in_zone = True for name , level , _ in cls . iter_blocks ( script . blocks ) : if name == 'broadcast %s and wait' : in_zone = False if ( name , 'absolute' ) in block_set : if in_zone and level == 0 : if state == cls . STATE_NOT_MODIFIED : state = cls . STATE_INITIALIZED else : state = cls . STATE_MODIFIED elif in_zone : continue else : state = cls . STATE_MODIFIED break elif ( name , 'relative' ) in block_set : state = cls . STATE_MODIFIED break if state != cls . STATE_NOT_MODIFIED : return state for script in other : for name , _ , _ in cls . iter_blocks ( script . blocks ) : if name in [ x [ 0 ] for x in block_set ] : return cls . STATE_MODIFIED return cls . STATE_NOT_MODIFIED
|
Return the state of the scripts for the given attribute .
|
6,912
|
def output_results ( cls , sprites ) : print ( ' ' . join ( cls . ATTRIBUTES ) ) format_strs = [ '{{{}!s:^{}}}' . format ( x , len ( x ) ) for x in cls . ATTRIBUTES ] print ( ' ' . join ( format_strs ) . format ( ** cls . attribute_result ( sprites ) ) )
|
Output whether or not each attribute was correctly initialized .
|
6,913
|
def sprite_changes ( cls , sprite ) : retval = dict ( ( x , cls . attribute_state ( sprite . scripts , x ) ) for x in ( x for x in cls . ATTRIBUTES if x != 'background' ) ) return retval
|
Return a mapping of attributes to their initilization state .
|
6,914
|
def analyze ( self , scratch , ** kwargs ) : changes = dict ( ( x . name , self . sprite_changes ( x ) ) for x in scratch . sprites ) changes [ 'stage' ] = { 'background' : self . attribute_state ( scratch . stage . scripts , 'costume' ) } return { 'initialized' : changes }
|
Run and return the results of the AttributeInitialization plugin .
|
6,915
|
def variable_state ( cls , scripts , variables ) : def conditionally_set_not_modified ( ) : state = variables . get ( block . args [ 0 ] , None ) if state == cls . STATE_NOT_MODIFIED : variables [ block . args [ 0 ] ] = cls . STATE_MODIFIED green_flag , other = partition_scripts ( scripts , cls . HAT_GREEN_FLAG ) variables = dict ( ( x , cls . STATE_NOT_MODIFIED ) for x in variables ) for script in green_flag : in_zone = True for name , level , block in cls . iter_blocks ( script . blocks ) : if name == 'broadcast %s and wait' : in_zone = False if name == 'set %s effect to %s' : state = variables . get ( block . args [ 0 ] , None ) if state is None : continue if in_zone and level == 0 : if state == cls . STATE_NOT_MODIFIED : state = cls . STATE_INITIALIZED else : state = cls . STATE_MODIFIED elif in_zone : continue elif state == cls . STATE_NOT_MODIFIED : state = cls . STATE_MODIFIED variables [ block . args [ 0 ] ] = state elif name == 'change %s effect by %s' : conditionally_set_not_modified ( ) for script in other : for name , _ , block in cls . iter_blocks ( script . blocks ) : if name in ( 'change %s effect by %s' , 'set %s effect to %s' ) : conditionally_set_not_modified ( ) return variables
|
Return the initialization state for each variable in variables .
|
6,916
|
def analyze ( self , scratch , ** kwargs ) : variables = dict ( ( x , self . variable_state ( x . scripts , x . variables ) ) for x in scratch . sprites ) variables [ 'global' ] = self . variable_state ( self . iter_scripts ( scratch ) , scratch . stage . variables ) import pprint pprint . pprint ( variables ) return { 'variables' : variables }
|
Run and return the results of the VariableInitialization plugin .
|
6,917
|
def finalize ( self ) : print ( '{} default sprite names found:' . format ( self . total_default ) ) for name in self . list_default : print ( name )
|
Output the default sprite names found in the project .
|
6,918
|
def analyze ( self , scratch , ** kwargs ) : for sprite in self . iter_sprites ( scratch ) : for default in self . default_names : if default in sprite . name : self . total_default += 1 self . list_default . append ( sprite . name )
|
Run and return the results from the SpriteNaming plugin .
|
6,919
|
def prepare_request_params ( self , _query_params , _json_params ) : self . _query_params = dictset ( _query_params or self . request . params . mixed ( ) ) self . _json_params = dictset ( _json_params ) ctype = self . request . content_type if self . request . method in [ 'POST' , 'PUT' , 'PATCH' ] : if ctype == 'application/json' : try : self . _json_params . update ( self . request . json ) except simplejson . JSONDecodeError : log . error ( "Expecting JSON. Received: '{}'. " "Request: {} {}" . format ( self . request . body , self . request . method , self . request . url ) ) self . _json_params = BaseView . convert_dotted ( self . _json_params ) self . _query_params = BaseView . convert_dotted ( self . _query_params ) self . _params = self . _query_params . copy ( ) self . _params . update ( self . _json_params )
|
Prepare query and update params .
|
6,920
|
def set_override_rendered ( self ) : if '' in self . request . accept : self . request . override_renderer = self . _default_renderer elif 'application/json' in self . request . accept : self . request . override_renderer = 'nefertari_json' elif 'text/plain' in self . request . accept : self . request . override_renderer = 'string'
|
Set self . request . override_renderer if needed .
|
6,921
|
def _setup_aggregation ( self , aggregator = None ) : from nefertari . elasticsearch import ES if aggregator is None : aggregator = ESAggregator aggregations_enabled = ( ES . settings and ES . settings . asbool ( 'enable_aggregations' ) ) if not aggregations_enabled : log . debug ( 'Elasticsearch aggregations are not enabled' ) return index = getattr ( self , 'index' , None ) index_defined = index and index != self . not_allowed_action if index_defined : self . index = aggregator ( self ) . wrap ( self . index )
|
Wrap self . index method with ESAggregator .
|
6,922
|
def get_collection_es ( self ) : from nefertari . elasticsearch import ES return ES ( self . Model . __name__ ) . get_collection ( ** self . _query_params )
|
Query ES collection and return results .
|
6,923
|
def set_public_limits ( self ) : if self . request . method . upper ( ) in [ 'GET' , 'HEAD' ] : self . _query_params . process_int_param ( '_limit' , 20 ) if self . _auth_enabled and not getattr ( self . request , 'user' , None ) : wrappers . set_public_limits ( self )
|
Set public limits if auth is enabled and user is not authenticated .
|
6,924
|
def convert_ids2objects ( self ) : if not self . Model : log . info ( "%s has no model defined" % self . __class__ . __name__ ) return for field in self . _json_params . keys ( ) : if not engine . is_relationship_field ( field , self . Model ) : continue rel_model_cls = engine . get_relationship_cls ( field , self . Model ) self . id2obj ( field , rel_model_cls )
|
Convert object IDs from self . _json_params to objects if needed .
|
6,925
|
def setup_default_wrappers ( self ) : self . _after_calls [ 'index' ] = [ wrappers . wrap_in_dict ( self . request ) , wrappers . add_meta ( self . request ) , wrappers . add_object_url ( self . request ) , ] self . _after_calls [ 'show' ] = [ wrappers . wrap_in_dict ( self . request ) , wrappers . add_meta ( self . request ) , wrappers . add_object_url ( self . request ) , ] self . _after_calls [ 'create' ] = [ wrappers . wrap_in_dict ( self . request ) , wrappers . add_meta ( self . request ) , wrappers . add_object_url ( self . request ) , ] self . _after_calls [ 'update' ] = [ wrappers . wrap_in_dict ( self . request ) , wrappers . add_meta ( self . request ) , wrappers . add_object_url ( self . request ) , ] self . _after_calls [ 'replace' ] = [ wrappers . wrap_in_dict ( self . request ) , wrappers . add_meta ( self . request ) , wrappers . add_object_url ( self . request ) , ] if self . _auth_enabled : for meth in ( 'index' , 'show' , 'create' , 'update' , 'replace' ) : self . _after_calls [ meth ] += [ wrappers . apply_privacy ( self . request ) , ] for meth in ( 'update' , 'replace' , 'update_many' ) : self . _before_calls [ meth ] += [ wrappers . apply_request_privacy ( self . Model , self . _json_params ) , ]
|
Setup defaulf wrappers .
|
6,926
|
def register ( self ) : user , created = self . Model . create_account ( self . _json_params ) if not created : raise JHTTPConflict ( 'Looks like you already have an account.' ) self . request . _user = user pk_field = user . pk_field ( ) headers = remember ( self . request , getattr ( user , pk_field ) ) return JHTTPOk ( 'Registered' , headers = headers )
|
Register new user by POSTing all required data .
|
6,927
|
def register ( self ) : user , created = self . Model . create_account ( self . _json_params ) if user . api_key is None : raise JHTTPBadRequest ( 'Failed to generate ApiKey for user' ) if not created : raise JHTTPConflict ( 'Looks like you already have an account.' ) self . request . _user = user headers = remember ( self . request , user . username ) return JHTTPOk ( 'Registered' , headers = headers )
|
Register a new user by POSTing all required data .
|
6,928
|
def claim_token ( self , ** params ) : self . _json_params . update ( params ) success , self . user = self . Model . authenticate_by_password ( self . _json_params ) if success : headers = remember ( self . request , self . user . username ) return JHTTPOk ( 'Token claimed' , headers = headers ) if self . user : raise JHTTPUnauthorized ( 'Wrong login or password' ) else : raise JHTTPNotFound ( 'User not found' )
|
Claim current token by POSTing login and password .
|
6,929
|
def reset_token ( self , ** params ) : response = self . claim_token ( ** params ) if not self . user : return response self . user . api_key . reset_token ( ) headers = remember ( self . request , self . user . username ) return JHTTPOk ( 'Registered' , headers = headers )
|
Reset current token by POSTing login and password .
|
6,930
|
def _apply_nested_privacy ( self , data ) : kw = { 'is_admin' : self . is_admin , 'drop_hidden' : self . drop_hidden , } for key , val in data . items ( ) : if is_document ( val ) : data [ key ] = apply_privacy ( self . request ) ( result = val , ** kw ) elif isinstance ( val , list ) and val and is_document ( val [ 0 ] ) : data [ key ] = [ apply_privacy ( self . request ) ( result = doc , ** kw ) for doc in val ] return data
|
Apply privacy to nested documents .
|
6,931
|
def get_root_resource ( config ) : app_package_name = get_app_package_name ( config ) return config . registry . _root_resources . setdefault ( app_package_name , Resource ( config ) )
|
Returns the root resource .
|
6,932
|
def get_default_view_path ( resource ) : "Returns the dotted path to the default view class." parts = [ a . member_name for a in resource . ancestors ] + [ resource . collection_name or resource . member_name ] if resource . prefix : parts . insert ( - 1 , resource . prefix ) view_file = '%s' % '_' . join ( parts ) view = '%s:%sView' % ( view_file , snake2camel ( view_file ) ) app_package_name = get_app_package_name ( resource . config ) return '%s.views.%s' % ( app_package_name , view )
|
Returns the dotted path to the default view class .
|
6,933
|
def get_ancestors ( self ) : "Returns the list of ancestor resources." if self . _ancestors : return self . _ancestors if not self . parent : return [ ] obj = self . resource_map . get ( self . parent . uid ) while obj and obj . member_name : self . _ancestors . append ( obj ) obj = obj . parent self . _ancestors . reverse ( ) return self . _ancestors
|
Returns the list of ancestor resources .
|
6,934
|
def add_from_child ( self , resource , ** kwargs ) : new_resource = self . add ( resource . member_name , resource . collection_name , ** kwargs ) for child in resource . children : new_resource . add_from_child ( child , ** kwargs )
|
Add a resource with its all children resources to the current resource .
|
6,935
|
def add ( self , path ) : name_with_ext = os . path . split ( path ) [ 1 ] name = name_with_ext . split ( '.' ) [ 0 ] self . list . update ( { name : path } )
|
Add the path of a data set to the list of available sets
|
6,936
|
def unpack ( self , name ) : path = self . list [ name ] df = pd . read_pickle ( path , compression = 'gzip' ) return df
|
Unpacks a data set to a Pandas DataFrame
|
6,937
|
def six_frame ( genome , table , minimum = 10 ) : for seq in parse_fasta ( genome ) : dna = Seq ( seq [ 1 ] . upper ( ) . replace ( 'U' , 'T' ) , IUPAC . ambiguous_dna ) counter = 0 for sequence in [ 'f' , dna ] , [ 'rc' , dna . reverse_complement ( ) ] : direction , sequence = sequence for frame in range ( 0 , 3 ) : for prot in sequence [ frame : ] . translate ( table = table , to_stop = False ) . split ( '*' ) : if len ( prot ) < minimum : continue counter += 1 header = '%s_%s table=%s frame=%s-%s %s' % ( seq [ 0 ] . split ( ) [ 0 ] , counter , table , frame + 1 , direction , ' ' . join ( seq [ 0 ] . split ( ) [ 1 : ] ) ) yield [ header , prot ]
|
translate each sequence into six reading frames
|
6,938
|
def check_gaps ( matches , gap_threshold = 0 ) : gaps = [ ] prev = None for match in sorted ( matches , key = itemgetter ( 0 ) ) : if prev is None : prev = match continue if match [ 0 ] - prev [ 1 ] >= gap_threshold : gaps . append ( [ prev , match ] ) prev = match return [ [ i [ 0 ] [ 1 ] , i [ 1 ] [ 0 ] ] for i in gaps ]
|
check for large gaps between alignment windows
|
6,939
|
def check_overlap ( current , hit , overlap = 200 ) : for prev in current : p_coords = prev [ 2 : 4 ] coords = hit [ 2 : 4 ] if get_overlap ( coords , p_coords ) >= overlap : return True return False
|
determine if sequence has already hit the same part of the model indicating that this hit is for another 16S rRNA gene
|
6,940
|
def find_coordinates ( hmms , bit_thresh ) : seq2hmm = parse_hmm ( hmms , bit_thresh ) seq2hmm = best_model ( seq2hmm ) group2hmm = { } for seq , info in list ( seq2hmm . items ( ) ) : group2hmm [ seq ] = { } for group_num , group in enumerate ( hit_groups ( info [ 1 ] ) ) : best = sorted ( group , reverse = True , key = itemgetter ( - 1 ) ) [ 0 ] strand = best [ 5 ] coordinates = [ i [ 0 ] for i in group ] + [ i [ 1 ] for i in group ] coordinates = [ min ( coordinates ) , max ( coordinates ) , strand ] matches = [ i for i in group if i [ 5 ] == strand ] gaps = check_gaps ( matches ) group2hmm [ seq ] [ group_num ] = [ info [ 0 ] , strand , coordinates , matches , gaps ] return group2hmm
|
find 16S rRNA gene sequence coordinates
|
6,941
|
def get_info ( line , bit_thresh ) : if len ( line ) >= 18 : id , model , bit , inc = line [ 0 ] . split ( ) [ 0 ] , line [ 2 ] , float ( line [ 14 ] ) , line [ 16 ] sstart , send , strand = int ( line [ 7 ] ) , int ( line [ 8 ] ) , line [ 9 ] mstart , mend = int ( line [ 5 ] ) , int ( line [ 6 ] ) elif len ( line ) == 9 : if bit_thresh == 0 : print ( '# ssu-cmsearch does not include a model-specific inclusion threshold, ' , file = sys . stderr ) print ( '# please specify a bit score threshold' , file = sys . stderr ) exit ( ) id , model , bit = line [ 1 ] . split ( ) [ 0 ] , line [ 0 ] , float ( line [ 6 ] ) inc = '!' sstart , send = int ( line [ 2 ] ) , int ( line [ 3 ] ) mstart , mend = int ( 4 ) , int ( 5 ) if send >= sstart : strand = '+' else : strand = '-' else : print ( '# unsupported hmm format:' , file = sys . stderr ) print ( '# provide tabular output from ssu-cmsearch and cmsearch supported' , file = sys . stderr ) exit ( ) coords = [ sstart , send ] sstart , send = min ( coords ) , max ( coords ) mcoords = [ mstart , mend ] mstart , mend = min ( mcoords ) , max ( mcoords ) return id , model , bit , sstart , send , mstart , mend , strand , inc
|
get info from either ssu - cmsearch or cmsearch output
|
6,942
|
def check_buffer ( coords , length , buffer ) : s = min ( coords [ 0 ] , buffer ) e = min ( length - coords [ 1 ] , buffer ) return [ s , e ]
|
check to see how much of the buffer is being used
|
6,943
|
def _import_parsers ( ) : global ARCGIS_NODES global ARCGIS_ROOTS global ArcGISParser global FGDC_ROOT global FgdcParser global ISO_ROOTS global IsoParser global VALID_ROOTS if ARCGIS_NODES is None or ARCGIS_ROOTS is None or ArcGISParser is None : from gis_metadata . arcgis_metadata_parser import ARCGIS_NODES from gis_metadata . arcgis_metadata_parser import ARCGIS_ROOTS from gis_metadata . arcgis_metadata_parser import ArcGISParser if FGDC_ROOT is None or FgdcParser is None : from gis_metadata . fgdc_metadata_parser import FGDC_ROOT from gis_metadata . fgdc_metadata_parser import FgdcParser if ISO_ROOTS is None or IsoParser is None : from gis_metadata . iso_metadata_parser import ISO_ROOTS from gis_metadata . iso_metadata_parser import IsoParser if VALID_ROOTS is None : VALID_ROOTS = { FGDC_ROOT } . union ( ARCGIS_ROOTS + ISO_ROOTS )
|
Lazy imports to prevent circular dependencies between this module and utils
|
6,944
|
def _init_metadata ( self ) : if self . _data_map is None : self . _init_data_map ( ) validate_properties ( self . _data_map , self . _metadata_props ) for prop in self . _data_map : setattr ( self , prop , parse_property ( self . _xml_tree , None , self . _data_map , prop ) ) self . has_data = any ( getattr ( self , prop ) for prop in self . _data_map )
|
Dynamically sets attributes from a Dictionary passed in by children . The Dictionary will contain the name of each attribute as keys and either an XPATH mapping to a text value in _xml_tree or a function that takes no parameters and returns the intended value .
|
6,945
|
def _parse_complex ( self , prop ) : xpath_root = None xpath_map = self . _data_structures [ prop ] return parse_complex ( self . _xml_tree , xpath_root , xpath_map , prop )
|
Default parsing operation for a complex struct
|
6,946
|
def _parse_complex_list ( self , prop ) : xpath_root = self . _get_xroot_for ( prop ) xpath_map = self . _data_structures [ prop ] return parse_complex_list ( self . _xml_tree , xpath_root , xpath_map , prop )
|
Default parsing operation for lists of complex structs
|
6,947
|
def _parse_dates ( self , prop = DATES ) : return parse_dates ( self . _xml_tree , self . _data_structures [ prop ] )
|
Creates and returns a Date Types data structure parsed from the metadata
|
6,948
|
def _update_complex ( self , ** update_props ) : prop = update_props [ 'prop' ] xpath_root = self . _get_xroot_for ( prop ) xpath_map = self . _data_structures [ prop ] return update_complex ( xpath_root = xpath_root , xpath_map = xpath_map , ** update_props )
|
Default update operation for a complex struct
|
6,949
|
def _update_complex_list ( self , ** update_props ) : prop = update_props [ 'prop' ] xpath_root = self . _get_xroot_for ( prop ) xpath_map = self . _data_structures [ prop ] return update_complex_list ( xpath_root = xpath_root , xpath_map = xpath_map , ** update_props )
|
Default update operation for lists of complex structs
|
6,950
|
def spec ( self , postf_un_ops : str ) -> list : spec = [ ( l + op , { 'pat' : self . pat ( pat ) , 'postf' : self . postf ( r , postf_un_ops ) , 'regex' : None } ) for op , pat in self . styles . items ( ) for l , r in self . brackets ] spec [ 0 ] [ 1 ] [ 'regex' ] = self . regex_pat . format ( _ops_regex ( l for l , r in self . brackets ) , _ops_regex ( self . styles . keys ( ) ) ) return spec
|
Return prefix unary operators list
|
6,951
|
def one_symbol_ops_str ( self ) -> str : return re . escape ( '' . join ( ( key for key in self . ops . keys ( ) if len ( key ) == 1 ) ) )
|
Regex - escaped string with all one - symbol operators
|
6,952
|
def plot_gaps ( plot , columns ) : from plot_window import window_plot_convolve as plot_window plot_window ( [ [ 100 - i for i in columns ] ] , len ( columns ) * .01 , plot )
|
plot % of gaps at each position
|
6,953
|
def sample_group ( sid , groups ) : for name in groups : if sid in groups [ name ] . sids : return name
|
Iterate through all categories in an OrderedDict and return category name if SampleID present in that category .
|
6,954
|
def combine_sets ( * sets ) : combined = set ( ) for s in sets : combined . update ( s ) return combined
|
Combine multiple sets to create a single larger set .
|
6,955
|
def unique_otuids ( groups ) : uniques = { key : set ( ) for key in groups } for i , group in enumerate ( groups ) : to_combine = groups . values ( ) [ : i ] + groups . values ( ) [ i + 1 : ] combined = combine_sets ( * to_combine ) uniques [ group ] = groups [ group ] . difference ( combined ) return uniques
|
Get unique OTUIDs of each category .
|
6,956
|
def shared_otuids ( groups ) : for g in sorted ( groups ) : print ( "Number of OTUs in {0}: {1}" . format ( g , len ( groups [ g ] . results [ "otuids" ] ) ) ) number_of_categories = len ( groups ) shared = defaultdict ( ) for i in range ( 2 , number_of_categories + 1 ) : for j in combinations ( sorted ( groups ) , i ) : combo_name = " & " . join ( list ( j ) ) for grp in j : shared [ combo_name ] = groups [ j [ 0 ] ] . results [ "otuids" ] . copy ( ) for grp in j [ 1 : ] : shared [ combo_name ] . intersection_update ( groups [ grp ] . results [ "otuids" ] ) return shared
|
Get shared OTUIDs between all unique combinations of groups .
|
6,957
|
def write_uniques ( path , prefix , uniques ) : for group in uniques : fp = osp . join ( path , "{}_{}.txt" . format ( prefix , group ) ) with open ( fp , "w" ) as outf : outf . write ( "\n" . join ( uniques [ group ] ) )
|
Given a path the method writes out one file for each group name in the uniques dictionary with the file name in the pattern
|
6,958
|
def storeFASTA ( fastaFNH ) : fasta = file_handle ( fastaFNH ) . read ( ) return [ FASTARecord ( rec [ 0 ] . split ( ) [ 0 ] , rec [ 0 ] . split ( None , 1 ) [ 1 ] , "" . join ( rec [ 1 : ] ) ) for rec in ( x . strip ( ) . split ( "\n" ) for x in fasta . split ( ">" ) [ 1 : ] ) ]
|
Parse the records in a FASTA - format file by first reading the entire file into memory .
|
6,959
|
def parseFASTA ( fastaFNH ) : recs = [ ] seq = [ ] seqID = "" descr = "" for line in file_handle ( fastaFNH ) : line = line . strip ( ) if line [ 0 ] == ";" : continue if line [ 0 ] == ">" : if seq : recs . append ( FASTARecord ( seqID , descr , "" . join ( seq ) ) ) seq = [ ] line = line [ 1 : ] . split ( None , 1 ) seqID , descr = line [ 0 ] , line [ 1 ] else : seq . append ( line ) if seq : recs . append ( FASTARecord ( seqID , descr , "" . join ( seq ) ) ) return recs
|
Parse the records in a FASTA - format file keeping the file open and reading through one line at a time .
|
6,960
|
def get ( self , test_id ) : self . select ( '*' , 'test_id=?' , [ test_id ] ) row = self . _cursor . fetchone ( ) if not row : raise KeyError ( 'No report with test id %s in the DB' % test_id ) values = self . row_to_dict ( row ) content = self . _deserialize_dict ( values [ 'content' ] ) return Report . from_dict ( content )
|
get report by the test id
|
6,961
|
def _serialize_dict ( cls , data ) : return b64encode ( zlib . compress ( cPickle . dumps ( data , protocol = 2 ) ) ) . decode ( )
|
serializes a dictionary
|
6,962
|
def _deserialize_dict ( cls , data ) : return cPickle . loads ( zlib . decompress ( b64decode ( data . encode ( ) ) ) )
|
deserializes a dictionary
|
6,963
|
def _run_sequence ( self , sequence ) : self . _check_pause ( ) self . _pre_test ( ) session_data = self . target . get_session_data ( ) self . _test_info ( ) resp = None for edge in sequence : if edge . callback : edge . callback ( self , edge , resp ) session_data = self . target . get_session_data ( ) node = edge . dst node . set_session_data ( session_data ) resp = self . _transmit ( node ) return self . _post_test ( )
|
Run a single sequence
|
6,964
|
def _transmit ( self , node ) : payload = node . render ( ) . tobytes ( ) self . _last_payload = payload try : return self . target . transmit ( payload ) except Exception as e : self . logger . error ( 'Error in transmit: %s' , e ) raise
|
Transmit node data to target .
|
6,965
|
def _generate_rpc_method ( self , method ) : def _ ( ** kwargs ) : msg_id = self . get_unique_msg_id ( ) params = encode_data ( kwargs ) payload = { 'method' : method , 'params' : params , 'jsonrpc' : '2.0' , 'id' : msg_id } response = requests . post ( self . url , data = json . dumps ( payload ) , headers = self . headers ) . json ( ) if ( 'error' in response ) : if response [ 'error' ] [ 'code' ] == JSONRPC_NO_RESULT : return None raise Exception ( 'Got error from RPC server when called "%s" error: %s' % ( method , response [ 'error' ] ) ) if 'result' in response : result = decode_data ( response [ 'result' ] ) return result return _
|
Generate a function that performs rpc call
|
6,966
|
def _parse_request ( self ) : self . req_method = 'unknown' self . req_params = { } self . req_rpc_version = '2.0' self . req_id = 0 self . data = self . rfile . read ( int ( self . headers . get ( 'content-length' ) ) ) data_dict = json . loads ( self . data ) self . req_method = data_dict [ 'method' ] self . req_params = decode_data ( data_dict [ 'params' ] ) self . req_rpc_version = data_dict [ 'jsonrpc' ] self . req_id = data_dict [ 'id' ]
|
Parse the request
|
6,967
|
def send_result ( self , additional_dict ) : self . send_response ( 200 ) self . send_header ( "Content-type" , "application/json" ) response = { 'jsonrpc' : self . req_rpc_version , 'id' : self . req_id , } response . update ( additional_dict ) jresponse = json . dumps ( response ) self . send_header ( "Content-length" , len ( jresponse ) ) self . end_headers ( ) self . wfile . write ( jresponse . encode ( ) )
|
Send a result to the RPC client
|
6,968
|
def set_current_value ( self , value ) : self . _current_value = value self . _current_rendered = self . _encode_value ( self . _current_value ) return self . _current_rendered
|
Sets the current value of the field
|
6,969
|
def mutate ( self ) : self . _initialize ( ) if self . _exhausted ( ) : return False self . _current_index += 1 self . _mutate ( ) return True
|
Mutate the field
|
6,970
|
def render ( self , ctx = None ) : self . _initialize ( ) if not self . is_default ( ) : self . _current_rendered = self . _encode_value ( self . _current_value ) return self . _current_rendered
|
Render the current value of the field
|
6,971
|
def reset ( self ) : self . _current_index = - 1 self . _current_value = self . _default_value self . _current_rendered = self . _default_rendered self . offset = None
|
Reset the field to its default state
|
6,972
|
def resolve_field ( self , field ) : if isinstance ( field , BaseField ) : return field name = field if name . startswith ( '/' ) : return self . resolve_absolute_name ( name ) resolved_field = self . scan_for_field ( name ) if not resolved_field : container = self . enclosing if container : resolved_field = container . resolve_field ( name ) if not resolved_field : raise KittyException ( 'Could not resolve field by name (%s)' % name ) return resolved_field
|
Resolve a field from name
|
6,973
|
def stop ( self ) : self . _stop_event . set ( ) if self . _func_stop_event is not None : self . _func_stop_event . set ( ) self . join ( timeout = 1 ) if self . isAlive ( ) : print ( 'Failed to stop thread' )
|
stop the thread return after thread stopped
|
6,974
|
def _initialize ( self ) : if self . _field_name : self . _field = self . resolve_field ( self . _field_name ) if not self . _field : raise KittyException ( 'Could not resolve field name %s' % self . _field_name )
|
We override _initialize as we want to resolve the field each time
|
6,975
|
def _calculate ( self , field ) : encloser = field . enclosing if encloser : rendered = encloser . get_rendered_fields ( RenderContext ( self ) ) if field not in rendered : value = len ( rendered ) else : value = rendered . index ( field ) else : value = 0 return value
|
We want to avoid trouble so if the field is not enclosed by any other field we just return 0 .
|
6,976
|
def _calculate ( self , field ) : base_offset = 0 if self . base_field is not None : base_offset = self . base_field . offset target_offset = self . _field . offset if ( target_offset is None ) or ( base_offset is None ) : return 0 return target_offset - base_offset
|
If the offset is unknown return 0
|
6,977
|
def get_report ( self ) : report_dict = self . _meta_get_report ( ) report = Report . from_dict ( report_dict ) return report
|
need to wrap get_report since we need to parse the report
|
6,978
|
def setup ( self ) : if self . controller : self . controller . setup ( ) for monitor in self . monitors : monitor . setup ( )
|
Make sure the target is ready for fuzzing including monitors and controllers
|
6,979
|
def teardown ( self ) : if self . controller : self . controller . teardown ( ) for monitor in self . monitors : monitor . teardown ( )
|
Clean up the target once all tests are completed
|
6,980
|
def stop ( self ) : self . logger . info ( 'Stopping client fuzzer' ) self . _target_control_thread . stop ( ) self . target . signal_mutated ( ) super ( ClientFuzzer , self ) . stop ( )
|
Stop the fuzzing session
|
6,981
|
def _should_fuzz_node ( self , fuzz_node , stage ) : if stage == ClientFuzzer . STAGE_ANY : return True if fuzz_node . name . lower ( ) == stage . lower ( ) : if self . _index_in_path == len ( self . _fuzz_path ) - 1 : return True else : return False
|
The matching stage is either the name of the last node or ClientFuzzer . STAGE_ANY .
|
6,982
|
def get_mutation ( self , stage , data ) : payload = None if self . _keep_running ( ) : fuzz_node = self . _fuzz_path [ self . _index_in_path ] . dst if self . _should_fuzz_node ( fuzz_node , stage ) : fuzz_node . set_session_data ( data ) payload = fuzz_node . render ( ) . tobytes ( ) self . _last_payload = payload else : self . _update_path_index ( stage ) if payload : self . _notify_mutated ( ) self . _requested_stages . append ( ( stage , payload ) ) return payload
|
Get the next mutation if in the correct stage
|
6,983
|
def SInt64 ( value , min_value = None , max_value = None , encoder = ENC_INT_DEFAULT , fuzzable = True , name = None , full_range = False ) : return BitField ( value , 64 , signed = True , min_value = min_value , max_value = max_value , encoder = encoder , fuzzable = fuzzable , name = name , full_range = full_range )
|
Signed 64 - bit field
|
6,984
|
def BE8 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt8 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_BE , fuzzable = fuzzable , name = name , full_range = full_range )
|
8 - bit field Big endian encoded
|
6,985
|
def BE16 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt16 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_BE , fuzzable = fuzzable , name = name , full_range = full_range )
|
16 - bit field Big endian encoded
|
6,986
|
def BE32 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt32 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_BE , fuzzable = fuzzable , name = name , full_range = full_range )
|
32 - bit field Big endian encoded
|
6,987
|
def BE64 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt64 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_BE , fuzzable = fuzzable , name = name , full_range = full_range )
|
64 - bit field Big endian encoded
|
6,988
|
def LE8 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt8 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_LE , fuzzable = fuzzable , name = name , full_range = full_range )
|
8 - bit field Little endian encoded
|
6,989
|
def LE16 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt16 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_LE , fuzzable = fuzzable , name = name , full_range = full_range )
|
16 - bit field Little endian encoded
|
6,990
|
def LE32 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt32 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_LE , fuzzable = fuzzable , name = name , full_range = full_range )
|
32 - bit field Little endian encoded
|
6,991
|
def LE64 ( value , min_value = None , max_value = None , fuzzable = True , name = None , full_range = False ) : return UInt64 ( value , min_value = min_value , max_value = max_value , encoder = ENC_INT_LE , fuzzable = fuzzable , name = name , full_range = full_range )
|
64 - bit field Little endian encoded
|
6,992
|
def _eintr_retry ( func , * args ) : while True : try : return func ( * args ) except ( OSError , select . error ) as e : if e . args [ 0 ] != errno . EINTR : raise
|
restart a system call interrupted by EINTR
|
6,993
|
def serve_forever ( self , poll_interval = 0.5 ) : self . __is_shut_down . clear ( ) try : while not self . __shutdown_request : r , w , e = _eintr_retry ( select . select , [ self ] , [ ] , [ ] , poll_interval ) if self in r : self . _handle_request_noblock ( ) finally : self . __shutdown_request = False self . __is_shut_down . set ( )
|
Handle one request at a time until shutdown . Polls for shutdown every poll_interval seconds . Ignores self . timeout . If you need to do periodic tasks do them in another thread .
|
6,994
|
def _handle_request_noblock ( self ) : try : request , client_address = self . get_request ( ) except socket . error : return if self . verify_request ( request , client_address ) : try : self . process_request ( request , client_address ) except : self . handle_error ( request , client_address ) self . shutdown_request ( request )
|
Handle one request without blocking .
|
6,995
|
def process_request ( self , request , client_address ) : self . finish_request ( request , client_address ) self . shutdown_request ( request )
|
Call finish_request .
|
6,996
|
def process_request_thread ( self , request , client_address ) : try : self . finish_request ( request , client_address ) self . shutdown_request ( request ) except Exception as e : self . logger . error ( e ) self . handle_error ( request , client_address ) self . shutdown_request ( request )
|
Process the request .
|
6,997
|
def handle_error ( self , request , client_address ) : self . logger . error ( '-' * 40 ) self . logger . error ( 'Exception happened during processing of request from %s:%s' % ( client_address [ 0 ] , client_address [ 1 ] ) ) self . logger . error ( traceback . format_exc ( ) ) self . logger . error ( '-' * 40 ) self . stop ( )
|
Add self . stop to make server stop
|
6,998
|
def _restart_target ( self ) : if self . _server : if self . _server . returncode is None : self . _server . kill ( ) time . sleep ( 0.2 ) self . _server = subprocess . Popen ( "python session_server.py" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) time . sleep ( 0.2 )
|
Restart our Target .
|
6,999
|
def clear ( self ) : self . _data_fields = { } self . _sub_reports = { } self . set_status ( Report . FAILED if self . _default_failed else Report . PASSED ) self . add ( 'name' , self . _name ) self . add ( 'sub_reports' , [ ] )
|
Set the report to its defaults . This will clear the report keeping only the name and setting the failure status to the default .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.