idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,100
def _getUserAuthObject ( self , user , connection ) : credentials = self . _getCredentials ( user ) userAuthObject = AutomaticUserAuthClient ( user , connection , ** credentials ) return userAuthObject
Get a SSHUserAuthClient object to use for authentication
56,101
def _verifyHostKey ( self , hostKey , fingerprint ) : if fingerprint in self . knownHosts : return defer . succeed ( True ) return defer . fail ( UnknownHostKey ( hostKey , fingerprint ) )
Called when ssh transport requests us to verify a given host key . Return a deferred that callback if we accept the key or errback if we decide to reject it .
56,102
def yield_once ( iterator ) : @ wraps ( iterator ) def yield_once_generator ( * args , ** kwargs ) : yielded = set ( ) for item in iterator ( * args , ** kwargs ) : if item not in yielded : yielded . add ( item ) yield item return yield_once_generator
Decorator to make an iterator returned by a method yield each result only once .
56,103
def _to_list ( var ) : if isinstance ( var , list ) : return var elif var is None : return [ ] elif isinstance ( var , str ) or isinstance ( var , dict ) : return [ var ] else : try : return list ( var ) except TypeError : return [ var ]
Make variable to list .
56,104
def arguments_to_lists ( function ) : def l_function ( * args , ** kwargs ) : l_args = [ _to_list ( arg ) for arg in args ] l_kwargs = { } for key , value in kwargs . items ( ) : l_kwargs [ key ] = _to_list ( value ) return function ( * l_args , ** l_kwargs ) return l_function
Decorator for a function that converts all arguments to lists .
56,105
def generate_eq ( * members ) : def decorator ( cls ) : def eq ( self , other ) : if not isinstance ( other , cls ) : return False return all ( getattr ( self , member ) == getattr ( other , member ) for member in members ) def ne ( self , other ) : return not eq ( self , other ) cls . __eq__ = eq cls . __ne__ = ne return cls return decorator
Decorator that generates equality and inequality operators for the decorated class . The given members as well as the type of self and other will be taken into account .
56,106
def enforce_signature ( function ) : argspec = inspect . getfullargspec ( function ) annotations = argspec . annotations argnames = argspec . args unnamed_annotations = { } for i , arg in enumerate ( argnames ) : if arg in annotations : unnamed_annotations [ i ] = ( annotations [ arg ] , arg ) def decorated ( * args , ** kwargs ) : for i , annotation in unnamed_annotations . items ( ) : if i < len ( args ) : assert_right_type ( args [ i ] , annotation [ 0 ] , annotation [ 1 ] ) for argname , argval in kwargs . items ( ) : if argname in annotations : assert_right_type ( argval , annotations [ argname ] , argname ) return function ( * args , ** kwargs ) return decorated
Enforces the signature of the function by throwing TypeError s if invalid arguments are provided . The return value is not checked .
56,107
def as_string ( self ) : if self . headers_only : self . msgobj = self . _get_content ( ) from email . generator import Generator fp = StringIO ( ) g = Generator ( fp , maxheaderlen = 60 ) g . flatten ( self . msgobj ) text = fp . getvalue ( ) return text
Get the underlying message object as a string
56,108
def iteritems ( self ) : for n , v in self . msgobj . __dict__ [ "_headers" ] : yield n . lower ( ) , v return
Present the email headers
56,109
def _set_flag ( self , flag ) : self . folder . _invalidate_cache ( ) def replacer ( m ) : return "%s/%s.%s%s" % ( joinpath ( self . folder . base , self . folder . folder , "cur" ) , m . group ( "key" ) , m . group ( "hostname" ) , ":2,%s" % ( "%s%s" % ( m . group ( "flags" ) , flag ) if m . group ( "flags" ) else flag ) ) newfilename = self . msgpathre . sub ( replacer , self . filename ) self . filesystem . rename ( self . filename , newfilename ) self . filename = newfilename
Turns the specified flag on
56,110
def _get_message ( self , key , since = None ) : stored = self . store [ key ] if isinstance ( stored , dict ) : filename = stored [ "path" ] folder = stored [ "folder" ] if since and since > 0.0 : st = stat ( filename ) if st . st_mtime < since : return None stored = MdMessage ( key , filename = filename , folder = folder , filesystem = folder . filesystem ) self . store [ key ] = stored else : if since and since > 0.0 : st = stat ( stored . filename ) if st . st_mtime < since : return None return stored
Return the MdMessage object for the key .
56,111
def _foldername ( self , additionalpath = "" ) : if not self . _foldername_cache . get ( additionalpath ) : fn = joinpath ( self . base , self . folder , additionalpath ) if not self . is_subfolder else joinpath ( self . base , ".%s" % self . folder , additionalpath ) self . _foldername_cache [ additionalpath ] = fn return self . _foldername_cache [ additionalpath ]
Dot decorate a folder name .
56,112
def folders ( self ) : entrys = self . filesystem . listdir ( abspath ( self . _foldername ( ) ) ) regex = re . compile ( "\\..*" ) just_dirs = dict ( [ ( d , d ) for d in entrys if regex . match ( d ) ] ) folder = self . _foldername ( ) filesystem = self . filesystem class FolderList ( object ) : def __iter__ ( self ) : dirs = list ( just_dirs . keys ( ) ) dirs . sort ( ) dirs . reverse ( ) for dn in dirs : yield MdFolder ( dn [ 1 : ] , base = folder , subfolder = True , filesystem = filesystem ) return def __list__ ( self ) : return [ dn [ 1 : ] for dn in just_dirs ] def __contains__ ( self , name ) : return just_dirs . __contains__ ( ".%s" % name ) def __getitem__ ( self , name ) : return MdFolder ( just_dirs [ ".%s" % name ] [ 1 : ] , base = folder , subfolder = True , filesystem = filesystem ) f = FolderList ( ) return f
Return a map of the subfolder objects for this folder .
56,113
def move ( self , key , folder ) : path , host , flags = self . _exists ( key ) self . _invalidate_cache ( ) newpath = joinpath ( folder . base , folder . get_name ( ) , "cur" , basename ( path ) ) self . filesystem . rename ( path , newpath ) folder . _invalidate_cache ( )
Move the specified key to folder .
56,114
def _muaprocessnew ( self ) : foldername = self . _foldername ( "new" ) files = self . filesystem . listdir ( foldername ) for filename in files : if filename == "" : continue curfilename = self . _foldername ( joinpath ( "new" , filename ) ) newfilename = joinpath ( self . _cur , "%s:2,%s" % ( filename , "" ) ) self . filesystem . rename ( curfilename , newfilename )
Moves all new files into cur correctly flagging
56,115
def _exists ( self , key ) : filecache , keycache = self . _fileslist ( ) msg = keycache . get ( key , None ) if msg : path = msg . filename meta = filecache [ path ] return path , meta [ "hostname" ] , meta . get ( "flags" , "" ) raise KeyError ( "not found %s" % key )
Find a key in a particular section
56,116
def __put_slice_in_slim ( slim , dataim , sh , i ) : a , b = np . unravel_index ( int ( i ) , sh ) st0 = int ( dataim . shape [ 0 ] * a ) st1 = int ( dataim . shape [ 1 ] * b ) sp0 = int ( st0 + dataim . shape [ 0 ] ) sp1 = int ( st1 + dataim . shape [ 1 ] ) slim [ st0 : sp0 , st1 : sp1 ] = dataim return slim
put one small slice as a tile in a big image
56,117
def _import_data ( data , axis , slice_step , first_slice_offset = 0 ) : try : import SimpleITK as sitk if type ( data ) is sitk . SimpleITK . Image : data = sitk . GetArrayFromImage ( data ) except : pass data = __select_slices ( data , axis , slice_step , first_slice_offset = first_slice_offset ) return data
import ndarray or SimpleITK data
56,118
def index_to_coords ( index , shape ) : coords = [ ] for i in xrange ( 1 , len ( shape ) ) : divisor = int ( np . product ( shape [ i : ] ) ) value = index // divisor coords . append ( value ) index -= value * divisor coords . append ( index ) return tuple ( coords )
convert index to coordinates given the shape
56,119
def on_scroll ( self , event ) : if event . button == 'up' : self . next_slice ( ) if event . button == 'down' : self . prev_slice ( ) self . actual_slice_slider . set_val ( self . actual_slice )
mouse wheel is used for setting slider value
56,120
def on_press ( self , event ) : 'on but-ton press we will see if the mouse is over us and store data' if event . inaxes != self . ax : return self . press = [ event . xdata ] , [ event . ydata ] , event . button
on but - ton press we will see if the mouse is over us and store data
56,121
def on_motion ( self , event ) : 'on motion we will move the rect if the mouse is over us' if self . press is None : return if event . inaxes != self . ax : return x0 , y0 , btn = self . press x0 . append ( event . xdata ) y0 . append ( event . ydata )
on motion we will move the rect if the mouse is over us
56,122
def on_release ( self , event ) : 'on release we reset the press data' if self . press is None : return x0 , y0 , btn = self . press if btn == 1 : color = 'r' elif btn == 2 : color = 'b' btn = self . button_map [ btn ] self . set_seeds ( y0 , x0 , self . actual_slice , btn ) self . press = None self . update_slice ( )
on release we reset the press data
56,123
def get_seed_sub ( self , label ) : sx , sy , sz = np . nonzero ( self . seeds == label ) return sx , sy , sz
Return list of all seeds with specific label
56,124
def push ( self , item ) : hash ( item ) heapq . heappush ( self . _items , item )
Push the value item onto the heap maintaining the heap invariant . If the item is not hashable a TypeError is raised .
56,125
def format_field_by_match ( self , value , match ) : groups = match . groups ( ) fill , align , sign , sharp , zero , width , comma , prec , type_ = groups if not comma and not prec and type_ not in list ( 'fF%' ) : return None if math . isnan ( value ) or math . isinf ( value ) : return None locale = self . numeric_locale prefix = get_prefix ( sign ) if type_ == 'd' : if prec is not None : raise ValueError ( 'precision not allowed in ' 'integer format specifier' ) string = format_number ( value , 0 , prefix , locale ) elif type_ in 'fF%' : format_ = format_percent if type_ == '%' else format_number string = format_ ( value , int ( prec or DEFAULT_PREC ) , prefix , locale ) else : return None if not comma : string = remove_group_symbols ( string , locale ) if not ( fill or align or zero or width ) : return string spec = '' . join ( [ fill or u'' , align or u'>' , zero or u'' , width or u'' ] ) return format ( string , spec )
Formats a field by a Regex match of the format spec pattern .
56,126
def reset ( self ) : self . _start = 0 self . _first_start = 0 self . _stop = time . perf_counter ( ) self . _array = None self . _array_len = 0 self . intervals = [ ] self . _intervals_len = 0
Resets the time intervals
56,127
def read ( filename ) : import os here = os . path . dirname ( os . path . abspath ( __file__ ) ) with open ( os . path . join ( here , filename ) ) as fd : return fd . read ( )
Read a file relative to setup . py location .
56,128
def find_version ( filename ) : import re content = read ( filename ) version_match = re . search ( r"^__version__ = ['\"]([^'\"]*)['\"]" , content , re . M ) if version_match : return version_match . group ( 1 ) raise RuntimeError ( 'Unable to find version string.' )
Find package version in file .
56,129
def find_requirements ( filename ) : import string content = read ( filename ) requirements = [ ] for line in content . splitlines ( ) : line = line . strip ( ) if line and line [ : 1 ] in string . ascii_letters : requirements . append ( line ) return requirements
Find requirements in file .
56,130
def generate_uuid ( basedata = None ) : if basedata is None : return str ( uuid . uuid4 ( ) ) elif isinstance ( basedata , str ) : checksum = hashlib . md5 ( basedata ) . hexdigest ( ) return '%8s-%4s-%4s-%4s-%12s' % ( checksum [ 0 : 8 ] , checksum [ 8 : 12 ] , checksum [ 12 : 16 ] , checksum [ 16 : 20 ] , checksum [ 20 : 32 ] )
Provides a _random_ UUID with no input or a UUID4 - format MD5 checksum of any input data provided
56,131
def from_unix ( cls , seconds , milliseconds = 0 ) : base = list ( time . gmtime ( seconds ) ) [ 0 : 6 ] base . append ( milliseconds * 1000 ) return cls ( * base )
Produce a full |datetime . datetime| object from a Unix timestamp
56,132
def to_unix ( cls , timestamp ) : if not isinstance ( timestamp , datetime . datetime ) : raise TypeError ( 'Time.milliseconds expects a datetime object' ) base = time . mktime ( timestamp . timetuple ( ) ) return base
Wrapper over time module to produce Unix epoch time as a float
56,133
def fixUTF8 ( cls , data ) : for key in data : if isinstance ( data [ key ] , str ) : data [ key ] = data [ key ] . encode ( 'utf-8' ) return data
Convert all strings to UTF - 8
56,134
def consume_options ( cls , data , hittype , args ) : opt_position = 0 data [ 't' ] = hittype if hittype in cls . option_sequence : for expected_type , optname in cls . option_sequence [ hittype ] : if opt_position < len ( args ) and isinstance ( args [ opt_position ] , expected_type ) : data [ optname ] = args [ opt_position ] opt_position += 1
Interpret sequential arguments related to known hittypes based on declared structures
56,135
def set_timestamp ( self , data ) : if 'hittime' in data : data [ 'qt' ] = self . hittime ( timestamp = data . pop ( 'hittime' , None ) ) if 'hitage' in data : data [ 'qt' ] = self . hittime ( age = data . pop ( 'hitage' , None ) )
Interpret time - related options apply queue - time parameter as needed
56,136
async def send ( self , hittype , * args , ** data ) : if hittype not in self . valid_hittypes : raise KeyError ( 'Unsupported Universal Analytics Hit Type: {0}' . format ( repr ( hittype ) ) ) self . set_timestamp ( data ) self . consume_options ( data , hittype , args ) for item in args : if isinstance ( item , dict ) : for key , val in self . payload ( item ) : data [ key ] = val for k , v in self . params . items ( ) : if k not in data : data [ k ] = v data = dict ( self . payload ( data ) ) if self . hash_client_id : data [ 'cid' ] = generate_uuid ( data [ 'cid' ] ) await self . http . send ( data )
Transmit HTTP requests to Google Analytics using the measurement protocol
56,137
def get_process_log ( self , pid = None , start = 0 , limit = 1000 ) : pid = self . _get_pid ( pid ) data = self . _call_rest_api ( 'get' , '/processes/' + pid + '/log?start={}&limit={}' . format ( start , limit ) , error = 'Failed to fetch process log' ) return data [ 'list' ]
get_process_log ( self pid = None start = 0 limit = 1000
56,138
def write ( self ) : for entry in self . _instream : if isinstance ( entry , Feature ) : for feature in entry : if feature . num_children > 0 or feature . is_multi : if feature . is_multi and feature != feature . multi_rep : continue self . feature_counts [ feature . type ] += 1 fid = '{}{}' . format ( feature . type , self . feature_counts [ feature . type ] ) feature . add_attribute ( 'ID' , fid ) else : feature . drop_attribute ( 'ID' ) if isinstance ( entry , Sequence ) and not self . _seq_written : print ( '##FASTA' , file = self . outfile ) self . _seq_written = True print ( repr ( entry ) , file = self . outfile )
Pull features from the instream and write them to the output .
56,139
def date2doy ( time : Union [ str , datetime . datetime ] ) -> Tuple [ int , int ] : T = np . atleast_1d ( time ) year = np . empty ( T . size , dtype = int ) doy = np . empty_like ( year ) for i , t in enumerate ( T ) : yd = str ( datetime2yeardoy ( t ) [ 0 ] ) year [ i ] = int ( yd [ : 4 ] ) doy [ i ] = int ( yd [ 4 : ] ) assert ( ( 0 < doy ) & ( doy < 366 ) ) . all ( ) , 'day of year must be 0 < doy < 366' return doy , year
< 366 for leap year too . normal year 0 .. 364 . Leap 0 .. 365 .
56,140
def randomdate ( year : int ) -> datetime . date : if calendar . isleap ( year ) : doy = random . randrange ( 366 ) else : doy = random . randrange ( 365 ) return datetime . date ( year , 1 , 1 ) + datetime . timedelta ( days = doy )
gives random date in year
56,141
def function_to_serializable_representation ( fn ) : if type ( fn ) not in ( FunctionType , BuiltinFunctionType ) : raise ValueError ( "Can't serialize %s : %s, must be globally defined function" % ( fn , type ( fn ) , ) ) if hasattr ( fn , "__closure__" ) and fn . __closure__ is not None : raise ValueError ( "No serializable representation for closure %s" % ( fn , ) ) return { "__module__" : get_module_name ( fn ) , "__name__" : fn . __name__ }
Converts a Python function into a serializable representation . Does not currently work for methods or functions with closure data .
56,142
def from_serializable_dict ( x ) : if "__name__" in x : return _lookup_value ( x . pop ( "__module__" ) , x . pop ( "__name__" ) ) non_string_key_objects = [ from_json ( serialized_key ) for serialized_key in x . pop ( SERIALIZED_DICTIONARY_KEYS_FIELD , [ ] ) ] converted_dict = type ( x ) ( ) for k , v in x . items ( ) : serialized_key_index = parse_serialized_keys_index ( k ) if serialized_key_index is not None : k = non_string_key_objects [ serialized_key_index ] converted_dict [ k ] = from_serializable_repr ( v ) if "__class__" in converted_dict : class_object = converted_dict . pop ( "__class__" ) if "__value__" in converted_dict : return class_object ( converted_dict [ "__value__" ] ) elif hasattr ( class_object , "from_dict" ) : return class_object . from_dict ( converted_dict ) else : return class_object ( ** converted_dict ) return converted_dict
Reconstruct a dictionary by recursively reconstructing all its keys and values .
56,143
def to_serializable_repr ( x ) : t = type ( x ) if isinstance ( x , list ) : return list_to_serializable_repr ( x ) elif t in ( set , tuple ) : return { "__class__" : class_to_serializable_representation ( t ) , "__value__" : list_to_serializable_repr ( x ) } elif isinstance ( x , dict ) : return dict_to_serializable_repr ( x ) elif isinstance ( x , ( FunctionType , BuiltinFunctionType ) ) : return function_to_serializable_representation ( x ) elif type ( x ) is type : return class_to_serializable_representation ( x ) else : state_dictionary = to_serializable_repr ( to_dict ( x ) ) state_dictionary [ "__class__" ] = class_to_serializable_representation ( x . __class__ ) return state_dictionary
Convert an instance of Serializable or a primitive collection containing such instances into serializable types .
56,144
def _combine_rest_push ( self ) : new = [ ] change = 0 i = 0 examinetypes = self . quickresponse_types [ 3 ] for state in examinetypes : if state . type == 3 : for nextstate_id in state . trans . keys ( ) : found = 0 if nextstate_id in self . quickresponse : examines = self . quickresponse [ nextstate_id ] for examine in examines : if examine . id == nextstate_id and examine . type == 1 : temp = PDAState ( ) temp . type = 1 temp . sym = examine . sym temp . id = state . id for nextnextstate_id in examine . trans : for x_char in state . trans [ nextstate_id ] : for z_char in examine . trans [ nextnextstate_id ] : if nextnextstate_id not in temp . trans : temp . trans [ nextnextstate_id ] = [ ] if x_char != 0 and z_char != 0 : temp . trans [ nextnextstate_id ] . append ( x_char + z_char ) elif x_char != 0 and z_char == 0 : temp . trans [ nextnextstate_id ] . append ( x_char ) elif x_char == 0 and z_char != 0 : temp . trans [ nextnextstate_id ] . append ( z_char ) elif x_char == 0 and z_char == 0 : temp . trans [ nextnextstate_id ] . append ( 0 ) else : pass found = 1 new . append ( temp ) if found == 1 : change = 1 i = i + 1 if change == 0 : return [ ] else : return new
Combining Rest and Push States
56,145
def _check ( self , accepted ) : total = [ ] if 1 in self . quickresponse : total = total + self . quickresponse [ 1 ] if ( 1 , 0 ) in self . quickresponse : total = total + self . quickresponse [ ( 1 , 0 ) ] for key in total : if ( key . id == 1 or key . id == ( 1 , 0 ) ) and key . type == 3 : if accepted is None : if 2 in key . trans : return key . trans [ 2 ] else : for state in accepted : if ( 2 , state ) in key . trans : return key . trans [ ( 2 , state ) ] return - 1
_check for string existence
56,146
def _stage ( self , accepted , count = 0 ) : new5 = self . _combine_rest_push ( ) new1 = self . _combine_push_pop ( ) new2 = self . _combine_push_rest ( ) new3 = self . _combine_pop_rest ( ) new4 = self . _combine_rest_rest ( ) new = new1 + new2 + new3 + new4 + new5 del new1 del new2 del new3 del new4 del new5 if len ( new ) == 0 : return None self . statediag = self . statediag + new del new newstates = [ ] for key in self . statediag : if len ( key . trans ) == 0 or key . trans == { } : pass else : newstates . append ( key ) del self . statediag self . statediag = newstates self . quickresponse = { } self . quickresponse_types = { } self . quickresponse_types [ 0 ] = [ ] self . quickresponse_types [ 1 ] = [ ] self . quickresponse_types [ 2 ] = [ ] self . quickresponse_types [ 3 ] = [ ] self . quickresponse_types [ 4 ] = [ ] for state in self . statediag : if state . id not in self . quickresponse : self . quickresponse [ state . id ] = [ state ] else : self . quickresponse [ state . id ] . append ( state ) self . quickresponse_types [ state . type ] . append ( state ) exists = self . _check ( accepted ) if exists == - 1 : return self . _stage ( accepted , count + 1 ) else : print exists return exists
This is a repeated state in the state removal algorithm
56,147
def printer ( self ) : for key in self . statediag : if key . trans is not None and len ( key . trans ) > 0 : print '****** ' + repr ( key . id ) + '(' + repr ( key . type ) + ' on sym ' + repr ( key . sym ) + ') ******' print key . trans
Visualizes the current state
56,148
def init ( self , states , accepted ) : self . statediag = [ ] for key in states : self . statediag . append ( states [ key ] ) self . quickresponse = { } self . quickresponse_types = { } self . quickresponse_types [ 0 ] = [ ] self . quickresponse_types [ 1 ] = [ ] self . quickresponse_types [ 2 ] = [ ] self . quickresponse_types [ 3 ] = [ ] self . quickresponse_types [ 4 ] = [ ] for state in self . statediag : if state . id not in self . quickresponse : self . quickresponse [ state . id ] = [ state ] else : self . quickresponse [ state . id ] . append ( state ) self . quickresponse_types [ state . type ] . append ( state ) return self . _stage ( accepted , 0 )
Initialization of the indexing dictionaries
56,149
def execute ( filelocation , outpath , executable , args = None , switchArgs = None ) : procArgs = [ 'java' , '-jar' , executable ] procArgs . extend ( [ '-output_path' , outpath ] ) if args is not None : for arg in args : procArgs . extend ( [ '-' + arg [ 0 ] , arg [ 1 ] ] ) if switchArgs is not None : procArgs . extend ( [ '-' + arg for arg in switchArgs ] ) procArgs . extend ( aux . toList ( filelocation ) ) proc = subprocess . Popen ( procArgs , stderr = subprocess . PIPE ) while True : out = proc . stderr . read ( 1 ) if out == '' and proc . poll ( ) != None : break if out != '' : sys . stdout . write ( out ) sys . stdout . flush ( )
Executes the dinosaur tool on Windows operating systems .
56,150
def generate_example ( ) : cmd_args = sys . argv [ 1 : ] parser = argparse . ArgumentParser ( description = 'Confpy example generator.' ) parser . add_argument ( '--module' , action = 'append' , help = 'A python module which should be imported.' , ) parser . add_argument ( '--file' , action = 'append' , help = 'A python file which should be evaled.' , ) parser . add_argument ( '--format' , default = 'JSON' , choices = ( 'JSON' , 'INI' ) , help = 'The output format of the configuration file.' , ) args = parser . parse_args ( cmd_args ) for module in args . module or ( ) : __import__ ( module ) for source_file in args . file or ( ) : cfg = pyfile . PythonFile ( path = source_file ) . config cfg = config . Configuration ( ) print ( example . generate_example ( cfg , ext = args . format ) )
Generate a configuration file example .
56,151
def count ( self , val = True ) : return sum ( ( elem . count ( val ) for elem in self . _iter_components ( ) ) )
Get the number of bits in the array with the specified value .
56,152
def _api_group_for_type ( cls ) : _groups = { ( u"v1beta1" , u"Deployment" ) : u"extensions" , ( u"v1beta1" , u"DeploymentList" ) : u"extensions" , ( u"v1beta1" , u"ReplicaSet" ) : u"extensions" , ( u"v1beta1" , u"ReplicaSetList" ) : u"extensions" , } key = ( cls . apiVersion , cls . __name__ . rsplit ( u"." ) [ - 1 ] , ) group = _groups . get ( key , None ) return group
Determine which Kubernetes API group a particular PClass is likely to belong with .
56,153
def response ( request , status , obj ) : request . setResponseCode ( status ) request . responseHeaders . setRawHeaders ( u"content-type" , [ u"application/json" ] , ) body = dumps_bytes ( obj ) return body
Generate a response .
56,154
def create ( self , collection_name , obj ) : obj = self . agency . before_create ( self , obj ) new = self . agency . after_create ( self , obj ) updated = self . transform ( [ collection_name ] , lambda c : c . add ( new ) , ) return updated
Create a new object in the named collection .
56,155
def replace ( self , collection_name , old , new ) : self . agency . before_replace ( self , old , new ) updated = self . transform ( [ collection_name ] , lambda c : c . replace ( old , new ) , ) return updated
Replace an existing object with a new version of it .
56,156
def execution_timer ( value ) : def _invoke ( method , key_arg_position , * args , ** kwargs ) : start_time = time . time ( ) result = method ( * args , ** kwargs ) duration = time . time ( ) - start_time key = [ method . func_name ] if key_arg_position is not None : key . append ( args [ key_arg_position ] ) add_timing ( '.' . join ( key ) , value = duration ) return result if type ( value ) is types . FunctionType : def wrapper ( * args , ** kwargs ) : return _invoke ( value , None , * args , ** kwargs ) return wrapper else : def duration_decorator ( func ) : def wrapper ( * args , ** kwargs ) : return _invoke ( func , value , * args , ** kwargs ) return wrapper return duration_decorator
The execution_timer decorator allows for easy instrumentation of the duration of function calls using the method name in the key .
56,157
def _get_lang ( self , * args , ** kwargs ) : if "lang" in kwargs : if kwargs [ "lang" ] in self . _available_languages : self . lang = kwargs [ "lang" ]
Let users select language
56,158
def notify ( self , msg , color = 'green' , notify = 'true' , message_format = 'text' ) : self . message_dict = { 'message' : msg , 'color' : color , 'notify' : notify , 'message_format' : message_format , } if not self . debug : return requests . post ( self . notification_url , json . dumps ( self . message_dict ) , headers = self . headers ) else : print ( 'HipChat message: <{}>' . format ( msg ) ) return [ ]
Send notification to specified HipChat room
56,159
def trial ( path = TESTS_PATH , coverage = False ) : args = [ 'trial' ] if coverage : args . append ( '--coverage' ) args . append ( path ) print args local ( ' ' . join ( args ) )
Run tests using trial
56,160
def process_result_value ( self , value , dialect ) : if value is not None : cmd = "value = {}" . format ( value ) exec ( cmd ) return value
When SQLAlchemy gets the string representation from a ReprObjType column it converts it to the python equivalent via exec .
56,161
def make_regex ( separator ) : return re . compile ( r'(?:' + re . escape ( separator ) + r')?((?:[^' + re . escape ( separator ) + r'\\]|\\.)+)' )
Utility function to create regexp for matching escaped separators in strings .
56,162
def strip_comments ( text ) : regex = r'\s*(#|\/{2}).*$' regex_inline = r'(:?(?:\s)*([A-Za-z\d\.{}]*)|((?<=\").*\"),?)(?:\s)*(((#|(\/{2})).*)|)$' lines = text . split ( '\n' ) for index , line in enumerate ( lines ) : if re . search ( regex , line ) : if re . search ( r'^' + regex , line , re . IGNORECASE ) : lines [ index ] = "" elif re . search ( regex_inline , line ) : lines [ index ] = re . sub ( regex_inline , r'\1' , line ) return '\n' . join ( lines )
Comment stripper for JSON .
56,163
def register ( action ) : if isinstance ( action , str ) : Action . register ( Action ( action ) ) elif isinstance ( action , Action ) : Action . registered . add ( action ) else : for a in action : Action . register ( a )
Action registration is used to support generating lists of permitted actions from a permission set and an object pattern . Only registered actions will be returned by such queries .
56,164
def allow ( self , act , obj = None ) : objc = obj . components if obj is not None else [ ] try : return self . tree [ act . components + objc ] == 'allow' except KeyError : return False
Determine where a given action on a given object is allowed .
56,165
def permitted_actions ( self , obj = None ) : return [ a for a in Action . registered if self . allow ( a , obj ( str ( a ) ) if obj is not None else None ) ]
Determine permitted actions for a given object pattern .
56,166
def subscribe ( ws ) : while ws is not None : gevent . sleep ( 0.1 ) try : message = ws . receive ( ) if message : stream . register ( ws , message ) except WebSocketError : ws = None
WebSocket endpoint used for liveupdates
56,167
def could_scope_out ( self ) : return not self . waiting_for or isinstance ( self . waiting_for , callable . EndOfStory ) or self . is_breaking_a_loop ( )
could bubble up from current scope
56,168
def alias ( self ) : if self . _alias is None : if self . name in self . aliases_fix : self . _alias = self . aliases_fix [ self . name ] else : self . _alias = self . name . lower ( ) . replace ( ' ' , '-' ) . replace ( '(' , '' ) . replace ( ')' , '' ) return self . _alias
If the _alias cache is None just build the alias from the item name .
56,169
def load_configs ( self , conf_file ) : with open ( conf_file ) as stream : lines = itertools . chain ( ( "[global]" , ) , stream ) self . _config . read_file ( lines ) return self . _config [ 'global' ]
Assumes that the config file does not have any sections so throw it all in global
56,170
def remove_quotes ( self , configs ) : for key in configs : value = configs [ key ] if value [ 0 ] == "'" and value [ - 1 ] == "'" : configs [ key ] = value [ 1 : - 1 ] return configs
Because some values are wraped in single quotes
56,171
def chunks_of ( max_chunk_size , list_to_chunk ) : for i in range ( 0 , len ( list_to_chunk ) , max_chunk_size ) : yield list_to_chunk [ i : i + max_chunk_size ]
Yields the list with a max size of max_chunk_size
56,172
def split_into ( max_num_chunks , list_to_chunk ) : max_chunk_size = math . ceil ( len ( list_to_chunk ) / max_num_chunks ) return chunks_of ( max_chunk_size , list_to_chunk )
Yields the list with a max total size of max_num_chunks
56,173
def get_proxy_parts ( proxy ) : proxy_parts = { 'schema' : None , 'user' : None , 'password' : None , 'host' : None , 'port' : None , } results = re . match ( proxy_parts_pattern , proxy ) if results : matched = results . groupdict ( ) for key in proxy_parts : proxy_parts [ key ] = matched . get ( key ) else : logger . error ( "Invalid proxy format `{proxy}`" . format ( proxy = proxy ) ) if proxy_parts [ 'port' ] is None : proxy_parts [ 'port' ] = '80' return proxy_parts
Take a proxy url and break it up to its parts
56,174
def remove_html_tag ( input_str = '' , tag = None ) : result = input_str if tag is not None : pattern = re . compile ( '<{tag}[\s\S]+?/{tag}>' . format ( tag = tag ) ) result = re . sub ( pattern , '' , str ( input_str ) ) return result
Returns a string with the html tag and all its contents from a string
56,175
def ip_between ( ip , start , finish ) : if is_IPv4Address ( ip ) and is_IPv4Address ( start ) and is_IPv4Address ( finish ) : return IPAddress ( ip ) in IPRange ( start , finish ) else : return False
Checks to see if IP is between start and finish
56,176
def is_rfc1918 ( ip ) : if ip_between ( ip , "10.0.0.0" , "10.255.255.255" ) : return True elif ip_between ( ip , "172.16.0.0" , "172.31.255.255" ) : return True elif ip_between ( ip , "192.168.0.0" , "192.168.255.255" ) : return True else : return False
Checks to see if an IP address is used for local communications within a private network as specified by RFC 1918
56,177
def is_reserved ( ip ) : if ip_between ( ip , "0.0.0.0" , "0.255.255.255" ) : return True elif ip_between ( ip , "10.0.0.0" , "10.255.255.255" ) : return True elif ip_between ( ip , "100.64.0.0" , "100.127.255.255" ) : return True elif ip_between ( ip , "127.0.0.0" , "127.255.255.255" ) : return True elif ip_between ( ip , "169.254.0.0" , "169.254.255.255" ) : return True elif ip_between ( ip , "172.16.0.0" , "172.31.255.255" ) : return True elif ip_between ( ip , "192.0.0.0" , "192.0.0.255" ) : return True elif ip_between ( ip , "192.0.2.0" , "192.0.2.255" ) : return True elif ip_between ( ip , "192.88.99.0" , "192.88.99.255" ) : return True elif ip_between ( ip , "192.168.0.0" , "192.168.255.255" ) : return True elif ip_between ( ip , "198.18.0.0" , "198.19.255.255" ) : return True elif ip_between ( ip , "198.51.100.0" , "198.51.100.255" ) : return True elif ip_between ( ip , "203.0.113.0" , "203.0.113.255" ) : return True elif ip_between ( ip , "224.0.0.0" , "255.255.255.255" ) : return True else : return False
Checks to see if an IP address is reserved for special purposes . This includes all of the RFC 1918 addresses as well as other blocks that are reserved by IETF and IANA for various reasons .
56,178
def is_hash ( fhash ) : if re . match ( re_md5 , fhash ) : return True elif re . match ( re_sha1 , fhash ) : return True elif re . match ( re_sha256 , fhash ) : return True elif re . match ( re_sha512 , fhash ) : return True elif re . match ( re_ssdeep , fhash ) : return True else : return False
Returns true for valid hashes false for invalid .
56,179
def reverse_dns_sna ( ipaddress ) : r = requests . get ( "http://api.statdns.com/x/%s" % ipaddress ) if r . status_code == 200 : names = [ ] for item in r . json ( ) [ 'answer' ] : name = str ( item [ 'rdata' ] ) . strip ( "." ) names . append ( name ) return names elif r . json ( ) [ 'code' ] == 503 : return None
Returns a list of the dns names that point to a given ipaddress using StatDNS API
56,180
def vt_ip_check ( ip , vt_api ) : if not is_IPv4Address ( ip ) : return None url = 'https://www.virustotal.com/vtapi/v2/ip-address/report' parameters = { 'ip' : ip , 'apikey' : vt_api } response = requests . get ( url , params = parameters ) try : return response . json ( ) except ValueError : return None
Checks VirusTotal for occurrences of an IP address
56,181
def vt_name_check ( domain , vt_api ) : if not is_fqdn ( domain ) : return None url = 'https://www.virustotal.com/vtapi/v2/domain/report' parameters = { 'domain' : domain , 'apikey' : vt_api } response = requests . get ( url , params = parameters ) try : return response . json ( ) except ValueError : return None
Checks VirusTotal for occurrences of a domain name
56,182
def vt_hash_check ( fhash , vt_api ) : if not is_hash ( fhash ) : return None url = 'https://www.virustotal.com/vtapi/v2/file/report' parameters = { 'resource' : fhash , 'apikey' : vt_api } response = requests . get ( url , params = parameters ) try : return response . json ( ) except ValueError : return None
Checks VirusTotal for occurrences of a file hash
56,183
def ipinfo_ip_check ( ip ) : if not is_IPv4Address ( ip ) : return None response = requests . get ( 'http://ipinfo.io/%s/json' % ip ) return response . json ( )
Checks ipinfo . io for basic WHOIS - type data on an IP address
56,184
def dshield_ip_check ( ip ) : if not is_IPv4Address ( ip ) : return None headers = { 'User-Agent' : useragent } url = 'https://isc.sans.edu/api/ip/' response = requests . get ( '{0}{1}?json' . format ( url , ip ) , headers = headers ) return response . json ( )
Checks dshield for info on an IP address
56,185
def cli ( ctx , amount , index , stage ) : if not ctx . bubble : ctx . say_yellow ( 'There is no bubble present, will not push' ) raise click . Abort ( ) TGT = None transformed = True STAGE = None if stage in STAGES and stage in ctx . cfg . CFG : STAGE = ctx . cfg . CFG [ stage ] if not STAGE : ctx . say_red ( 'There is no STAGE in CFG:' + stage ) ctx . say_yellow ( 'please check configuration in ' + ctx . home + '/config/config.yaml' ) raise click . Abort ( ) if 'TARGET' in STAGE : TGT = STAGE . TARGET if 'TRANSFORM' in STAGE : transformed = True else : transformed = False if not transformed : ctx . say_yellow ( ) if not TGT : ctx . say_red ( 'There is no TARGET in: ' + stage ) ctx . say_yellow ( 'please check configuration in ' + ctx . home + '/config/config.yaml' ) raise click . Abort ( ) tgt_client = get_client ( ctx . gbc , TGT . CLIENT , ctx . home ) try : tclient = tgt_client . BubbleClient ( cfg = TGT ) tclient . set_parent ( ctx . gbc ) tclient . set_verbose ( ctx . get_verbose ( ) ) except Exception as e : ctx . say_red ( 'cannot create bubble client:' + TGT . CLIENT ) ctx . say_red ( str ( e ) ) raise click . Abort ( 'can not push' ) step_to_load = 'push' if not transformed : step_to_load = 'pulled' data_gen = bubble_lod_load ( ctx , step_to_load , stage ) full_data = False if amount == - 1 and index == - 1 : full_data = True to_push = get_gen_slice ( ctx . gbc , data_gen , amount , index ) error_count = Counter ( ) total_count = Counter ( ) pushres = do_yielding_push ( ctx = ctx , to_push = to_push , tclient = tclient , total_counter = total_count , error_counter = error_count ) pfr = bubble_lod_dump ( ctx = ctx , step = 'pushed' , stage = stage , full_data = full_data , reset = True , data_gen = pushres ) ctx . say ( 'pushed [%d] objects' % pfr [ 'total' ] ) stats = { } stats [ 'pushed_stat_error_count' ] = error_count . get_total ( ) stats [ 'pushed_stat_total_count' ] = total_count . get_total ( ) update_stats ( ctx , stage , stats ) return True
Push data to Target Service Client
56,186
def RemoveEmptyDirectoryTree ( path , silent = False , recursion = 0 ) : if not silent and recursion is 0 : goodlogging . Log . Info ( "UTIL" , "Starting removal of empty directory tree at: {0}" . format ( path ) ) try : os . rmdir ( path ) except OSError : if not silent : goodlogging . Log . Info ( "UTIL" , "Removal of empty directory tree terminated at: {0}" . format ( path ) ) return else : if not silent : goodlogging . Log . Info ( "UTIL" , "Directory deleted: {0}" . format ( path ) ) RemoveEmptyDirectoryTree ( os . path . dirname ( path ) , silent , recursion + 1 )
Delete tree of empty directories .
56,187
def ValidUserResponse ( response , validList ) : if response in validList : return response else : prompt = "Unknown response given - please reenter one of [{0}]: " . format ( '/' . join ( validList ) ) response = goodlogging . Log . Input ( "DM" , prompt ) return ValidUserResponse ( response , validList )
Check if user response is in a list of valid entires . If an invalid response is given re - prompt user to enter one of the valid options . Do not proceed until a valid entry is given .
56,188
def UserAcceptance ( matchList , recursiveLookup = True , promptComment = None , promptOnly = False , xStrOverride = "to skip this selection" ) : matchString = ', ' . join ( matchList ) if len ( matchList ) == 1 : goodlogging . Log . Info ( "UTIL" , "Match found: {0}" . format ( matchString ) ) prompt = "Enter 'y' to accept this match or e" elif len ( matchList ) > 1 : goodlogging . Log . Info ( "UTIL" , "Multiple possible matches found: {0}" . format ( matchString ) ) prompt = "Enter correct match from list or e" else : if promptOnly is False : goodlogging . Log . Info ( "UTIL" , "No match found" ) prompt = "E" if not recursiveLookup : return None if recursiveLookup : prompt = prompt + "nter a different string to look up or e" prompt = prompt + "nter 'x' {0} or enter 'exit' to quit this program" . format ( xStrOverride ) if promptComment is None : prompt = prompt + ": " else : prompt = prompt + " ({0}): " . format ( promptComment ) while ( 1 ) : response = goodlogging . Log . Input ( 'UTIL' , prompt ) if response . lower ( ) == 'exit' : goodlogging . Log . Fatal ( "UTIL" , "Program terminated by user 'exit'" ) if response . lower ( ) == 'x' : return None elif response . lower ( ) == 'y' and len ( matchList ) == 1 : return matchList [ 0 ] elif len ( matchList ) > 1 : for match in matchList : if response . lower ( ) == match . lower ( ) : return match if recursiveLookup : return response
Prompt user to select a entry from a given match list or to enter a new string to look up . If the match list is empty user must enter a new string or exit .
56,189
def GetBestMatch ( target , matchList ) : bestMatchList = [ ] if len ( matchList ) > 0 : ratioMatch = [ ] for item in matchList : ratioMatch . append ( GetBestStringMatchValue ( target , item ) ) maxRatio = max ( ratioMatch ) if maxRatio > 0.8 : matchIndexList = [ i for i , j in enumerate ( ratioMatch ) if j == maxRatio ] for index in matchIndexList : if maxRatio == 1 and len ( matchList [ index ] ) == len ( target ) : return [ matchList [ index ] , ] else : bestMatchList . append ( matchList [ index ] ) return bestMatchList
Finds the elements of matchList which best match the target string .
56,190
def GetBestStringMatchValue ( string1 , string2 ) : string1 = string1 . lower ( ) string2 = string2 . lower ( ) string1 = '' . join ( i for i in string1 if i . isalnum ( ) ) string2 = '' . join ( i for i in string2 if i . isalnum ( ) ) if len ( string1 ) == 0 or len ( string2 ) == 0 : bestRatio = 0 elif len ( string1 ) == len ( string2 ) : match = difflib . SequenceMatcher ( None , string1 , string2 ) bestRatio = match . ratio ( ) else : if len ( string1 ) > len ( string2 ) : shortString = string2 longString = string1 else : shortString = string1 longString = string2 match = difflib . SequenceMatcher ( None , shortString , longString ) bestRatio = match . ratio ( ) for block in match . get_matching_blocks ( ) : subString = longString [ block [ 1 ] : block [ 1 ] + block [ 2 ] ] subMatch = difflib . SequenceMatcher ( None , shortString , subString ) if ( subMatch . ratio ( ) > bestRatio ) : bestRatio = subMatch . ratio ( ) return ( bestRatio )
Return the value of the highest matching substrings between two strings .
56,191
def WebLookup ( url , urlQuery = None , utf8 = True ) : goodlogging . Log . Info ( "UTIL" , "Looking up info from URL:{0} with QUERY:{1})" . format ( url , urlQuery ) , verbosity = goodlogging . Verbosity . MINIMAL ) response = requests . get ( url , params = urlQuery ) goodlogging . Log . Info ( "UTIL" , "Full url: {0}" . format ( response . url ) , verbosity = goodlogging . Verbosity . MINIMAL ) if utf8 is True : response . encoding = 'utf-8' if ( response . status_code == requests . codes . ok ) : return ( response . text ) else : response . raise_for_status ( )
Look up webpage at given url with optional query string
56,192
def ArchiveProcessedFile ( filePath , archiveDir ) : targetDir = os . path . join ( os . path . dirname ( filePath ) , archiveDir ) goodlogging . Log . Info ( "UTIL" , "Moving file to archive directory:" ) goodlogging . Log . IncreaseIndent ( ) goodlogging . Log . Info ( "UTIL" , "FROM: {0}" . format ( filePath ) ) goodlogging . Log . Info ( "UTIL" , "TO: {0}" . format ( os . path . join ( targetDir , os . path . basename ( filePath ) ) ) ) goodlogging . Log . DecreaseIndent ( ) os . makedirs ( targetDir , exist_ok = True ) try : shutil . move ( filePath , targetDir ) except shutil . Error as ex4 : err = ex4 . args [ 0 ] goodlogging . Log . Info ( "UTIL" , "Move to archive directory failed - Shutil Error: {0}" . format ( err ) )
Move file from given file path to archive directory . Note the archive directory is relative to the file path directory .
56,193
def send_wait ( self , text ) : self . send ( text ) time . sleep ( len ( text ) * PiLite . COLS_PER_CHAR * self . speed / 1000.0 )
Send a string to the PiLite sleep until the message has been displayed ( based on an estimate of the speed of the display . Due to the font not being monotype this will wait too long in most cases
56,194
def set_speed ( self , speed ) : self . speed = speed self . send_cmd ( "SPEED" + str ( speed ) )
Set the display speed . The parameters is the number of milliseconds between each column scrolling off the display
56,195
def set_fb_random ( self ) : pattern = '' . join ( [ random . choice ( [ '0' , '1' ] ) for i in xrange ( 14 * 9 ) ] ) self . set_fb ( pattern )
Set the frame buffer to a random pattern
56,196
def set_pixel ( self , x , y , state ) : self . send_cmd ( "P" + str ( x + 1 ) + "," + str ( y + 1 ) + "," + state )
Set pixel at x y to state where state can be one of ON OFF or TOGGLE
56,197
def display_char ( self , x , y , char ) : self . send_cmd ( "T" + str ( x + 1 ) + "," + str ( y + 1 ) + "," + char )
Display character char with its top left at x y
56,198
def the_magic_mapping_function ( peptides , fastaPath , importAttributes = None , ignoreUnmapped = True ) : missedCleavage = max ( [ p . count ( 'K' ) + p . count ( 'R' ) for p in peptides ] ) - 1 minLength = min ( [ len ( p ) for p in peptides ] ) maxLength = max ( [ len ( p ) for p in peptides ] ) defaultAttributes = { 'cleavageRule' : '[KR]' , 'minLength' : minLength , 'maxLength' : maxLength , 'removeNtermM' : True , 'ignoreIsoleucine' : True , 'missedCleavage' : missedCleavage , 'forceId' : True , 'headerParser' : PROTEINDB . fastaParserSpectraClusterPy , } if importAttributes is not None : defaultAttributes . update ( importAttributes ) proteindb = PROTEINDB . importProteinDatabase ( fastaPath , ** defaultAttributes ) proteinToPeptides = ddict ( set ) for peptide in peptides : try : peptideDbEntry = proteindb . peptides [ peptide ] except KeyError as exception : if ignoreUnmapped : continue else : exceptionText = 'No protein mappings for peptide "' + peptide + '"' raise KeyError ( exceptionText ) for protein in peptideDbEntry . proteins : proteinToPeptides [ protein ] . add ( peptide ) inference = INFERENCE . mappingBasedGrouping ( proteinToPeptides ) peptideGroupMapping = dict ( ) for peptide in peptides : groupLeaders = set ( ) for proteinId in inference . pepToProts [ peptide ] : for proteinGroup in inference . getGroups ( proteinId ) : groupLeaders . add ( ';' . join ( sorted ( proteinGroup . leading ) ) ) peptideGroupMapping [ peptide ] = groupLeaders return peptideGroupMapping
Returns a dictionary mapping peptides to protein group leading proteins .
56,199
def truncate ( text , length = 50 , ellipsis = '...' ) : text = nativestring ( text ) return text [ : length ] + ( text [ length : ] and ellipsis )
Returns a truncated version of the inputted text .