idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,900
def line_type ( line : str ) -> str : for regex , ltype in LINE_TYPES . items ( ) : if re . fullmatch ( regex , line ) : return ltype raise ValueError ( "Input line \"{}\" is not bubble formatted" . format ( line ) )
Give type of input line as defined in LINE_TYPES
56,901
def line_data ( line : str ) -> tuple : for regex , _ in LINE_TYPES . items ( ) : match = re . fullmatch ( regex , line ) if match : return match . groups ( ) raise ValueError ( "Input line \"{}\" is not bubble formatted" . format ( line ) )
Return groups found in given line
56,902
def _initBuffer ( self , bufferColorMode , bufferSize ) : self . _buffer_color_mode = bufferColorMode if bufferSize == None : self . _buffer = Image . new ( bufferColorMode , self . _display_size ) else : self . _buffer = Image . new ( bufferColorMode , bufferSize ) self . Canvas = ImageDraw . Draw ( self . _buffer ) self . View = SSRect ( 0 , 0 , self . _display_size [ 0 ] , self . _display_size [ 1 ] )
! \ ~english Initialize the buffer object instance use PIL Image as for buffer
56,903
def clearCanvas ( self , fillColor = 0 ) : self . Canvas . rectangle ( ( 0 , 0 , self . _display_size [ 0 ] , self . _display_size [ 1 ] ) , outline = 0 , fill = fillColor )
! \ ~engliash Clear up canvas and fill color at same time
56,904
def resize ( self , newWidth = 0 , newHeight = 0 ) : self . height = newHeight self . width = newWidth
! \ ~english Resize width and height of rectangles
56,905
def rotateDirection ( self , displayDirection ) : if self . _needSwapWH ( self . _display_direction , displayDirection ) : self . _display_size = ( self . _display_size [ 1 ] , self . _display_size [ 0 ] ) if self . redefineBuffer ( { "size" : self . _display_size , "color_mode" : self . _buffer_color_mode } ) : self . View . resize ( self . _display_size [ 0 ] , self . _display_size [ 1 ] ) self . _display_direction = displayDirection
! \ ~english rotate screen direction
56,906
def autodiscover_modules ( packages , related_name_re = '.+' , ignore_exceptions = False ) : warnings . warn ( 'autodiscover_modules has been deprecated. ' 'Use Flask-Registry instead.' , DeprecationWarning ) global _RACE_PROTECTION if _RACE_PROTECTION : return [ ] _RACE_PROTECTION = True modules = [ ] try : tmp = [ find_related_modules ( pkg , related_name_re , ignore_exceptions ) for pkg in packages ] for l in tmp : for m in l : if m is not None : modules . append ( m ) except : _RACE_PROTECTION = False raise _RACE_PROTECTION = False return modules
Autodiscover function follows the pattern used by Celery .
56,907
def find_related_modules ( package , related_name_re = '.+' , ignore_exceptions = False ) : warnings . warn ( 'find_related_modules has been deprecated.' , DeprecationWarning ) package_elements = package . rsplit ( "." , 1 ) try : if len ( package_elements ) == 2 : pkg = __import__ ( package_elements [ 0 ] , globals ( ) , locals ( ) , [ package_elements [ 1 ] ] ) pkg = getattr ( pkg , package_elements [ 1 ] ) else : pkg = __import__ ( package_elements [ 0 ] , globals ( ) , locals ( ) , [ ] ) pkg_path = pkg . __path__ except AttributeError : return [ ] p = re . compile ( related_name_re ) modules = [ ] for name in find_modules ( package , include_packages = True ) : if p . match ( name . split ( '.' ) [ - 1 ] ) : try : modules . append ( import_string ( name , silent = ignore_exceptions ) ) except Exception as e : if not ignore_exceptions : raise e return modules
Find matching modules using a package and a module name pattern .
56,908
def import_related_module ( package , pkg_path , related_name , ignore_exceptions = False ) : try : imp . find_module ( related_name , pkg_path ) except ImportError : return try : return getattr ( __import__ ( '%s' % ( package ) , globals ( ) , locals ( ) , [ related_name ] ) , related_name ) except Exception as e : if ignore_exceptions : current_app . logger . exception ( 'Can not import "{}" package' . format ( package ) ) else : raise e
Import module from given path .
56,909
def ansi ( string , * args ) : ansi = '' for arg in args : arg = str ( arg ) if not re . match ( ANSI_PATTERN , arg ) : raise ValueError ( 'Additional arguments must be ansi strings' ) ansi += arg return ansi + string + colorama . Style . RESET_ALL
Convenience function to chain multiple ColorWrappers to a string
56,910
def puts ( * args , ** kwargs ) : trim = kwargs . pop ( 'trim' , False ) padding = kwargs . pop ( 'padding' , None ) stream = kwargs . pop ( 'stream' , sys . stdout ) indent = getattr ( stream , 'indent' , 0 ) args = [ str ( i ) for i in args ] def trimstr ( ansi , width ) : string = '' size = 0 i = 0 while i < len ( ansi ) : mobj = re . match ( ANSI_PATTERN , ansi [ i : ] ) if mobj : string = string + mobj . group ( 0 ) i += len ( mobj . group ( 0 ) ) else : size += 1 if size > width : break string = string + ansi [ i ] i += 1 return ( string , size ) if not stream . isatty ( ) : for string in args : stream . write ( re . sub ( ANSI_PATTERN , '' , string ) + '\n' ) else : try : curses . setupterm ( ) except : trim = False padding = None else : width = curses . tigetnum ( 'cols' ) - indent for string in args : if trim or padding : trimmed , size = trimstr ( string , width ) if trim : if len ( trimmed ) < len ( string ) : trimmed = trimstr ( string , width - 3 ) [ 0 ] + colorama . Style . RESET_ALL + '...' string = trimmed if padding : string += padding * ( width - size ) stream . write ( string + '\n' )
Full feature printing function featuring trimming and padding for both files and ttys
56,911
def char_between ( lower , upper , func_name ) : function = register_function ( func_name , lambda char : lower <= char <= upper ) return char_on_predicate ( function )
return current char and step if char is between lower and upper where
56,912
def char_in ( string , func_name ) : function = register_function ( func_name , lambda char : char in string ) return char_on_predicate ( function )
return current char and step if char is in string where
56,913
def update_version ( self , version , step = 1 ) : "Compute an new version and write it as a tag" if self . config . patch : version . patch += step if self . config . minor : version . minor += step if self . config . major : version . major += step if self . config . build : version . build_number += step if self . config . build_number : version . build_number = self . config . build_number if self . config . dry_run : log . info ( 'Not updating repo to version {0}, because of --dry-run' . format ( version ) ) else : version = self . call_plugin_function ( 'set_version' , version ) return version
Compute an new version and write it as a tag
56,914
def _get_id ( self ) : ret = None row = self . row if row : ret = row [ "id" ] return ret
Getter because using the id property from within was not working
56,915
def time_pipeline ( iterable , * steps ) : if callable ( iterable ) : try : iter ( iterable ( ) ) callable_base = True except : raise TypeError ( 'time_pipeline needs the first argument to be an iterable or a function that produces an iterable.' ) else : try : iter ( iterable ) callable_base = False except : raise TypeError ( 'time_pipeline needs the first argument to be an iterable or a function that produces an iterable.' ) if not callable_base : iterable = tuple ( iterable ) durations = [ ] results = [ ] for i , _ in enumerate ( steps ) : current_tasks = steps [ : i + 1 ] duration = 0.0 for t in range ( 100000 ) : test_generator = iter ( iterable ( ) ) if callable_base else iter ( iterable ) start = ts ( ) for task in current_tasks : test_generator = task ( test_generator ) for i in current_tasks [ - 1 ] ( test_generator ) : pass duration += ts ( ) - start durations . append ( duration ) if len ( durations ) == 1 : results . append ( durations [ 0 ] ) else : results . append ( durations [ - 1 ] - durations [ - 2 ] ) assert sum ( results ) > 0 resultsum = sum ( results ) ratios = [ i / resultsum for i in results ] for i in range ( len ( ratios ) ) : try : s = getsource ( steps [ i ] ) . splitlines ( ) [ 0 ] . strip ( ) except : s = repr ( steps [ i ] ) . strip ( ) print ( 'step {} | {:2.4f}s | {}' . format ( i + 1 , durations [ i ] , s ) )
This times the steps in a pipeline . Give it an iterable to test against followed by the steps of the pipeline seperated in individual functions .
56,916
def runs_per_second ( generator , seconds = 3 ) : assert isinstance ( seconds , int ) , 'runs_per_second needs seconds to be an int, not {}' . format ( repr ( seconds ) ) assert seconds > 0 , 'runs_per_second needs seconds to be positive, not {}' . format ( repr ( seconds ) ) if callable ( generator ) and not any ( i in ( 'next' , '__next__' , '__iter__' ) for i in dir ( generator ) ) : try : output = generator ( ) except : raise Exception ( 'runs_per_second needs a working function that accepts no arguments' ) else : generator = iter ( generator , ( 1 if output is None else None ) ) del output c = 0 entire_test_time_used = False start = ts ( ) end = start + seconds for _ in generator : if ts ( ) > end : entire_test_time_used = True break else : c += 1 duration = ( ts ( ) ) - start return int ( c / ( seconds if entire_test_time_used else duration ) )
use this function as a profiler for both functions and generators to see how many iterations or cycles they can run per second
56,917
def fermion_avg ( efermi , norm_hopping , func ) : if func == 'ekin' : func = bethe_ekin_zeroT elif func == 'ocupation' : func = bethe_filling_zeroT return np . asarray ( [ func ( ef , tz ) for ef , tz in zip ( efermi , norm_hopping ) ] )
calcules for every slave it s average over the desired observable
56,918
def spinflipandhop ( slaves ) : Sdw = [ csr_matrix ( spin_gen ( slaves , i , 0 ) ) for i in range ( slaves ) ] Sup = [ mat . T for mat in Sdw ] sfh = np . zeros_like ( Sup [ 0 ] ) orbitals = slaves // 2 for n in range ( orbitals ) : for m in range ( n + 1 , orbitals ) : sfh += Sup [ 2 * n ] * Sdw [ 2 * n + 1 ] * Sup [ 2 * m + 1 ] * Sdw [ 2 * m ] sfh += Sup [ 2 * n + 1 ] * Sdw [ 2 * n ] * Sup [ 2 * m ] * Sdw [ 2 * m + 1 ] sfh += Sup [ 2 * n ] * Sup [ 2 * n + 1 ] * Sdw [ 2 * m ] * Sdw [ 2 * m + 1 ] sfh += Sup [ 2 * m ] * Sup [ 2 * m + 1 ] * Sdw [ 2 * n ] * Sdw [ 2 * n + 1 ] return sfh
Calculates the interaction term of a spin flip and pair hopping
56,919
def spin_z_op ( param , oper ) : slaves = param [ 'slaves' ] oper [ 'Sz' ] = np . array ( [ spin_z ( slaves , spin ) for spin in range ( slaves ) ] ) oper [ 'Sz+1/2' ] = oper [ 'Sz' ] + 0.5 * np . eye ( 2 ** slaves ) oper [ 'sumSz2' ] = oper [ 'Sz' ] . sum ( axis = 0 ) ** 2 Sz_mat_shape = oper [ 'Sz' ] . reshape ( param [ 'orbitals' ] , 2 , 2 ** slaves , 2 ** slaves ) oper [ 'sumSz-sp2' ] = ( Sz_mat_shape . sum ( axis = 1 ) ** 2 ) . sum ( axis = 0 ) oper [ 'sumSz-or2' ] = ( Sz_mat_shape . sum ( axis = 0 ) ** 2 ) . sum ( axis = 0 )
Generates the required Sz operators given the system parameter setup and the operator dictionary
56,920
def spin_gen_op ( oper , gauge ) : slaves = len ( gauge ) oper [ 'O' ] = np . array ( [ spin_gen ( slaves , i , c ) for i , c in enumerate ( gauge ) ] ) oper [ 'O_d' ] = np . transpose ( oper [ 'O' ] , ( 0 , 2 , 1 ) ) oper [ 'O_dO' ] = np . einsum ( '...ij,...jk->...ik' , oper [ 'O_d' ] , oper [ 'O' ] ) oper [ 'Sfliphop' ] = spinflipandhop ( slaves )
Generates the generic spin matrices for the system
56,921
def set_filling ( self , populations ) : populations = np . asarray ( populations ) efermi = - bethe_find_crystalfield ( populations , self . param [ 'hopping' ] ) self . param [ 'populations' ] = populations self . param [ 'ekin' ] = fermion_avg ( efermi , self . param [ 'hopping' ] , 'ekin' ) spin_gen_op ( self . oper , estimate_gauge ( populations ) )
Sets the orbital enenergies for on the reference of the free case . By setting the desired local populations on every orbital . Then generate the necesary operators to respect such configuraion
56,922
def reset ( self , populations , lag , mu , u_int , j_coup , mean_f ) : self . set_filling ( populations ) self . param [ 'lambda' ] = lag self . param [ 'orbital_e' ] = mu self . selfconsistency ( u_int , j_coup , mean_f )
Resets the system into the last known state as given by the input values
56,923
def update_H ( self , mean_field , l ) : self . H_s = self . spin_hamiltonian ( mean_field , l ) try : self . eig_energies , self . eig_states = diagonalize ( self . H_s ) except np . linalg . linalg . LinAlgError : np . savez ( 'errorhamil' , H = self . H_s , fiel = mean_field , lamb = l ) raise except ValueError : np . savez ( 'errorhamil' , H = self . H_s , fiel = mean_field , lamb = l ) print ( mean_field , l ) raise
Updates the spin hamiltonian and recalculates its eigenbasis
56,924
def spin_hamiltonian ( self , h , l ) : h_spin = np . einsum ( 'i,ijk' , h [ 1 ] , self . oper [ 'O' ] ) h_spin += np . einsum ( 'i,ijk' , h [ 0 ] , self . oper [ 'O_d' ] ) h_spin += np . einsum ( 'i,ijk' , l , self . oper [ 'Sz+1/2' ] ) h_spin += self . oper [ 'Hint' ] return h_spin
Constructs the single site spin Hamiltonian
56,925
def inter_spin_hamiltonian ( self , u_int , J_coup ) : J_coup *= u_int h_int = ( u_int - 2 * J_coup ) / 2. * self . oper [ 'sumSz2' ] h_int += J_coup * self . oper [ 'sumSz-sp2' ] h_int -= J_coup / 2. * self . oper [ 'sumSz-or2' ] h_int -= J_coup * self . oper [ 'Sfliphop' ] return h_int
Calculates the interaction Hamiltonian . The Hund coupling is a fraction of the coulom interaction
56,926
def expected ( self , observable , beta = 1e5 ) : return expected_value ( observable , self . eig_energies , self . eig_states , beta )
Wrapper to the expected_value function to fix the eigenbasis
56,927
def quasiparticle_weight ( self ) : return np . array ( [ self . expected ( op ) ** 2 for op in self . oper [ 'O' ] ] )
Calculates quasiparticle weight
56,928
def mean_field ( self ) : mean_field = [ ] for sp_oper in [ self . oper [ 'O' ] , self . oper [ 'O_d' ] ] : avgO = np . array ( [ self . expected ( op ) for op in sp_oper ] ) avgO [ abs ( avgO ) < 1e-10 ] = 0. mean_field . append ( avgO * self . param [ 'ekin' ] ) return np . array ( mean_field )
Calculates mean field
56,929
def selfconsistency ( self , u_int , J_coup , mean_field_prev = None ) : if mean_field_prev is None : mean_field_prev = np . array ( [ self . param [ 'ekin' ] ] * 2 ) hlog = [ mean_field_prev ] self . oper [ 'Hint' ] = self . inter_spin_hamiltonian ( u_int , J_coup ) converging = True half_fill = ( self . param [ 'populations' ] == 0.5 ) . all ( ) while converging : if half_fill : self . update_H ( hlog [ - 1 ] , self . param [ 'lambda' ] ) else : res = root ( self . restriction , self . param [ 'lambda' ] , ( hlog [ - 1 ] ) ) if not res . success : res . x = res . x * 0.5 + 0.5 * self . param [ 'lambda' ] self . update_H ( self . mean_field ( ) * 0.5 + 0.5 * hlog [ - 1 ] , res . x ) print ( 'fail' , self . param [ 'populations' ] [ 3 : 5 ] ) if ( self . quasiparticle_weight ( ) < 0.001 ) . all ( ) : return hlog self . param [ 'lambda' ] = res . x hlog . append ( self . mean_field ( ) ) converging = ( abs ( hlog [ - 1 ] - hlog [ - 2 ] ) > self . param [ 'tol' ] ) . all ( ) or ( abs ( self . restriction ( self . param [ 'lambda' ] , hlog [ - 1 ] ) ) > self . param [ 'tol' ] ) . all ( ) return hlog
Iterates over the hamiltonian to get the stable selfcosistent one
56,930
def restriction ( self , lam , mean_field ) : self . update_H ( mean_field , lam ) restric = np . array ( [ self . expected ( op ) - n for op , n in zip ( self . oper [ 'Sz+1/2' ] , self . param [ 'populations' ] ) ] ) return restric
Lagrange multiplier in lattice slave spin
56,931
def run_cmd ( cmd , out = os . path . devnull , err = os . path . devnull ) : logger . debug ( ' ' . join ( cmd ) ) with open ( out , 'w' ) as hout : proc = subprocess . Popen ( cmd , stdout = hout , stderr = subprocess . PIPE ) err_msg = proc . communicate ( ) [ 1 ] . decode ( ) with open ( err , 'w' ) as herr : herr . write ( str ( err_msg ) ) msg = '({}) {}' . format ( ' ' . join ( cmd ) , err_msg ) if proc . returncode != 0 : logger . error ( msg ) raise RuntimeError ( msg )
Runs an external command
56,932
def run_cmd_if_file_missing ( cmd , fname , out = os . path . devnull , err = os . path . devnull ) : if fname is None or not os . path . exists ( fname ) : run_cmd ( cmd , out , err ) return True else : return False
Runs an external command if file is absent .
56,933
def merge_files ( sources , destination ) : with open ( destination , 'w' ) as hout : for f in sources : if os . path . exists ( f ) : with open ( f ) as hin : shutil . copyfileobj ( hin , hout ) else : logger . warning ( 'File is missing: {}' . format ( f ) )
Copy content of multiple files into a single file .
56,934
def add_path ( self , path ) : if os . path . exists ( path ) : self . paths . add ( path ) return path else : return None
Adds a new path to the list of searchable paths
56,935
def get ( self , name ) : for d in self . paths : if os . path . exists ( d ) and name in os . listdir ( d ) : return os . path . join ( d , name ) logger . debug ( 'File not found {}' . format ( name ) ) return None
Looks for a name in the path .
56,936
def __handle_request ( self , request , * args , ** kw ) : self . _authenticate ( request ) self . _check_permission ( request ) method = self . _get_method ( request ) data = self . _get_input_data ( request ) data = self . _clean_input_data ( data , request ) response = self . _exec_method ( method , request , data , * args , ** kw ) return self . _process_response ( response , request )
Intercept the request and response .
56,937
def _exec_method ( self , method , request , data , * args , ** kw ) : if self . _is_data_method ( request ) : return method ( data , request , * args , ** kw ) else : return method ( request , * args , ** kw )
Execute appropriate request handler .
56,938
def _format_response ( self , request , response ) : res = datamapper . format ( request , response , self ) if res . status_code is 0 : res . status_code = 200 self . _add_resposne_headers ( res , response ) return res
Format response using appropriate datamapper .
56,939
def _add_resposne_headers ( self , django_response , devil_response ) : try : headers = devil_response . headers except AttributeError : pass else : for k , v in headers . items ( ) : django_response [ k ] = v return django_response
Add response headers .
56,940
def _get_input_data ( self , request ) : if not self . _is_data_method ( request ) : return None content = [ row for row in request . read ( ) ] content = '' . join ( content ) if content else None return self . _parse_input_data ( content , request ) if content else None
If there is data parse it otherwise return None .
56,941
def _clean_input_data ( self , data , request ) : if not self . _is_data_method ( request ) : return data try : if self . representation : self . _validate_input_data ( data , request ) if self . factory : return self . _create_object ( data , request ) else : return data except ValidationError , exc : return self . _input_validation_failed ( exc , data , request )
Clean input data .
56,942
def _get_input_validator ( self , request ) : method = request . method . upper ( ) if method != 'POST' : return self . representation elif self . post_representation : return self . post_representation else : return self . representation
Return appropriate input validator .
56,943
def _validate_input_data ( self , data , request ) : validator = self . _get_input_validator ( request ) if isinstance ( data , ( list , tuple ) ) : return map ( validator . validate , data ) else : return validator . validate ( data )
Validate input data .
56,944
def _validate_output_data ( self , original_res , serialized_res , formatted_res , request ) : validator = self . representation if not validator : return try : if isinstance ( serialized_res , ( list , tuple ) ) : map ( validator . validate , serialized_res ) else : validator . validate ( serialized_res ) except ValidationError , exc : self . _output_validation_failed ( exc , serialized_res , request )
Validate the response data .
56,945
def _create_object ( self , data , request ) : if request . method . upper ( ) == 'POST' and self . post_factory : fac_func = self . post_factory . create else : fac_func = self . factory . create if isinstance ( data , ( list , tuple ) ) : return map ( fac_func , data ) else : return fac_func ( data )
Create a python object from the given data .
56,946
def _serialize_object ( self , response_data , request ) : if not self . factory : return response_data if isinstance ( response_data , ( list , tuple ) ) : return map ( lambda item : self . factory . serialize ( item , request ) , response_data ) else : return self . factory . serialize ( response_data , request )
Create a python datatype from the given python object .
56,947
def _get_unknown_error_response ( self , request , exc ) : logging . getLogger ( 'devil' ) . error ( 'while doing %s on %s with [%s], devil caught: %s' % ( request . method , request . path_info , str ( request . GET ) , str ( exc ) ) , exc_info = True ) if settings . DEBUG : raise else : return HttpResponse ( status = codes . INTERNAL_SERVER_ERROR [ 1 ] )
Generate HttpResponse for unknown exceptions .
56,948
def _get_error_response ( self , exc ) : if exc . has_code ( codes . UNAUTHORIZED ) : return self . _get_auth_challenge ( exc ) else : if exc . has_code ( codes . INTERNAL_SERVER_ERROR ) : logging . getLogger ( 'devil' ) . error ( 'devil caught http error: ' + str ( exc ) , exc_info = True ) else : logging . getLogger ( 'devil' ) . error ( 'devil caught http error: ' + str ( exc ) ) content = exc . content or '' return HttpResponse ( content = content , status = exc . get_code_num ( ) )
Generate HttpResponse based on the HttpStatusCodeError .
56,949
def _get_auth_challenge ( self , exc ) : response = HttpResponse ( content = exc . content , status = exc . get_code_num ( ) ) response [ 'WWW-Authenticate' ] = 'Basic realm="%s"' % REALM return response
Returns HttpResponse for the client .
56,950
def _get_method ( self , request ) : methodname = request . method . lower ( ) method = getattr ( self , methodname , None ) if not method or not callable ( method ) : raise errors . MethodNotAllowed ( ) return method
Figure out the requested method and return the callable .
56,951
def _authenticate ( self , request ) : def ensure_user_obj ( ) : try : if request . user : return except AttributeError : pass request . user = AnonymousUser ( ) def anonymous_access ( exc_obj ) : if request . user and request . user . is_authenticated ( ) : pass elif self . allow_anonymous : request . user = AnonymousUser ( ) else : raise exc_obj ensure_user_obj ( ) if self . authentication : try : self . authentication . authenticate ( request ) except errors . Unauthorized , exc : anonymous_access ( exc ) else : anonymous_access ( errors . Forbidden ( ) )
Perform authentication .
56,952
def print_item_callback ( item ) : print ( '&listen [{}, {}={}]' . format ( item . get ( 'cmd' , '' ) , item . get ( 'id' , '' ) , item . get ( 'data' , '' ) ) )
Print an item callback used by &listen .
56,953
def main ( ) : import argparse parser = argparse . ArgumentParser ( ) parser . add_argument ( '--url' , help = 'QSUSB URL [http://127.0.0.1:2020]' , default = 'http://127.0.0.1:2020' ) parser . add_argument ( '--file' , help = 'a test file from /&devices' ) parser . add_argument ( '--test_ids' , help = 'List of test IDs' , default = '@0c2700,@0ac2f0' ) args = parser . parse_args ( ) if args . file : with open ( args . file ) as data_file : data = json . load ( data_file ) qsusb = pyqwikswitch . QSDevices ( print_devices_change_callback , print_devices_change_callback ) print_bad_data ( data ) qsusb . set_qs_values ( data ) return print ( 'Execute a basic test on server: {}\n' . format ( args . url ) ) def qs_to_value ( key , new ) : print ( " . format ( key , new ) ) qsusb = QSUsb ( args . url , 1 , qs_to_value ) print ( 'Version: ' + qsusb . version ( ) ) qsusb . set_qs_values ( ) qsusb . listen ( print_item_callback , timeout = 5 ) print ( "Started listening" ) try : if args . test_ids and len ( args . test_ids ) > 0 : test_devices_set ( qsusb . devices , args . test_ids . split ( ',' ) ) print ( "\n\nListening for 60 seconds (test buttons now)\n" ) sleep ( 60 ) except KeyboardInterrupt : pass finally : qsusb . stop ( ) print ( "Stopped listening" )
Quick test for QSUsb class .
56,954
def get ( self ) : with self . _lock : now = datetime . now ( ) active = [ ] for i , vef in enumerate ( self . futures ) : if ( vef [ 1 ] or datetime . max ) <= now : self . futures . pop ( i ) continue elif ( vef [ 2 ] or datetime . min ) >= now : continue else : active . append ( i ) if active : value , _e , _f = self . futures [ active [ - 1 ] ] for i in active [ : - 1 ] : self . futures . pop ( i ) return value raise ValueError ( "dicttime: no current value, however future has (%d) values" % len ( self . futures ) )
Called to get the asset values and if it is valid
56,955
def add_node ( node , ** kwds ) : nodes . _add_node_class_names ( [ node . __name__ ] ) for key , val in kwds . iteritems ( ) : try : visit , depart = val except ValueError : raise ValueError ( 'Value for key %r must be a ' '(visit, depart) function tuple' % key ) if key == 'html' : from docutils . writers . html4css1 import HTMLTranslator as translator elif key == 'latex' : from docutils . writers . latex2e import LaTeXTranslator as translator else : continue setattr ( translator , 'visit_' + node . __name__ , visit ) if depart : setattr ( translator , 'depart_' + node . __name__ , depart )
add_node from Sphinx
56,956
def retrieve ( self , * args , ** kwargs ) : lookup , key = self . _lookup ( * args , ** kwargs ) return lookup [ key ]
Retrieve the permsission function for the provided things .
56,957
def has_rabf_motif ( self ) : if self . rabf_motifs : for gdomain in self . gdomain_regions : beg , end = map ( int , gdomain . split ( '-' ) ) motifs = [ x for x in self . rabf_motifs if x [ 1 ] >= beg and x [ 2 ] <= end ] if motifs : matches = int ( pairwise2 . align . globalxx ( '12345' , '' . join ( str ( x [ 0 ] ) for x in motifs ) ) [ 0 ] [ 2 ] ) if matches >= self . motif_number : return True return False
Checks if the sequence has enough RabF motifs within the G domain
56,958
def summarize ( self ) : data = [ [ 'Sequence ID' , self . seqrecord . id ] , [ 'G domain' , ' ' . join ( self . gdomain_regions ) if self . gdomain_regions else None ] , [ 'E-value vs rab db' , self . evalue_bh_rabs ] , [ 'E-value vs non-rab db' , self . evalue_bh_non_rabs ] , [ 'RabF motifs' , ' ' . join ( map ( str , self . rabf_motifs ) ) if self . rabf_motifs else None ] , [ 'Is Rab?' , self . is_rab ( ) ] ] summary = '' for name , value in data : summary += '{:25s}{}\n' . format ( name , value ) if self . is_rab ( ) : summary += '{:25s}{}\n' . format ( 'Top 5 subfamilies' , ', ' . join ( '{:s} ({:.2g})' . format ( name , score ) for name , score in self . rab_subfamily_top5 ) ) return summary
G protein annotation summary in a text format
56,959
def write ( self ) : rabs = [ x . seqrecord for x in self . gproteins . values ( ) if x . is_rab ( ) ] return SeqIO . write ( rabs , self . tmpfname + '.phase2' , 'fasta' )
Write sequences predicted to be Rabs as a fasta file .
56,960
def check ( self ) : pathfinder = Pathfinder ( True ) if pathfinder . add_path ( pathfinder [ 'superfamily' ] ) is None : raise RuntimeError ( "'superfamily' data directory is missing" ) for tool in ( 'hmmscan' , 'phmmer' , 'mast' , 'blastp' , 'ass3.pl' , 'hmmscan.pl' ) : if not pathfinder . exists ( tool ) : raise RuntimeError ( 'Dependency {} is missing' . format ( tool ) )
Check if data and third party tools necessary to run the classification are available
56,961
def filter_ ( * permissions , ** kwargs ) : bearer = kwargs [ 'bearer' ] target = kwargs . get ( 'target' ) bearer_cls = type_for ( bearer ) if 'query' in kwargs : query = kwargs [ 'query' ] elif 'session' in kwargs : query = kwargs [ 'session' ] . query ( target ) else : query = object_session ( bearer ) . query ( target ) getter = functools . partial ( registry . retrieve , bearer = bearer_cls , target = target ) try : if len ( permissions ) : rules = { getter ( permission = x ) : x for x in permissions } else : rules = { getter ( ) : None } except KeyError : return query . filter ( sql . false ( ) ) reducer = lambda q , r : r [ 0 ] ( permission = r [ 1 ] , query = q , bearer = bearer ) return reduce ( reducer , six . iteritems ( rules ) , query )
Constructs a clause to filter all bearers or targets for a given berarer or target .
56,962
def create_app ( application , request_class = Request ) : def wsgi ( environ , start_response ) : response = application . serve ( request = request_class ( environ ) , path = environ . get ( "PATH_INFO" , "" ) , ) start_response ( response . status , [ ( name , b"," . join ( values ) ) for name , values in response . headers . canonicalized ( ) ] , ) return [ response . content ] return wsgi
Create a WSGI application out of the given Minion app .
56,963
def get_documentation ( self , request , * args , ** kw ) : ret = dict ( ) ret [ 'resource' ] = self . name ( ) ret [ 'urls' ] = self . _get_url_doc ( ) ret [ 'description' ] = self . __doc__ ret [ 'representation' ] = self . _get_representation_doc ( ) ret [ 'methods' ] = self . _get_method_doc ( ) return ret
Generate the documentation .
56,964
def _serialize_object ( self , response_data , request ) : if self . _is_doc_request ( request ) : return response_data else : return super ( DocumentedResource , self ) . _serialize_object ( response_data , request )
Override to not serialize doc responses .
56,965
def _validate_output_data ( self , original_res , serialized_res , formatted_res , request ) : if self . _is_doc_request ( request ) : return else : return super ( DocumentedResource , self ) . _validate_output_data ( original_res , serialized_res , formatted_res , request )
Override to not validate doc output .
56,966
def _get_method ( self , request ) : if self . _is_doc_request ( request ) : return self . get_documentation else : return super ( DocumentedResource , self ) . _get_method ( request )
Override to check if this is a documentation request .
56,967
def _get_representation_doc ( self ) : if not self . representation : return 'N/A' fields = { } for name , field in self . representation . fields . items ( ) : fields [ name ] = self . _get_field_doc ( field ) return fields
Return documentation for the representation of the resource .
56,968
def _get_field_doc ( self , field ) : fieldspec = dict ( ) fieldspec [ 'type' ] = field . __class__ . __name__ fieldspec [ 'required' ] = field . required fieldspec [ 'validators' ] = [ { validator . __class__ . __name__ : validator . __dict__ } for validator in field . validators ] return fieldspec
Return documentation for a field in the representation .
56,969
def _get_url_doc ( self ) : resolver = get_resolver ( None ) possibilities = resolver . reverse_dict . getlist ( self ) urls = [ possibility [ 0 ] for possibility in possibilities ] return urls
Return a list of URLs that map to this resource .
56,970
def _get_method_doc ( self ) : ret = { } for method_name in self . methods : method = getattr ( self , method_name , None ) if method : ret [ method_name ] = method . __doc__ return ret
Return method documentations .
56,971
def create_process ( self , command , shell = True , stdout = None , stderr = None , env = None ) : env = env if env is not None else dict ( os . environ ) env [ 'DISPLAY' ] = self . display return subprocess . Popen ( command , shell = shell , stdout = stdout , stderr = stderr , env = env )
Execute a process using subprocess . Popen setting the backend s DISPLAY
56,972
def pause ( self , instance_id , keep_provisioned = True ) : try : if self . _paused : log . debug ( "node %s is already paused" , instance_id ) return self . _paused = True post_shutdown_action = 'Stopped' if keep_provisioned else 'StoppedDeallocated' result = self . _subscription . _sms . shutdown_role ( service_name = self . _cloud_service . _name , deployment_name = self . _cloud_service . _name , role_name = self . _qualified_name , post_shutdown_action = post_shutdown_action ) self . _subscription . _wait_result ( result ) except Exception as exc : log . error ( "error pausing instance %s: %s" , instance_id , exc ) raise log . debug ( 'paused instance(instance_id=%s)' , instance_id )
shuts down the instance without destroying it .
56,973
def restart ( self , instance_id ) : try : if not self . _paused : log . debug ( "node %s is not paused, can't restart" , instance_id ) return self . _paused = False result = self . _subscription . _sms . start_role ( service_name = self . _cloud_service . _name , deployment_name = self . _cloud_service . _name , role_name = instance_id ) self . _subscription . _wait_result ( result ) except Exception as exc : log . error ( 'error restarting instance %s: %s' , instance_id , exc ) raise log . debug ( 'restarted instance(instance_id=%s)' , instance_id )
restarts a paused instance .
56,974
def _save_or_update ( self ) : with self . _resource_lock : if not self . _config or not self . _config . _storage_path : raise Exception ( "self._config._storage path is undefined" ) if not self . _config . _base_name : raise Exception ( "self._config._base_name is undefined" ) if not os . path . exists ( self . _config . _storage_path ) : os . makedirs ( self . _config . _storage_path ) path = self . _get_cloud_provider_storage_path ( ) with open ( path , 'wb' ) as storage : pickle . dump ( self . _config , storage , pickle . HIGHEST_PROTOCOL ) pickle . dump ( self . _subscriptions , storage , pickle . HIGHEST_PROTOCOL )
Save or update the private state needed by the cloud provider .
56,975
def get_path ( root , path , default = _UNSET ) : if isinstance ( path , basestring ) : path = path . split ( '.' ) cur = root try : for seg in path : try : cur = cur [ seg ] except ( KeyError , IndexError ) as exc : raise PathAccessError ( exc , seg , path ) except TypeError as exc : try : seg = int ( seg ) cur = cur [ seg ] except ( ValueError , KeyError , IndexError , TypeError ) : if not is_iterable ( cur ) : exc = TypeError ( '%r object is not indexable' % type ( cur ) . __name__ ) raise PathAccessError ( exc , seg , path ) except PathAccessError : if default is _UNSET : raise return default return cur
Retrieve a value from a nested object via a tuple representing the lookup path .
56,976
def __query ( p , k , v , accepted_keys = None , required_values = None , path = None , exact = True ) : def as_values_iterable ( v ) : if isinstance ( v , dict ) : return v . values ( ) elif isinstance ( v , six . string_types ) : return [ v ] else : return v if path and path != p : return False if accepted_keys : if isinstance ( accepted_keys , six . string_types ) : accepted_keys = [ accepted_keys ] if len ( [ akey for akey in accepted_keys if akey == k or ( not exact and akey in k ) ] ) == 0 : return False if required_values : if isinstance ( required_values , six . string_types ) : required_values = [ required_values ] if len ( required_values ) > len ( [ term for term in required_values for nv in as_values_iterable ( v ) if term == nv or ( not exact and term in nv ) ] ) : return False return True
Query function given to visit method
56,977
def _get_image_url ( self , image_id ) : gce = self . _connect ( ) filter = "name eq %s" % image_id request = gce . images ( ) . list ( project = self . _project_id , filter = filter ) response = self . _execute_request ( request ) response = self . _wait_until_done ( response ) image_url = None if "items" in response : image_url = response [ "items" ] [ 0 ] [ "selfLink" ] if image_url : return image_url else : raise ImageError ( "Could not find given image id `%s`" % image_id )
Gets the url for the specified image . Unfortunatly this only works for images uploaded by the user . The images provided by google will not be found .
56,978
def execute ( self ) : creator = make_creator ( self . params . config , storage_path = self . params . storage ) cluster_name = self . params . cluster try : cluster = creator . load_cluster ( cluster_name ) except ( ClusterNotFound , ConfigurationError ) as ex : log . error ( "Listing nodes from cluster %s: %s\n" % ( cluster_name , ex ) ) return from elasticluster . gc3pie_config import create_gc3pie_config_snippet if self . params . append : path = os . path . expanduser ( self . params . append ) try : fd = open ( path , 'a' ) fd . write ( create_gc3pie_config_snippet ( cluster ) ) fd . close ( ) except IOError as ex : log . error ( "Unable to write configuration to file %s: %s" , path , ex ) else : print ( create_gc3pie_config_snippet ( cluster ) )
Load the cluster and build a GC3Pie configuration snippet .
56,979
def write_xml ( self , outfile , encoding = "UTF-8" ) : if any ( [ key for item in self . items for key in vars ( item ) if key . startswith ( 'media_' ) and getattr ( item , key ) ] ) : self . rss_attrs [ "xmlns:media" ] = "http://search.yahoo.com/mrss/" self . generator = _generator_name super ( MediaRSS2 , self ) . write_xml ( outfile , encoding )
Write the Media RSS Feed s XML representation to the given file .
56,980
def _add_attribute ( self , name , value , allowed_values = None ) : if value and value != 'none' : if isinstance ( value , ( int , bool ) ) : value = str ( value ) if allowed_values and value not in allowed_values : raise TypeError ( "Attribute '" + name + "' must be one of " + str ( allowed_values ) + " but is '" + str ( value ) + "'" ) self . element_attrs [ name ] = value
Add an attribute to the MediaContent element .
56,981
def check_complicance ( self ) : if ( any ( [ ma for ma in vars ( self ) if ma . startswith ( 'media_' ) and getattr ( self , ma ) ] ) and not self . media_group and not self . media_content and not self . media_player and not self . media_peerLink and not self . media_location ) : raise AttributeError ( "Using media elements requires the specification of at least " "one of the following elements: 'media_group', " "'media_content', 'media_player', 'media_peerLink' or " "'media_location'." ) if not self . media_player : if self . media_content : if isinstance ( self . media_content , list ) : if not all ( [ False for mc in self . media_content if 'url' not in mc . element_attrs ] ) : raise AttributeError ( "MediaRSSItems require a media_player attribute " "if a media_content has no url set." ) else : if not self . media_content . element_attrs [ 'url' ] : raise AttributeError ( "MediaRSSItems require a media_player attribute " "if a media_content has no url set." ) pass elif self . media_group : raise NotImplementedError ( "MediaRSSItem: media_group check not implemented yet." )
Check compliance with Media RSS Specification Version 1 . 5 . 1 .
56,982
def publish_extensions ( self , handler ) : if isinstance ( self . media_content , list ) : [ PyRSS2Gen . _opt_element ( handler , "media:content" , mc_element ) for mc_element in self . media_content ] else : PyRSS2Gen . _opt_element ( handler , "media:content" , self . media_content ) if hasattr ( self , 'media_title' ) : PyRSS2Gen . _opt_element ( handler , "media:title" , self . media_title ) if hasattr ( self , 'media_text' ) : PyRSS2Gen . _opt_element ( handler , "media:text" , self . media_text )
Publish the Media RSS Feed elements as XML .
56,983
def get_conversations ( self ) : cs = self . data [ "data" ] res = [ ] for c in cs : res . append ( Conversation ( c ) ) return res
Returns list of Conversation objects
56,984
def _accumulate ( iterable , func = ( lambda a , b : a + b ) ) : 'Return running totals' it = iter ( iterable ) try : total = next ( it ) except StopIteration : return yield total for element in it : total = func ( total , element ) yield total
Return running totals
56,985
def add_methods ( methods_to_add ) : for i in methods_to_add : try : Generator . add_method ( * i ) except Exception as ex : raise Exception ( 'issue adding {} - {}' . format ( repr ( i ) , ex ) )
use this to bulk add new methods to Generator
56,986
def dirsize_get ( l_filesWithoutPath , ** kwargs ) : str_path = "" for k , v in kwargs . items ( ) : if k == 'path' : str_path = v d_ret = { } l_size = [ ] size = 0 for f in l_filesWithoutPath : str_f = '%s/%s' % ( str_path , f ) if not os . path . islink ( str_f ) : try : size += os . path . getsize ( str_f ) except : pass str_size = pftree . sizeof_fmt ( size ) return { 'status' : True , 'diskUsage_raw' : size , 'diskUsage_human' : str_size }
Sample callback that determines a directory size .
56,987
def inputReadCallback ( self , * args , ** kwargs ) : b_status = True filesRead = 0 for k , v in kwargs . items ( ) : if k == 'l_file' : l_file = v if k == 'path' : str_path = v if len ( args ) : at_data = args [ 0 ] str_path = at_data [ 0 ] l_file = at_data [ 1 ] self . dp . qprint ( "reading (in path %s):\n%s" % ( str_path , self . pp . pformat ( l_file ) ) , level = 5 ) filesRead = len ( l_file ) if not len ( l_file ) : b_status = False return { 'status' : b_status , 'l_file' : l_file , 'str_path' : str_path , 'filesRead' : filesRead }
Test for inputReadCallback
56,988
def inputAnalyzeCallback ( self , * args , ** kwargs ) : b_status = False filesRead = 0 filesAnalyzed = 0 for k , v in kwargs . items ( ) : if k == 'filesRead' : d_DCMRead = v if k == 'path' : str_path = v if len ( args ) : at_data = args [ 0 ] str_path = at_data [ 0 ] d_read = at_data [ 1 ] b_status = True self . dp . qprint ( "analyzing:\n%s" % self . pp . pformat ( d_read [ 'l_file' ] ) , level = 5 ) if int ( self . f_sleepLength ) : self . dp . qprint ( "sleeping for: %f" % self . f_sleepLength , level = 5 ) time . sleep ( self . f_sleepLength ) filesAnalyzed = len ( d_read [ 'l_file' ] ) return { 'status' : b_status , 'filesAnalyzed' : filesAnalyzed , 'l_file' : d_read [ 'l_file' ] }
Test method for inputAnalzeCallback
56,989
def outputSaveCallback ( self , at_data , ** kwargs ) : path = at_data [ 0 ] d_outputInfo = at_data [ 1 ] other . mkdir ( self . str_outputDir ) filesSaved = 0 other . mkdir ( path ) if not self . testType : str_outfile = '%s/file-ls.txt' % path else : str_outfile = '%s/file-count.txt' % path with open ( str_outfile , 'w' ) as f : self . dp . qprint ( "saving: %s" % ( str_outfile ) , level = 5 ) if not self . testType : f . write ( '%s`' % self . pp . pformat ( d_outputInfo [ 'l_file' ] ) ) else : f . write ( '%d\n' % d_outputInfo [ 'filesAnalyzed' ] ) filesSaved += 1 return { 'status' : True , 'outputFile' : str_outfile , 'filesSaved' : filesSaved }
Test method for outputSaveCallback
56,990
def run ( self , * args , ** kwargs ) : b_status = True d_probe = { } d_tree = { } d_stats = { } str_error = '' b_timerStart = False d_test = { } for k , v in kwargs . items ( ) : if k == 'timerStart' : b_timerStart = bool ( v ) if b_timerStart : other . tic ( ) if not os . path . exists ( self . str_inputDir ) : b_status = False self . dp . qprint ( "input directory either not specified or does not exist." , comms = 'error' ) error . warn ( self , 'inputDirFail' , exitToOS = True , drawBox = True ) str_error = 'error captured while accessing input directory' if b_status : str_origDir = os . getcwd ( ) if self . b_relativeDir : os . chdir ( self . str_inputDir ) str_rootDir = '.' else : str_rootDir = self . str_inputDir d_probe = self . tree_probe ( root = str_rootDir ) b_status = b_status and d_probe [ 'status' ] d_tree = self . tree_construct ( l_files = d_probe [ 'l_files' ] , constructCallback = self . dirsize_get ) b_status = b_status and d_tree [ 'status' ] if self . b_test : d_test = self . test_run ( * args , ** kwargs ) b_status = b_status and d_test [ 'status' ] else : if self . b_stats or self . b_statsReverse : d_stats = self . stats_compute ( ) self . dp . qprint ( 'Total size (raw): %d' % d_stats [ 'totalSize' ] , level = 1 ) self . dp . qprint ( 'Total size (human): %s' % d_stats [ 'totalSize_human' ] , level = 1 ) self . dp . qprint ( 'Total files: %s' % d_stats [ 'files' ] , level = 1 ) self . dp . qprint ( 'Total dirs: %s' % d_stats [ 'dirs' ] , level = 1 ) b_status = b_status and d_stats [ 'status' ] if self . b_jsonStats : print ( json . dumps ( d_stats , indent = 4 , sort_keys = True ) ) if self . b_relativeDir : os . chdir ( str_origDir ) d_ret = { 'status' : b_status , 'd_probe' : d_probe , 'd_tree' : d_tree , 'd_stats' : d_stats , 'd_test' : d_test , 'str_error' : str_error , 'runTime' : other . toc ( ) } if self . b_json : print ( json . dumps ( d_ret , indent = 4 , sort_keys = True ) ) return d_ret
Probe the input tree and print .
56,991
def _set_status ( self , status , result = None ) : logger . info ( "{}.SetStatus: {}[{}] status update '{}' -> '{}'" . format ( self . __class__ . __name__ , self . __class__ . path , self . uuid , self . status , status ) , extra = dict ( kmsg = Message ( self . uuid , entrypoint = self . __class__ . path , params = self . params ) . dump ( ) ) ) return self . set_status ( status , result )
update operation status
56,992
def _prerun ( self ) : self . check_required_params ( ) self . _set_status ( "RUNNING" ) logger . debug ( "{}.PreRun: {}[{}]: running..." . format ( self . __class__ . __name__ , self . __class__ . path , self . uuid ) , extra = dict ( kmsg = Message ( self . uuid , entrypoint = self . __class__ . path , params = self . params ) . dump ( ) ) ) return self . prerun ( )
To execute before running message
56,993
def next ( self , task ) : uuid = str ( task . uuid ) for idx , otask in enumerate ( self . tasks [ : - 1 ] ) : if otask . uuid == uuid : if self . tasks [ idx + 1 ] . status != 'SUCCESS' : return self . tasks [ idx + 1 ] else : uuid = self . tasks [ idx + 1 ] . uuid
Find the next task
56,994
def launch_next ( self , task = None , result = None ) : if task : next_task = self . next ( task ) if next_task : return next_task . send ( result = result ) else : return self . set_status ( task . status , result ) elif len ( self . tasks ) > 0 : return self . tasks [ 0 ] . send ( result = result ) else : return Result ( retcode = 1 , stderr = "Nothing to do, empty operation !" )
Launch next task or finish operation
56,995
def compute_tasks ( self , ** kwargs ) : params = self . _prebuild ( ** kwargs ) if not params : params = dict ( kwargs ) return self . _build_tasks ( ** params )
perfrom checks and build tasks
56,996
def build ( self , ** kwargs ) : self . tasks += self . compute_tasks ( ** kwargs ) return self . finalize ( )
create the operation and associate tasks
56,997
def serve_dtool_directory ( directory , port ) : os . chdir ( directory ) server_address = ( "localhost" , port ) httpd = DtoolHTTPServer ( server_address , DtoolHTTPRequestHandler ) httpd . serve_forever ( )
Serve the datasets in a directory over HTTP .
56,998
def cli ( ) : parser = argparse . ArgumentParser ( description = __doc__ ) parser . add_argument ( "dataset_directory" , help = "Directory with datasets to be served" ) parser . add_argument ( "-p" , "--port" , type = int , default = 8081 , help = "Port to serve datasets on (default 8081)" ) args = parser . parse_args ( ) if not os . path . isdir ( args . dataset_directory ) : parser . error ( "Not a directory: {}" . format ( args . dataset_directory ) ) serve_dtool_directory ( args . dataset_directory , args . port )
Command line utility for serving datasets in a directory over HTTP .
56,999
def generate_url ( self , suffix ) : url_base_path = os . path . dirname ( self . path ) netloc = "{}:{}" . format ( * self . server . server_address ) return urlunparse ( ( "http" , netloc , url_base_path + "/" + suffix , "" , "" , "" ) )
Return URL by combining server details with a path suffix .