idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
57,300
def clear ( self , fill = 0x00 ) : self . _buffer = [ fill ] * ( self . width * self . height )
! Clear buffer data and other data RPiDiaplay object just implemented clear buffer data
57,301
def connect ( self ) : count = 1 no_of_servers = len ( self . _rabbit_urls ) while True : server_choice = ( count % no_of_servers ) - 1 self . _url = self . _rabbit_urls [ server_choice ] try : logger . info ( 'Connecting' , attempt = count ) return pika . SelectConnection ( pika . URLParameters ( self . _url ) , self . on_connection_open , stop_ioloop_on_close = False ) except pika . exceptions . AMQPConnectionError : logger . exception ( "Connection error" ) count += 1 logger . error ( "Connection sleep" , no_of_seconds = count ) time . sleep ( count ) continue
This method connects to RabbitMQ using a SelectConnection object returning the connection handle .
57,302
def nack_message ( self , delivery_tag , ** kwargs ) : logger . info ( 'Nacking message' , delivery_tag = delivery_tag , ** kwargs ) self . _channel . basic_nack ( delivery_tag )
Negative acknowledge a message
57,303
def tx_id ( properties ) : tx_id = properties . headers [ 'tx_id' ] logger . info ( "Retrieved tx_id from message properties: tx_id={}" . format ( tx_id ) ) return tx_id
Gets the tx_id for a message from a rabbit queue using the message properties . Will raise KeyError if tx_id is missing from message headers .
57,304
def on_message ( self , unused_channel , basic_deliver , properties , body ) : if self . check_tx_id : try : tx_id = self . tx_id ( properties ) logger . info ( 'Received message' , queue = self . _queue , delivery_tag = basic_deliver . delivery_tag , app_id = properties . app_id , tx_id = tx_id ) except KeyError as e : self . reject_message ( basic_deliver . delivery_tag ) logger . error ( "Bad message properties - no tx_id" , action = "rejected" , exception = str ( e ) ) return None except TypeError as e : self . reject_message ( basic_deliver . delivery_tag ) logger . error ( "Bad message properties - no headers" , action = "rejected" , exception = str ( e ) ) return None else : logger . debug ( "check_tx_id is False. Not checking tx_id for message." , delivery_tag = basic_deliver . delivery_tag ) tx_id = None try : try : self . process ( body . decode ( "utf-8" ) , tx_id ) except TypeError : logger . error ( 'Incorrect call to process method' ) raise QuarantinableError self . acknowledge_message ( basic_deliver . delivery_tag , tx_id = tx_id ) except ( QuarantinableError , BadMessageError ) as e : try : self . quarantine_publisher . publish_message ( body , headers = { 'tx_id' : tx_id } ) self . reject_message ( basic_deliver . delivery_tag , tx_id = tx_id ) logger . error ( "Quarantinable error occured" , action = "quarantined" , exception = str ( e ) , tx_id = tx_id ) except PublishMessageError : logger . error ( "Unable to publish message to quarantine queue. Rejecting message and requeuing." ) self . reject_message ( basic_deliver . delivery_tag , requeue = True , tx_id = tx_id ) except RetryableError as e : self . nack_message ( basic_deliver . delivery_tag , tx_id = tx_id ) logger . error ( "Failed to process" , action = "nack" , exception = str ( e ) , tx_id = tx_id ) except Exception as e : self . nack_message ( basic_deliver . delivery_tag , tx_id = tx_id ) logger . exception ( "Unexpected exception occurred" ) logger . error ( "Failed to process" , action = "nack" , exception = str ( e ) , tx_id = tx_id )
Called on receipt of a message from a queue .
57,305
def authenticate ( self , username , password ) : self . _username = username self . _password = password resp = requests . get ( self . _url , auth = ( username , password ) , ** self . _default_request_kwargs ) try : if resp . status_code == 200 : json_data = resp . json ( ) token = json_data [ 'data' ] [ 'token' ] elif resp . status_code == 401 : raise errors . AuthFailure ( resp . json ( ) . get ( 'message' , 'Authentication Failure.' ) ) else : raise errors . AuthFailure ( "Unknown exception while authenticating: '{}'" . format ( resp . text ) ) except errors . AuthFailure : raise except Exception as ex : logging . exception ( ex ) raise errors . AuthFailure ( '{}: {}' . format ( ex . __class__ . __name__ , ex ) ) self . _token = token logger . info ( 'New API token received: "{}".' . format ( token ) ) return token
Authenticate against the ObjectRocket API .
57,306
def _refresh ( self ) : new_token = self . authenticate ( self . _username , self . _password ) self . _token = new_token logger . info ( 'New API token received: "{}".' . format ( new_token ) ) return self . _token
Refresh the API token using the currently bound credentials .
57,307
def _verify ( self , token ) : url = '{}{}/' . format ( self . _url , 'verify' ) resp = requests . post ( url , json = { 'token' : token } , ** self . _default_request_kwargs ) if resp . status_code == 200 : return resp . json ( ) . get ( 'data' , None ) return None
Verify that the given token is valid .
57,308
def preprocess ( net , image ) : return np . float32 ( np . rollaxis ( image , 2 ) [ : : - 1 ] ) - net . transformer . mean [ "data" ]
convert to Caffe input image layout
57,309
def make_step ( net , step_size = 1.5 , end = "inception_4c/output" , jitter = 32 , clip = True , objective = objective_L2 ) : src = net . blobs [ "data" ] dst = net . blobs [ end ] ox , oy = np . random . randint ( - jitter , jitter + 1 , 2 ) src . data [ 0 ] = np . roll ( np . roll ( src . data [ 0 ] , ox , - 1 ) , oy , - 2 ) net . forward ( end = end ) objective ( dst ) net . backward ( start = end ) g = src . diff [ 0 ] src . data [ : ] += step_size / np . abs ( g ) . mean ( ) * g src . data [ 0 ] = np . roll ( np . roll ( src . data [ 0 ] , - ox , - 1 ) , - oy , - 2 ) if clip : bias = net . transformer . mean [ "data" ] src . data [ : ] = np . clip ( src . data , - bias , 255 - bias )
basic gradient ascent step
57,310
def deepdream ( net , base_image , iter_n = 10 , octave_n = 4 , octave_scale = 1.4 , end = "inception_4c/output" , clip = True , ** step_params ) : octaves = [ preprocess ( net , base_image ) ] for i in xrange ( octave_n - 1 ) : octaves . append ( nd . zoom ( octaves [ - 1 ] , ( 1 , 1.0 / octave_scale , 1.0 / octave_scale ) , order = 1 ) ) src = net . blobs [ "data" ] detail = np . zeros_like ( octaves [ - 1 ] ) for octave , octave_base in enumerate ( octaves [ : : - 1 ] ) : h , w = octave_base . shape [ - 2 : ] if octave > 0 : h1 , w1 = detail . shape [ - 2 : ] detail = nd . zoom ( detail , ( 1 , 1.0 * h / h1 , 1.0 * w / w1 ) , order = 1 ) src . reshape ( 1 , 3 , h , w ) src . data [ 0 ] = octave_base + detail for i in xrange ( iter_n ) : make_step ( net , end = end , clip = clip , ** step_params ) vis = deprocess ( net , src . data [ 0 ] ) if not clip : vis = vis * ( 255.0 / np . percentile ( vis , 99.98 ) ) log . info ( "octave: {octave}, index: {index}, blob/layer: {end}, dimensions: {shape}" . format ( octave = octave , index = i , end = end , shape = vis . shape , ) ) detail = src . data [ 0 ] - octave_base return deprocess ( net , src . data [ 0 ] )
an ascent through different scales called octaves
57,311
def main ( param_path = 'parameters.txt' ) : if not os . path . isfile ( param_path ) : raise IOError , "Parameter file not found at %s" % param_path params , base_options = _get_params_base_options ( param_path ) log_path = os . path . join ( base_options [ 'results_dir' ] , '_log.txt' ) if os . path . isfile ( log_path ) : os . remove ( log_path ) logging . basicConfig ( level = logging . INFO , format = '%(message)s' ) fileh = logging . FileHandler ( log_path ) fileh . setLevel ( logging . DEBUG ) filefmt = logging . Formatter ( time . strftime ( "%Y/%m/%d %H:%M:%S %p" , time . localtime ( ) ) + ' - %(name)s - %(levelname)s - %(message)s' ) fileh . setFormatter ( filefmt ) logging . getLogger ( '' ) . addHandler ( fileh ) def log_uncaught ( type1 , value1 , traceback1 ) : tb_list = traceback . format_exception ( type1 , value1 , traceback1 ) tb_str = '' . join ( tb_list ) logging . critical ( '\n\n' + tb_str ) sys . excepthook = log_uncaught logging . info ( 'Running macroeco' ) logging . info ( 'Parameters file at %s' % os . path . abspath ( param_path ) ) bad_params = misc . check_parameter_file ( param_path ) if len ( bad_params [ 0 ] ) > 0 : logging . warning ( "Possible formatting error(s) in" + " %s: parameters %s on lines %s" % ( param_path , bad_params [ 0 ] , bad_params [ 1 ] ) ) logging . info ( 'Starting analysis' ) for run_name in base_options [ 'run_names' ] : logging . info ( 'Starting run %s' % run_name ) options = dict ( params [ run_name ] ) options . update ( base_options ) options [ 'run_dir' ] = os . path . join ( base_options [ 'results_dir' ] , run_name ) if 'format' in options [ 'analysis' ] : _do_format ( options ) else : _do_analysis ( options ) logging . info ( 'Finished run %s' % run_name ) logging . info ( 'Finished analysis successfully' ) logging . info ( 'Results available at %s' % options [ 'param_dir' ] ) logging . shutdown ( )
Entry point function for analysis based on parameter files .
57,312
def _do_analysis ( options ) : module = _function_location ( options ) core_results = _call_analysis_function ( options , module ) if module == 'emp' and ( 'models' in options . keys ( ) ) : fit_results = _fit_models ( options , core_results ) else : fit_results = None _save_results ( options , module , core_results , fit_results )
Do analysis for a single run as specified by options .
57,313
def _call_analysis_function ( options , module ) : args , kwargs = _get_args_kwargs ( options , module ) return eval ( "%s.%s(*args, **kwargs)" % ( module , options [ 'analysis' ] ) )
Call function from module and get result using inputs from options
57,314
def _emp_extra_options ( options ) : metadata_path = os . path . normpath ( os . path . join ( options [ 'param_dir' ] , options [ 'metadata' ] ) ) if not os . path . isfile ( metadata_path ) : raise IOError , ( "Path to metadata file %s is invalid." % metadata_path ) options [ 'metadata_path' ] = metadata_path subset = options . get ( 'subset' , '' ) options [ 'patch' ] = emp . Patch ( metadata_path , subset ) if 'cols' not in options . keys ( ) : options [ 'cols' ] = '' if 'splits' not in options . keys ( ) : options [ 'splits' ] = '' return options
Get special options patch cols and splits if analysis in emp module
57,315
def _fit_models ( options , core_results ) : logging . info ( "Fitting models" ) models = options [ 'models' ] . replace ( ' ' , '' ) . split ( ';' ) fit_results = [ ] for core_result in core_results : fit_result = { } for model in models : fits = _get_fits ( core_result , model , options ) values = _get_values ( core_result , model , fits ) stat_names , stats = _get_comparison_stat ( core_result , values , model , fits ) fit_result [ model ] = [ fits , values , stat_names , stats ] fit_results . append ( fit_result ) return fit_results
Fit models to empirical result from a function in emp module
57,316
def _save_results ( options , module , core_results , fit_results ) : logging . info ( "Saving all results" ) mpl . rcParams . update ( misc . rcparams . ggplot_rc ) os . makedirs ( options [ 'run_dir' ] ) _write_core_tables ( options , module , core_results ) if module == 'emp' : _write_subset_index_file ( options , core_results ) if fit_results : models = options [ 'models' ] . replace ( ' ' , '' ) . split ( ';' ) for i , core_result in enumerate ( core_results ) : _write_fitted_params ( i , models , options , fit_results ) _write_test_statistics ( i , models , options , fit_results ) _write_comparison_plot_table ( i , models , options , core_results , fit_results )
Save results of analysis as tables and figures
57,317
def _write_subset_index_file ( options , core_results ) : f_path = os . path . join ( options [ 'run_dir' ] , '_subset_index.csv' ) subset_strs = zip ( * core_results ) [ 0 ] index = np . arange ( len ( subset_strs ) ) + 1 df = pd . DataFrame ( { 'subsets' : subset_strs } , index = index ) df . to_csv ( f_path )
Write table giving index of subsets giving number and subset string
57,318
def _pad_plot_frame ( ax , pad = 0.01 ) : xmin , xmax = ax . get_xlim ( ) ymin , ymax = ax . get_ylim ( ) xr = xmax - xmin yr = ymax - ymin ax . set_xlim ( xmin - xr * pad , xmax + xr * pad ) ax . set_ylim ( ymin - yr * pad , ymax + yr * pad ) return ax
Provides padding on sides of frame equal to pad fraction of plot
57,319
def _output_cdf_plot ( core_result , spid , models , options , fit_results ) : x = core_result [ 'y' ] . values df = emp . empirical_cdf ( x ) df . columns = [ 'x' , 'empirical' ] def calc_func ( model , df , shapes ) : return eval ( "mod.%s.cdf(df['x'], *shapes)" % model ) plot_exec_str = "ax.step(df['x'], emp, color='k', lw=3);ax.set_ylim(top=1)" _save_table_and_plot ( spid , models , options , fit_results , 'data_pred_cdf' , df , calc_func , plot_exec_str )
Function for plotting cdf
57,320
def openOnlyAccel ( self , cycleFreq = 0x00 ) : self . openWith ( accel = True , gyro = False , temp = False , cycle = True , cycleFreq = cycleFreq )
! Trun on device into Accelerometer Only Low Power Mode
57,321
def setMotionInt ( self , motDHPF = 0x01 , motTHR = 0x14 , motDUR = 0x30 , motDeteDec = 0x15 ) : self . _sendCmd ( self . REG_SIGNAL_PATH_RESET , 0x07 ) self . _sendCmd ( self . REG_INT_PIN_CFG , 0x00 ) orgAccelConf = self . _readByte ( self . REG_ACCEL_CONFIG ) newAccelConf = ( ( orgAccelConf | 0xE7 ) ^ 0xE7 ) | motDHPF self . _sendCmd ( self . REG_ACCEL_CONFIG , newAccelConf ) self . _sendCmd ( self . REG_MOTION_DET , motTHR ) self . _sendCmd ( self . REG_MOTION_DET_DUR , motDUR ) self . _sendCmd ( self . REG_MOTION_DET_CTRL , motDeteDec ) self . _sendCmd ( self . REG_INT_ENABLE , self . VAL_INT_ENABLE_MOTION )
! Set to enable Motion Detection Interrupt
57,322
def readAccelRange ( self ) : raw_data = self . _readByte ( self . REG_ACCEL_CONFIG ) raw_data = ( raw_data | 0xE7 ) ^ 0xE7 return raw_data
! Reads the range of accelerometer setup .
57,323
def getAccelData ( self , raw = False ) : x = self . _readWord ( self . REG_ACCEL_XOUT_H ) y = self . _readWord ( self . REG_ACCEL_YOUT_H ) z = self . _readWord ( self . REG_ACCEL_ZOUT_H ) accel_scale_modifier = None accel_range = self . readAccelRange ( ) if accel_range == self . ACCEL_RANGE_2G : accel_scale_modifier = self . ACCEL_SCALE_MODIFIER_2G elif accel_range == self . ACCEL_RANGE_4G : accel_scale_modifier = self . ACCEL_SCALE_MODIFIER_4G elif accel_range == self . ACCEL_RANGE_8G : accel_scale_modifier = self . ACCEL_SCALE_MODIFIER_8G elif accel_range == self . ACCEL_RANGE_16G : accel_scale_modifier = self . ACCEL_SCALE_MODIFIER_16G else : print ( "ERROR: Unkown accel range!" ) return False x = x / accel_scale_modifier y = y / accel_scale_modifier z = z / accel_scale_modifier if raw == True : return { 'x' : x , 'y' : y , 'z' : z } elif raw == False : return { 'x' : x * self . _gravityFactor , 'y' : y * self . _gravityFactor , 'z' : z * self . _gravityFactor }
! Gets and returns the X Y and Z values from the accelerometer .
57,324
def readGyroRange ( self ) : raw_data = self . _readByte ( self . REG_GYRO_CONFIG ) raw_data = ( raw_data | 0xE7 ) ^ 0xE7 return raw_data
! Read range of gyroscope .
57,325
def getGyroData ( self ) : x = self . _readWord ( self . REG_GYRO_XOUT_H ) y = self . _readWord ( self . REG_GYRO_YOUT_H ) z = self . _readWord ( self . REG_GYRO_ZOUT_H ) gyro_scale_modifier = None gyro_range = self . readGyroRange ( ) if gyro_range == self . GYRO_RANGE_250DEG : gyro_scale_modifier = self . GYRO_SCALE_MODIFIER_250DEG elif gyro_range == self . GYRO_RANGE_500DEG : gyro_scale_modifier = self . GYRO_SCALE_MODIFIER_500DEG elif gyro_range == self . GYRO_RANGE_1KDEG : gyro_scale_modifier = self . GYRO_SCALE_MODIFIER_1KDEG elif gyro_range == self . GYRO_RANGE_2KDEG : gyro_scale_modifier = self . GYRO_SCALE_MODIFIER_2KDEG else : print ( "ERROR: Unkown gyroscope range!" ) return False x = x / gyro_scale_modifier y = y / gyro_scale_modifier z = z / gyro_scale_modifier return { 'x' : x , 'y' : y , 'z' : z }
! Gets and returns the X Y and Z values from the gyroscope
57,326
def getAllData ( self , temp = True , accel = True , gyro = True ) : allData = { } if temp : allData [ "temp" ] = self . getTemp ( ) if accel : allData [ "accel" ] = self . getAccelData ( raw = False ) if gyro : allData [ "gyro" ] = self . getGyroData ( ) return allData
! Get all the available data .
57,327
def repeater ( pipe , how_many = 2 ) : r = range ( how_many ) for i in pipe : for _ in r : yield i
this function repeats each value in the pipeline however many times you need
57,328
def kld ( p1 , p2 ) : return np . sum ( np . where ( p1 != 0 , p1 * np . log ( p1 / p2 ) , 0 ) )
Compute Kullback - Leibler divergence between p1 and p2 . It assumes that p1 and p2 are already normalized that each of them sums to 1 .
57,329
def jsd ( p1 , p2 ) : m = ( p1 + p2 ) / 2 return ( kld ( p1 , m ) + kld ( p2 , m ) ) / 2
Compute Jensen - Shannon divergence between p1 and p2 . It assumes that p1 and p2 are already normalized that each of them sums to 1 .
57,330
def njsd ( network , ref_gene_expression_dict , query_gene_expression_dict , gene_set ) : gene_jsd_dict = dict ( ) reference_genes = ref_gene_expression_dict . keys ( ) assert len ( reference_genes ) != 'Reference gene expression profile should have > 0 genes.' for gene in gene_set : if gene not in network . nodes : continue neighbors = find_neighbors ( network , gene ) query_expression_vec = get_neighbor_expression_vector ( neighbors , query_gene_expression_dict ) ref_expression_vec = get_neighbor_expression_vector ( neighbors , ref_gene_expression_dict ) assert len ( query_expression_vec ) == len ( ref_expression_vec ) , 'Topology of reference network and query network differs. Please check.' if np . sum ( query_expression_vec ) == 0 and np . sum ( ref_expression_vec ) == 0 : continue query_p_vec = exp2prob ( query_expression_vec ) ref_p_vec = exp2prob ( ref_expression_vec ) gene_jsd_dict [ gene ] = jsd ( query_p_vec , ref_p_vec ) return np . mean ( list ( gene_jsd_dict . values ( ) ) )
Calculate Jensen - Shannon divergence between query and reference gene expression profile .
57,331
def lookupProcessor ( name ) : if name in _proc_lookup : return _proc_lookup [ name ] else : error_string = 'If you are creating a new processor, please read the\documentation on creating a new processor' raise LookupError ( "Unknown processor %s\n%s" % ( name , error_string ) )
Lookup processor class object by its name
57,332
def serialize ( self , value , entity = None , request = None ) : ret = self . from_python ( value ) self . validate ( ret ) self . run_validators ( value ) return ret
Validate and serialize the value .
57,333
def side_task ( pipe , * side_jobs ) : assert iterable ( pipe ) , 'side_task needs the first argument to be iterable' for sj in side_jobs : assert callable ( sj ) , 'all side_jobs need to be functions, not {}' . format ( sj ) side_jobs = ( lambda i : i , ) + side_jobs for i in map ( pipe , * side_jobs ) : yield i [ 0 ]
allows you to run a function in a pipeline without affecting the data
57,334
def _connect ( self ) : if self . _ec2_connection : return self . _ec2_connection if not self . _vpc : vpc_connection = None try : log . debug ( "Connecting to ec2 host %s" , self . _ec2host ) region = ec2 . regioninfo . RegionInfo ( name = self . _region_name , endpoint = self . _ec2host ) ec2_connection = boto . connect_ec2 ( aws_access_key_id = self . _access_key , aws_secret_access_key = self . _secret_key , is_secure = self . _secure , host = self . _ec2host , port = self . _ec2port , path = self . _ec2path , region = region ) log . debug ( "EC2 connection has been successful." ) if self . _vpc : vpc_connection = boto . connect_vpc ( aws_access_key_id = self . _access_key , aws_secret_access_key = self . _secret_key , is_secure = self . _secure , host = self . _ec2host , port = self . _ec2port , path = self . _ec2path , region = region ) log . debug ( "VPC connection has been successful." ) for vpc in vpc_connection . get_all_vpcs ( ) : log . debug ( "Checking whether %s matches %s/%s" % ( self . _vpc , vpc . tags [ 'Name' ] , vpc . id ) ) if self . _vpc in [ vpc . tags [ 'Name' ] , vpc . id ] : self . _vpc_id = vpc . id if self . _vpc != self . _vpc_id : log . debug ( "VPC %s matches %s" % ( self . _vpc , self . _vpc_id ) ) break else : raise VpcError ( 'VPC %s does not exist.' % self . _vpc ) except Exception as e : log . error ( "connection to ec2 could not be " "established: message=`%s`" , str ( e ) ) raise self . _ec2_connection , self . _vpc_connection = ( ec2_connection , vpc_connection ) return self . _ec2_connection
Connects to the ec2 cloud provider
57,335
def split ( pipe , splitter , skip_empty = False ) : splitter = tuple ( splitter ) len_splitter = len ( splitter ) pipe = iter ( pipe ) current = deque ( ) tmp = [ ] windowed = window ( pipe , len ( splitter ) ) for i in windowed : if i == splitter : skip ( windowed , len ( splitter ) - 1 ) yield list ( current ) current . clear ( ) tmp = [ ] else : current . append ( i [ 0 ] ) tmp = i if len ( current ) or len ( tmp ) : yield list ( chain ( current , tmp ) )
this function works a lot like groupby but splits on given patterns the same behavior as str . split provides . if skip_empty is True split only yields pieces that have contents
57,336
def query_tracking_code ( tracking_code , year = None ) : payload = { 'Anio' : year or datetime . now ( ) . year , 'Tracking' : tracking_code , } response = _make_request ( TRACKING_URL , payload ) if not response [ 'd' ] : return [ ] data = response [ 'd' ] [ 0 ] destination = data [ 'RetornoCadena6' ] payload . update ( { 'Destino' : destination , } ) response = _make_request ( TRACKING_DETAIL_URL , payload ) return _process_detail ( response [ 'd' ] )
Given a tracking_code return a list of events related the tracking code
57,337
def comments_nb_counts ( ) : recid = request . view_args . get ( 'recid' ) if recid is None : return elif recid == 0 : return 0 else : return CmtRECORDCOMMENT . count ( * [ CmtRECORDCOMMENT . id_bibrec == recid , CmtRECORDCOMMENT . star_score == 0 , CmtRECORDCOMMENT . status . notin_ ( [ 'dm' , 'da' ] ) ] )
Get number of comments for the record recid .
57,338
def decide_k ( airport_code ) : if airport_code [ : 1 ] . upper ( ) == 'K' : try : return Airport . objects . get ( location_identifier__iexact = airport_code [ 1 : ] ) . location_identifier except Airport . DoesNotExist : return airport_code else : return airport_code
A function to decide if a leading K is throwing off an airport match and return the correct code .
57,339
def parse_date ( datestring ) : datestring = str ( datestring ) . strip ( ) if not datestring [ 0 ] . isdigit ( ) : raise ParseError ( ) if 'W' in datestring . upper ( ) : try : datestring = datestring [ : - 1 ] + str ( int ( datestring [ - 1 : ] ) - 1 ) except : pass for regex , pattern in DATE_FORMATS : if regex . match ( datestring ) : found = regex . search ( datestring ) . groupdict ( ) dt = datetime . utcnow ( ) . strptime ( found [ 'matched' ] , pattern ) if 'fraction' in found and found [ 'fraction' ] is not None : dt = dt . replace ( microsecond = int ( found [ 'fraction' ] [ 1 : ] ) ) if 'timezone' in found and found [ 'timezone' ] is not None : dt = dt . replace ( tzinfo = Timezone ( found . get ( 'timezone' , '' ) ) ) return dt return parse_time ( datestring )
Attepmts to parse an ISO8601 formatted datestring .
57,340
def parse_time ( timestring ) : timestring = str ( timestring ) . strip ( ) for regex , pattern in TIME_FORMATS : if regex . match ( timestring ) : found = regex . search ( timestring ) . groupdict ( ) dt = datetime . utcnow ( ) . strptime ( found [ 'matched' ] , pattern ) dt = datetime . combine ( date . today ( ) , dt . time ( ) ) if 'fraction' in found and found [ 'fraction' ] is not None : dt = dt . replace ( microsecond = int ( found [ 'fraction' ] [ 1 : ] ) ) if 'timezone' in found and found [ 'timezone' ] is not None : dt = dt . replace ( tzinfo = Timezone ( found . get ( 'timezone' , '' ) ) ) return dt raise ParseError ( )
Attepmts to parse an ISO8601 formatted timestring .
57,341
def connect ( self ) : self . log . debug ( 'starting the ``get`` method' ) dbSettings = self . dbSettings port = False if "tunnel" in dbSettings and dbSettings [ "tunnel" ] : port = self . _setup_tunnel ( tunnelParameters = dbSettings [ "tunnel" ] ) host = dbSettings [ "host" ] user = dbSettings [ "user" ] passwd = dbSettings [ "password" ] dbName = dbSettings [ "db" ] dbConn = ms . connect ( host = host , user = user , passwd = passwd , db = dbName , port = port , use_unicode = True , charset = 'utf8' , local_infile = 1 , client_flag = ms . constants . CLIENT . MULTI_STATEMENTS , connect_timeout = 36000 , max_allowed_packet = 51200000 ) if self . autocommit : dbConn . autocommit ( True ) self . log . debug ( 'completed the ``get`` method' ) return dbConn
connect to the database
57,342
def map_ ( cache : Mapping [ Domain , Range ] ) -> Operator [ Map [ Domain , Range ] ] : def wrapper ( function : Map [ Domain , Range ] ) -> Map [ Domain , Range ] : @ wraps ( function ) def wrapped ( argument : Domain ) -> Range : try : return cache [ argument ] except KeyError : return function ( argument ) return wrapped return wrapper
Returns decorator that calls wrapped function if nothing was found in cache for its argument .
57,343
def updatable_map ( cache : MutableMapping [ Domain , Range ] ) -> Operator [ Map ] : def wrapper ( function : Map [ Domain , Range ] ) -> Map [ Domain , Range ] : @ wraps ( function ) def wrapped ( argument : Domain ) -> Range : try : return cache [ argument ] except KeyError : result = function ( argument ) cache [ argument ] = result return result return wrapped return wrapper
Returns decorator that calls wrapped function if nothing was found in cache for its argument and reuses result afterwards .
57,344
def property_ ( getter : Map [ Domain , Range ] ) -> property : return property ( map_ ( WeakKeyDictionary ( ) ) ( getter ) )
Returns property that calls given getter on the first access and reuses result afterwards .
57,345
def get_context_data ( self , ** kwargs ) : self . request . session . set_test_cookie ( ) if not self . request . session . test_cookie_worked ( ) : messages . add_message ( self . request , messages . ERROR , "Please enable cookies." ) self . request . session . delete_test_cookie ( ) return super ( ) . get_context_data ( ** kwargs )
Tests cookies .
57,346
def print ( root ) : def print_before ( previous = 0 , defined = None , is_last = False ) : defined = defined or { } ret = '' if previous != 0 : for i in range ( previous - 1 ) : if i in defined : ret += '| ' else : ret += ' ' ret += '`--' if is_last else '|--' return ret def terminal_traverse ( term , callback , previous = 0 , defined = None , is_last = False ) : before = print_before ( previous , defined , is_last ) yield before + '(T)' + str ( term . s ) + '\n' def nonterminal_traverse ( nonterm , callback , previous = 0 , defined = None , is_last = False ) : before = print_before ( previous , defined , is_last ) yield before + '(N)' + nonterm . __class__ . __name__ + '\n' yield callback ( nonterm . to_rule , previous + 1 , defined , True ) def rule_traverse ( rule , callback , previous = 0 , defined = None , is_last = False ) : before = print_before ( previous , defined , is_last ) yield before + '(R)' + rule . __class__ . __name__ + '\n' defined = defined or set ( ) defined . add ( previous ) for i in range ( len ( rule . to_symbols ) - 1 ) : yield callback ( rule . to_symbols [ i ] , previous + 1 , defined , False ) defined . remove ( previous ) yield callback ( rule . to_symbols [ - 1 ] , previous + 1 , defined , True ) res = Traversing . traverse_separated ( root , rule_traverse , nonterminal_traverse , terminal_traverse ) return str . join ( "" , res )
Transform the parsed tree to the string . Expects tree like structure . You can see example output below .
57,347
def get_filter ( self ) : return self . filter_form_cls ( self . request . GET , runtime_context = self . get_runtime_context ( ) , use_filter_chaining = self . use_filter_chaining )
Get FilterForm instance .
57,348
def get_context_data ( self , ** kwargs ) : context = super ( FilterFormMixin , self ) . get_context_data ( ** kwargs ) context [ self . context_filterform_name ] = self . get_filter ( ) return context
Add filter form to the context .
57,349
def compile_to_python ( exp , env , done = None ) : original_exp = exp compiler = Compiler ( ) if done is None : done = il . Done ( compiler . new_var ( il . ConstLocalVar ( 'v' ) ) ) compiler . exit_block_cont_map = { } compiler . continue_block_cont_map = { } compiler . protect_cont = done if env is None : env = Environment ( ) exp = element ( exp ) exp = exp . alpha ( env , compiler ) exp = exp . cps ( compiler , done ) exp . analyse ( compiler ) env = Environment ( ) exp = exp . optimize ( env , compiler ) function = compiler . new_var ( il . ConstLocalVar ( 'compiled_dao_function' ) ) exp = il . Function ( function , ( ) , exp ) exp = il . begin ( * exp . pythonize ( env , compiler ) [ 0 ] ) if isinstance ( exp , il . Begin ) : exp = exp . statements [ 0 ] exp . body = exp . body . replace_return_with_yield ( ) compiler = Compiler ( ) result = exp . to_code ( compiler ) return prelude + result
assemble steps from dao expression to python code
57,350
def last ( pipe , items = 1 ) : if items == 1 : tmp = None for i in pipe : tmp = i return tmp else : return tuple ( deque ( pipe , maxlen = items ) )
this function simply returns the last item in an iterable
57,351
def print_help ( filename , table , dest = sys . stdout ) : cmds = '|' . join ( sorted ( table . keys ( ) ) ) print >> dest , "Syntax: %s %s [args]" % ( path . basename ( filename ) , cmds )
Print help to the given destination file object .
57,352
def dispatch ( table , args ) : if len ( args ) == 1 : print_help ( args [ 0 ] , table ) sys . exit ( 0 ) if args [ 1 ] not in table or len ( args ) != len ( table [ args [ 1 ] ] ) + 1 : print_help ( args [ 0 ] , table , dest = sys . stderr ) sys . exit ( 1 ) sig = table [ args [ 1 ] ] try : fixed_args = [ type_ ( arg ) for arg , type_ in zip ( args [ 2 : ] , sig [ 1 : ] ) ] except TypeError : print_help ( args [ 0 ] , table , dest = sys . stderr ) sys . exit ( 1 ) sig [ 0 ] ( * fixed_args )
Dispatches to a function based on the contents of args .
57,353
def find_all ( s , sub , start = 0 , end = 0 , limit = - 1 , reverse = False ) : indexes = [ ] if not bool ( s and sub ) : return indexes lstr = len ( s ) if lstr <= start : return indexes lsub = len ( sub ) if lstr < lsub : return indexes if limit == 0 : return indexes elif limit < 0 : limit = lstr end = min ( end , lstr ) or lstr idx = s . rfind ( sub , start , end ) if reverse else s . find ( sub , start , end ) while idx != - 1 : indexes . append ( idx ) if reverse : idx = s . rfind ( sub , start , idx - lstr ) else : idx = s . find ( sub , idx + lsub , end ) if len ( indexes ) >= limit : break return indexes
Find all indexes of sub in s .
57,354
def get_substructure ( data , path ) : if not len ( path ) : return data try : return get_substructure ( data [ path [ 0 ] ] , path [ 1 : ] ) except ( TypeError , IndexError , KeyError ) : return None
Tries to retrieve a sub - structure within some data . If the path does not match any sub - structure returns None .
57,355
def iterable ( target ) : if any ( i in ( 'next' , '__next__' , '__iter__' ) for i in dir ( target ) ) : return True else : try : iter ( target ) return True except : return False
returns true if the given argument is iterable
57,356
def _thread_worker ( self ) : while self . _running : packet = self . _queue . get ( True ) if isinstance ( packet , dict ) and QS_CMD in packet : try : self . _callback_listen ( packet ) except Exception as err : _LOGGER . error ( "Exception in callback\nType: %s: %s" , type ( err ) , err ) self . _queue . task_done ( )
Process callbacks from the queue populated by &listen .
57,357
def _thread_listen ( self ) : while self . _running : try : rest = requests . get ( URL_LISTEN . format ( self . _url ) , timeout = self . _timeout ) if rest . status_code == 200 : self . _queue . put ( rest . json ( ) ) else : _LOGGER . error ( 'QSUSB response code %s' , rest . status_code ) sleep ( 30 ) except requests . exceptions . ConnectionError as err : if str ( err ) . find ( 'timed' ) > 0 : self . _queue . put ( { QS_CMD : CMD_UPDATE } ) else : _LOGGER . error ( str ( err ) ) sleep ( 60 ) except Exception as err : _LOGGER . error ( "%s - %s" , str ( type ( err ) ) , str ( err ) ) sleep ( 5 ) self . _queue . put ( { } )
The main &listen loop .
57,358
def hsla_to_rgba ( h , s , l , a ) : h = h % 360 s = max ( 0 , min ( 1 , s ) ) l = max ( 0 , min ( 1 , l ) ) a = max ( 0 , min ( 1 , a ) ) c = ( 1 - abs ( 2 * l - 1 ) ) * s x = c * ( 1 - abs ( h / 60 % 2 - 1 ) ) m = l - c / 2 if h < 60 : r , g , b = c , x , 0 elif h < 120 : r , g , b = x , c , 0 elif h < 180 : r , g , b = 0 , c , x elif h < 240 : r , g , b = 0 , x , c elif h < 300 : r , g , b = x , 0 , c else : r , g , b = c , 0 , x return ( int ( ( r + m ) * 255 ) , int ( ( g + m ) * 255 ) , int ( ( b + m ) * 255 ) , int ( a * 255 ) )
0 < = H < 360 0 < = s l a < 1
57,359
def dir_list ( directory ) : try : content = listdir ( directory ) return content except WindowsError as winErr : print ( "Directory error: " + str ( ( winErr ) ) )
Returns the list of all files in the directory .
57,360
def read_dir ( directory ) : content = dir_list ( directory ) text = '' for filename in content : text += read_file ( directory + '/' + filename ) text += ' ' return text
Returns the text of all files in a directory .
57,361
def colorize ( occurence , maxoccurence , minoccurence ) : if occurence == maxoccurence : color = ( 255 , 0 , 0 ) elif occurence == minoccurence : color = ( 0 , 0 , 255 ) else : color = ( int ( ( float ( occurence ) / maxoccurence * 255 ) ) , 0 , int ( float ( minoccurence ) / occurence * 255 ) ) return color
A formula for determining colors .
57,362
def fontsize ( count , maxsize , minsize , maxcount ) : size = int ( maxsize - ( maxsize ) * ( ( float ( maxcount - count ) / maxcount ) ) ) if size < minsize : size = minsize return size
A formula for determining font sizes .
57,363
def _init_display ( self ) : self . _command ( [ self . CMD_SSD1306_DISPLAY_OFF , self . CMD_SSD1306_SET_SCROLL_DEACTIVE , self . CMD_SSD1306_SET_MULTIPLEX_RATIO , 0x3F , self . CMD_SSD1306_SET_DISPLAY_OFFSET , 0x00 , self . CMD_SSD1306_SET_DISPLAY_START_LINE , self . CMD_SSD1306_SET_COM_PINS , ( 0x02 | 0x10 ) , self . CMD_SSD1306_SET_CONTRAST , 0x7F , self . CMD_SSD1306_ENTIRE_DISPLAY_ON_0 , self . CMD_SSD1306_NORMAL_DISPLAY , self . CMD_SSD1306_SET_CLOCK_DIVIDE_RATIO , 0x80 , self . CMD_SSD1306_CHARGE_PUMP , 0x14 , self . CMD_SSD1306_SET_MEM_ADDR_MODE , 0x01 , self . CMD_SSD1306_SCAN_DIRECTION_INC if self . _mirror_v else self . CMD_SSD1306_SCAN_DIRECTION_DEC , self . CMD_SSD1306_SET_SEGMENT_REMAP_0 if self . _mirror_h else self . CMD_SSD1306_SET_SEGMENT_REMAP_1 , ] )
! \ ~english Initialize the SSD1306 display chip
57,364
def display ( self , buffer = None ) : if buffer != None : self . _display_buffer ( buffer ) else : self . _display_buffer ( self . _buffer )
! \ ~english Write buffer to physical display .
57,365
def scrollWith ( self , hStart = 0x00 , hEnd = 0x00 , vOffset = 0x00 , vStart = 0x00 , vEnd = 0x00 , int = 0x00 , dire = "left" ) : self . _command ( [ self . CMD_SSD1306_SET_SCROLL_DEACTIVE ] ) if vOffset != 0 : self . _command ( [ self . CMD_SSD1306_SET_SCROLL_VERTICAL_AREA , vStart , vEnd , 0x00 ] ) self . _command ( [ self . CMD_SSD1306_SET_SCROLL_HORIZONTAL_VERTICAL_LEFT if dire . upper ( ) == "LEFT" else self . CMD_SSD1306_SET_SCROLL_HORIZONTAL_VERTICAL_RIGHT , 0x00 , hStart , int , hEnd , vOffset , 0x00 , self . CMD_SSD1306_SET_SCROLL_ACTIVE ] )
! \ ~english Scroll screen
57,366
def run ( self , schedule_type , lookup_id , ** kwargs ) : log = self . get_logger ( ** kwargs ) log . info ( "Queuing <%s> <%s>" % ( schedule_type , lookup_id ) ) task_run = QueueTaskRun ( ) task_run . task_id = self . request . id or uuid4 ( ) task_run . started_at = now ( ) tr_qs = QueueTaskRun . objects schedules = Schedule . objects . filter ( enabled = True ) if schedule_type == "crontab" : schedules = schedules . filter ( celery_cron_definition = lookup_id ) tr_qs = tr_qs . filter ( celery_cron_definition = lookup_id ) scheduler_type = CrontabSchedule task_run . celery_cron_definition_id = lookup_id elif schedule_type == "interval" : schedules = schedules . filter ( celery_interval_definition = lookup_id ) tr_qs = tr_qs . filter ( celery_interval_definition = lookup_id ) scheduler_type = IntervalSchedule task_run . celery_interval_definition_id = lookup_id try : last_task_run = tr_qs . latest ( "started_at" ) except QueueTaskRun . DoesNotExist : pass else : sched = scheduler_type . objects . get ( id = lookup_id ) due , due_next = sched . schedule . is_due ( last_task_run . started_at ) if not due and due_next >= settings . DEFAULT_CLOCK_SKEW_SECONDS : return ( "Aborted Queuing <%s> <%s> due to last task run (%s) " "at %s" % ( schedule_type , lookup_id , last_task_run . id , last_task_run . started_at , ) ) task_run . save ( ) queued = 0 schedules = schedules . values ( "id" , "auth_token" , "endpoint" , "payload" ) for schedule in schedules . iterator ( ) : schedule [ "schedule_id" ] = str ( schedule . pop ( "id" ) ) DeliverTask . apply_async ( kwargs = schedule ) queued += 1 task_run . completed_at = now ( ) task_run . save ( ) return "Queued <%s> Tasks" % ( queued , )
Loads Schedule linked to provided lookup
57,367
def bind ( renderer , to ) : @ wraps ( to ) def view ( request , ** kwargs ) : try : returned = to ( request , ** kwargs ) except Exception as error : view_error = getattr ( renderer , "view_error" , None ) if view_error is None : raise return view_error ( request , error ) try : return renderer . render ( request , returned ) except Exception as error : render_error = getattr ( renderer , "render_error" , None ) if render_error is None : raise return render_error ( request , returned , error ) return view
Bind a renderer to the given callable by constructing a new rendering view .
57,368
def get_perm_model ( ) : try : return django_apps . get_model ( settings . PERM_MODEL , require_ready = False ) except ValueError : raise ImproperlyConfigured ( "PERM_MODEL must be of the form 'app_label.model_name'" ) except LookupError : raise ImproperlyConfigured ( "PERM_MODEL refers to model '{}' that has not been installed" . format ( settings . PERM_MODEL ) )
Returns the Perm model that is active in this project .
57,369
def _load_yaml_config ( cls , config_data , filename = "(unknown)" ) : try : config = yaml . safe_load ( config_data ) except yaml . YAMLError as err : if hasattr ( err , 'problem_mark' ) : mark = err . problem_mark errmsg = ( "Invalid YAML syntax in Configuration file " "%(file)s at line: %(line)s, column: %(column)s." % dict ( file = filename , line = mark . line + 1 , column = mark . column + 1 ) ) else : errmsg = ( "YAML error reading Configuration file " "%(file)s" % dict ( file = filename ) ) logger . error ( errmsg ) raise logger . info ( "Configuration: %s" , config ) return config
Load a yaml config file .
57,370
def sround ( x , precision = 0 ) : sr = StochasticRound ( precision = precision ) return sr . round ( x )
Round a single number using default non - deterministic generator .
57,371
def _parse_chord_line ( line ) : chords = [ TabChord ( position = position , chord = chord ) for chord , position in Chord . extract_chordpos ( line ) ] return ChordLineData ( chords = chords )
Parse a chord line into a ChordLineData object .
57,372
def _get_line_type ( line ) : stripped = line . strip ( ) if not stripped : return 'empty' remainder = re . sub ( r"\s+" , " " , re . sub ( CHORD_RE , "" , stripped ) ) if len ( remainder ) * 2 < len ( re . sub ( r"\s+" , " " , stripped ) ) : return 'chord' return 'lyric'
Decide the line type in function of its contents
57,373
def parse_line ( line ) : line = line . rstrip ( ) line_type = _get_line_type ( line ) return TabLine ( type = line_type , data = _DATA_PARSERS [ line_type ] ( line ) , original = line , )
Parse a line into a TabLine object .
57,374
def parse_tablature ( lines ) : lines = [ parse_line ( l ) for l in lines ] return Tablature ( lines = lines )
Parse a list of lines into a Tablature .
57,375
def preview ( df , preview_rows = 20 ) : if preview_rows < 4 : preview_rows = 4 preview_rows = min ( preview_rows , df . shape [ 0 ] ) outer = math . floor ( preview_rows / 4 ) return pd . concat ( [ df . head ( outer ) , df [ outer : - outer ] . sample ( preview_rows - 2 * outer ) , df . tail ( outer ) ] )
Returns a preview of a dataframe which contains both header rows and tail rows .
57,376
def title_line ( text ) : columns = shutil . get_terminal_size ( ) [ 0 ] start = columns // 2 - len ( text ) // 2 output = '=' * columns + '\n\n' + ' ' * start + str ( text ) + "\n\n" + '=' * columns + '\n' return output
Returns a string that represents the text as a title blurb
57,377
def RadiusGrid ( gridSize ) : x , y = np . mgrid [ 0 : gridSize , 0 : gridSize ] x = x - ( gridSize - 1.0 ) / 2.0 y = y - ( gridSize - 1.0 ) / 2.0 return np . abs ( x + 1j * y )
Return a square grid with values of the distance from the centre of the grid to each gridpoint
57,378
def CircularMaskGrid ( gridSize , diameter = None ) : if diameter is None : diameter = gridSize return np . less_equal ( RadiusGrid ( gridSize ) , diameter / 2.0 )
Return a square grid with ones inside and zeros outside a given diameter circle
57,379
def AdaptiveOpticsCorrect ( pupils , diameter , maxRadial , numRemove = None ) : gridSize = pupils . shape [ - 1 ] pupilsVector = np . reshape ( pupils , ( - 1 , gridSize ** 2 ) ) zernikes = np . reshape ( ZernikeGrid ( gridSize , maxRadial , diameter ) , ( - 1 , gridSize ** 2 ) ) if numRemove is None : numRemove = zernikes . shape [ 0 ] numScreen = pupilsVector . shape [ 0 ] normalisation = 1.0 / np . sum ( zernikes [ 0 ] ) for i in list ( range ( numRemove ) ) + [ 0 , ] : amplitudes = np . inner ( zernikes [ i ] , pupilsVector ) * normalisation pupilsVector = pupilsVector - zernikes [ i ] * amplitudes [ : , np . newaxis ] return np . reshape ( pupilsVector , pupils . shape )
Correct a wavefront using Zernike rejection up to some maximal order . Can operate on multiple telescopes in parallel . Note that this version removes the piston mode as well
57,380
def FibreCouple ( pupils , modeDiameter ) : gridSize = pupils . shape [ - 1 ] pupilsVector = np . reshape ( pupils , ( - 1 , gridSize ** 2 ) ) mode = np . reshape ( FibreMode ( gridSize , modeDiameter ) , ( gridSize ** 2 , ) ) return np . inner ( pupilsVector , mode )
Return the complex amplitudes coupled into a set of fibers
57,381
def SingleModeCombine ( pupils , modeDiameter = None ) : if modeDiameter is None : modeDiameter = 0.9 * pupils . shape [ - 1 ] amplitudes = FibreCouple ( pupils , modeDiameter ) cc = np . conj ( amplitudes ) fluxes = ( amplitudes * cc ) . real coherentFluxes = [ amplitudes [ i ] * cc [ j ] for i in range ( 1 , len ( amplitudes ) ) for j in range ( i ) ] return fluxes , coherentFluxes
Return the instantaneous coherent fluxes and photometric fluxes for a multiway single - mode fibre combiner
57,382
def to_unicode ( s ) : if not isinstance ( s , TEXT ) : if not isinstance ( s , bytes ) : raise TypeError ( 'You are required to pass either unicode or ' 'bytes here, not: %r (%s)' % ( type ( s ) , s ) ) try : s = s . decode ( 'utf-8' ) except UnicodeDecodeError as le : raise TypeError ( 'You are required to pass either a unicode ' 'object or a utf-8-encoded bytes string here. ' 'You passed a bytes object which contained ' 'non-utf-8: %r. The UnicodeDecodeError that ' 'resulted from attempting to interpret it as ' 'utf-8 was: %s' % ( s , le , ) ) return s
Convert to unicode raise exception with instructive error message if s is not unicode ascii or utf - 8 .
57,383
def to_postdata ( self ) : items = [ ] for k , v in sorted ( self . items ( ) ) : items . append ( ( k . encode ( 'utf-8' ) , to_utf8_optional_iterator ( v ) ) ) return urlencode ( items , True ) . replace ( '+' , '%20' ) . encode ( 'ascii' )
Serialize as post data for a POST request .
57,384
def fetch_request_token ( self , oauth_request ) : try : token = self . _get_token ( oauth_request , 'request' ) except Error : version = self . _get_version ( oauth_request ) consumer = self . _get_consumer ( oauth_request ) try : callback = self . get_callback ( oauth_request ) except Error : callback = None self . _check_signature ( oauth_request , consumer , None ) token = self . data_store . fetch_request_token ( consumer , callback ) return token
Processes a request_token request and returns the request token on success .
57,385
def fetch_access_token ( self , oauth_request ) : version = self . _get_version ( oauth_request ) consumer = self . _get_consumer ( oauth_request ) try : verifier = self . _get_verifier ( oauth_request ) except Error : verifier = None token = self . _get_token ( oauth_request , 'request' ) self . _check_signature ( oauth_request , consumer , token ) new_token = self . data_store . fetch_access_token ( consumer , token , verifier ) return new_token
Processes an access_token request and returns the access token on success .
57,386
def _get_token ( self , oauth_request , token_type = 'access' ) : token_field = oauth_request . get_parameter ( 'oauth_token' ) token = self . data_store . lookup_token ( token_type , token_field ) if not token : raise OAuthError ( 'Invalid %s token: %s' % ( token_type , token_field ) ) return token
Try to find the token for the provided request token key .
57,387
def mete_upscale_iterative_alt ( S , N , doublings ) : n_arr = np . empty ( doublings + 1 ) s_arr = np . empty ( doublings + 1 ) for i in xrange ( doublings + 1 ) : if i == 0 : n_arr [ i ] = N s_arr [ i ] = S else : SA = s_arr [ i - 1 ] n_arr [ i ] = 2 * n_arr [ i - 1 ] N2A = n_arr [ i ] def S2A_calc ( x , SA , N2A ) : return ( ( SA + N2A * ( 1 - x ) / ( x - x ** ( N2A + 1 ) ) * ( 1 - ( x ** N2A ) / ( N2A + 1 ) ) ) / x ** - 1 ) def x_calc ( x , SA , N2A ) : return ( S2A_calc ( x , SA , N2A ) / N2A * x * ( x ** N2A - 1 ) / ( x - 1 ) - ( x ** N2A * ( - lerchphi ( x , 1 , N2A + 1 ) ) - np . log ( 1 - x ) ) ) - 1e-23 x = ( optimize . brentq ( x_calc , 1e-24 , 1 - 1e-16 , args = ( SA , N2A ) , xtol = 1e-16 , maxiter = 1000 , disp = True ) + 1e-23 ) s_arr [ i ] = S2A_calc ( x , SA , N2A ) return s_arr
This function is used to upscale from the anchor area .
57,388
def fit_lsq ( self , x , y_obs , params_start = None ) : x = np . atleast_1d ( x ) y_obs = np . atleast_1d ( y_obs ) if not params_start : params_start = np . ones ( self . n_parameters ) if len ( x ) != len ( y_obs ) : raise ValueError , "x and y_obs must be the same length" if len ( params_start ) != self . n_parameters : raise ValueError , "Incorrect number of values in params_start" def residuals ( params , x , y_obs ) : y_pred = self . vals ( x , * params ) return y_obs - y_pred params_fit , _ , _ , msg , ier = optimize . leastsq ( residuals , params_start , args = ( x , y_obs ) , full_output = True ) if ier > 4 : raise ValueError , ( "Least squares fit did not converge with " "message %s" % msg ) return tuple ( params_fit )
Fit curve by method of least squares .
57,389
def fit_lsq ( self , df ) : tdf = df . set_index ( 'div' ) return tdf . ix [ '1,1' ] [ 'n_spp' ] , tdf . ix [ '1,1' ] [ 'n_individs' ]
Parameterize generic SAR curve from empirical data set
57,390
def after_insert ( mapper , connection , target ) : record_after_update . send ( CmtRECORDCOMMENT , recid = target . id_bibrec ) from . api import get_reply_order_cache_data if target . in_reply_to_id_cmtRECORDCOMMENT > 0 : parent = CmtRECORDCOMMENT . query . get ( target . in_reply_to_id_cmtRECORDCOMMENT ) if parent : trans = connection . begin ( ) parent_reply_order = parent . reply_order_cached_data if parent . reply_order_cached_data else '' parent_reply_order += get_reply_order_cache_data ( target . id ) connection . execute ( db . update ( CmtRECORDCOMMENT . __table__ ) . where ( CmtRECORDCOMMENT . id == parent . id ) . values ( reply_order_cached_data = parent_reply_order ) ) trans . commit ( )
Update reply order cache and send record - after - update signal .
57,391
def is_collapsed ( self , id_user ) : return CmtCOLLAPSED . query . filter ( db . and_ ( CmtCOLLAPSED . id_bibrec == self . id_bibrec , CmtCOLLAPSED . id_cmtRECORDCOMMENT == self . id , CmtCOLLAPSED . id_user == id_user ) ) . count ( ) > 0
Return true if the comment is collapsed by user .
57,392
def collapse ( self , id_user ) : c = CmtCOLLAPSED ( id_bibrec = self . id_bibrec , id_cmtRECORDCOMMENT = self . id , id_user = id_user ) db . session . add ( c ) db . session . commit ( )
Collapse comment beloging to user .
57,393
def expand ( self , id_user ) : CmtCOLLAPSED . query . filter ( db . and_ ( CmtCOLLAPSED . id_bibrec == self . id_bibrec , CmtCOLLAPSED . id_cmtRECORDCOMMENT == self . id , CmtCOLLAPSED . id_user == id_user ) ) . delete ( synchronize_session = False )
Expand comment beloging to user .
57,394
def count ( cls , * criteria , ** filters ) : return cls . query . filter ( * criteria ) . filter_by ( ** filters ) . count ( )
Count how many comments .
57,395
def get_version ( version = None ) : if version [ 4 ] > 0 : return "%s.%s.%s-%s.%s" % ( version [ 0 ] , version [ 1 ] , version [ 2 ] , version [ 3 ] , version [ 4 ] ) elif version [ 3 ] != '' : return "%s.%s.%s-%s" % ( version [ 0 ] , version [ 1 ] , version [ 2 ] , version [ 3 ] ) elif version [ 2 ] > 0 : return "%s.%s.%s" % ( version [ 0 ] , version [ 1 ] , version [ 2 ] ) else : return "%s.%s" % ( version [ 0 ] , version [ 1 ] )
Returns a tuple of the django version . If version argument is non - empty then checks for correctness of the tuple provided .
57,396
def _push_packet ( self , packet ) : self . _read_queue . append ( ( decode ( packet ) , packet ) ) if self . _read_waiter is not None : w , self . _read_waiter = self . _read_waiter , None w . set_result ( None )
Appends a packet to the internal read queue or notifies a waiting listener that a packet just came in .
57,397
def _read_data ( self ) : while True : try : data = yield from self . _socket . recv ( ) except asyncio . CancelledError : break except ConnectionClosed : break self . _push_packet ( data ) self . _loop . call_soon ( self . close )
Reads data from the connection and adds it to _push_packet until the connection is closed or the task in cancelled .
57,398
def wait_message ( self ) : if self . _state != states [ 'open' ] : return False if len ( self . _read_queue ) > 0 : return True assert self . _read_waiter is None or self . _read_waiter . cancelled ( ) , "You may only use one wait_message() per connection." self . _read_waiter = asyncio . Future ( loop = self . _loop ) yield from self . _read_waiter return self . wait_message ( )
Waits until a connection is available on the wire or until the connection is in a state that it can t accept messages . It returns True if a message is available False otherwise .
57,399
def get_reservation_ports ( session , reservation_id , model_name = 'Generic Traffic Generator Port' ) : reservation_ports = [ ] reservation = session . GetReservationDetails ( reservation_id ) . ReservationDescription for resource in reservation . Resources : if resource . ResourceModelName == model_name : reservation_ports . append ( resource ) return reservation_ports
Get all Generic Traffic Generator Port in reservation .