idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
49,800
def job_ids ( log_stream ) : id_rows = [ line for line in log_stream if 'scancel' in line ] jobs = [ id_row . strip ( ) [ - 7 : - 1 ] for id_row in id_rows ] return jobs
Grep out all lines with scancel example .
49,801
def get_or_create_config ( path , config ) : if os . path . isfile ( path ) : with open ( path ) as fh : _LOG . debug ( "loading config from %s" , os . path . abspath ( path ) ) config . _inflate ( toml . load ( fh ) ) else : try : os . makedirs ( os . path . dirname ( path ) ) except OSError : pass with open ( path , "w" ) as fh : toml . dump ( config . _deflate ( ) , fh )
Using TOML format load config from given path or write out example based on defaults
49,802
def _deflate ( cls ) : data = { k : v for k , v in vars ( cls ) . items ( ) if not k . startswith ( "_" ) } return { Constants . CONFIG_KEY : data }
Prepare for serialisation - returns a dictionary
49,803
def _inflate ( cls , data ) : for k , v in data [ Constants . CONFIG_KEY ] . items ( ) : setattr ( cls , k , v ) return cls . _deflate ( )
Update config by deserialising input dictionary
49,804
def with_patched_object ( obj , attr_name , patched_value ) : def patcher ( method ) : @ wraps ( method ) def method_call ( * m_args , ** m_kw ) : patched_obj = patch_object ( obj , attr_name , patched_value ) try : return method ( * m_args , ** m_kw ) finally : patched_obj . restore ( ) return method_call return patcher
Decorator that patches an object before the decorated method is called and restores it afterwards .
49,805
def patch ( self , patched_value ) : try : if self . getter : setattr ( self . getter_class , self . attr_name , patched_value ) else : setattr ( self . orig_object , self . attr_name , patched_value ) except TypeError : proxy_name = 'fudge_proxy_%s_%s_%s' % ( self . orig_object . __module__ , self . orig_object . __name__ , patched_value . __class__ . __name__ ) self . proxy_object = type ( proxy_name , ( self . orig_object , ) , { self . attr_name : patched_value } ) mod = sys . modules [ self . orig_object . __module__ ] setattr ( mod , self . orig_object . __name__ , self . proxy_object )
Set a new value for the attribute of the object .
49,806
def restore ( self ) : if self . proxy_object is None : if self . getter : setattr ( self . getter_class , self . attr_name , self . getter ) elif self . is_local : setattr ( self . orig_object , self . attr_name , self . orig_value ) else : delattr ( self . orig_object , self . attr_name ) else : setattr ( sys . modules [ self . orig_object . __module__ ] , self . orig_object . __name__ , self . orig_object )
Restore the saved value for the attribute of the object .
49,807
def parse_config ( data : dict ) -> dict : return { 'email' : data . get ( 'email' ) , 'family' : data [ 'family_id' ] , 'samples' : [ { 'id' : sample_id , 'type' : analysis_type , } for sample_id , analysis_type in data [ 'analysis_type' ] . items ( ) ] , 'config_path' : data [ 'config_file_analysis' ] , 'is_dryrun' : True if 'dry_run_all' in data else False , 'log_path' : data [ 'log_file' ] , 'out_dir' : data [ 'outdata_dir' ] , 'priority' : data [ 'slurm_quality_of_service' ] , 'sampleinfo_path' : data [ 'sample_info_file' ] , }
Parse MIP config file .
49,808
def parse_sampleinfo ( data : dict ) -> dict : genome_build = data [ 'human_genome_build' ] genome_build_str = f"{genome_build['source']}{genome_build['version']}" if 'svdb' in data [ 'program' ] : svdb_outpath = ( f"{data['program']['svdb']['path']}" ) else : svdb_outpath = '' outdata = { 'date' : data [ 'analysis_date' ] , 'family' : data [ 'family' ] , 'genome_build' : genome_build_str , 'rank_model_version' : data [ 'program' ] [ 'genmod' ] [ 'rank_model' ] [ 'version' ] , 'is_finished' : True if data [ 'analysisrunstatus' ] == 'finished' else False , 'pedigree_path' : data [ 'pedigree_minimal' ] , 'peddy' : { 'ped' : ( data [ 'program' ] [ 'peddy' ] [ 'peddy' ] [ 'path' ] if 'peddy' in data [ 'program' ] else None ) , 'ped_check' : ( data [ 'program' ] [ 'peddy' ] [ 'ped_check' ] [ 'path' ] if 'peddy' in data [ 'program' ] else None ) , 'sex_check' : ( data [ 'program' ] [ 'peddy' ] [ 'sex_check' ] [ 'path' ] if 'peddy' in data [ 'program' ] else None ) , } , 'qcmetrics_path' : data [ 'program' ] [ 'qccollect' ] [ 'path' ] , 'samples' : [ ] , 'snv' : { 'bcf' : data [ 'most_complete_bcf' ] [ 'path' ] , 'clinical_vcf' : data [ 'vcf_binary_file' ] [ 'clinical' ] [ 'path' ] , 'gbcf' : data [ 'gbcf_file' ] [ 'path' ] , 'research_vcf' : data [ 'vcf_binary_file' ] [ 'research' ] [ 'path' ] , } , 'svdb_outpath' : svdb_outpath , 'sv' : { 'bcf' : data . get ( 'sv_bcf_file' , { } ) . get ( 'path' ) , 'clinical_vcf' : ( data [ 'sv_vcf_binary_file' ] [ 'clinical' ] [ 'path' ] if 'sv_vcf_binary_file' in data else None ) , 'merged' : svdb_outpath , 'research_vcf' : ( data [ 'sv_vcf_binary_file' ] [ 'research' ] [ 'path' ] if 'sv_vcf_binary_file' in data else None ) , } , 'version' : data [ 'mip_version' ] , } for sample_id , sample_data in data [ 'sample' ] . items ( ) : sample = { 'id' : sample_id , 'bam' : sample_data [ 'most_complete_bam' ] [ 'path' ] , 'sambamba' : list ( sample_data [ 'program' ] [ 'sambamba_depth' ] . values ( ) ) [ 0 ] [ 'path' ] , 'sex' : sample_data [ 'sex' ] , 'subsample_mt' : ( list ( sample_data [ 'program' ] [ 'samtools_subsample_mt' ] . values ( ) ) [ 0 ] [ 'path' ] if 'samtools_subsample_mt' in sample_data [ 'program' ] else None ) , 'vcf2cytosure' : list ( sample_data [ 'program' ] [ 'vcf2cytosure' ] . values ( ) ) [ 0 ] [ 'path' ] , } chanjo_sexcheck = list ( sample_data [ 'program' ] [ 'chanjo_sexcheck' ] . values ( ) ) [ 0 ] sample [ 'chanjo_sexcheck' ] = chanjo_sexcheck [ 'path' ] outdata [ 'samples' ] . append ( sample ) return outdata
Parse MIP sample info file .
49,809
def parse_peddy_sexcheck ( handle : TextIO ) : data = { } samples = csv . DictReader ( handle ) for sample in samples : data [ sample [ 'sample_id' ] ] = { 'predicted_sex' : sample [ 'predicted_sex' ] , 'het_ratio' : float ( sample [ 'het_ratio' ] ) , 'error' : True if sample [ 'error' ] == 'True' else False , } return data
Parse Peddy sexcheck output .
49,810
def parse_chanjo_sexcheck ( handle : TextIO ) : samples = csv . DictReader ( handle , delimiter = '\t' ) for sample in samples : return { 'predicted_sex' : sample [ 'sex' ] , 'x_coverage' : float ( sample [ '#X_coverage' ] ) , 'y_coverage' : float ( sample [ 'Y_coverage' ] ) , }
Parse Chanjo sex - check output .
49,811
def get_cli ( ) : parser = argparse . ArgumentParser ( prog = "auto_version" , description = "auto version v%s: a tool to control version numbers" % __version__ , ) parser . add_argument ( "--target" , action = "append" , default = [ ] , help = "Files containing version info. " "Assumes unique variable names between files. (default: %s)." % ( config . targets , ) , ) parser . add_argument ( "--bump" , choices = SemVerSigFig , help = "Bumps the specified part of SemVer string. " "Use this locally to correctly modify the version file." , ) parser . add_argument ( "--news" , "--file-triggers" , action = "store_true" , dest = "file_triggers" , help = "Detects need to bump based on presence of files (as specified in config)." , ) parser . add_argument ( "--set" , help = "Set the SemVer string. Use this locally to set the project version explicitly." , ) parser . add_argument ( "--set-patch-count" , action = "store_true" , help = "Sets the patch number to the commit count." , ) parser . add_argument ( "--lock" , action = "store_true" , help = "Locks the SemVer string. " "Lock will remain for another call to autoversion before being cleared." , ) parser . add_argument ( "--release" , action = "store_true" , default = False , help = "Marks as a release build, which flags the build as released." , ) parser . add_argument ( "--version" , action = "store_true" , default = False , help = "Prints the version of auto_version itself (self-version)." , ) parser . add_argument ( "--config" , help = "Configuration file path." ) parser . add_argument ( "-v" , "--verbosity" , action = "count" , default = 0 , help = "increase output verbosity. " "can be specified multiple times" , ) return parser . parse_known_args ( )
Load cli options
49,812
def stdio_mgr ( in_str = "" ) : r old_stdin = sys . stdin old_stdout = sys . stdout old_stderr = sys . stderr new_stdout = StringIO ( ) new_stderr = StringIO ( ) new_stdin = TeeStdin ( new_stdout , in_str ) sys . stdin = new_stdin sys . stdout = new_stdout sys . stderr = new_stderr yield new_stdin , new_stdout , new_stderr sys . stdin = old_stdin sys . stdout = old_stdout sys . stderr = old_stderr new_stdin . close ( ) new_stdout . close ( ) new_stderr . close ( )
r Subsitute temporary text buffers for stdio in a managed context .
49,813
def draw_segments ( image , segments , color = ( 255 , 0 , 0 ) , line_width = 1 ) : for segment in segments : x , y , w , h = segment cv2 . rectangle ( image , ( x , y ) , ( x + w , y + h ) , color , line_width )
draws segments on image
49,814
def draw_lines ( image , ys , color = ( 255 , 0 , 0 ) , line_width = 1 ) : for y in ys : cv2 . line ( image , ( 0 , y ) , ( image . shape [ 1 ] , y ) , color , line_width )
draws horizontal lines
49,815
def time_to_sec ( time_str : str ) -> int : total_sec = 0 if '-' in time_str : days , time_str = time_str . split ( '-' ) total_sec += ( int ( days ) * 24 * 60 * 60 ) hours_min_raw = time_str . split ( ':' ) [ : - 1 ] time_parts = [ int ( round ( float ( val ) ) ) for val in hours_min_raw ] total_sec += time_parts [ - 1 ] * 60 if len ( time_parts ) > 1 : total_sec += time_parts [ - 2 ] * 60 * 60 return total_sec
Convert time in string format to seconds .
49,816
def convert_job ( row : list ) -> dict : state = row [ - 2 ] start_time_raw = row [ - 4 ] end_time_raw = row [ - 3 ] if state not in ( 'PENDING' , 'CANCELLED' ) : start_time = datetime . strptime ( start_time_raw , '%Y-%m-%dT%H:%M:%S' ) if state != 'RUNNING' : end_time = datetime . strptime ( end_time_raw , '%Y-%m-%dT%H:%M:%S' ) else : end_time = None else : start_time = end_time = None job_name = row [ 1 ] step_name , step_context = job_name . rstrip ( '_BOTH' ) . rstrip ( '_SV' ) . rsplit ( '_' , 1 ) return { 'id' : int ( row [ 0 ] ) , 'name' : job_name , 'step' : step_name , 'context' : step_context , 'state' : state , 'start' : start_time , 'end' : end_time , 'elapsed' : time_to_sec ( row [ - 5 ] ) , 'cpu' : time_to_sec ( row [ - 6 ] ) , 'is_completed' : state == 'COMPLETED' , }
Convert sacct row to dict .
49,817
def parse_sacct ( sacct_stream ) : rows = ( line . split ( ) for line in sacct_stream ) relevant_rows = ( row for row in rows if row [ 0 ] . isdigit ( ) ) jobs = [ convert_job ( row ) for row in relevant_rows ] return jobs
Parse out information from sacct status output .
49,818
def filter_jobs ( sacct_jobs , failed = True ) : categories = FAILED_CATEGORIES if failed else NORMAL_CATEGORIES filtered_jobs = [ job for job in sacct_jobs if job [ 'state' ] in categories ] return filtered_jobs
Filter jobs that have a FAILED etc . status .
49,819
def guess_segments_lines ( segments , lines , nearline_tolerance = 5.0 ) : ys = segments [ : , 1 ] closeness = numpy . abs ( numpy . subtract . outer ( ys , lines ) ) line_of_y = numpy . argmin ( closeness , axis = 1 ) distance = numpy . min ( closeness , axis = 1 ) bad = distance > numpy . mean ( distance ) + nearline_tolerance * numpy . std ( distance ) line_of_y [ bad ] = - 1 return line_of_y
given segments outputs a array of line numbers or - 1 if it doesn t belong to any
49,820
def _process ( self , segments ) : mlh , mlw = self . max_line_height , self . max_line_width s = segments . astype ( numpy . uint32 ) order = mlw * ( s [ : , 1 ] // mlh ) + s [ : , 0 ] sort_order = numpy . argsort ( order ) return segments [ sort_order ]
sort segments in read order - left to right up to down
49,821
def _guess_lines ( ys , max_lines = 50 , confidence_minimum = 0.0 ) : ys = ys . astype ( numpy . float32 ) compactness_list , means_list , diffs , deviations = [ ] , [ ] , [ ] , [ ] start_n = 1 for k in range ( start_n , min ( len ( ys ) , max_lines ) ) : compactness , classified_points , means = cv2 . kmeans ( data = ys , K = k , bestLabels = None , criteria = ( cv2 . TERM_CRITERIA_EPS | cv2 . TERM_CRITERIA_MAX_ITER , 1 , 10 ) , attempts = 2 , flags = cv2 . KMEANS_PP_CENTERS ) means = numpy . sort ( means , axis = 0 ) means_list . append ( means ) compactness_list . append ( compactness ) if k < 3 : tmp1 = [ 1 , 2 , 500 , 550 ] else : tmp1 = numpy . diff ( means , axis = 0 ) tmp2 = numpy . std ( tmp1 ) / numpy . mean ( means ) tmp3 = numpy . sum ( ( tmp1 - numpy . mean ( tmp1 ) ) ** 2 ) diffs . append ( tmp1 ) deviations . append ( tmp3 ) compactness_list = numpy . diff ( numpy . log ( numpy . array ( compactness_list ) + 0.01 ) ) deviations = numpy . array ( deviations [ 1 : ] ) deviations [ 0 ] = numpy . mean ( deviations [ 1 : ] ) compactness_list = ( compactness_list - numpy . mean ( compactness_list ) ) / numpy . std ( compactness_list ) deviations = ( deviations - numpy . mean ( deviations ) ) / numpy . std ( deviations ) aglomerated_metric = 0.1 * compactness_list + 0.9 * deviations i = numpy . argmin ( aglomerated_metric ) + 1 lines = means_list [ i ] betterness = numpy . sort ( aglomerated_metric , axis = 0 ) confidence = ( betterness [ 1 ] - betterness [ 0 ] ) / ( betterness [ 2 ] - betterness [ 1 ] ) if confidence < confidence_minimum : raise Exception ( "low confidence" ) return lines
guesses and returns text inter - line distance number of lines y_position of first line
49,822
def _run ( self ) : self . _is_running = True network_fail = False try : while self . _do_run : try : if network_fail is True : LOGGER . info ( "Network connection re-established!" ) network_fail = False self . _sender ( self . _message ) except IOError as err : if err . errno == errno . ENETUNREACH : LOGGER . error ( "Network unreachable. " "Trying again in %d s." , self . _interval ) network_fail = True else : raise time . sleep ( self . _interval ) finally : self . _is_running = False self . _sender . close ( )
Broadcasts forever .
49,823
def from_json ( self , json ) : res_type = json [ 'sys' ] [ 'type' ] if ResourceType . Array . value == res_type : return self . create_array ( json ) elif ResourceType . Entry . value == res_type : return self . create_entry ( json ) elif ResourceType . Asset . value == res_type : return ResourceFactory . create_asset ( json ) elif ResourceType . ContentType . value == res_type : return ResourceFactory . create_content_type ( json ) elif ResourceType . Space . value == res_type : return ResourceFactory . create_space ( json )
Create resource out of JSON data .
49,824
def process_array_items ( self , array , json ) : for item in json [ 'items' ] : key = None processed = self . from_json ( item ) if isinstance ( processed , Asset ) : key = 'Asset' elif isinstance ( processed , Entry ) : key = 'Entry' if key is not None : array . items_mapped [ key ] [ processed . sys [ 'id' ] ] = processed array . items . append ( processed )
Iterate through all items and create a resource for each .
49,825
def process_array_includes ( self , array , json ) : includes = json . get ( 'includes' ) or { } for key in array . items_mapped . keys ( ) : if key in includes : for resource in includes [ key ] : processed = self . from_json ( resource ) array . items_mapped [ key ] [ processed . sys [ 'id' ] ] = processed
Iterate through all includes and create a resource for every item .
49,826
def clear_calls ( self ) : self . clear_actual_calls ( ) for stack in self . call_stacks : stack . reset ( ) for fake , call_order in self . get_expected_call_order ( ) . items ( ) : call_order . reset_calls ( )
Clears out any calls that were made on previously registered fake objects and resets all call stacks .
49,827
def verify ( self ) : try : for exp in self . get_expected_calls ( ) : exp . assert_called ( ) exp . assert_times_called ( ) for fake , call_order in self . get_expected_call_order ( ) . items ( ) : call_order . assert_order_met ( finalize = True ) finally : self . clear_calls ( )
Ensure all expected calls were called raise AssertionError otherwise .
49,828
def assert_order_met ( self , finalize = False ) : error = None actual_call_len = len ( self . _actual_calls ) expected_call_len = len ( self . _call_order ) if actual_call_len == 0 : error = "Not enough calls were made" else : for i , call in enumerate ( self . _call_order ) : if actual_call_len < i + 1 : if not finalize : continue calls_made = len ( self . _actual_calls ) if calls_made == 1 : error = "Only 1 call was made" else : error = "Only %s calls were made" % calls_made break ac_call = self . _actual_calls [ i ] if ac_call is not call : error = "Call #%s was %r" % ( i + 1 , ac_call ) break if not error : if actual_call_len > expected_call_len : error = "#%s %s was unexpected" % ( expected_call_len + 1 , self . _actual_calls [ expected_call_len ] ) if error : msg = "%s; Expected: %s" % ( error , self . _repr_call_list ( self . _call_order ) ) raise AssertionError ( msg )
assert that calls have been made in the right order .
49,829
def expects_call ( self ) : self . _callable = ExpectedCall ( self , call_name = self . _name , callable = True ) return self
The fake must be called .
49,830
def is_callable ( self ) : self . _callable = Call ( self , call_name = self . _name , callable = True ) return self
The fake can be called .
49,831
def calls ( self , call ) : exp = self . _get_current_call ( ) exp . call_replacement = call return self
Redefine a call .
49,832
def expects ( self , call_name ) : if call_name in self . _declared_calls : return self . next_call ( for_method = call_name ) self . _last_declared_call_name = call_name c = ExpectedCall ( self , call_name , call_order = self . _expected_call_order ) self . _declare_call ( call_name , c ) return self
Expect a call .
49,833
def next_call ( self , for_method = None ) : last_call_name = self . _last_declared_call_name if for_method : if for_method not in self . _declared_calls : raise FakeDeclarationError ( "next_call(for_method=%r) is not possible; " "declare expects(%r) or provides(%r) first" % ( for_method , for_method , for_method ) ) else : last_call_name = for_method self . _last_declared_call_name = last_call_name if last_call_name : exp = self . _declared_calls [ last_call_name ] elif self . _callable : exp = self . _callable else : raise FakeDeclarationError ( 'next_call() must follow provides(), ' 'expects() or is_callable()' ) if getattr ( exp , 'expected_times_called' , None ) is not None : raise FakeDeclarationError ( "Cannot use next_call() in combination with times_called()" ) if not isinstance ( exp , CallStack ) : stack = CallStack ( self , initial_calls = [ exp ] , expected = isinstance ( exp , ExpectedCall ) , call_name = exp . call_name ) if last_call_name : self . _declare_call ( last_call_name , stack ) elif self . _callable : self . _callable = stack else : stack = exp if stack . expected : next_call = ExpectedCall ( self , call_name = exp . call_name , call_order = self . _expected_call_order ) else : next_call = Call ( self , call_name = exp . call_name ) stack . add_call ( next_call ) return self
Start expecting or providing multiple calls .
49,834
def provides ( self , call_name ) : if call_name in self . _declared_calls : return self . next_call ( for_method = call_name ) self . _last_declared_call_name = call_name c = Call ( self , call_name ) self . _declare_call ( call_name , c ) return self
Provide a call .
49,835
def raises ( self , exc ) : exp = self . _get_current_call ( ) exp . exception_to_raise = exc return self
Set last call to raise an exception class or instance .
49,836
def returns ( self , val ) : exp = self . _get_current_call ( ) exp . return_val = val return self
Set the last call to return a value .
49,837
def times_called ( self , n ) : if self . _last_declared_call_name : actual_last_call = self . _declared_calls [ self . _last_declared_call_name ] if isinstance ( actual_last_call , CallStack ) : raise FakeDeclarationError ( "Cannot use times_called() in combination with next_call()" ) exp = self . _get_current_call ( ) exp . expected_times_called = n return self
Set the number of times an object can be called .
49,838
def with_args ( self , * args , ** kwargs ) : exp = self . _get_current_call ( ) if args : exp . expected_args = args if kwargs : exp . expected_kwargs = kwargs return self
Set the last call to expect specific argument values .
49,839
def with_matching_args ( self , * args , ** kwargs ) : exp = self . _get_current_call ( ) if args : exp . expected_matching_args = args if kwargs : exp . expected_matching_kwargs = kwargs return self
Set the last call to expect specific argument values if those arguments exist .
49,840
def without_args ( self , * args , ** kwargs ) : exp = self . _get_current_call ( ) if args : exp . unexpected_args = args if kwargs : exp . unexpected_kwargs = kwargs return self
Set the last call to expect that certain arguments will not exist .
49,841
def with_arg_count ( self , count ) : exp = self . _get_current_call ( ) exp . expected_arg_count = count return self
Set the last call to expect an exact argument count .
49,842
def with_kwarg_count ( self , count ) : exp = self . _get_current_call ( ) exp . expected_kwarg_count = count return self
Set the last call to expect an exact count of keyword arguments .
49,843
def connect ( self ) : self . log . debug ( 'starting the ``get`` method' ) transientSettings = self . settings [ "database settings" ] [ "transients" ] catalogueSettings = self . settings [ "database settings" ] [ "static catalogues" ] if "pessto marshall" in self . settings [ "database settings" ] : marshallSettings = self . settings [ "database settings" ] [ "pessto marshall" ] else : marshallSettings = False dbConns = [ ] for dbSettings in [ transientSettings , catalogueSettings , marshallSettings ] : port = False if dbSettings and dbSettings [ "tunnel" ] : port = self . _setup_tunnel ( tunnelParameters = dbSettings [ "tunnel" ] ) if dbSettings : host = dbSettings [ "host" ] user = dbSettings [ "user" ] passwd = dbSettings [ "password" ] dbName = dbSettings [ "db" ] thisConn = ms . connect ( host = host , user = user , passwd = passwd , db = dbName , port = port , use_unicode = True , charset = 'utf8' , client_flag = ms . constants . CLIENT . MULTI_STATEMENTS , connect_timeout = 3600 ) thisConn . autocommit ( True ) dbConns . append ( thisConn ) else : dbConns . append ( None ) dbConns = { "transients" : dbConns [ 0 ] , "catalogues" : dbConns [ 1 ] , "marshall" : dbConns [ 2 ] } dbVersions = { } for k , v in dbConns . iteritems ( ) : if v : sqlQuery = u % locals ( ) rows = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = v , quiet = False ) version = rows [ 0 ] [ 'v' ] dbVersions [ k ] = version else : dbVersions [ k ] = None self . log . debug ( 'completed the ``get`` method' ) return dbConns , dbVersions
connect to the various databases the credientals and settings of which are found in the sherlock settings file
49,844
def clean_username ( self ) : try : user = get_user_model ( ) . objects . get ( username = self . cleaned_data [ "username" ] ) except get_user_model ( ) . DoesNotExist : pass else : raise forms . ValidationError ( self . error_messages [ 'duplicate_username' ] ) if self . cleaned_data [ 'username' ] . lower ( ) in defaults . ACCOUNTS_FORBIDDEN_USERNAMES : raise forms . ValidationError ( _ ( u'This username is not allowed.' ) ) return self . cleaned_data [ 'username' ]
Validate that the username is unique and not listed in defaults . ACCOUNTS_FORBIDDEN_USERNAMES list .
49,845
def clean_email ( self ) : if get_user_model ( ) . objects . filter ( Q ( email__iexact = self . cleaned_data [ 'email' ] ) | Q ( email_unconfirmed__iexact = self . cleaned_data [ 'email' ] ) ) : raise forms . ValidationError ( _ ( u'This email address is already ' 'in use. Please supply a different email.' ) ) return self . cleaned_data [ 'email' ]
Validate that the email address is unique .
49,846
def clean_email ( self ) : if self . cleaned_data [ 'email' ] . lower ( ) == self . user . email : raise forms . ValidationError ( _ ( u"You're already known under " "this email address." ) ) if get_user_model ( ) . objects . filter ( email__iexact = self . cleaned_data [ 'email' ] ) . exclude ( email__iexact = self . user . email ) : raise forms . ValidationError ( _ ( u'This email address is already ' 'in use. Please supply a different email address.' ) ) return self . cleaned_data [ 'email' ]
Validate that the email is not already registered with another user .
49,847
def clean_picture ( self ) : if self . cleaned_data . get ( 'picture' ) : picture_data = self . cleaned_data [ 'picture' ] if 'error' in picture_data : raise forms . ValidationError ( _ ( u'Upload a valid image. ' 'The file you uploaded was either not an image ' 'or a corrupted image.' ) ) content_type = picture_data . content_type if content_type : main , sub = content_type . split ( '/' ) if not ( main == 'image' and sub in defaults . ACCOUNTS_PICTURE_FORMATS ) : raise forms . ValidationError ( _ ( u'%s only.' % defaults . ACCOUNTS_PICTURE_FORMATS ) ) if picture_data . size > int ( defaults . ACCOUNTS_PICTURE_MAX_FILE ) : raise forms . ValidationError ( _ ( u'Image size is too big.' ) ) return self . cleaned_data [ 'picture' ]
Validates format and file size of uploaded profile picture .
49,848
def log ( self ) : with Subscribe ( services = [ "" ] , addr_listener = True ) as sub : for msg in sub . recv ( 1 ) : if msg : if msg . type in [ "log.debug" , "log.info" , "log.warning" , "log.error" , "log.critical" ] : getattr ( LOGGER , msg . type [ 4 : ] ) ( msg . subject + " " + msg . sender + " " + str ( msg . data ) + " " + str ( msg . time ) ) elif msg . binary : LOGGER . debug ( "%s %s %s [binary] %s" , msg . subject , msg . sender , msg . type , str ( msg . time ) ) else : LOGGER . debug ( "%s %s %s %s %s" , msg . subject , msg . sender , msg . type , str ( msg . data ) , str ( msg . time ) ) if not self . loop : LOGGER . info ( "Stop logging" ) break
Log stuff .
49,849
def cleanup ( ) : to_stop = STARTED_TASKS . copy ( ) if to_stop : print "Cleaning up..." for task in to_stop : try : task . stop ( ) except : etype , value , trace = sys . exc_info ( ) if not ( isinstance ( value , OSError ) and value . errno == 3 ) : print '' . join ( format_exception ( etype , value , trace , None ) ) continue
Stop all started tasks on system exit .
49,850
def wait ( self ) : if self . _status is not TaskStatus . STARTED : raise RuntimeError ( "Cannot wait on %s in state %s" % ( self , self . _status ) ) self . _wait ( ) self . stop ( ) return self . return_values
Wait on a task to finish and stop it when it has finished .
49,851
def stop ( self ) : if self . _status is TaskStatus . STOPPED : return if self . _status is not TaskStatus . STARTED : raise RuntimeError ( "Cannot stop %s in state %s" % ( self , self . _status ) ) self . _stop ( ) STARTED_TASKS . remove ( self ) self . _status = TaskStatus . STOPPED
Stop a task immediately .
49,852
def reset ( self ) : if self . _status is not TaskStatus . STOPPED : raise RuntimeError ( "Cannot reset %s in state %s" % ( self , self . _status ) ) self . _reset ( ) self . return_values = { } self . _status = TaskStatus . IDLE
Reset a task .
49,853
def _aggregate ( self ) : nonremote = [ t for t in self . _tasks if not isinstance ( t , RemoteTask ) ] remote = [ t for t in self . _tasks if isinstance ( t , RemoteTask ) ] host_dict = defaultdict ( list ) for task in remote : host_dict [ task . host ] . append ( task ) aggregated = [ ] for task_group in host_dict . values ( ) : combined_cmd = [ ] for task in task_group : if combined_cmd : combined_cmd . append ( '&' ) combined_cmd . append ( ' ' . join ( task . _remote_command ) ) t0 = task_group [ 0 ] task = RemoteTask ( t0 . host , combined_cmd , t0 . _quiet , t0 . _return_output , t0 . _kill_remote , t0 . _identity_file ) aggregated . append ( task ) self . _tasks = nonremote + aggregated
Helper method to aggregate RemoteTasks into single ssh session .
49,854
def upload_to_picture ( instance , filename ) : extension = filename . split ( '.' ) [ - 1 ] . lower ( ) salt , hash = generate_sha1 ( instance . id ) return '%(path)s/%(hash)s.%(extension)s' % { 'path' : getattr ( defaults , 'ACCOUNTS_PICTURE_PATH' , '%s/%s' % ( str ( instance . _meta . app_label ) , str ( instance . _meta . model_name ) ) ) , 'hash' : hash [ : 10 ] , 'extension' : extension }
Uploads a picture for a user to the ACCOUNTS_PICTURE_PATH and saving it under unique hash for the image . This is for privacy reasons so others can t just browse through the picture directory .
49,855
def activation_key_expired ( self ) : expiration_days = datetime . timedelta ( days = defaults . ACCOUNTS_ACTIVATION_DAYS ) expiration_date = self . date_joined + expiration_days if self . activation_key == defaults . ACCOUNTS_ACTIVATED : return True if get_datetime_now ( ) >= expiration_date : return True return False
Checks if activation key is expired .
49,856
def send_activation_email ( self ) : context = { 'user' : self , 'protocol' : get_protocol ( ) , 'activation_days' : defaults . ACCOUNTS_ACTIVATION_DAYS , 'activation_key' : self . activation_key , 'site' : Site . objects . get_current ( ) } subject = '' . join ( render_to_string ( 'accounts/emails/activation_email_subject.txt' , context ) . splitlines ( ) ) message = render_to_string ( 'accounts/emails/activation_email_message.txt' , context ) send_mail ( subject , message , settings . DEFAULT_FROM_EMAIL , [ self . email , ] )
Sends a activation email to the user .
49,857
def change_email ( self , email ) : self . email_unconfirmed = email salt , hash = generate_sha1 ( self . username ) self . email_confirmation_key = hash self . email_confirmation_key_created = get_datetime_now ( ) self . save ( ) self . send_confirmation_email ( ) return self
Changes the email address for a user .
49,858
def send_confirmation_email ( self ) : context = { 'user' : self , 'new_email' : self . email_unconfirmed , 'protocol' : get_protocol ( ) , 'confirmation_key' : self . email_confirmation_key , 'site' : Site . objects . get_current ( ) } subject_old = '' . join ( render_to_string ( 'accounts/emails/confirmation_email_subject_old.txt' , context ) . splitlines ( ) ) message_old = render_to_string ( 'accounts/emails/confirmation_email_message_old.txt' , context ) send_mail ( subject_old , message_old , settings . DEFAULT_FROM_EMAIL , [ self . email ] ) subject_new = '' . join ( render_to_string ( 'accounts/emails/confirmation_email_subject_new.txt' , context ) . splitlines ( ) ) message_new = render_to_string ( 'accounts/emails/confirmation_email_message_new.txt' , context ) send_mail ( subject_new , message_new , settings . DEFAULT_FROM_EMAIL , [ self . email_unconfirmed , ] )
Sends an email to confirm the new email address .
49,859
def get_picture_url ( self ) : if self . picture : return self . picture . url if defaults . ACCOUNTS_GRAVATAR_PICTURE : return get_gravatar ( self . email , defaults . ACCOUNTS_GRAVATAR_SIZE , defaults . ACCOUNTS_GRAVATAR_DEFAULT ) else : if defaults . ACCOUNTS_GRAVATAR_DEFAULT not in [ '404' , 'mm' , 'identicon' , 'monsterid' , 'wavatar' ] : return defaults . ACCOUNTS_GRAVATAR_DEFAULT else : return None
Returns the image containing the picture for the user .
49,860
def get_full_name_or_username ( self ) : if self . first_name or self . last_name : name = _ ( u"%(first_name)s %(last_name)s" ) % { 'first_name' : self . first_name , 'last_name' : self . last_name } else : if not defaults . ACCOUNTS_WITHOUT_USERNAMES : name = "%(username)s" % { 'username' : self . username } else : name = "%(email)s" % { 'email' : self . email } return name . strip ( )
Returns the full name of the user or if none is supplied will return the username .
49,861
def resolve ( self , link_resource_type , resource_id , array = None ) : result = None if array is not None : container = array . items_mapped . get ( link_resource_type ) result = container . get ( resource_id ) if result is None : clz = utils . class_for_type ( link_resource_type ) result = self . fetch ( clz ) . where ( { 'sys.id' : resource_id } ) . first ( ) return result
Resolve a link to a CDA resource .
49,862
def resolve_dict_link ( self , dct , array = None ) : sys = dct . get ( 'sys' ) return self . resolve ( sys [ 'linkType' ] , sys [ 'id' ] , array ) if sys is not None else None
Convenience method for resolving links given a dict object .
49,863
def all ( self ) : result = self . invoke ( ) if self . resolve_links : result . resolve_links ( ) return result
Attempt to retrieve all available resources matching this request .
49,864
def first ( self ) : self . params [ 'limit' ] = 1 result = self . all ( ) return result . items [ 0 ] if result . total > 0 else None
Attempt to retrieve only the first resource matching this request .
49,865
def where ( self , params ) : self . params = dict ( self . params , ** params ) return self
Set a dict of parameters to be passed to the API when invoking this request .
49,866
def complex_require_condition ( ) : print ( "Demonstrating complex require_condition example" ) val = 64 Buzz . require_condition ( is_even ( val ) , 'This condition should pass' ) val = 81 Buzz . require_condition ( is_even ( val ) , 'Value {val} is not even' , val = val )
This function demonstrates a more complex usage of the require_condition function . It shows argument interpolation and handling a more complex boolean expression
49,867
def get ( self , item_name ) : if self . prefix : item_name = self . prefix + self . seperator + item_name item_names = item_name . split ( self . seperator ) node = self . _storage for item_name in item_names : node = node [ item_name ] return node
Retrieve the value of an option .
49,868
def has_option ( self , option_name ) : if self . prefix : option_name = self . prefix + self . seperator + option_name item_names = option_name . split ( self . seperator ) node = self . _storage for item_name in item_names : if node is None : return False if not item_name in node : return False node = node [ item_name ] return True
Check that an option exists .
49,869
def has_section ( self , section_name ) : if not self . has_option ( section_name ) : return False return isinstance ( self . get ( section_name ) , dict )
Checks that an option exists and that it contains sub options .
49,870
def set ( self , item_name , item_value ) : if self . prefix : item_name = self . prefix + self . seperator + item_name item_names = item_name . split ( self . seperator ) item_last = item_names . pop ( ) node = self . _storage for item_name in item_names : if not item_name in node : node [ item_name ] = { } node = node [ item_name ] node [ item_last ] = item_value return
Sets the value of an option in the configuration .
49,871
def get_missing ( self , verify_file ) : vconf = Configuration ( verify_file ) missing = { } for setting , setting_type in vconf . get ( 'settings' ) . items ( ) : if not self . has_option ( setting ) : missing [ 'missing' ] = missing . get ( 'settings' , [ ] ) missing [ 'missing' ] . append ( setting ) elif not type ( self . get ( setting ) ) . __name__ == setting_type : missing [ 'incompatible' ] = missing . get ( 'incompatible' , [ ] ) missing [ 'incompatible' ] . append ( ( setting , setting_type ) ) return missing
Use a verification configuration which has a list of required options and their respective types . This information is used to identify missing and incompatible options in the loaded configuration .
49,872
def save ( self ) : with open ( self . configuration_file , 'w' ) as file_h : file_h . write ( self . _serializer ( 'dumps' , self . _storage ) )
Save the current configuration to disk .
49,873
def replace_lines ( regexer , handler , lines ) : result = [ ] for line in lines : content = line . strip ( ) replaced = regexer . sub ( handler , content ) result . append ( line . replace ( content , replaced , 1 ) ) return result
Uses replacement handler to perform replacements on lines of text
49,874
def write_targets ( targets , ** params ) : handler = ReplacementHandler ( ** params ) for target , regexer in regexer_for_targets ( targets ) : with open ( target ) as fh : lines = fh . readlines ( ) lines = replace_lines ( regexer , handler , lines ) with open ( target , "w" ) as fh : fh . writelines ( lines ) if handler . missing : raise Exception ( "Failed to complete all expected replacements: %r" % handler . missing )
Writes version info into version file
49,875
def regexer_for_targets ( targets ) : for target in targets : path , file_ext = os . path . splitext ( target ) regexer = config . regexers [ file_ext ] yield target , regexer
Pairs up target files with their correct regex
49,876
def extract_keypairs ( lines , regexer ) : updates = { } for line in lines : match = regexer . match ( line . strip ( ) ) if not match : continue k_v = match . groupdict ( ) updates [ k_v [ Constants . KEY_GROUP ] ] = k_v [ Constants . VALUE_GROUP ] return updates
Given some lines of text extract key - value pairs from them
49,877
def read_targets ( targets ) : results = { } for target , regexer in regexer_for_targets ( targets ) : with open ( target ) as fh : results . update ( extract_keypairs ( fh . readlines ( ) , regexer ) ) return results
Reads generic key - value pairs from input files
49,878
def detect_file_triggers ( trigger_patterns ) : triggers = set ( ) for trigger , pattern in trigger_patterns . items ( ) : matches = glob . glob ( pattern ) if matches : _LOG . debug ( "trigger: %s bump from %r\n\t%s" , trigger , pattern , matches ) triggers . add ( trigger ) else : _LOG . debug ( "trigger: no match on %r" , pattern ) return triggers
The existence of files matching configured globs will trigger a version bump
49,879
def get_all_triggers ( bump , file_triggers ) : triggers = set ( ) if file_triggers : triggers = triggers . union ( detect_file_triggers ( config . trigger_patterns ) ) if bump : _LOG . debug ( "trigger: %s bump requested" , bump ) triggers . add ( bump ) return triggers
Aggregated set of significant figures to bump
49,880
def get_lock_behaviour ( triggers , all_data , lock ) : updates = { } lock_key = config . _forward_aliases . get ( Constants . VERSION_LOCK_FIELD ) if lock : updates [ Constants . VERSION_LOCK_FIELD ] = config . VERSION_LOCK_VALUE elif ( triggers and lock_key and str ( all_data . get ( lock_key ) ) == str ( config . VERSION_LOCK_VALUE ) ) : triggers . clear ( ) updates [ Constants . VERSION_LOCK_FIELD ] = config . VERSION_UNLOCK_VALUE return updates
Binary state lock protects from version increments if set
49,881
def get_final_version_string ( release_mode , semver , commit_count = 0 ) : version_string = "." . join ( semver ) maybe_dev_version_string = version_string updates = { } if release_mode : updates [ Constants . RELEASE_FIELD ] = config . RELEASED_VALUE else : maybe_dev_version_string = config . DEVMODE_TEMPLATE . format ( version = version_string , count = commit_count ) updates [ Constants . VERSION_FIELD ] = maybe_dev_version_string updates [ Constants . VERSION_STRICT_FIELD ] = version_string return updates
Generates update dictionary entries for the version string
49,882
def get_dvcs_info ( ) : cmd = "git rev-list --count HEAD" commit_count = str ( int ( subprocess . check_output ( shlex . split ( cmd ) ) . decode ( "utf8" ) . strip ( ) ) ) cmd = "git rev-parse HEAD" commit = str ( subprocess . check_output ( shlex . split ( cmd ) ) . decode ( "utf8" ) . strip ( ) ) return { Constants . COMMIT_FIELD : commit , Constants . COMMIT_COUNT_FIELD : commit_count }
Gets current repository info from git
49,883
def log_cmd ( context , sampleinfo , sacct , quiet , config ) : log_analysis = LogAnalysis ( context . obj [ 'store' ] ) try : new_run = log_analysis ( config , sampleinfo = sampleinfo , sacct = sacct ) except MissingFileError as error : click . echo ( click . style ( f"Skipping, missing Sacct file: {error.message}" , fg = 'yellow' ) ) return except KeyError as error : print ( click . style ( f"unexpected output, missing key: {error.args[0]}" , fg = 'yellow' ) ) return if new_run is None : if not quiet : click . echo ( click . style ( 'Analysis already logged' , fg = 'yellow' ) ) else : message = f"New log added: {new_run.family} ({new_run.id}) - {new_run.status}" click . echo ( click . style ( message , fg = 'green' ) )
Log an analysis .
49,884
def start ( context , mip_config , email , priority , dryrun , command , start_with , family ) : mip_cli = MipCli ( context . obj [ 'script' ] ) mip_config = mip_config or context . obj [ 'mip_config' ] email = email or environ_email ( ) kwargs = dict ( config = mip_config , family = family , priority = priority , email = email , dryrun = dryrun , start_with = start_with ) if command : mip_command = mip_cli . build_command ( ** kwargs ) click . echo ( ' ' . join ( mip_command ) ) else : try : mip_cli ( ** kwargs ) if not dryrun : context . obj [ 'store' ] . add_pending ( family , email = email ) except MipStartError as error : click . echo ( click . style ( error . message , fg = 'red' ) )
Start a new analysis .
49,885
def scan ( context , root_dir ) : root_dir = root_dir or context . obj [ 'root' ] config_files = Path ( root_dir ) . glob ( '*/analysis/*_config.yaml' ) for config_file in config_files : LOG . debug ( "found analysis config: %s" , config_file ) with config_file . open ( ) as stream : context . invoke ( log_cmd , config = stream , quiet = True ) context . obj [ 'store' ] . track_update ( )
Scan a directory for analyses .
49,886
def user ( context , name , email ) : existing_user = context . obj [ 'store' ] . user ( email ) if existing_user : click . echo ( existing_user . to_dict ( ) ) elif name : new_user = context . obj [ 'store' ] . add_user ( name , email ) click . echo ( click . style ( f"New user added: {email} ({new_user.id})" , fg = 'green' ) ) else : click . echo ( click . style ( 'User not found' , fg = 'yellow' ) )
Add a new or display information about an existing user .
49,887
def cancel ( context , jobs , analysis_id ) : analysis_obj = context . obj [ 'store' ] . analysis ( analysis_id ) if analysis_obj is None : click . echo ( 'analysis not found' ) context . abort ( ) elif analysis_obj . status != 'running' : click . echo ( f"analysis not running: {analysis_obj.status}" ) context . abort ( ) config_path = Path ( analysis_obj . config_path ) with config_path . open ( ) as config_stream : config_raw = ruamel . yaml . safe_load ( config_stream ) config_data = parse_config ( config_raw ) log_path = Path ( f"{config_data['log_path']}" ) if not log_path . exists ( ) : click . echo ( f"missing MIP log file: {log_path}" ) context . abort ( ) with log_path . open ( ) as log_stream : all_jobs = job_ids ( log_stream ) if jobs : for job_id in all_jobs : click . echo ( job_id ) else : for job_id in all_jobs : LOG . debug ( f"cancelling job: {job_id}" ) process = subprocess . Popen ( [ 'scancel' , job_id ] ) process . wait ( ) analysis_obj . status = 'canceled' context . obj [ 'store' ] . commit ( ) click . echo ( 'cancelled analysis successfully!' )
Cancel all jobs in a run .
49,888
def run ( self , * args ) : del args arec = AddressReceiver ( max_age = self . _max_age , multicast_enabled = self . _multicast_enabled ) arec . start ( ) port = PORT try : with nslock : self . listener = get_context ( ) . socket ( REP ) self . listener . bind ( "tcp://*:" + str ( port ) ) logger . debug ( 'Listening on port %s' , str ( port ) ) poller = Poller ( ) poller . register ( self . listener , POLLIN ) while self . loop : with nslock : socks = dict ( poller . poll ( 1000 ) ) if socks : if socks . get ( self . listener ) == POLLIN : msg = self . listener . recv_string ( ) else : continue logger . debug ( "Replying to request: " + str ( msg ) ) msg = Message . decode ( msg ) self . listener . send_unicode ( six . text_type ( get_active_address ( msg . data [ "service" ] , arec ) ) ) except KeyboardInterrupt : pass finally : arec . stop ( ) self . stop ( )
Run the listener and answer to requests .
49,889
def stop ( self ) : self . listener . setsockopt ( LINGER , 1 ) self . loop = False with nslock : self . listener . close ( )
Stop the name server .
49,890
def segments_to_numpy ( segments ) : segments = numpy . array ( segments , dtype = SEGMENT_DATATYPE , ndmin = 2 ) segments = segments if SEGMENTS_DIRECTION == 0 else numpy . transpose ( segments ) return segments
given a list of 4 - element tuples transforms it into a numpy array
49,891
def _create_dictionary_of_marshall ( self , marshallQuery , marshallTable ) : self . log . debug ( 'starting the ``_create_dictionary_of_marshall`` method' ) dictList = [ ] tableName = self . dbTableName rows = readquery ( log = self . log , sqlQuery = marshallQuery , dbConn = self . pmDbConn , quiet = False ) totalCount = len ( rows ) count = 0 for row in rows : if "dateCreated" in row : del row [ "dateCreated" ] count += 1 if count > 1 : sys . stdout . write ( "\x1b[1A\x1b[2K" ) print "%(count)s / %(totalCount)s `%(tableName)s` data added to memory" % locals ( ) dictList . append ( dict ( row ) ) self . log . debug ( 'completed the ``_create_dictionary_of_marshall`` method' ) return dictList
create a list of dictionaries containing all the rows in the marshall stream
49,892
def ingest ( self ) : self . log . debug ( 'starting the ``get`` method' ) dictList = self . _create_dictionary_of_ned_d ( ) self . primaryIdColumnName = "primaryId" self . raColName = "raDeg" self . declColName = "decDeg" tableName = self . dbTableName createStatement = u % locals ( ) self . add_data_to_database_table ( dictList = dictList , createStatement = createStatement ) self . _clean_up_columns ( ) self . _get_metadata_for_galaxies ( ) self . _update_sdss_coverage ( ) self . log . debug ( 'completed the ``get`` method' ) return None
Import the ned_d catalogue into the catalogues database
49,893
def _create_dictionary_of_ned_d ( self ) : self . log . debug ( 'starting the ``_create_dictionary_of_ned_d`` method' ) count = 0 with open ( self . pathToDataFile , 'rb' ) as csvFile : csvReader = csv . reader ( csvFile , dialect = 'excel' , delimiter = ',' , quotechar = '"' ) totalRows = sum ( 1 for row in csvReader ) csvFile . close ( ) totalCount = totalRows with open ( self . pathToDataFile , 'rb' ) as csvFile : csvReader = csv . reader ( csvFile , dialect = 'excel' , delimiter = ',' , quotechar = '"' ) theseKeys = [ ] dictList = [ ] for row in csvReader : if len ( theseKeys ) == 0 : totalRows -= 1 if "Exclusion Code" in row and "Hubble const." in row : for i in row : if i == "redshift (z)" : theseKeys . append ( "redshift" ) elif i == "Hubble const." : theseKeys . append ( "hubble_const" ) elif i == "G" : theseKeys . append ( "galaxy_index_id" ) elif i == "err" : theseKeys . append ( "dist_mod_err" ) elif i == "D (Mpc)" : theseKeys . append ( "dist_mpc" ) elif i == "Date (Yr. - 1980)" : theseKeys . append ( "ref_date" ) elif i == "REFCODE" : theseKeys . append ( "ref" ) elif i == "Exclusion Code" : theseKeys . append ( "dist_in_ned_flag" ) elif i == "Adopted LMC modulus" : theseKeys . append ( "lmc_mod" ) elif i == "m-M" : theseKeys . append ( "dist_mod" ) elif i == "Notes" : theseKeys . append ( "notes" ) elif i == "SN ID" : theseKeys . append ( "dist_derived_from_sn" ) elif i == "method" : theseKeys . append ( "dist_method" ) elif i == "Galaxy ID" : theseKeys . append ( "primary_ned_id" ) elif i == "D" : theseKeys . append ( "dist_index_id" ) else : theseKeys . append ( i ) continue if len ( theseKeys ) : count += 1 if count > 1 : sys . stdout . write ( "\x1b[1A\x1b[2K" ) if count > totalCount : count = totalCount percent = ( float ( count ) / float ( totalCount ) ) * 100. print "%(count)s / %(totalCount)s (%(percent)1.1f%%) rows added to memory" % locals ( ) rowDict = { } for t , r in zip ( theseKeys , row ) : rowDict [ t ] = r if t == "ref_date" : try : rowDict [ t ] = int ( r ) + 1980 except : rowDict [ t ] = None if rowDict [ "dist_index_id" ] != "999999" : dictList . append ( rowDict ) csvFile . close ( ) self . log . debug ( 'completed the ``_create_dictionary_of_ned_d`` method' ) return dictList
create a list of dictionaries containing all the rows in the ned_d catalogue
49,894
def _clean_up_columns ( self ) : self . log . debug ( 'starting the ``_clean_up_columns`` method' ) tableName = self . dbTableName print "cleaning up %(tableName)s columns" % locals ( ) sqlQuery = u % locals ( ) writequery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , ) sqlQuery = u % locals ( ) writequery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , ) self . log . debug ( 'completed the ``_clean_up_columns`` method' ) return None
clean up columns of the NED table
49,895
def _get_metadata_for_galaxies ( self ) : self . log . debug ( 'starting the ``_get_metadata_for_galaxies`` method' ) total , batches = self . _count_galaxies_requiring_metadata ( ) print "%(total)s galaxies require metadata. Need to send %(batches)s batch requests to NED." % locals ( ) totalBatches = self . batches thisCount = 0 while self . total : thisCount += 1 self . _get_3000_galaxies_needing_metadata ( ) dictList = self . _query_ned_and_add_results_to_database ( thisCount ) self . add_data_to_database_table ( dictList = dictList , createStatement = False ) self . _count_galaxies_requiring_metadata ( ) self . log . debug ( 'completed the ``_get_metadata_for_galaxies`` method' ) return None
get metadata for galaxies
49,896
def _get_3000_galaxies_needing_metadata ( self ) : self . log . debug ( 'starting the ``_get_3000_galaxies_needing_metadata`` method' ) tableName = self . dbTableName self . theseIds = { } sqlQuery = u % locals ( ) rows = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , quiet = False ) for row in rows : self . theseIds [ row [ "primary_ned_id" ] ] = row [ "primaryId" ] self . log . debug ( 'completed the ``_get_3000_galaxies_needing_metadata`` method' ) return len ( self . theseIds )
get 3000 galaxies needing metadata
49,897
def _update_sdss_coverage ( self ) : self . log . debug ( 'starting the ``_update_sdss_coverage`` method' ) tableName = self . dbTableName sqlQuery = u % locals ( ) rows = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , quiet = False ) totalCount = len ( rows ) count = 0 for row in rows : count += 1 if count > 1 : sys . stdout . write ( "\x1b[1A\x1b[2K" ) sys . stdout . write ( "\x1b[1A\x1b[2K" ) sys . stdout . write ( "\x1b[1A\x1b[2K" ) if count > totalCount : count = totalCount percent = ( float ( count ) / float ( totalCount ) ) * 100. primaryID = row [ "primaryID" ] raDeg = float ( row [ "raDeg" ] ) decDeg = float ( row [ "decDeg" ] ) primary_ned_id = row [ "primary_ned_id" ] time . sleep ( 1.1 ) print "%(count)s / %(totalCount)s (%(percent)1.1f%%) NED galaxies checked for SDSS coverage" % locals ( ) print "NED NAME: " , primary_ned_id sdss_coverage = check_coverage ( log = self . log , ra = raDeg , dec = decDeg ) . get ( ) if sdss_coverage == 999 : sdss_coverage_flag = "null" elif sdss_coverage == True : sdss_coverage_flag = 1 elif sdss_coverage == False : sdss_coverage_flag = 0 else : self . log . error ( 'cound not get sdss coverage' % locals ( ) ) sys . exit ( 0 ) sqlQuery = u % locals ( ) writequery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , ) self . log . debug ( 'completed the ``_update_sdss_coverage`` method' ) return None
update sdss coverage
49,898
def _rsync_cmd ( self ) : cmd = [ 'rsync' ] if self . _identity_file : cmd += [ '-e' , 'ssh -i ' + os . path . expanduser ( self . _identity_file ) ] return cmd
Helper method to generate base rsync command .
49,899
def send_file ( self , file_name , remote_destination = None , ** kwargs ) : if not remote_destination : remote_destination = file_name return SubprocessTask ( self . _rsync_cmd ( ) + [ '-ut' , file_name , '%s:%s' % ( self . hostname , remote_destination ) ] , ** kwargs )
Send a file to a remote host with rsync .