idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
15,800
def upload ( self , file_obj ) : return self . _client . upload_object ( self . _instance , self . _bucket , self . name , file_obj )
Replace the content of this object .
15,801
def moving_average ( arr : np . ndarray , n : int = 3 ) -> np . ndarray : ret = np . cumsum ( arr , dtype = float ) ret [ n : ] = ret [ n : ] - ret [ : - n ] return ret [ n - 1 : ] / n
Calculate the moving overage over an array .
15,802
def recursive_getattr ( obj : Any , attr : str , * args ) -> Any : def _getattr ( obj , attr ) : return getattr ( obj , attr , * args ) return functools . reduce ( _getattr , [ obj ] + attr . split ( '.' ) )
Recursive getattar .
15,803
def recursive_setattr ( obj : Any , attr : str , val : Any ) -> Any : pre , _ , post = attr . rpartition ( '.' ) return setattr ( recursive_getattr ( obj , pre ) if pre else obj , post , val )
Recusrive setattr .
15,804
def recursive_getitem ( d : Mapping [ str , Any ] , keys : Union [ str , Sequence [ str ] ] ) -> Any : if isinstance ( keys , str ) : return d [ keys ] else : return functools . reduce ( operator . getitem , keys , d )
Recursively retrieve an item from a nested dict .
15,805
def get_array_for_fit ( observables : dict , track_pt_bin : int , jet_pt_bin : int ) -> histogram . Histogram1D : for name , observable in observables . items ( ) : if observable . track_pt_bin == track_pt_bin and observable . jet_pt_bin == jet_pt_bin : return histogram . Histogram1D . from_existing_hist ( observable . hist ) raise ValueError ( "Cannot find fit with jet pt bin {jet_pt_bin} and track pt bin {track_pt_bin}" )
Get a Histogram1D associated with the selected jet and track pt bins .
15,806
def epcrparsethreads ( self ) : from Bio import SeqIO for sample in self . metadata : if sample . general . bestassemblyfile != 'NA' : threads = Thread ( target = self . epcrparse , args = ( ) ) threads . setDaemon ( True ) threads . start ( ) for sample in self . metadata : if sample . general . bestassemblyfile != 'NA' : if sample [ self . analysistype ] . primers != 'NA' : record = dict ( ) sample [ self . analysistype ] . blastresults = dict ( ) sample [ self . analysistype ] . rawblastresults = dict ( ) for rec in SeqIO . parse ( sample . general . bestassemblyfile , 'fasta' ) : record [ rec . id ] = str ( rec . seq ) for line in sample [ self . analysistype ] . epcrresults : if not line . startswith ( '#' ) : self . epcrparsequeue . put ( ( sample , record , line ) ) self . epcrparsequeue . join ( )
Parse the ePCR results and run BLAST on the parsed results
15,807
def epcrparse ( self ) : from Bio . Blast . Applications import NcbiblastnCommandline while True : sample , record , line = self . epcrparsequeue . get ( ) gene , chromosome , strand , start , end , m_match , gaps , act_len_exp_len = line . split ( '\t' ) genesequence = record [ chromosome ] [ int ( start ) - 1 : int ( end ) ] blastn = NcbiblastnCommandline ( db = sample [ self . analysistype ] . probes . split ( '.' ) [ 0 ] , num_threads = 12 , task = 'blastn-short' , num_alignments = 1 , outfmt = "'6 qseqid sseqid positive mismatch gaps " "evalue bitscore slen length'" ) out , err = blastn ( stdin = genesequence ) results = out . rstrip ( ) . split ( '\t' ) sample [ self . analysistype ] . rawblastresults [ gene ] = results positives = float ( results [ 2 ] ) mismatches = float ( results [ 3 ] ) gaps = float ( results [ 4 ] ) subjectlength = float ( results [ 7 ] ) percentidentity = float ( '{:0.2f}' . format ( ( positives - gaps ) / subjectlength * 100 ) ) resultdict = { 'matches' : positives , 'mismatches' : mismatches , 'gaps' : gaps , 'subject_length' : subjectlength , 'percent_identity' : percentidentity , 'match_length' : results [ 8 ] . split ( '\n' ) [ 0 ] } sample [ self . analysistype ] . blastresults [ gene ] = resultdict self . epcrparsequeue . task_done ( )
Run BLAST and record results to the object
15,808
def report ( self ) : data = '' for sample in self . metadata : if sample [ self . analysistype ] . primers != 'NA' : sample [ self . analysistype ] . report = os . path . join ( sample [ self . analysistype ] . reportdir , '{}_{}.csv' . format ( sample . name , self . analysistype ) ) strainspecific = 'Strain,{},\n{},' . format ( ',' . join ( sorted ( sample [ self . analysistype ] . targets ) ) , sample . name ) for gene in sorted ( sample [ self . analysistype ] . targets ) : try : percentidentity = sample [ self . analysistype ] . blastresults [ gene ] [ 'percent_identity' ] if percentidentity > 50 : strainspecific += '{},' . format ( percentidentity ) else : strainspecific += '-,' except KeyError : strainspecific += '-,' strainspecific += '\n' with open ( sample [ self . analysistype ] . report , 'w' ) as specificreport : specificreport . write ( strainspecific ) data += strainspecific with open ( os . path . join ( self . reportdir , '{}.csv' . format ( self . analysistype ) ) , 'w' ) as report : report . write ( data )
Create reports of the findings
15,809
def setup_environment ( ) : osinter = ostool . get_interface ( ) pypath = osinter . get_maya_envpath ( ) for p in sys . path : pypath = os . pathsep . join ( ( pypath , p ) ) os . environ [ 'PYTHONPATH' ] = pypath
Set up neccessary environment variables
15,810
def execute_mayapy ( args , wait = True ) : osinter = ostool . get_interface ( ) mayapy = osinter . get_maya_python ( ) allargs = [ mayapy ] allargs . extend ( args ) print "Executing mayapy with: %s" % allargs mayapyprocess = subprocess . Popen ( allargs ) if wait : rc = mayapyprocess . wait ( ) print "Process mayapy finished!" return rc else : return mayapyprocess
Execute mayapython with the given arguments capture and return the output
15,811
def setDoc ( self , doc ) : self . ui . overAtten . setNum ( doc [ 'overloaded_attenuation' ] ) self . ui . componentDetails . clearDoc ( ) self . ui . componentDetails . setDoc ( doc [ 'components' ] )
Presents the documentation
15,812
def increase_by_changes ( self , changes_amount , ratio ) : increases = round ( changes_amount * ratio ) return self . increase ( int ( increases ) )
Increase version by amount of changes
15,813
def wrap_maya_ui ( mayaname ) : ptr = apiUI . MQtUtil . findControl ( mayaname ) if ptr is None : ptr = apiUI . MQtUtil . findLayout ( mayaname ) if ptr is None : ptr = apiUI . MQtUtil . findMenuItem ( mayaname ) if ptr is not None : return wrap ( long ( ptr ) )
Given the name of a Maya UI element of any type return the corresponding QWidget or QAction . If the object does not exist returns None
15,814
def query_args ( self , name ) : sql = 'select type, id from code_items ' 'where kind = 22 and name = ?' logging . debug ( '%s %s' , sql , ( name , ) ) self . cursor . execute ( sql , ( name , ) ) func = self . cursor . fetchone ( ) if func : sql = 'select param_number, type, name ' 'from code_items where parent_id = ?' logging . debug ( '%s %s' , sql , ( func [ 1 ] , ) ) self . cursor . execute ( sql , ( func [ 1 ] , ) ) args = self . cursor . fetchall ( ) ret_type = clean_ret_type ( func [ 0 ] ) args = [ ( arg_number , sanitize_type ( arg_type ) , arg_name ) for arg_number , arg_type , arg_name in args ] return ret_type , name , args return None
Query the return type and argument list of the specified function in the specified database .
15,815
def query_info ( self , name , like , kind ) : kind = self . _make_kind_id ( kind ) sql = 'select name, kind, file_id, type ' 'from code_items ' 'where name {} ?' . format ( 'like' if like else '=' ) args = ( name , ) if like : sql += ' escape ?' args = ( name , '\\' ) if kind : sql += ' and kind = ?' args = ( name , kind ) if like and kind : args = ( name , '\\' , kind ) logging . debug ( '%s %s' , sql , args ) self . cursor . execute ( sql , args ) return self . cursor . fetchall ( ) , self
Query the information of the name in the database .
15,816
def query_names ( self , name , like , kind ) : kind = self . _make_kind_id ( kind ) sql = 'select id, name from files ' 'where leaf_name {} ?' . format ( 'like' if like else '=' ) args = ( name , ) if like : sql += ' escape ?' args = ( name , '\\' ) logging . debug ( '%s %s' , sql , args ) self . cursor . execute ( sql , args ) ids = self . cursor . fetchall ( ) files = [ ] for file_id , header in ids : sql = 'select name from code_items ' 'where file_id = ?' args = ( file_id , ) if kind : sql += 'and kind = ?' args = ( file_id , kind ) logging . debug ( '%s %s' , sql , args ) self . cursor . execute ( sql , args ) files . append ( ( header , self . cursor . fetchall ( ) ) ) return files
Query function declarations in the files .
15,817
def query_struct ( self , name ) : sql = 'select id, file_id, name from code_items ' 'where name = ?' self . cursor . execute ( sql , ( name , ) ) for i in self . cursor . fetchall ( ) : sql = 'select id, type, name from code_items ' 'where parent_id = ?' self . cursor . execute ( sql , ( i [ 0 ] , ) ) members = self . cursor . fetchall ( ) if members : print ( self . file_id_to_name ( i [ 1 ] ) , i [ 2 ] ) print ( members )
Query struct .
15,818
def file_id_to_name ( self , file_id ) : sql = 'select name from files where id = ?' logging . debug ( '%s %s' , sql , ( file_id , ) ) self . cursor . execute ( sql , ( file_id , ) ) name = self . cursor . fetchone ( ) if name : return name [ 0 ] return ''
Convert a file id to the file name .
15,819
def _make_kind_id ( self , name_or_id ) : if not name_or_id : return None if name_or_id . isdigit ( ) : return name_or_id return self . kind_name_to_id ( name_or_id )
Make kind_id from kind_name or kind_id .
15,820
def query_kinds ( self , kind ) : logging . debug ( _ ( 'querying %s' ) , kind ) if kind is None : return self . _kind_id_to_name . items ( ) if kind . isdigit ( ) : kind_name = self . kind_id_to_name ( int ( kind ) ) if kind_name : kind = ( kind , kind_name ) else : logging . warning ( _ ( 'id not found: %s' ) , kind ) kind = None else : kind_id = self . kind_name_to_id ( kind ) if kind_id : kind = ( kind_id , kind ) else : logging . warning ( _ ( 'name not found: %s' ) , kind ) kind = None return [ kind ]
Query kinds .
15,821
def _init_kind_converter ( self ) : from . . utils import invert_dict kinds = self . session . query ( Kind ) . all ( ) self . _kind_id_to_name = { kind . id : kind . name for kind in kinds } self . _kind_name_to_id = invert_dict ( self . _kind_id_to_name )
Make a dictionary mapping kind ids to the names .
15,822
def make_export ( self , exports ) : sql = 'drop table if exists export' logging . debug ( sql ) self . cursor . execute ( sql ) sql = 'create table if not exists export ' '(func text unique, module text)' logging . debug ( sql ) self . cursor . execute ( sql ) for module in exports : logging . debug ( _ ( 'insering exports from %s' ) , module ) sql = 'insert into export values (?, ?)' for func in exports [ module ] : if func : try : self . cursor . execute ( sql , ( func , module ) ) except sqlite3 . IntegrityError : pass self . con . commit ( )
Populate library exported function data .
15,823
def query_func_module ( self , func ) : exp = self . session . query ( Export ) . filter_by ( func = func ) . first ( ) if exp : return exp logging . debug ( _ ( 'Function not found: %s' ) , func ) alt = func + 'A' exp = self . session . query ( Export ) . filter_by ( func = alt ) . first ( ) if exp : logging . warning ( _ ( 'Using ANSI version: %s' ) , alt ) return exp logging . warning ( _ ( 'Not handled: %s or %s' ) , func , alt ) return None
Query the module name of the specified function .
15,824
def query_module_funcs ( self , module ) : funcs = self . session . query ( Export ) . filter_by ( module = module ) . all ( ) return funcs
Query the functions in the specified module .
15,825
def _build_named_object_ids ( parameters ) : if isinstance ( parameters , str ) : return [ _build_named_object_id ( parameters ) ] return [ _build_named_object_id ( parameter ) for parameter in parameters ]
Builds a list of NamedObjectId .
15,826
def _build_command_ids ( issued_commands ) : if isinstance ( issued_commands , IssuedCommand ) : entry = issued_commands . _proto . commandQueueEntry return [ entry . cmdId ] else : return [ issued_command . _proto . commandQueueEntry . cmdId for issued_command in issued_commands ]
Builds a list of CommandId .
15,827
def _cache_key ( cmd_id ) : return '{}__{}__{}__{}' . format ( cmd_id . generationTime , cmd_id . origin , cmd_id . sequenceNumber , cmd_id . commandName )
commandId is a tuple . Make a unique key for it .
15,828
def get_command_history ( self , issued_command ) : entry = issued_command . _proto . commandQueueEntry key = self . _cache_key ( entry . cmdId ) if key in self . _cache : return self . _cache [ key ] return None
Gets locally cached CommandHistory for the specified command .
15,829
def add ( self , parameters , abort_on_invalid = True , send_from_cache = True ) : assert self . subscription_id != - 1 if not parameters : return options = web_pb2 . ParameterSubscriptionRequest ( ) options . subscriptionId = self . subscription_id options . abortOnInvalid = abort_on_invalid options . sendFromCache = send_from_cache options . id . extend ( _build_named_object_ids ( parameters ) ) self . _manager . send ( 'subscribe' , options )
Add one or more parameters to this subscription .
15,830
def remove ( self , parameters ) : assert self . subscription_id != - 1 if not parameters : return options = web_pb2 . ParameterSubscriptionRequest ( ) options . subscriptionId = self . subscription_id options . id . extend ( _build_named_object_ids ( parameters ) ) self . _manager . send ( 'unsubscribe' , options )
Remove one or more parameters from this subscription .
15,831
def set_parameter_value ( self , parameter , value ) : parameter = adapt_name_for_rest ( parameter ) url = '/processors/{}/{}/parameters{}' . format ( self . _instance , self . _processor , parameter ) req = _build_value_proto ( value ) self . _client . put_proto ( url , data = req . SerializeToString ( ) )
Sets the value of the specified parameter .
15,832
def set_parameter_values ( self , values ) : req = rest_pb2 . BulkSetParameterValueRequest ( ) for key in values : item = req . request . add ( ) item . id . MergeFrom ( _build_named_object_id ( key ) ) item . value . MergeFrom ( _build_value_proto ( values [ key ] ) ) url = '/processors/{}/{}/parameters/mset' . format ( self . _instance , self . _processor ) self . _client . post_proto ( url , data = req . SerializeToString ( ) )
Sets the value of multiple parameters .
15,833
def issue_command ( self , command , args = None , dry_run = False , comment = None ) : req = rest_pb2 . IssueCommandRequest ( ) req . sequenceNumber = SequenceGenerator . next ( ) req . origin = socket . gethostname ( ) req . dryRun = dry_run if comment : req . comment = comment if args : for key in args : assignment = req . assignment . add ( ) assignment . name = key assignment . value = str ( args [ key ] ) command = adapt_name_for_rest ( command ) url = '/processors/{}/{}/commands{}' . format ( self . _instance , self . _processor , command ) response = self . _client . post_proto ( url , data = req . SerializeToString ( ) ) proto = rest_pb2 . IssueCommandResponse ( ) proto . ParseFromString ( response . content ) return IssuedCommand ( proto , self )
Issue the given command
15,834
def list_alarms ( self , start = None , stop = None ) : params = { 'order' : 'asc' } if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) url = '/processors/{}/{}/alarms' . format ( self . _instance , self . _processor ) response = self . _client . get_proto ( path = url , params = params ) message = rest_pb2 . ListAlarmsResponse ( ) message . ParseFromString ( response . content ) alarms = getattr ( message , 'alarm' ) return iter ( [ Alarm ( alarm ) for alarm in alarms ] )
Lists the active alarms .
15,835
def set_default_calibrator ( self , parameter , type , data ) : req = mdb_pb2 . ChangeParameterRequest ( ) req . action = mdb_pb2 . ChangeParameterRequest . SET_DEFAULT_CALIBRATOR if type : _add_calib ( req . defaultCalibrator , type , data ) url = '/mdb/{}/{}/parameters/{}' . format ( self . _instance , self . _processor , parameter ) response = self . _client . post_proto ( url , data = req . SerializeToString ( ) )
Apply a calibrator while processing raw values of the specified parameter . If there is already a default calibrator associated to this parameter that calibrator gets replaced .
15,836
def reset_calibrators ( self , parameter ) : req = mdb_pb2 . ChangeParameterRequest ( ) req . action = mdb_pb2 . ChangeParameterRequest . RESET_CALIBRATORS calib_info = req . defaultCalibrator url = '/mdb/{}/{}/parameters/{}' . format ( self . _instance , self . _processor , parameter ) response = self . _client . post_proto ( url , data = req . SerializeToString ( ) )
Reset all calibrators for the specified parameter to their original MDB value .
15,837
def set_default_alarm_ranges ( self , parameter , watch = None , warning = None , distress = None , critical = None , severe = None , min_violations = 1 ) : req = mdb_pb2 . ChangeParameterRequest ( ) req . action = mdb_pb2 . ChangeParameterRequest . SET_DEFAULT_ALARMS if ( watch or warning or distress or critical or severe ) : _add_alarms ( req . defaultAlarm , watch , warning , distress , critical , severe , min_violations ) url = '/mdb/{}/{}/parameters/{}' . format ( self . _instance , self . _processor , parameter ) response = self . _client . post_proto ( url , data = req . SerializeToString ( ) )
Generate out - of - limit alarms for a parameter using the specified alarm ranges .
15,838
def reset_alarm_ranges ( self , parameter ) : req = mdb_pb2 . ChangeParameterRequest ( ) req . action = mdb_pb2 . ChangeParameterRequest . RESET_ALARMS url = '/mdb/{}/{}/parameters/{}' . format ( self . _instance , self . _processor , parameter ) response = self . _client . post_proto ( url , data = req . SerializeToString ( ) )
Reset all alarm limits for the specified parameter to their original MDB value .
15,839
def acknowledge_alarm ( self , alarm , comment = None ) : url = '/processors/{}/{}/parameters{}/alarms/{}' . format ( self . _instance , self . _processor , alarm . name , alarm . sequence_number ) req = rest_pb2 . EditAlarmRequest ( ) req . state = 'acknowledged' if comment is not None : req . comment = comment self . _client . put_proto ( url , data = req . SerializeToString ( ) )
Acknowledges a specific alarm associated with a parameter .
15,840
def create_command_history_subscription ( self , issued_command = None , on_data = None , timeout = 60 ) : options = web_pb2 . CommandHistorySubscriptionRequest ( ) options . ignorePastCommands = True if issued_command : options . commandId . extend ( _build_command_ids ( issued_command ) ) manager = WebSocketSubscriptionManager ( self . _client , resource = 'cmdhistory' , options = options ) subscription = CommandHistorySubscription ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_cmdhist_data , subscription , on_data ) manager . open ( wrapped_callback , instance = self . _instance , processor = self . _processor ) subscription . reply ( timeout = timeout ) return subscription
Create a new command history subscription .
15,841
def create_parameter_subscription ( self , parameters , on_data = None , abort_on_invalid = True , update_on_expiration = False , send_from_cache = True , timeout = 60 ) : options = web_pb2 . ParameterSubscriptionRequest ( ) options . subscriptionId = - 1 options . abortOnInvalid = abort_on_invalid options . updateOnExpiration = update_on_expiration options . sendFromCache = send_from_cache options . id . extend ( _build_named_object_ids ( parameters ) ) manager = WebSocketSubscriptionManager ( self . _client , resource = 'parameter' , options = options ) subscription = ParameterSubscription ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_parameter_data , subscription , on_data ) manager . open ( wrapped_callback , instance = self . _instance , processor = self . _processor ) subscription . reply ( timeout = timeout ) return subscription
Create a new parameter subscription .
15,842
def create_alarm_subscription ( self , on_data = None , timeout = 60 ) : manager = WebSocketSubscriptionManager ( self . _client , resource = 'alarms' ) subscription = AlarmSubscription ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_alarm_data , subscription , on_data ) manager . open ( wrapped_callback , instance = self . _instance , processor = self . _processor ) subscription . reply ( timeout = timeout ) return subscription
Create a new alarm subscription .
15,843
def get_by ( self , name ) : return next ( ( item for item in self if item . name == name ) , None )
get element by name
15,844
def fastqc ( self ) : while True : threadlock = threading . Lock ( ) ( sample , systemcall , outputdir , fastqcreads ) = self . qcqueue . get ( ) try : _ = glob ( os . path . join ( outputdir , '*.html' ) ) [ 0 ] except IndexError : make_path ( outputdir ) outstr = str ( ) errstr = str ( ) out , err = run_subprocess ( systemcall ) outstr += out errstr += err out , err = run_subprocess ( fastqcreads ) outstr += out errstr += err threadlock . acquire ( ) write_to_logfile ( systemcall , systemcall , self . logfile , sample . general . logout , sample . general . logerr , None , None ) write_to_logfile ( fastqcreads , fastqcreads , self . logfile , sample . general . logout , sample . general . logerr , None , None ) write_to_logfile ( outstr , errstr , self . logfile , sample . general . logout , sample . general . logerr , None , None ) threadlock . release ( ) try : shutil . move ( os . path . join ( outputdir , 'stdin_fastqc.html' ) , os . path . join ( outputdir , '{}_fastqc.html' . format ( sample . name ) ) ) shutil . move ( os . path . join ( outputdir , 'stdin_fastqc.zip' ) , os . path . join ( outputdir , '{}_fastqc.zip' . format ( sample . name ) ) ) except IOError : pass self . qcqueue . task_done ( )
Run fastqc system calls
15,845
def trimquality ( self ) : logging . info ( "Trimming fastq files" ) with progressbar ( self . metadata ) as bar : for sample in bar : if type ( sample . general . fastqfiles ) is list : fastqfiles = sorted ( sample . general . fastqfiles ) outputdir = sample . general . outputdirectory cleanforward = os . path . join ( outputdir , '{}_R1_trimmed.fastq.gz' . format ( sample . name ) ) cleanreverse = os . path . join ( outputdir , '{}_R2_trimmed.fastq.gz' . format ( sample . name ) ) try : lesser_length = min ( int ( sample . run . forwardlength ) , int ( sample . run . reverselength ) ) except ValueError : lesser_length = int ( sample . run . forwardlength ) min_len = 50 if lesser_length >= 50 else lesser_length trim_left = 0 try : if 'R2' in fastqfiles [ 0 ] : if not os . path . isfile ( cleanreverse ) : out , err , bbdukcall = bbtools . bbduk_trim ( forward_in = fastqfiles [ 0 ] , reverse_in = None , forward_out = cleanreverse , trimq = 10 , minlength = min_len , forcetrimleft = trim_left , returncmd = True ) else : bbdukcall = str ( ) out = str ( ) err = str ( ) else : if not os . path . isfile ( cleanforward ) : out , err , bbdukcall = bbtools . bbduk_trim ( forward_in = fastqfiles [ 0 ] , forward_out = cleanforward , trimq = 10 , minlength = min_len , forcetrimleft = trim_left , returncmd = True ) else : bbdukcall = str ( ) out = str ( ) err = str ( ) except ( IndexError , CalledProcessError ) : bbdukcall = str ( ) out = str ( ) err = str ( ) write_to_logfile ( bbdukcall , bbdukcall , self . logfile , sample . general . logout , sample . general . logerr , None , None ) write_to_logfile ( out , err , self . logfile , sample . general . logout , sample . general . logerr , None , None ) trimmedfastqfiles = sorted ( glob ( os . path . join ( sample . general . outputdirectory , '*trimmed.fastq.gz' ) ) ) sample . general . trimmedfastqfiles = trimmedfastqfiles if trimmedfastqfiles else list ( ) logging . info ( 'Fastq files trimmed' )
Uses bbduk from the bbmap tool suite to quality and adapter trim
15,846
def contamination_finder ( self , input_path = None , report_path = None ) : logging . info ( 'Calculating contamination in reads' ) if input_path is not None : input_dir = input_path else : input_dir = self . path if report_path is not None : reportpath = report_path else : reportpath = os . path . join ( input_dir , 'confindr' ) confindr_report = os . path . join ( input_dir , 'confindr' , 'confindr_report.csv' ) pipeline_report = os . path . join ( reportpath , 'confindr_report.csv' ) if not os . path . isfile ( confindr_report ) : try : shutil . rmtree ( reportpath ) except IOError : pass make_path ( reportpath ) systemcall = 'confindr.py -i {input_dir} -o {output_dir} -d {database_dir} -bf 0.05' . format ( input_dir = input_dir , output_dir = os . path . join ( input_dir , 'confindr' ) , database_dir = os . path . join ( self . reffilepath , 'ConFindr' , 'databases' ) ) out , err = run_subprocess ( systemcall ) write_to_logfile ( systemcall , systemcall , self . logfile , None , None , None , None ) write_to_logfile ( out , err , self . logfile , None , None , None , None ) logging . info ( 'Contamination detection complete!' ) confindr_results = pandas . read_csv ( confindr_report , index_col = 0 ) . T . to_dict ( ) for sample in self . metadata : sample . confindr = GenObject ( ) for line in confindr_results : if sample . name in line : sample . confindr . genus = confindr_results [ line ] [ 'Genus' ] if type ( confindr_results [ line ] [ 'Genus' ] ) is not float else 'ND' sample . confindr . num_contaminated_snvs = confindr_results [ line ] [ 'NumContamSNVs' ] sample . confindr . contam_status = confindr_results [ line ] [ 'ContamStatus' ] try : sample . confindr . percent_contam = confindr_results [ line ] [ 'PercentContam' ] if str ( confindr_results [ line ] [ 'PercentContam' ] ) != 'nan' else 0 except KeyError : sample . confindr . percent_contam = 'ND' try : sample . confindr . percent_contam_std = confindr_results [ line ] [ 'PercentContamStandardDeviation' ] if str ( confindr_results [ line ] [ 'PercentContamStandardDeviation' ] ) != 'nan' else 0 except KeyError : sample . confindr . percent_contam_std = 'ND' if sample . confindr . contam_status is True : sample . confindr . contam_status = 'Contaminated' elif sample . confindr . contam_status is False : sample . confindr . contam_status = 'Clean' with open ( pipeline_report , 'w' ) as csv : data = 'Strain,Genus,NumContamSNVs,ContamStatus,PercentContam,PercentContamSTD\n' for sample in self . metadata : data += '{str},{genus},{numcontamsnv},{status},{pc},{pcs}\n' . format ( str = sample . name , genus = sample . confindr . genus , numcontamsnv = sample . confindr . num_contaminated_snvs , status = sample . confindr . contam_status , pc = sample . confindr . percent_contam , pcs = sample . confindr . percent_contam_std ) csv . write ( data )
Helper function to get confindr integrated into the assembly pipeline
15,847
def estimate_genome_size ( self ) : logging . info ( 'Estimating genome size using kmercountexact' ) for sample in self . metadata : sample [ self . analysistype ] . peaksfile = os . path . join ( sample [ self . analysistype ] . outputdir , 'peaks.txt' ) out , err , cmd = bbtools . kmercountexact ( forward_in = sorted ( sample . general . fastqfiles ) [ 0 ] , peaks = sample [ self . analysistype ] . peaksfile , returncmd = True , threads = self . cpus ) sample [ self . analysistype ] . kmercountexactcmd = cmd sample [ self . analysistype ] . genomesize = bbtools . genome_size ( sample [ self . analysistype ] . peaksfile ) write_to_logfile ( out , err , self . logfile , sample . general . logout , sample . general . logerr , None , None )
Use kmercountexact from the bbmap suite of tools to estimate the size of the genome
15,848
def error_correction ( self ) : logging . info ( 'Error correcting reads' ) for sample in self . metadata : sample . general . trimmedcorrectedfastqfiles = [ fastq . split ( '.fastq.gz' ) [ 0 ] + '_trimmed_corrected.fastq.gz' for fastq in sorted ( sample . general . fastqfiles ) ] try : if not os . path . isfile ( sample . general . trimmedcorrectedfastqfiles [ 0 ] ) : try : out , err , cmd = bbtools . tadpole ( forward_in = sorted ( sample . general . trimmedfastqfiles ) [ 0 ] , forward_out = sample . general . trimmedcorrectedfastqfiles [ 0 ] , returncmd = True , mode = 'correct' , threads = self . cpus ) sample [ self . analysistype ] . errorcorrectcmd = cmd write_to_logfile ( out = out , err = err , logfile = self . logfile , samplelog = sample . general . logout , sampleerr = sample . general . logerr , analysislog = None , analysiserr = None ) except IndexError : sample . general . trimmedcorrectedfastqfiles = list ( ) except CalledProcessError : sample . general . trimmedcorrectedfastqfiles = sample . general . trimmedfastqfiles except AttributeError : sample . general . trimmedcorrectedfastqfiles = list ( ) except IndexError : sample . general . trimmedcorrectedfastqfiles = list ( )
Use tadpole from the bbmap suite of tools to perform error correction of the reads
15,849
def normalise_reads ( self ) : logging . info ( 'Normalising reads to a kmer depth of 100' ) for sample in self . metadata : sample . general . normalisedreads = [ fastq . split ( '.fastq.gz' ) [ 0 ] + '_normalised.fastq.gz' for fastq in sorted ( sample . general . fastqfiles ) ] try : out , err , cmd = bbtools . bbnorm ( forward_in = sorted ( sample . general . trimmedcorrectedfastqfiles ) [ 0 ] , forward_out = sample . general . normalisedreads [ 0 ] , returncmd = True , threads = self . cpus ) sample [ self . analysistype ] . normalisecmd = cmd write_to_logfile ( out , err , self . logfile , sample . general . logout , sample . general . logerr , None , None ) except CalledProcessError : sample . general . normalisedreads = sample . general . trimmedfastqfiles except IndexError : sample . general . normalisedreads = list ( )
Use bbnorm from the bbmap suite of tools to perform read normalisation
15,850
def merge_pairs ( self ) : logging . info ( 'Merging paired reads' ) for sample in self . metadata : if len ( sample . general . fastqfiles ) == 2 : sample . general . mergedreads = os . path . join ( sample . general . outputdirectory , '{}_paired.fastq.gz' . format ( sample . name ) ) sample . general . unmergedforward = os . path . join ( sample . general . outputdirectory , '{}_unpaired_R1.fastq.gz' . format ( sample . name ) ) sample . general . unmergedreverse = os . path . join ( sample . general . outputdirectory , '{}_unpaired_R2.fastq.gz' . format ( sample . name ) ) try : out , err , cmd = bbtools . bbmerge ( forward_in = sorted ( sample . general . trimmedcorrectedfastqfiles ) [ 0 ] , merged_reads = sample . general . mergedreads , returncmd = True , outu1 = sample . general . unmergedforward , outu2 = sample . general . unmergedreverse , threads = self . cpus ) sample [ self . analysistype ] . bbmergecmd = cmd write_to_logfile ( out , err , self . logfile , sample . general . logout , sample . general . logerr , None , None ) except CalledProcessError : delattr ( sample . general , 'mergedreads' ) delattr ( sample . general , 'unmergedforward' ) delattr ( sample . general , 'unmergedreverse' ) except IndexError : delattr ( sample . general , 'mergedreads' ) delattr ( sample . general , 'unmergedforward' ) delattr ( sample . general , 'unmergedreverse' ) else : sample . general . mergedreads = sorted ( sample . general . trimmedcorrectedfastqfiles ) [ 0 ]
Use bbmerge from the bbmap suite of tools to merge paired - end reads
15,851
def main ( self ) : self . fasta_records ( ) self . fasta_stats ( ) self . find_largest_contig ( ) self . find_genome_length ( ) self . find_num_contigs ( ) self . find_n50 ( ) self . perform_pilon ( ) self . clear_attributes ( )
Run all the methods required for pipeline outputs
15,852
def fasta_records ( self ) : for sample in self . metadata : setattr ( sample , self . analysistype , GenObject ( ) ) try : record_dict = SeqIO . to_dict ( SeqIO . parse ( sample . general . bestassemblyfile , "fasta" ) ) except FileNotFoundError : record_dict = dict ( ) sample [ self . analysistype ] . record_dict = record_dict
Use SeqIO to create dictionaries of all records for each FASTA file
15,853
def fasta_stats ( self ) : for sample in self . metadata : contig_lengths = list ( ) fasta_sequence = str ( ) for contig , record in sample [ self . analysistype ] . record_dict . items ( ) : contig_lengths . append ( len ( record . seq ) ) fasta_sequence += record . seq sample [ self . analysistype ] . contig_lengths = sorted ( contig_lengths , reverse = True ) try : sample [ self . analysistype ] . gc = float ( '{:0.2f}' . format ( GC ( fasta_sequence ) ) ) except TypeError : sample [ self . analysistype ] . gc = 'NA'
Parse the lengths of all contigs for each sample as well as the total GC%
15,854
def find_largest_contig ( self ) : for sample in self . metadata : sample [ self . analysistype ] . longest_contig = sample [ self . analysistype ] . contig_lengths
Determine the largest contig for each strain
15,855
def find_genome_length ( self ) : for sample in self . metadata : sample [ self . analysistype ] . genome_length = sum ( sample [ self . analysistype ] . contig_lengths )
Determine the total length of all the contigs for each strain
15,856
def find_num_contigs ( self ) : for sample in self . metadata : sample [ self . analysistype ] . num_contigs = len ( sample [ self . analysistype ] . contig_lengths )
Count the total number of contigs for each strain
15,857
def find_n50 ( self ) : for sample in self . metadata : sample [ self . analysistype ] . n50 = '-' currentlength = 0 for contig_length in sample [ self . analysistype ] . contig_lengths : currentlength += contig_length if currentlength >= sample [ self . analysistype ] . genome_length * 0.5 : sample [ self . analysistype ] . n50 = contig_length break
Calculate the N50 for each strain . N50 is defined as the largest contig such that at least half of the total genome size is contained in contigs equal to or larger than this contig
15,858
def perform_pilon ( self ) : for sample in self . metadata : try : if sample [ self . analysistype ] . num_contigs > 500 or sample . confindr . contam_status == 'Contaminated' : sample . general . polish = False else : sample . general . polish = True except AttributeError : sample . general . polish = True
Determine if pilon polishing should be attempted . Do not perform polishing if confindr determines that the sample is contaminated or if there are > 500 contigs
15,859
def clear_attributes ( self ) : for sample in self . metadata : try : delattr ( sample [ self . analysistype ] , 'record_dict' ) delattr ( sample [ self . analysistype ] , 'contig_lengths' ) delattr ( sample [ self . analysistype ] , 'longest_contig' ) except AttributeError : pass
Remove the record_dict attribute from the object as SeqRecords are not JSON - serializable . Also remove the contig_lengths and longest_contig attributes as they are large lists that make the . json file ugly
15,860
def run_qaml ( self ) : logging . info ( 'Running GenomeQAML quality assessment' ) qaml_call = 'classify.py -t {tf} -r {rf}' . format ( tf = self . qaml_path , rf = self . qaml_report ) make_path ( self . reportpath ) if not os . path . isfile ( self . qaml_report ) : out , err = run_subprocess ( qaml_call ) self . threadlock . acquire ( ) write_to_logfile ( qaml_call , qaml_call , self . logfile ) write_to_logfile ( out , err , self . logfile ) self . threadlock . release ( )
Create and run the GenomeQAML system call
15,861
def parse_qaml ( self ) : logging . info ( 'Parsing GenomeQAML outputs' ) nesteddictionary = dict ( ) dictionary = pandas . read_csv ( self . qaml_report ) . to_dict ( ) for header in dictionary : for sample , value in dictionary [ header ] . items ( ) : try : nesteddictionary [ sample ] . update ( { header : value } ) except KeyError : nesteddictionary [ sample ] = dict ( ) nesteddictionary [ sample ] . update ( { header : value } ) for sample in self . metadata : setattr ( sample , self . analysistype , GenObject ( ) ) sample [ self . analysistype ] . prediction = str ( ) for line in nesteddictionary : name = nesteddictionary [ line ] [ 'Sample' ] if name == sample . name : sample [ self . analysistype ] . prediction = nesteddictionary [ line ] [ 'Predicted_Class' ]
Parse the GenomeQAML report and populate metadata objects
15,862
def init ( self , ) : self . gw = None pm = MayaPluginManager . get ( ) genesis = pm . get_plugin ( "Genesis" ) self . GenesisWin = self . subclass_genesis ( genesis . GenesisWin )
Initialize the plugin . Do nothing .
15,863
def save_lastfile ( self , tfi ) : tf = models . TaskFile . objects . get ( task = tfi . task , version = tfi . version , releasetype = tfi . releasetype , descriptor = tfi . descriptor , typ = tfi . typ ) c = self . get_config ( ) c [ 'lastfile' ] = tf . pk c . write ( )
Save the taskfile in the config
15,864
def subclass_genesis ( self , genesisclass ) : class MayaGenesisWin ( genesisclass ) : def open_shot ( self , taskfile ) : return self . open_file ( taskfile ) def save_shot ( self , jbfile , tf ) : self . update_scene_node ( tf ) self . save_file ( jbfile ) def open_asset ( self , taskfile ) : return self . open_file ( taskfile ) def save_asset ( self , jbfile , tf ) : self . update_scene_node ( tf ) self . save_file ( jbfile ) def save_file ( self , jbfile ) : p = jbfile . get_fullpath ( ) p = os . path . expanduser ( p ) typ = 'mayaBinary' if jbfile . get_ext ( ) == 'ma' : typ = 'mayaAscii' cmds . file ( rename = p ) cmds . file ( save = True , defaultExtensions = False , type = typ ) def open_file ( self , taskfile ) : r = self . check_modified ( ) if r is False : return False cmds . file ( taskfile . path , open = True , force = True , ignoreVersion = True ) return True def get_current_file ( self , ) : node = jbscene . get_current_scene_node ( ) if not node : return tfid = cmds . getAttr ( '%s.taskfile_id' % node ) try : return djadapter . taskfiles . get ( id = tfid ) except djadapter . models . TaskFile . DoesNotExist : log . error ( "No taskfile with id %s was found. Get current scene failed. Check your jb_sceneNode \'%s\'." % ( tfid , node ) ) return def get_scene_node ( self , ) : scenenodes = cmds . ls ( ':jb_sceneNode*' ) if len ( scenenodes ) > 1 : cmds . delete ( scenenodes ) node = jbscene . get_current_scene_node ( ) if node is None : cmds . namespace ( set = ':' ) node = cmds . createNode ( 'jb_sceneNode' ) return node def update_scene_node ( self , tf ) : node = self . get_scene_node ( ) cmds . setAttr ( '%s.taskfile_id' % node , lock = False ) cmds . setAttr ( '%s.taskfile_id' % node , tf . id ) cmds . setAttr ( '%s.taskfile_id' % node , lock = True ) def check_modified ( self , ) : if not cmds . file ( q = 1 , modified = 1 ) : return True curfile = cmds . file ( q = 1 , sceneName = 1 ) r = cmds . confirmDialog ( title = 'Save Changes' , message = 'Save changes to %s?' % curfile , button = [ 'Save' , 'Don\'t Save' , 'Cancel' ] , defaultButton = 'Save' , cancelButton = 'Cancel' , dismissString = 'Cancel' ) if r == 'Cancel' : return False if r == 'Save' : cmds . file ( save = True , force = True ) return True MayaGenesisWin . set_filetype ( djadapter . FILETYPES [ 'mayamainscene' ] , ) return MayaGenesisWin
Subclass the given genesis class and implement all abstract methods
15,865
def stash_calibration ( self , attenuations , freqs , frange , calname ) : self . calibration_vector = attenuations self . calibration_freqs = freqs self . calibration_frange = frange self . calname = calname
Save it for later
15,866
def set_stim_by_index ( self , index ) : self . stimulus . clearComponents ( ) self . stimulus . insertComponent ( self . stim_components [ index ] )
Sets the stimulus to be generated to the one referenced by index
15,867
def process_calibration ( self , save = True ) : if not self . save_data : raise Exception ( "Cannot process an unsaved calibration" ) avg_signal = np . mean ( self . datafile . get_data ( self . current_dataset_name + '/signal' ) , axis = 0 ) diffdB = attenuation_curve ( self . stimulus . signal ( ) [ 0 ] , avg_signal , self . stimulus . samplerate ( ) , self . calf ) logger = logging . getLogger ( 'main' ) logger . debug ( 'The maximum dB attenuation is {}, caldB {}' . format ( max ( diffdB ) , self . caldb ) ) self . datafile . init_data ( self . current_dataset_name , mode = 'calibration' , dims = diffdB . shape , nested_name = 'calibration_intensities' ) self . datafile . append ( self . current_dataset_name , diffdB , nested_name = 'calibration_intensities' ) relevant_info = { 'frequencies' : 'all' , 'calibration_dB' : self . caldb , 'calibration_voltage' : self . calv , 'calibration_frequency' : self . calf , } self . datafile . set_metadata ( '/' . join ( [ self . current_dataset_name , 'calibration_intensities' ] ) , relevant_info ) mean_reftone = np . mean ( self . datafile . get_data ( self . current_dataset_name + '/reference_tone' ) , axis = 0 ) tone_amp = signal_amplitude ( mean_reftone , self . player . get_aifs ( ) ) db = calc_db ( tone_amp , self . mphonesens , self . mphonedb ) self . protocol_model . remove ( 0 ) return diffdB , self . current_dataset_name , self . calf , db
processes calibration control signal . Determines transfer function of speaker to get frequency vs . attenuation curve .
15,868
def setModel ( self , model ) : "Sets the StimulusModel for this editor" self . _model = model self . ui . aofsSpnbx . setValue ( model . samplerate ( ) )
Sets the StimulusModel for this editor
15,869
def setStimIndex ( self , row , stimIndex ) : "Change out the component type in row to the one indexed by stimIndex" newcomp = self . _allComponents [ row ] [ stimIndex ] self . _model . removeComponent ( row , 1 ) self . _model . insertComponent ( newcomp , row , 1 )
Change out the component type in row to the one indexed by stimIndex
15,870
def addComponentEditor ( self ) : row = self . _model . rowCount ( ) comp_stack_editor = ExploreComponentEditor ( ) self . ui . trackStack . addWidget ( comp_stack_editor ) idx_button = IndexButton ( row ) idx_button . pickMe . connect ( self . ui . trackStack . setCurrentIndex ) self . trackBtnGroup . addButton ( idx_button ) self . ui . trackBtnLayout . addWidget ( idx_button ) self . ui . trackStack . setCurrentIndex ( row ) comp_stack_editor . closePlease . connect ( self . removeComponentEditor ) delay = Silence ( ) comp_stack_editor . delaySpnbx . setValue ( delay . duration ( ) ) self . _model . insertComponent ( delay , row , 0 ) self . _allComponents . append ( [ x ( ) for x in self . stimuli_types if x . explore ] ) for stim in self . _allComponents [ row ] : editor = wrapComponent ( stim ) . showEditor ( ) comp_stack_editor . addWidget ( editor , stim . name ) exvocal = comp_stack_editor . widgetForName ( "Vocalization" ) if exvocal is not None : exvocal . filelistView . setSelectionMode ( QtGui . QAbstractItemView . SingleSelection ) initcomp = self . _allComponents [ row ] [ 0 ] self . _model . insertComponent ( initcomp , row , 1 ) self . buttons . append ( idx_button ) comp_stack_editor . exploreStimTypeCmbbx . currentIndexChanged . connect ( lambda x : self . setStimIndex ( row , x ) ) comp_stack_editor . delaySpnbx . valueChanged . connect ( lambda x : self . setDelay ( row , x ) ) comp_stack_editor . valueChanged . connect ( self . valueChanged . emit ) return comp_stack_editor
Adds a new component to the model and an editor for this component to this editor
15,871
def list_space_systems ( self , page_size = None ) : params = { } if page_size is not None : params [ 'limit' ] = page_size return pagination . Iterator ( client = self . _client , path = '/mdb/{}/space-systems' . format ( self . _instance ) , params = params , response_class = mdb_pb2 . ListSpaceSystemsResponse , items_key = 'spaceSystem' , item_mapper = SpaceSystem , )
Lists the space systems visible to this client .
15,872
def get_space_system ( self , name ) : url = '/mdb/{}/space-systems{}' . format ( self . _instance , name ) response = self . _client . get_proto ( url ) message = mdb_pb2 . SpaceSystemInfo ( ) message . ParseFromString ( response . content ) return SpaceSystem ( message )
Gets a single space system by its unique name .
15,873
def list_parameters ( self , parameter_type = None , page_size = None ) : params = { 'details' : True } if parameter_type is not None : params [ 'type' ] = parameter_type if page_size is not None : params [ 'limit' ] = page_size return pagination . Iterator ( client = self . _client , path = '/mdb/{}/parameters' . format ( self . _instance ) , params = params , response_class = mdb_pb2 . ListParametersResponse , items_key = 'parameter' , item_mapper = Parameter , )
Lists the parameters visible to this client .
15,874
def get_parameter ( self , name ) : name = adapt_name_for_rest ( name ) url = '/mdb/{}/parameters{}' . format ( self . _instance , name ) response = self . _client . get_proto ( url ) message = mdb_pb2 . ParameterInfo ( ) message . ParseFromString ( response . content ) return Parameter ( message )
Gets a single parameter by its name .
15,875
def list_containers ( self , page_size = None ) : params = { } if page_size is not None : params [ 'limit' ] = page_size return pagination . Iterator ( client = self . _client , path = '/mdb/{}/containers' . format ( self . _instance ) , params = params , response_class = mdb_pb2 . ListContainersResponse , items_key = 'container' , item_mapper = Container , )
Lists the containers visible to this client .
15,876
def get_container ( self , name ) : name = adapt_name_for_rest ( name ) url = '/mdb/{}/containers{}' . format ( self . _instance , name ) response = self . _client . get_proto ( url ) message = mdb_pb2 . ContainerInfo ( ) message . ParseFromString ( response . content ) return Container ( message )
Gets a single container by its unique name .
15,877
def list_commands ( self , page_size = None ) : params = { } if page_size is not None : params [ 'limit' ] = page_size return pagination . Iterator ( client = self . _client , path = '/mdb/{}/commands' . format ( self . _instance ) , params = params , response_class = mdb_pb2 . ListCommandsResponse , items_key = 'command' , item_mapper = Command , )
Lists the commands visible to this client .
15,878
def get_command ( self , name ) : name = adapt_name_for_rest ( name ) url = '/mdb/{}/commands{}' . format ( self . _instance , name ) response = self . _client . get_proto ( url ) message = mdb_pb2 . CommandInfo ( ) message . ParseFromString ( response . content ) return Command ( message )
Gets a single command by its unique name .
15,879
def list_algorithms ( self , page_size = None ) : params = { } if page_size is not None : params [ 'limit' ] = page_size return pagination . Iterator ( client = self . _client , path = '/mdb/{}/algorithms' . format ( self . _instance ) , params = params , response_class = mdb_pb2 . ListAlgorithmsResponse , items_key = 'algorithm' , item_mapper = Algorithm , )
Lists the algorithms visible to this client .
15,880
def get_algorithm ( self , name ) : name = adapt_name_for_rest ( name ) url = '/mdb/{}/algorithms{}' . format ( self . _instance , name ) response = self . _client . get_proto ( url ) message = mdb_pb2 . AlgorithmInfo ( ) message . ParseFromString ( response . content ) return Algorithm ( message )
Gets a single algorithm by its unique name .
15,881
def list_buckets ( self , instance ) : response = self . _client . get_proto ( path = '/buckets/' + instance ) message = rest_pb2 . ListBucketsResponse ( ) message . ParseFromString ( response . content ) buckets = getattr ( message , 'bucket' ) return iter ( [ Bucket ( bucket , instance , self ) for bucket in buckets ] )
List the buckets for an instance .
15,882
def list_objects ( self , instance , bucket_name , prefix = None , delimiter = None ) : url = '/buckets/{}/{}' . format ( instance , bucket_name ) params = { } if prefix is not None : params [ 'prefix' ] = prefix if delimiter is not None : params [ 'delimiter' ] = delimiter response = self . _client . get_proto ( path = url , params = params ) message = rest_pb2 . ListObjectsResponse ( ) message . ParseFromString ( response . content ) return ObjectListing ( message , instance , bucket_name , self )
List the objects for a bucket .
15,883
def create_bucket ( self , instance , bucket_name ) : req = rest_pb2 . CreateBucketRequest ( ) req . name = bucket_name url = '/buckets/{}' . format ( instance ) self . _client . post_proto ( url , data = req . SerializeToString ( ) )
Create a new bucket in the specified instance .
15,884
def remove_bucket ( self , instance , bucket_name ) : url = '/buckets/{}/{}' . format ( instance , bucket_name ) self . _client . delete_proto ( url )
Remove a bucket from the specified instance .
15,885
def upload_object ( self , instance , bucket_name , object_name , file_obj , content_type = None ) : url = '/buckets/{}/{}/{}' . format ( instance , bucket_name , object_name ) with open ( file_obj , 'rb' ) as f : if content_type : files = { object_name : ( object_name , f , content_type ) } else : files = { object_name : ( object_name , f ) } self . _client . request ( path = url , method = 'post' , files = files )
Upload an object to a bucket .
15,886
def remove_object ( self , instance , bucket_name , object_name ) : url = '/buckets/{}/{}/{}' . format ( instance , bucket_name , object_name ) self . _client . delete_proto ( url )
Remove an object from a bucket .
15,887
def get_now_utc ( ) : ZERO = datetime . timedelta ( 0 ) class UTC ( datetime . tzinfo ) : def utcoffset ( self , dt ) : return ZERO def tzname ( self , dt ) : return "UTC" def dst ( self , dt ) : return ZERO now = datetime . datetime . now ( UTC ( ) ) return now
date in UTC ISO format
15,888
def get ( self , position ) : counter = 0 current_node = self . head while current_node is not None and counter <= position : if counter == position : return current_node . val current_node = current_node . next_node counter += 1 return None
Gets value at index
15,889
def insert_first ( self , val ) : self . head = Node ( val , next_node = self . head ) return True
Insert in head
15,890
def insert ( self , val , position = 0 ) : if position <= 0 : return self . insert_first ( val ) counter = 0 last_node = self . head current_node = self . head while current_node is not None and counter <= position : if counter == position : last_node . next_node = Node ( val , current_node ) return True last_node = current_node current_node = current_node . next_node counter += 1 if current_node is None : last_node . next_node = Node ( val , None ) return True
Insert in position
15,891
def remove ( self , position ) : if position <= 0 : return self . remove_first ( ) if position >= self . length ( ) - 1 : return self . remove_last ( ) counter = 0 last_node = self . head current_node = self . head while current_node is not None and counter <= position : if counter == position : last_node . next_node = current_node . next_node return True last_node = current_node current_node = current_node . next_node counter += 1 return False
Removes at index
15,892
def to_lst ( self ) : out = [ ] node = self . head while node is not None : out . append ( node . val ) node = node . next_node return out
Cycle all items and puts them in a list
15,893
def execute ( self , func , * args , ** kwargs ) : return [ func ( item , * args , ** kwargs ) for item in self . to_lst ( ) ]
Executes function on each item
15,894
def from_specification ( specification , env_prefix = None , separator = '.' , parent_names = None ) : items = { } for item_name , item_info in six . iteritems ( specification ) : names = copy . copy ( parent_names ) if parent_names else [ ] items [ item_name ] = _generate_item ( item_name , item_info , env_prefix , separator , names ) return items
Used to create YapconfItems from a specification dictionary .
15,895
def update_default ( self , new_default , respect_none = False ) : if new_default is not None : self . default = new_default elif new_default is None and respect_none : self . default = None
Update our current default with the new_default .
15,896
def migrate_config ( self , current_config , config_to_migrate , always_update , update_defaults ) : value = self . _search_config_for_possible_names ( current_config ) self . _update_config ( config_to_migrate , value , always_update , update_defaults )
Migrate config value in current_config updating config_to_migrate .
15,897
def add_argument ( self , parser , bootstrap = False ) : if self . cli_expose : args = self . _get_argparse_names ( parser . prefix_chars ) kwargs = self . _get_argparse_kwargs ( bootstrap ) parser . add_argument ( * args , ** kwargs )
Add this item as an argument to the given parser .
15,898
def get_config_value ( self , overrides , skip_environment = False ) : label , override , key = self . _search_overrides ( overrides , skip_environment ) if override is None and self . default is None and self . required : raise YapconfItemNotFound ( 'Could not find config value for {0}' . format ( self . fq_name ) , self ) if override is None : self . logger . debug ( 'Config value not found for {0}, falling back to default.' . format ( self . name ) ) value = self . default else : value = override [ key ] if value is None : return value converted_value = self . convert_config_value ( value , label ) self . _validate_value ( converted_value ) return converted_value
Get the configuration value from all overrides .
15,899
def add_argument ( self , parser , bootstrap = False ) : tmp_default = self . default exclusive_grp = parser . add_mutually_exclusive_group ( ) self . default = True args = self . _get_argparse_names ( parser . prefix_chars ) kwargs = self . _get_argparse_kwargs ( bootstrap ) exclusive_grp . add_argument ( * args , ** kwargs ) self . default = False args = self . _get_argparse_names ( parser . prefix_chars ) kwargs = self . _get_argparse_kwargs ( bootstrap ) exclusive_grp . add_argument ( * args , ** kwargs ) self . default = tmp_default
Add boolean item as an argument to the given parser .