idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
16,100 | def get_just_date ( self ) : return datetime . datetime ( self . date_time . year , self . date_time . month , self . date_time . day ) | Parses just date from date - time |
16,101 | def is_date_in_between ( self , start , end , include_start = True , include_end = True ) : start = Day ( start ) . get_just_date ( ) now = self . get_just_date ( ) end = Day ( end ) . get_just_date ( ) if start < now < end : return True if include_start and now == start : return True if include_end and now == end : return True return False | Checks if date is in between dates |
16,102 | def get_next_weekday ( self , including_today = False ) : weekday = self . date_time . weekday ( ) return Weekday . get_next ( weekday , including_today = including_today ) | Gets next week day |
16,103 | def get_last_weekday ( self , including_today = False ) : weekday = self . date_time . weekday ( ) return Weekday . get_last ( weekday , including_today = including_today ) | Gets last week day |
16,104 | def cli ( context , verbose , quiet , database , sense ) : logger = logging . getLogger ( ) handler = logging . StreamHandler ( sys . stderr ) handler . setFormatter ( LevelFormatter ( ) ) logger . addHandler ( handler ) logger . setLevel ( logging . WARNING + ( quiet - verbose ) * 10 ) logging . debug ( _ ( 'Subcommand: %s' ) , context . invoked_subcommand ) context . obj [ 'database' ] = Database ( database ) try : context . obj [ 'sense' ] = SenseWithExport ( sense ) . __enter__ ( ) except Exception : pass | Position Independent Programming For Humans . |
16,105 | def search ( context , keywords , module , raw , kind ) : logging . info ( _ ( 'Entering search mode' ) ) sense = context . obj [ 'sense' ] func = sense . query_names if module else sense . query_info none = True for keyword in keywords : output = func ( keyword , raw , kind ) if output : none = False print ( output ) else : logging . warning ( _ ( 'No results: %s' ) , keyword ) sys . exit ( 1 if none else 0 ) | Query Windows identifiers and locations . |
16,106 | def winapi ( context , names ) : logging . info ( _ ( 'Entering winapi mode' ) ) sense = context . obj [ 'sense' ] none = True for name in names : code = sense . query_args ( name ) if code : none = False print ( stylify_code ( code ) ) else : logging . warning ( _ ( 'Function not found: %s' ) , name ) sys . exit ( 1 if none else 0 ) | Query Win32 API declarations . |
16,107 | def kinds ( context , show_all , ids_or_names ) : logging . info ( _ ( 'Entering kind mode' ) ) logging . debug ( 'args: %s' , ids_or_names ) sense = context . obj [ 'sense' ] none = True if show_all : none = False print ( sense . query_kinds ( None ) ) else : for id_or_name in ids_or_names : id_name = sense . query_kinds ( id_or_name ) if id_name : none = False print ( id_name ) sys . exit ( 1 if none else 0 ) | Operate on IntelliSense kind ids and names . |
16,108 | def export ( context , keywords , module , update ) : logging . info ( _ ( 'Export Mode' ) ) database = context . obj [ 'sense' ] none = True if update : exports = OrderedDict ( ) from . executables . pe import PE for filename in keywords : module = split_ext ( filename , basename = True ) [ 0 ] with open ( filename , 'rb' ) as stream : exports . update ( { module : PE ( stream ) . get_export_table ( ) } ) database . make_export ( exports ) none = False elif module : for module_name in keywords : funcs = database . query_module_funcs ( module_name ) if funcs : none = False print ( ', ' . join ( map ( str , funcs ) ) ) else : logging . warning ( _ ( 'No function for module: %s' ) , module_name ) else : for func_name in keywords : module_name = database . query_func_module ( func_name ) if module_name : none = False print ( repr ( module_name ) ) else : logging . warning ( _ ( 'No module for function: %s' ) , func_name ) sys . exit ( 1 if none else 0 ) | Operate on libraries and exported functions . |
16,109 | def add ( context , filenames ) : logging . info ( _ ( 'Current Mode: Add Linux data' ) ) context . obj [ 'database' ] . add_data ( filenames ) sys . exit ( 0 ) | Add data on Linux system calls . |
16,110 | def make ( filenames , x64 , cl_args , link_args , output ) : from . msbuild import Builder builder = Builder ( ) builder . build ( list ( filenames ) , x64 = x64 , cl_args = cl_args , link_args = link_args , out_dir = output ) | Make binaries from sources . |
16,111 | def info ( context , keywords , x86 , x64 , x32 , common ) : logging . info ( _ ( 'Current Mode: Find in Linux' ) ) database = context . obj [ 'database' ] for one in keywords : abis = [ 'i386' , 'x64' , 'common' , 'x32' ] if x86 : abis = [ 'i386' ] if x64 : abis = [ 'x64' , 'common' ] if x32 : abis = [ 'x32' , 'common' ] if common : abis = [ 'common' ] items = database . query_item ( one , abis ) if not items : logging . warning ( _ ( 'Item not found: %s %s' ) , one , abis ) continue for item in items : print ( item . name , item . abi , item . number ) decl = database . query_decl ( name = item . name ) if not decl : logging . warning ( _ ( 'Decl not found: %s' ) , item . name ) continue for one in decl : print ( one . decl ( ) , '/* {} */' . format ( one . filename ) ) sys . exit ( 0 ) | Find in the Linux system calls . |
16,112 | def conv ( arg , source , target , filename , section ) : logging . info ( _ ( 'This is Binary Conversion mode.' ) ) section = section . encode ( 'utf-8' ) if source == 'sec' : arg = open ( arg , 'rb' ) if source == 'sec' : kwargs = dict ( section_name = section ) else : kwargs = dict ( ) result = Converter . uni_from ( source , arg , ** kwargs ) . uni_to ( target ) if result : if filename : logging . info ( _ ( 'Writing shellcode to the file: %s' ) , filename ) mode = 'wb' if target == 'bin' else 'w' with open ( filename , mode ) as output : output . write ( result ) else : print ( result ) else : logging . error ( _ ( 'Failed.' ) ) if source == 'sec' : arg . close ( ) return 0 | Convert binary . |
16,113 | def get_source ( label , source_type , ** kwargs ) : if source_type not in yapconf . ALL_SUPPORTED_SOURCES : raise YapconfSourceError ( 'Invalid source type %s. Supported types are %s.' % ( source_type , yapconf . ALL_SUPPORTED_SOURCES ) ) if source_type not in yapconf . SUPPORTED_SOURCES : raise YapconfSourceError ( 'Unsupported source type "%s". If you want to use this type, you ' 'will need to install the correct client for it (try `pip install ' 'yapconf[%s]. Currently supported types are %s. All supported ' 'types are %s' % ( source_type , source_type , yapconf . SUPPORTED_SOURCES , yapconf . ALL_SUPPORTED_SOURCES ) ) if source_type == 'dict' : return DictConfigSource ( label , data = kwargs . get ( 'data' ) ) elif source_type == 'json' : return JsonConfigSource ( label , ** kwargs ) elif source_type == 'yaml' : filename = kwargs . get ( 'filename' ) if 'filename' in kwargs : kwargs . pop ( 'filename' ) return YamlConfigSource ( label , filename , ** kwargs ) elif source_type == 'environment' : return EnvironmentConfigSource ( label ) elif source_type == 'etcd' : return EtcdConfigSource ( label , kwargs . get ( 'client' ) , kwargs . get ( 'key' , '/' ) ) elif source_type == 'kubernetes' : name = kwargs . get ( 'name' ) if 'name' in kwargs : kwargs . pop ( 'name' ) client = kwargs . get ( 'client' ) if 'client' in kwargs : kwargs . pop ( 'client' ) return KubernetesConfigSource ( label , client , name , ** kwargs ) else : raise NotImplementedError ( 'No implementation for source type %s' % source_type ) | Get a config source based on type and keyword args . |
16,114 | def make_simple_merged_vcf_with_no_combinations ( self , ref_seq ) : if len ( self ) <= 1 : return merged_vcf_record = self . vcf_records [ 0 ] for i in range ( 1 , len ( self . vcf_records ) , 1 ) : if self . vcf_records [ i ] . intersects ( merged_vcf_record ) : return else : merged_vcf_record = merged_vcf_record . merge ( self . vcf_records [ i ] , ref_seq ) self . vcf_records = [ merged_vcf_record ] | Does a simple merging of all variants in this cluster . Assumes one ALT in each variant . Uses the ALT for each variant making one new vcf_record that has all the variants put together |
16,115 | def make_simple_gt_aware_merged_vcf_with_no_combinations ( self , ref_seq ) : if len ( self ) <= 1 : return merged_vcf_record = self . vcf_records [ 0 ] for i in range ( 1 , len ( self . vcf_records ) , 1 ) : if self . vcf_records [ i ] . intersects ( merged_vcf_record ) : return else : merged_vcf_record = merged_vcf_record . gt_aware_merge ( self . vcf_records [ i ] , ref_seq ) self . vcf_records = [ merged_vcf_record ] | Does a simple merging of all variants in this cluster . Assumes one ALT in each variant . Uses the called allele for each variant making one new vcf_record that has all the variants put together |
16,116 | def make_separate_indels_and_one_alt_with_all_snps_no_combinations ( self , ref_seq ) : final_start_position = min ( [ x . POS for x in self . vcf_records ] ) final_end_position = max ( [ x . ref_end_pos ( ) for x in self . vcf_records ] ) snps = [ ] new_vcf_records = [ ] for record in self . vcf_records : if record . is_snp ( ) : snps . append ( copy . copy ( record ) ) else : new_record = copy . copy ( record ) new_record . add_flanking_seqs ( ref_seq , final_start_position , final_end_position ) new_vcf_records . append ( new_record ) if len ( snps ) : new_record = copy . copy ( snps [ 0 ] ) for snp in snps [ 1 : ] : merged = new_record . merge ( snp , ref_seq ) if merged is not None : new_record = merged new_record . add_flanking_seqs ( ref_seq , final_start_position , final_end_position ) new_vcf_records . append ( new_record ) alts = ',' . join ( sorted ( list ( set ( [ x . ALT [ 0 ] for x in new_vcf_records ] ) ) ) ) new_record = vcf_record . VcfRecord ( '\t' . join ( [ self . vcf_records [ 0 ] . CHROM , str ( final_start_position + 1 ) , '.' , new_vcf_records [ 0 ] . REF , alts , '.' , 'PASS' , '.' ] ) ) return new_record | Returns a VCF record where each indel from this cluster is in a separate ALT . Then all the remaining SNPs are applied to make one ALT . If > 1 SNP in same place either one might be used |
16,117 | def _get_header ( self ) : out = self . _get_row ( self . labels ) out += "\n" out += self . _get_row ( [ "---" ] * len ( self . labels ) ) return out | Gets header of table |
16,118 | def setModel ( self , model ) : self . paramList . setModel ( model ) model . hintRequested . connect ( self . hintRequested ) model . rowsInserted . connect ( self . updateTitle ) model . rowsRemoved . connect ( self . updateTitle ) self . updateTitle ( ) | sets the model for the auto parameters |
16,119 | def updateTitle ( self ) : title = 'Auto Parameters ({})' . format ( self . paramList . model ( ) . rowCount ( ) ) self . titleChange . emit ( title ) self . setWindowTitle ( title ) | Updates the Title of this widget according to how many parameters are currently in the model |
16,120 | def showEvent ( self , event ) : selected = self . paramList . selectedIndexes ( ) model = self . paramList . model ( ) self . visibilityChanged . emit ( 1 ) if len ( selected ) > 0 : self . paramList . parameterChanged . emit ( model . selection ( selected [ 0 ] ) ) self . hintRequested . emit ( 'Select parameter to edit. \n\nParameter must have selected components in order to edit fields' ) elif model . rowCount ( ) > 0 : self . paramList . selectRow ( 0 ) self . paramList . parameterChanged . emit ( model . selection ( model . index ( 0 , 0 ) ) ) self . hintRequested . emit ( 'Select parameter to edit. \n\nParameter must have selected components in order to edit fields' ) else : model . emptied . emit ( True ) self . hintRequested . emit ( 'To add a parameter, Drag "Add" onto empty auto-parameter table' ) | When this widget is shown it has an effect of putting other widgets in the parent widget into different editing modes emits signal to notify other widgets . Restores the previous selection the last time this widget was visible |
16,121 | def closeEvent ( self , event ) : self . visibilityChanged . emit ( 0 ) model = self . paramList . model ( ) model . hintRequested . disconnect ( ) model . rowsInserted . disconnect ( ) model . rowsRemoved . disconnect ( ) | Emits a signal to update start values on components |
16,122 | def authenticate_with_access_token ( access_token ) : credentials = Credentials ( access_token = access_token ) client = YamcsClient ( 'localhost:8090' , credentials = credentials ) for link in client . list_data_links ( 'simulator' ) : print ( link ) | Authenticate using an existing access token . |
16,123 | def pretty_format_table ( labels , data , num_format = "{:.3f}" , line_separator = "\n" ) : table = SqlTable ( labels , data , num_format , line_separator ) return table . build ( ) | Parses and creates pretty table |
16,124 | def _parse ( self ) : for i in range ( len ( self . data ) ) : self . _parse_row ( i ) | Parses raw data |
16,125 | def _calculate_optimal_column_widths ( self ) : columns = len ( self . data [ 0 ] ) str_labels = [ parse_colorama ( str ( l ) ) for l in self . labels ] str_data = [ [ parse_colorama ( str ( col ) ) for col in row ] for row in self . data ] widths = [ 0 ] * columns for row in str_data : widths = [ max ( w , len ( c ) ) for w , c in zip ( widths , row ) ] for col , label in enumerate ( str_labels ) : if len ( label ) > widths [ col ] : widths [ col ] = len ( label ) self . widths = widths | Calculates widths of columns |
16,126 | def get_blank_row ( self , filler = "-" , splitter = "+" ) : return self . get_pretty_row ( [ "" for _ in self . widths ] , filler , splitter , ) | Gets blank row |
16,127 | def build ( self ) : self . _calculate_optimal_column_widths ( ) pretty_table = self . get_blank_row ( ) + self . new_line pretty_table += self . pretty_format_row ( self . labels ) + self . new_line pretty_table += self . get_blank_row ( ) + self . new_line for row in self . data : pretty_table += self . pretty_format_row ( row ) + self . new_line pretty_table += self . get_blank_row ( ) return pretty_table | Builds pretty - formatted table |
16,128 | def from_df ( data_frame ) : labels = data_frame . keys ( ) . tolist ( ) data = data_frame . values . tolist ( ) return SqlTable ( labels , data , "{:.3f}" , "\n" ) | Parses data and builds an instance of this class |
16,129 | def editheaders ( ) : for line in fileinput . input ( ) : try : columns = line . split ( '\t' ) flag = int ( columns [ 1 ] ) columns [ 1 ] = str ( ( flag - 256 ) if ( flag & 256 ) else flag ) sys . stdout . write ( '\t' . join ( columns ) ) except ( IOError , ValueError ) : sys . stdout . write ( line ) pass try : sys . stdout . flush ( ) sys . stdout . close ( ) except : pass try : sys . stderr . close ( ) except : pass | Edits the headers of SAM files to remove secondary alignments |
16,130 | def ref_string_matches_ref_sequence ( self , ref_sequence ) : if self . POS < 0 : return False end_pos = self . ref_end_pos ( ) if end_pos >= len ( ref_sequence ) : return False return self . REF == ref_sequence [ self . POS : end_pos + 1 ] | Returns true iff the REF string in the record agrees with the given ref_sequence |
16,131 | def ref_string_matches_dict_of_ref_sequences ( self , ref_sequences ) : return self . CHROM in ref_sequences and self . ref_string_matches_ref_sequence ( ref_sequences [ self . CHROM ] ) | Returns true iff there is a sequence called self . CHROM in the dict of ref_sequences and the REF string matches |
16,132 | def is_snp ( self ) : nucleotides = { 'A' , 'C' , 'G' , 'T' } return len ( self . REF ) == 1 and self . REF in nucleotides and set ( self . ALT ) . issubset ( nucleotides ) | Returns true iff this variant is a SNP |
16,133 | def add_flanking_seqs ( self , ref_seq , new_start , new_end ) : if new_start > self . POS or new_end < self . ref_end_pos ( ) : raise Error ( 'new start and end positions must not try to shrink VCF record. new_start=' + str ( new_start ) + ', new_end=' + str ( new_end ) + '. VCF=' + str ( self ) ) new_start_nucleotides = ref_seq [ new_start : self . POS ] new_end_nucleotodes = ref_seq [ self . ref_end_pos ( ) + 1 : new_end + 1 ] self . POS = new_start self . REF = new_start_nucleotides + self . REF + new_end_nucleotodes self . ALT = [ new_start_nucleotides + x + new_end_nucleotodes for x in self . ALT ] | Adds new_start many nucleotides at the start and new_end many nucleotides at the end from the appropriate nucleotides in reference sequence ref_seq . |
16,134 | def remove_useless_start_nucleotides ( self ) : if len ( self . REF ) == 1 or len ( self . ALT ) != 1 : return i = 0 while i < len ( self . REF ) and i < len ( self . ALT [ 0 ] ) and self . REF [ i ] == self . ALT [ 0 ] [ i ] : i += 1 if i > 0 : self . REF = self . REF [ i - 1 : ] self . ALT = [ self . ALT [ 0 ] [ i - 1 : ] ] self . POS += i - 1 | Removes duplicated nucleotides at the start of REF and ALT . But always leaves at least one nucleotide in each of REF and ALT . eg if variant is at position 42 REF = GCTGA ALT = GCA then sets position = 41 REF = CTGA ALT = CA . Assumes only one ALT and does nothing if there is > 1 ALT |
16,135 | def inferred_var_seqs_plus_flanks ( self , ref_seq , flank_length ) : flank_start = max ( 0 , self . POS - flank_length ) flank_end = min ( len ( ref_seq ) - 1 , self . ref_end_pos ( ) + flank_length ) seqs = [ ref_seq [ flank_start : self . POS ] + self . REF + ref_seq [ self . ref_end_pos ( ) + 1 : flank_end + 1 ] ] for alt in self . ALT : seqs . append ( ref_seq [ flank_start : self . POS ] + alt + ref_seq [ self . ref_end_pos ( ) + 1 : flank_end + 1 ] ) return flank_start , seqs | Returns start position of first flank sequence plus a list of sequences - the REF plus one for each ALT . sequence . Order same as in ALT column |
16,136 | def total_coverage ( self ) : if 'COV' in self . FORMAT : return sum ( [ int ( x ) for x in self . FORMAT [ 'COV' ] . split ( ',' ) ] ) else : return None | Returns the sum of COV data if present . Otherwise returns None |
16,137 | def set_parent ( self , child , parent ) : parents = cmds . listConnections ( "%s.parent" % child , plugs = True , source = True ) if parents : cmds . disconnectAttr ( "%s.parent" % child , "%s" % parents [ 0 ] ) if parent : cmds . connectAttr ( "%s.parent" % child , "%s.children" % parent , force = True , nextAvailable = True ) | Set the parent of the child reftrack node |
16,138 | def get_children ( self , refobj ) : children = cmds . listConnections ( "%s.children" % refobj , d = False ) if not children : children = [ ] return children | Get the children reftrack nodes of the given node |
16,139 | def get_typ ( self , refobj ) : enum = cmds . getAttr ( "%s.type" % refobj ) try : return JB_ReftrackNode . types [ enum ] except IndexError : raise ValueError ( "The type on the node %s could not be associated with an available type: %s" % ( refobj , JB_ReftrackNode . types ) ) | Return the entity type of the given reftrack node |
16,140 | def set_typ ( self , refobj , typ ) : try : enum = JB_ReftrackNode . types . index ( typ ) except ValueError : raise ValueError ( "The given type %s could not be found in available types: %" % ( typ , JB_ReftrackNode . types ) ) cmds . setAttr ( "%s.type" % refobj , enum ) | Set the type of the given refobj |
16,141 | def create_refobj ( self , ) : n = cmds . createNode ( "jb_reftrack" ) cmds . lockNode ( n , lock = True ) return n | Create and return a new reftrack node |
16,142 | def referenced_by ( self , refobj ) : try : ref = cmds . referenceQuery ( refobj , referenceNode = True ) return ref except RuntimeError as e : if str ( e ) . endswith ( "' is not from a referenced file.\n" ) : return None else : raise e | Return the reference that holds the given reftrack node . |
16,143 | def delete_refobj ( self , refobj ) : with common . locknode ( refobj , lock = False ) : cmds . delete ( refobj ) | Delete the given reftrack node |
16,144 | def get_current_element ( self , ) : n = jbscene . get_current_scene_node ( ) if not n : return None tfid = cmds . getAttr ( "%s.taskfile_id" % n ) try : tf = djadapter . taskfiles . get ( pk = tfid ) return tf . task . element except djadapter . models . TaskFile . DoesNotExist : raise djadapter . models . TaskFile . DoesNotExist ( "Could not find the taskfile that was set on the scene node. Id was %s" % tfid ) | Return the currently open Shot or Asset |
16,145 | def set_reference ( self , refobj , reference ) : refnodeattr = "%s.referencenode" % refobj if reference : cmds . connectAttr ( "%s.message" % reference , refnodeattr , force = True ) ns = cmds . referenceQuery ( reference , namespace = True ) cmds . setAttr ( "%s.namespace" % refobj , ns , type = "string" ) else : conns = cmds . listConnections ( refnodeattr , plugs = True ) if not conns : return for c in conns : cmds . disconnectAttr ( c , refnodeattr ) | Connect the given reftrack node with the given refernce node |
16,146 | def get_reference ( self , refobj ) : c = cmds . listConnections ( "%s.referencenode" % refobj , d = False ) return c [ 0 ] if c else None | Return the reference node that the reftrack node is connected to or None if it is imported . |
16,147 | def get_status ( self , refobj ) : reference = self . get_reference ( refobj ) return Reftrack . IMPORTED if not reference else Reftrack . LOADED if cmds . referenceQuery ( reference , isLoaded = True ) else Reftrack . UNLOADED | Return the status of the given reftrack node |
16,148 | def get_taskfile ( self , refobj ) : tfid = cmds . getAttr ( "%s.taskfile_id" % refobj ) try : return djadapter . taskfiles . get ( pk = tfid ) except djadapter . models . TaskFile . DoesNotExist : raise djadapter . models . TaskFile . DoesNotExist ( "Could not find the taskfile that was set on the node %s. Id was %s" % ( refobj , tfid ) ) | Return the taskfile that is loaded and represented by the refobj |
16,149 | def connect_reftrack_scenenode ( self , refobj , scenenode ) : conns = [ ( "%s.scenenode" % refobj , "%s.reftrack" % scenenode ) , ( "%s.taskfile_id" % scenenode , "%s.taskfile_id" % refobj ) ] for src , dst in conns : if not cmds . isConnected ( src , dst ) : cmds . connectAttr ( src , dst , force = True ) | Connect the given reftrack node with the given scene node |
16,150 | def get_search_page ( self , query ) : query_web_page = Webpage ( self . url + self . parse_query ( query ) ) query_web_page . get_html_source ( ) return query_web_page . source | Gets HTML source |
16,151 | def set ( constants ) : if not constants : return constants = wrap ( constants ) for k , new_value in constants . leaves ( ) : errors = [ ] try : old_value = mo_dots_set_attr ( sys . modules , k , new_value ) continue except Exception as e : errors . append ( e ) try : caller_globals = sys . _getframe ( 1 ) . f_globals caller_file = caller_globals [ "__file__" ] if not caller_file . endswith ( ".py" ) : raise Exception ( "do not know how to handle non-python caller" ) caller_module = caller_file [ : - 3 ] . replace ( "/" , "." ) path = split_field ( k ) for i , p in enumerate ( path ) : if i == 0 : continue prefix = join_field ( path [ : 1 ] ) name = join_field ( path [ i : ] ) if caller_module . endswith ( prefix ) : old_value = mo_dots_set_attr ( caller_globals , name , new_value ) if DEBUG : from mo_logs import Log Log . note ( "Changed {{module}}[{{attribute}}] from {{old_value}} to {{new_value}}" , module = prefix , attribute = name , old_value = old_value , new_value = new_value ) break except Exception as e : errors . append ( e ) if errors : from mo_logs import Log Log . error ( "Can not set constant {{path}}" , path = k , cause = errors ) | REACH INTO THE MODULES AND OBJECTS TO SET CONSTANTS . THINK OF THIS AS PRIMITIVE DEPENDENCY INJECTION FOR MODULES . USEFUL FOR SETTING DEBUG FLAGS . |
16,152 | def values ( self ) : lower = float ( self . lowerSpnbx . value ( ) ) upper = float ( self . upperSpnbx . value ( ) ) return ( lower , upper ) | Gets the user enter max and min values of where the raster points should appear on the y - axis |
16,153 | def _delete ( self , ) : for k in self . keys ( ) : try : self [ k ] . _delete ( ) except KeyError : pass if self . __parent is not None : del self . __parent [ self . __name ] self . __parent = None cmds . deleteUI ( self . __menustring ) | Delete the menu and remove it from parent |
16,154 | def create_menu ( self , name , parent = None , ** kwargs ) : m = Menu ( name , parent , ** kwargs ) if parent is None : self . menus [ name ] = m return m | Creates a maya menu or menu item |
16,155 | def delete_menu ( self , menu ) : if menu . parent is None : del self . menus [ menu . name ( ) ] menu . _delete ( ) | Delete the specified menu |
16,156 | def delete_all_menus ( self , ) : for m in self . menus . itervalues ( ) : m . _delete ( ) self . menus . clear ( ) | Delete all menues managed by this manager |
16,157 | def add_mismatch ( self , entity , * traits ) : for trait in traits : self . index [ trait ] . add ( entity ) | Add a mismatching entity to the index . |
16,158 | def add_match ( self , entity , * traits ) : for trait in traits : if trait not in self . index : self . index [ trait ] = self . mismatch_unknown . copy ( ) for existing_trait in self . index : if existing_trait not in traits : self . index [ existing_trait ] . add ( entity ) self . mismatch_unknown . add ( entity ) | Add a matching entity to the index . |
16,159 | def hist_axis_func ( axis_type : enum . Enum ) -> Callable [ [ Hist ] , Axis ] : def axis_func ( hist : Hist ) -> Axis : try : hist_axis_type = axis_type . value except AttributeError : hist_axis_type = axis_type if hasattr ( hist , "ProjectionND" ) and hasattr ( hist , "Projection" ) : return hist . GetAxis ( hist_axis_type ) else : axis_function_map = { TH1AxisType . x_axis . value : hist . GetXaxis , TH1AxisType . y_axis . value : hist . GetYaxis , TH1AxisType . z_axis . value : hist . GetZaxis } return_func = axis_function_map [ hist_axis_type ] return return_func ( ) return axis_func | Wrapper to retrieve the axis of a given histogram . |
16,160 | def axis ( self ) -> Callable [ [ Any ] , Any ] : axis_func = hist_axis_func ( axis_type = self . axis_type ) return axis_func | Determine the axis to return based on the hist type . |
16,161 | def apply_range_set ( self , hist : Hist ) -> None : axis = self . axis ( hist ) assert not isinstance ( self . min_val , float ) assert not isinstance ( self . max_val , float ) min_val = self . min_val ( axis ) max_val = self . max_val ( axis ) self . axis ( hist ) . SetRange ( min_val , max_val ) | Apply the associated range set to the axis of a given hist . |
16,162 | def apply_func_to_find_bin ( func : Union [ None , Callable [ ... , Union [ float , int , Any ] ] ] , values : Optional [ float ] = None ) -> Callable [ [ Any ] , Union [ float , int ] ] : def return_func ( axis ) -> Any : if func : if values is not None : return func ( axis , values ) else : return func ( axis ) else : return values return return_func | Closure to determine the bin associated with a value on an axis . |
16,163 | def call_projection_function ( self , hist : Hist ) -> Hist : for axis in self . projection_axes : logger . debug ( f"Apply projection axes hist range: {axis.name}" ) axis . apply_range_set ( hist ) projected_hist = None if hasattr ( hist , "ProjectionND" ) and hasattr ( hist , "Projection" ) : projected_hist = self . _project_THn ( hist = hist ) elif hasattr ( hist , "ProjectionZ" ) and hasattr ( hist , "Project3D" ) : projected_hist = self . _project_TH3 ( hist = hist ) elif hasattr ( hist , "ProjectionX" ) and hasattr ( hist , "ProjectionY" ) : projected_hist = self . _project_TH2 ( hist = hist ) else : raise TypeError ( type ( hist ) , f"Could not recognize hist {hist} of type {type(hist)}" ) self . cleanup_cuts ( hist , cut_axes = self . projection_axes ) return projected_hist | Calls the actual projection function for the hist . |
16,164 | def _project_THn ( self , hist : Hist ) -> Any : projection_axes = [ axis . axis_type . value for axis in self . projection_axes ] if len ( projection_axes ) == 2 : projection_axes . reverse ( ) args = projection_axes + [ "E" ] logger . debug ( f"hist: {hist.GetName()} args: {args}" ) if len ( projection_axes ) > 3 : projected_hist = hist . ProjectionND ( * args ) else : projected_hist = hist . Projection ( * args ) return projected_hist | Perform the actual THn - > THn or TH1 projection . |
16,165 | def _project_TH3 ( self , hist : Hist ) -> Any : if len ( self . projection_axes ) < 1 or len ( self . projection_axes ) > 2 : raise ValueError ( len ( self . projection_axes ) , "Invalid number of axes" ) projection_axis_name = "" for axis in self . projection_axes : proj_axis_name = axis . axis_type . name [ : 1 ] if proj_axis_name not in [ "x" , "y" , "z" ] : raise ValueError ( f"Projection axis name {proj_axis_name} is not 'x', 'y', or 'z'. Please check your configuration." ) projection_axis_name += proj_axis_name if len ( self . projection_axes ) == 2 : projection_axis_name = projection_axis_name [ : : - 1 ] logger . info ( f"Projecting onto axes \"{projection_axis_name}\" from hist {hist.GetName()}" ) projected_hist = hist . Project3D ( projection_axis_name ) return projected_hist | Perform the actual TH3 - > TH1 projection . |
16,166 | def _project_TH2 ( self , hist : Hist ) -> Any : if len ( self . projection_axes ) != 1 : raise ValueError ( len ( self . projection_axes ) , "Invalid number of axes" ) projection_func_map = { TH1AxisType . x_axis . value : hist . ProjectionX , TH1AxisType . y_axis . value : hist . ProjectionY } try : axis_type = self . projection_axes [ 0 ] . axis_type . value except ValueError : axis_type = self . axis_type projection_func = projection_func_map [ axis_type ] logger . info ( f"Projecting onto axis range {self.projection_axes[0].name} from hist {hist.GetName()}" ) projected_hist = projection_func ( ) return projected_hist | Perform the actual TH2 - > TH1 projection . |
16,167 | def _project_observable ( self , input_key : str , input_observable : Any , get_hist_args : Dict [ str , Any ] = None , projection_name_args : Dict [ str , Any ] = None , ** kwargs ) -> Hist : if get_hist_args is None : get_hist_args = copy . deepcopy ( kwargs ) if projection_name_args is None : projection_name_args = copy . deepcopy ( kwargs ) get_hist_args . update ( { "observable" : input_observable } ) hist = self . get_hist ( ** get_hist_args ) projection_name_args . update ( self . projection_information ) projection_name_args . update ( kwargs ) projection_name_args . update ( { "input_key" : input_key , "input_observable" : input_observable , "input_hist" : hist } ) projection_name = self . projection_name ( ** projection_name_args ) logger . debug ( f"hist: {hist}" ) for axis in self . additional_axis_cuts : logger . debug ( f"Apply additional axis hist range: {axis.name}" ) axis . apply_range_set ( hist ) if self . projection_dependent_cut_axes == [ ] : self . projection_dependent_cut_axes . append ( [ ] ) duplicated_axes = [ PDCA for PA in self . projection_axes for PDCA_group in self . projection_dependent_cut_axes for PDCA in PDCA_group if PDCA . axis_type == PA . axis_type ] if duplicated_axes : raise ValueError ( f"Axis {duplicated_axes} is in the projection axes and the projection dependent cut axes." " This configuration is not allowed, as the range in the PDCA will be overwritten by the projection axes!" " Please revise your configuration." ) hists = [ ] for i , axes in enumerate ( self . projection_dependent_cut_axes ) : for axis in axes : logger . debug ( f"Apply projection dependent hist range: {axis.name}" ) axis . apply_range_set ( hist ) projected_hist = self . call_projection_function ( hist ) projected_hist . SetName ( f"{projection_name}_{i}" ) hists . append ( projected_hist ) self . cleanup_cuts ( hist , cut_axes = axes ) self . cleanup_cuts ( hist , cut_axes = self . additional_axis_cuts ) output_hist = hists [ 0 ] for temp_hist in hists [ 1 : ] : output_hist . Add ( temp_hist ) output_hist . SetName ( projection_name ) output_hist . SetDirectory ( 0 ) return output_hist , projection_name , projection_name_args | Perform a projection for a single observable . |
16,168 | def _project_single_observable ( self , ** kwargs : Dict [ str , Any ] ) -> Hist : assert isinstance ( self . output_attribute_name , str ) output_hist , projection_name , projection_name_args , = self . _project_observable ( input_key = "single_observable" , input_observable = self . observable_to_project_from , ** kwargs , ) output_hist_args = projection_name_args output_hist_args . update ( { "output_hist" : output_hist , "projection_name" : projection_name } ) output_hist = self . output_hist ( ** output_hist_args ) if not hasattr ( self . output_observable , self . output_attribute_name ) : raise ValueError ( f"Attempted to assign hist to non-existent attribute {self.output_attribute_name} of object {self.output_observable}. Check the attribute name!" ) setattr ( self . output_observable , self . output_attribute_name , output_hist ) return output_hist | Driver function for projecting and storing a single observable . |
16,169 | def _project_dict ( self , ** kwargs : Dict [ str , Any ] ) -> Dict [ str , Hist ] : get_hist_args = copy . deepcopy ( kwargs ) projection_name_args = copy . deepcopy ( kwargs ) for key , input_observable in self . observable_to_project_from . items ( ) : output_hist , projection_name , projection_name_args , = self . _project_observable ( input_key = key , input_observable = input_observable , get_hist_args = get_hist_args , projection_name_args = projection_name_args , ** kwargs , ) output_hist_args = projection_name_args output_hist_args . update ( { "output_hist" : output_hist , "projection_name" : projection_name } ) output_key_name = self . output_key_name ( ** output_hist_args ) self . output_observable [ output_key_name ] = self . output_hist ( ** output_hist_args ) return self . output_observable | Driver function for projecting and storing a dictionary of observables . |
16,170 | def cleanup_cuts ( self , hist : Hist , cut_axes : Iterable [ HistAxisRange ] ) -> None : for axis in cut_axes : axis . axis ( hist ) . SetRange ( 1 , axis . axis ( hist ) . GetNbins ( ) ) | Cleanup applied cuts by resetting the axis to the full range . |
16,171 | def projection_name ( self , ** kwargs : Dict [ str , Any ] ) -> str : return self . projection_name_format . format ( ** kwargs ) | Define the projection name for this projector . |
16,172 | def get_hist ( self , observable : Any , ** kwargs : Dict [ str , Any ] ) -> Any : return observable | Get the histogram that may be stored in some object . |
16,173 | def output_key_name ( self , input_key : str , output_hist : Hist , projection_name : str , ** kwargs ) -> str : return projection_name | Returns the key under which the output object should be stored . |
16,174 | def output_hist ( self , output_hist : Hist , input_observable : Any , ** kwargs : Dict [ str , Any ] ) -> Union [ Hist , Any ] : return output_hist | Return an output object . It should store the output_hist . |
16,175 | def run ( ) : args = parse_args ( ) codetools . setup_logging ( args . debug ) global g g = pygithub . login_github ( token_path = args . token_path , token = args . token ) org = g . get_organization ( args . org ) try : teams = list ( org . get_teams ( ) ) except github . RateLimitExceededException : raise except github . GithubException as e : msg = 'error getting teams' raise pygithub . CaughtOrganizationError ( org , e , msg ) from None old_team = find_team ( teams , args . oldteam ) new_team = find_team ( teams , args . newteam ) move_me = args . repos debug ( len ( move_me ) , 'repos to be moved' ) added = [ ] removed = [ ] for name in move_me : try : r = org . get_repo ( name ) except github . RateLimitExceededException : raise except github . GithubException as e : msg = "error getting repo by name: {r}" . format ( r = name ) raise pygithub . CaughtOrganizationError ( org , e , msg ) from None debug ( "Adding {repo} to '{team}' ..." . format ( repo = r . full_name , team = args . newteam ) ) if not args . dry_run : try : new_team . add_to_repos ( r ) added += r . full_name debug ( ' ok' ) except github . RateLimitExceededException : raise except github . GithubException : debug ( ' FAILED' ) if old_team . name in 'Owners' : warn ( "Removing repo {repo} from team 'Owners' is not allowed" . format ( repo = r . full_name ) ) debug ( "Removing {repo} from '{team}' ..." . format ( repo = r . full_name , team = args . oldteam ) ) if not args . dry_run : try : old_team . remove_from_repos ( r ) removed += r . full_name debug ( ' ok' ) except github . RateLimitExceededException : raise except github . GithubException : debug ( ' FAILED' ) info ( 'Added:' , added ) info ( 'Removed:' , removed ) | Move the repos |
16,176 | def poll_values ( ) : subscription = processor . create_parameter_subscription ( [ '/YSS/SIMULATOR/BatteryVoltage1' ] ) sleep ( 5 ) print ( 'Latest value:' ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage1' ) ) sleep ( 5 ) print ( 'Latest value:' ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage1' ) ) | Shows how to poll values from the subscription . |
16,177 | def receive_callbacks ( ) : def print_data ( data ) : for parameter in data . parameters : print ( parameter ) processor . create_parameter_subscription ( '/YSS/SIMULATOR/BatteryVoltage1' , on_data = print_data ) sleep ( 5 ) | Shows how to receive callbacks on value updates . |
16,178 | def manage_subscription ( ) : subscription = processor . create_parameter_subscription ( [ '/YSS/SIMULATOR/BatteryVoltage1' ] ) sleep ( 5 ) print ( 'Adding extra items to the existing subscription...' ) subscription . add ( [ '/YSS/SIMULATOR/Alpha' , '/YSS/SIMULATOR/BatteryVoltage2' , 'MDB:OPS Name/SIMULATOR_PrimBusVoltage1' , ] ) sleep ( 5 ) print ( 'Shrinking subscription...' ) subscription . remove ( '/YSS/SIMULATOR/Alpha' ) print ( 'Cancelling the subscription...' ) subscription . cancel ( ) print ( 'Last values from cache:' ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage1' ) ) print ( subscription . get_value ( '/YSS/SIMULATOR/BatteryVoltage2' ) ) print ( subscription . get_value ( '/YSS/SIMULATOR/Alpha' ) ) print ( subscription . get_value ( 'MDB:OPS Name/SIMULATOR_PrimBusVoltage1' ) ) | Shows how to interact with a parameter subscription . |
16,179 | def setPixelScale ( self , pxms ) : pxms = float ( pxms ) / 2 self . pixelsPerms = pxms if pxms * self . gridms < GRID_PIXEL_MIN : self . gridms = self . gridms * 2 elif pxms * self . gridms > GRID_PIXEL_MAX : self . gridms = self . gridms / 2 self . _viewIsDirty = True self . viewport ( ) . update ( ) return self . gridms | Sets the zoom scale |
16,180 | def indexXY ( self , index ) : rect = self . visualRect ( index ) return rect . x ( ) , rect . y ( ) | Returns the top left coordinates of the item for the given index |
16,181 | def mouseDoubleClickEvent ( self , event ) : if self . mode == BuildMode : if event . button ( ) == QtCore . Qt . LeftButton : index = self . indexAt ( event . pos ( ) ) self . edit ( index ) | Launches an editor for the component if the mouse cursor is over an item |
16,182 | def mousePressEvent ( self , event ) : if self . mode == BuildMode : super ( StimulusView , self ) . mousePressEvent ( event ) else : index = self . indexAt ( event . pos ( ) ) if index . isValid ( ) : self . selectionModel ( ) . select ( index , QtGui . QItemSelectionModel . Toggle ) comp = self . model ( ) . data ( index , AbstractDragView . DragRole ) self . componentSelected . emit ( comp ) self . hintRequested . emit ( 'Click components to toggle more members of auto-parameter\n\n-or-\n\nEdit fields of auto-parameter (parameter type should be selected first)' ) | In Auto - parameter selection mode mouse press over an item emits componentSelected |
16,183 | def visualRegionForSelection ( self , selection ) : region = QtGui . QRegion ( ) for index in selection . indexes ( ) : region = region . united ( self . _rects [ index . row ( ) ] [ index . column ( ) ] ) return region | Gets the region of all of the components in selection |
16,184 | def sizeHint ( self , option , index ) : component = index . internalPointer ( ) width = self . component . duration ( ) * self . pixelsPerms * 1000 return QtCore . QSize ( width , 50 ) | Size based on component duration and a fixed height |
16,185 | def get_namespace ( taskfileinfo ) : element = taskfileinfo . task . element name = element . name return name + "_1" | Return a suitable name for a namespace for the taskfileinfo |
16,186 | def get_groupname ( taskfileinfo ) : element = taskfileinfo . task . element name = element . name return name + "_grp" | Return a suitable name for a groupname for the given taskfileinfo . |
16,187 | def group_content ( content , namespace , grpname , grpnodetype ) : with common . preserve_namespace ( namespace ) : grpnode = cmds . createNode ( grpnodetype , name = grpname ) cmds . group ( content , uag = grpnode ) return grpnode | Group the given content in the given namespace under a node of type grpnodetype with the name grpname |
16,188 | def getLabelByName ( self , name ) : name = name . lower ( ) if name in self . stimLabels : return self . stimLabels [ name ] else : return None | Gets a label widget by it component name |
16,189 | def read_byte ( self ) : buf = b'' if len ( self . cookedq ) > 0 : buf = bytes ( [ self . cookedq [ 0 ] ] ) self . cookedq = self . cookedq [ 1 : ] else : yield from self . process_rawq ( ) if not self . eof : yield from self . fill_rawq ( ) yield from self . process_rawq ( ) buf = yield from self . read_byte ( ) return buf | Read one byte of cooked data |
16,190 | def read_line ( self ) : buf = b'' while not self . eof and buf . endswith ( b'\n' ) is False : buf += yield from self . read_byte ( ) if self . eof : buf = b'' buf = buf . replace ( b'\n' , b'' ) return buf | Read data until \ n is found |
16,191 | def getTzid ( tzid , smart = True ) : tz = __tzidMap . get ( toUnicode ( tzid ) , None ) if smart and tzid and not tz : try : from pytz import timezone , UnknownTimeZoneError try : tz = timezone ( tzid ) registerTzid ( toUnicode ( tzid ) , tz ) except UnknownTimeZoneError : pass except ImportError : pass return tz | Return the tzid if it exists or None . |
16,192 | def dateTimeToString ( dateTime , convertToUTC = False ) : if dateTime . tzinfo and convertToUTC : dateTime = dateTime . astimezone ( utc ) datestr = "{}{}{}T{}{}{}" . format ( numToDigits ( dateTime . year , 4 ) , numToDigits ( dateTime . month , 2 ) , numToDigits ( dateTime . day , 2 ) , numToDigits ( dateTime . hour , 2 ) , numToDigits ( dateTime . minute , 2 ) , numToDigits ( dateTime . second , 2 ) , ) if tzinfo_eq ( dateTime . tzinfo , utc ) : datestr += "Z" return datestr | Ignore tzinfo unless convertToUTC . Output string . |
16,193 | def getrruleset ( self , addRDate = False ) : rruleset = None for name in DATESANDRULES : addfunc = None for line in self . contents . get ( name , ( ) ) : if rruleset is None : rruleset = rrule . rruleset ( ) if addfunc is None : addfunc = getattr ( rruleset , name ) if name in DATENAMES : if type ( line . value [ 0 ] ) == datetime . datetime : map ( addfunc , line . value ) elif type ( line . value [ 0 ] ) == datetime . date : for dt in line . value : addfunc ( datetime . datetime ( dt . year , dt . month , dt . day ) ) else : pass elif name in RULENAMES : try : dtstart = self . dtstart . value except ( AttributeError , KeyError ) : try : if self . name == "VTODO" : dtstart = self . due . value else : print ( 'failed to get dtstart with VTODO' ) return None except ( AttributeError , KeyError ) : print ( 'failed to find DUE at all.' ) return None value = str_ ( line . value ) . replace ( '\\' , '' ) rule = rrule . rrulestr ( value , dtstart = dtstart ) until = rule . _until if until is not None and isinstance ( dtstart , datetime . datetime ) and ( until . tzinfo != dtstart . tzinfo ) : vals = dict ( pair . split ( '=' ) for pair in line . value . upper ( ) . split ( ';' ) ) if len ( vals . get ( 'UNTIL' , '' ) ) == 8 : until = datetime . datetime . combine ( until . date ( ) , dtstart . time ( ) ) if until . tzinfo is None : until = until . replace ( tzinfo = dtstart . tzinfo ) if dtstart . tzinfo is not None : until = until . astimezone ( dtstart . tzinfo ) if dtstart . tzinfo is None : until = until . replace ( tzinfo = None ) rule . _until = until addfunc ( rule ) if name == 'rrule' and addRDate : try : if not isinstance ( dtstart , datetime . datetime ) : adddtstart = datetime . datetime . fromordinal ( dtstart . toordinal ( ) ) else : adddtstart = dtstart if rruleset . _rrule [ - 1 ] [ 0 ] != adddtstart : rruleset . rdate ( adddtstart ) added = True else : added = False except IndexError : added = False if added and rruleset . _rrule [ - 1 ] . _count is not None : rruleset . _rrule [ - 1 ] . _count -= 1 return rruleset | Get an rruleset created from self . |
16,194 | def transformToNative ( obj ) : if obj . isNative : return obj obj . isNative = True if obj . value == '' : return obj obj . value = obj . value obj . value = parseDtstart ( obj ) if obj . value . tzinfo is None : obj . params [ 'X-VOBJ-FLOATINGTIME-ALLOWED' ] = [ 'TRUE' ] if obj . params . get ( 'TZID' ) : obj . params [ 'X-VOBJ-ORIGINAL-TZID' ] = [ obj . params [ 'TZID' ] ] del obj . params [ 'TZID' ] return obj | Turn obj . value into a datetime . |
16,195 | def transformFromNative ( cls , obj ) : if obj . isNative : obj . isNative = False tzid = TimezoneComponent . registerTzinfo ( obj . value . tzinfo ) obj . value = dateTimeToString ( obj . value , cls . forceUTC ) if not cls . forceUTC and tzid is not None : obj . tzid_param = tzid if obj . params . get ( 'X-VOBJ-ORIGINAL-TZID' ) : if not hasattr ( obj , 'tzid_param' ) : obj . tzid_param = obj . x_vobj_original_tzid_param del obj . params [ 'X-VOBJ-ORIGINAL-TZID' ] return obj | Replace the datetime in obj . value with an ISO 8601 string . |
16,196 | def get_currencies_info ( ) -> Element : response = requests . get ( const . CBRF_API_URLS [ 'info' ] ) return XML ( response . text ) | Get META information about currencies |
16,197 | def get_daily_rates ( date_req : datetime . datetime = None , lang : str = 'rus' ) -> Element : if lang not in [ 'rus' , 'eng' ] : raise ValueError ( '"lang" must be string. "rus" or "eng"' ) base_url = const . CBRF_API_URLS [ 'daily_rus' ] if lang == 'rus' else const . CBRF_API_URLS [ 'daily_eng' ] url = base_url + 'date_req=' + utils . date_to_str ( date_req ) if date_req else base_url response = requests . get ( url = url ) return XML ( response . text ) | Getting currency for current day . |
16,198 | def mangleIR ( data , ignore_errors = False ) : try : if isinstance ( data , bytes ) : data = data . decode ( 'ascii' ) data = data . strip ( ) times = [ int ( x , 16 ) for x in data . split ( ) [ 2 : ] ] minTime = min ( times [ 2 : - 1 ] ) maxTime = max ( times [ 2 : - 1 ] ) margin = ( maxTime - minTime ) / 2 + minTime return '' . join ( [ ( x < margin and 'S' or 'L' ) for x in times ] ) except : if not ignore_errors : raise | Mangle a raw Kira data packet into shorthand |
16,199 | def mangleNec ( code , freq = 40 ) : timings = [ ] for octet in binascii . unhexlify ( code . replace ( " " , "" ) ) : burst = lambda x : x and "0226 06AD" or "0226 0258" for bit in reversed ( "%08d" % int ( bin ( ord ( octet ) ) [ 2 : ] ) ) : bit = int ( bit ) timings . append ( burst ( bit ) ) return mangleIR ( "K %0X22 214d 10b3 " % freq + " " . join ( timings ) + " 0226 2000" ) | Convert NEC code to shorthand notation |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.