idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
16,000
def utf82unicode ( value ) : try : return value . decode ( "utf8" ) except Exception as e : if not _Log : _late_import ( ) if not is_binary ( value ) : _Log . error ( "Can not convert {{type}} to unicode because it's not bytes" , type = type ( value ) . __name__ ) e = _Except . wrap ( e ) for i , c in enumerate ( value ) : try : c . decode ( "utf8" ) except Exception as f : _Log . error ( "Can not convert charcode {{c}} in string index {{i}}" , i = i , c = ord ( c ) , cause = [ e , _Except . wrap ( f ) ] ) try : latin1 = text_type ( value . decode ( "latin1" ) ) _Log . error ( "Can not explain conversion failure, but seems to be latin1" , e ) except Exception : pass try : a = text_type ( value . decode ( "latin1" ) ) _Log . error ( "Can not explain conversion failure, but seems to be latin1" , e ) except Exception : pass _Log . error ( "Can not explain conversion failure of " + type ( value ) . __name__ + "!" , e )
WITH EXPLANATION FOR FAILURE
16,001
def setModel ( self , model ) : self . stimModel = model self . parameterModel = model . autoParams ( ) tone = self . stimModel . data ( self . stimModel . index ( 0 , 0 ) , QtCore . Qt . UserRole + 1 ) info = tone . auto_details ( ) fmax = info [ 'frequency' ] [ 'max' ] self . ui . freqStartSpnbx . setMaximum ( fmax ) self . ui . freqStopSpnbx . setMaximum ( fmax ) self . ui . freqStepSpnbx . setMaximum ( 500000 ) dbmax = info [ 'intensity' ] [ 'max' ] self . ui . dbStartSpnbx . setMaximum ( dbmax ) self . ui . dbStopSpnbx . setMaximum ( dbmax ) self . ui . dbStepSpnbx . setMaximum ( 500000 ) self . ui . durSpnbx . setMaximum ( info [ 'duration' ] [ 'max' ] ) self . ui . risefallSpnbx . setMaximum ( info [ 'risefall' ] [ 'max' ] ) self . fmapper . setModel ( self . parameterModel ) self . dbmapper . setModel ( self . parameterModel ) self . fmapper . addMapping ( self . ui . freqStartSpnbx , 1 ) self . fmapper . addMapping ( self . ui . freqStopSpnbx , 2 ) self . fmapper . addMapping ( self . ui . freqStepSpnbx , 3 ) self . fmapper . addMapping ( self . ui . freqNstepsLbl , 4 , 'text' ) self . dbmapper . addMapping ( self . ui . dbStartSpnbx , 1 ) self . dbmapper . addMapping ( self . ui . dbStopSpnbx , 2 ) self . dbmapper . addMapping ( self . ui . dbStepSpnbx , 3 ) self . dbmapper . addMapping ( self . ui . dbNstepsLbl , 4 , 'text' ) self . fmapper . toFirst ( ) self . dbmapper . setCurrentIndex ( 1 ) self . ui . durSpnbx . setValue ( tone . duration ( ) ) self . ui . nrepsSpnbx . setValue ( self . stimModel . repCount ( ) ) self . ui . risefallSpnbx . setValue ( tone . risefall ( ) ) self . tone = tone
Sets the QStimulusModel for this editor
16,002
def setStimDuration ( self ) : duration = self . ui . durSpnbx . value ( ) self . tone . setDuration ( duration )
Sets the duration of the StimulusModel from values pulled from this widget
16,003
def setStimReps ( self ) : reps = self . ui . nrepsSpnbx . value ( ) self . stimModel . setRepCount ( reps )
Sets the reps of the StimulusModel from values pulled from this widget
16,004
def setStimRisefall ( self ) : rf = self . ui . risefallSpnbx . value ( ) self . tone . setRisefall ( rf )
Sets the Risefall of the StimulusModel s tone from values pulled from this widget
16,005
def add_arguments ( self , parser , bootstrap = False ) : [ item . add_argument ( parser , bootstrap ) for item in self . _get_items ( bootstrap = False ) ]
Adds all items to the parser passed in .
16,006
def add_source ( self , label , source_type , ** kwargs ) : self . _sources [ label ] = get_source ( label , source_type , ** kwargs )
Add a source to the spec .
16,007
def find_item ( self , fq_name ) : names = fq_name . split ( self . _separator ) current = self . _yapconf_items for name in names : if isinstance ( current , ( YapconfDictItem , YapconfListItem ) ) : current = current . children if name not in current : return None current = current [ name ] return current
Find an item in the specification by fully qualified name .
16,008
def get_item ( self , name , bootstrap = False ) : for item in self . _get_items ( bootstrap ) : if item . name == name : return item return None
Get a particular item in the specification .
16,009
def update_defaults ( self , new_defaults , respect_none = False ) : for key , value in six . iteritems ( new_defaults ) : item = self . get_item ( key ) if item is None : raise YapconfItemNotFound ( "Cannot update default for {0}, " "there is no config item by the " "name of {1}" . format ( key , key ) , None ) item . update_default ( value , respect_none )
Update items defaults to the values in the new_defaults dict .
16,010
def generate_documentation ( self , app_name , ** kwargs ) : output_file = kwargs . get ( 'output_file_name' ) encoding = kwargs . get ( 'encoding' , 'utf-8' ) doc_string = generate_markdown_doc ( app_name , self ) if output_file : with open ( output_file , 'w' , encoding = encoding ) as doc_file : doc_file . write ( doc_string ) return doc_string
Generate documentation for this specification .
16,011
def load_config ( self , * args , ** kwargs ) : bootstrap = kwargs . get ( 'bootstrap' , False ) overrides = self . _generate_overrides ( * args ) config = self . _generate_config_from_overrides ( overrides , bootstrap ) return Box ( config )
Load a config based on the arguments passed in .
16,012
def spawn_watcher ( self , label , target = None , eternal = False ) : if label not in self . _sources : raise YapconfSourceError ( 'Cannot watch %s no source named %s' % ( label , label ) ) current_config = self . _sources [ label ] . get_data ( ) handler = ConfigChangeHandler ( current_config , self , target ) return self . _sources [ label ] . watch ( handler , eternal )
Spawns a config watcher in a separate daemon thread .
16,013
def migrate_config_file ( self , config_file_path , always_update = False , current_file_type = None , output_file_name = None , output_file_type = None , create = True , update_defaults = True , dump_kwargs = None , include_bootstrap = True , ) : current_file_type = current_file_type or self . _file_type output_file_type = output_file_type or self . _file_type output_file_name = output_file_name or config_file_path current_config = self . _get_config_if_exists ( config_file_path , create , current_file_type ) migrated_config = { } if include_bootstrap : items = self . _yapconf_items . values ( ) else : items = [ item for item in self . _yapconf_items . values ( ) if not item . bootstrap ] for item in items : item . migrate_config ( current_config , migrated_config , always_update , update_defaults ) if create : yapconf . dump_data ( migrated_config , filename = output_file_name , file_type = output_file_type , klazz = YapconfLoadError , dump_kwargs = dump_kwargs ) return Box ( migrated_config )
Migrates a configuration file .
16,014
def refractory ( times , refract = 0.002 ) : times_refract = [ ] times_refract . append ( times [ 0 ] ) for i in range ( 1 , len ( times ) ) : if times_refract [ - 1 ] + refract <= times [ i ] : times_refract . append ( times [ i ] ) return times_refract
Removes spikes in times list that do not satisfy refractor period
16,015
def spike_times ( signal , threshold , fs , absval = True ) : times = [ ] if absval : signal = np . abs ( signal ) over , = np . where ( signal > threshold ) segments , = np . where ( np . diff ( over ) > 1 ) if len ( over ) > 1 : if len ( segments ) == 0 : segments = [ 0 , len ( over ) - 1 ] else : if segments [ 0 ] != 0 : segments = np . insert ( segments , [ 0 ] , [ 0 ] ) else : times . append ( float ( over [ 0 ] ) / fs ) if 1 not in segments : segments [ 0 ] = 1 if segments [ - 1 ] != len ( over ) - 1 : segments = np . insert ( segments , [ len ( segments ) ] , [ len ( over ) - 1 ] ) else : times . append ( float ( over [ - 1 ] ) / fs ) for iseg in range ( 1 , len ( segments ) ) : if segments [ iseg ] - segments [ iseg - 1 ] == 1 : idx = over [ segments [ iseg ] ] else : segments [ 0 ] = segments [ 0 ] - 1 idx = over [ segments [ iseg - 1 ] + 1 ] + np . argmax ( signal [ over [ segments [ iseg - 1 ] + 1 ] : over [ segments [ iseg ] ] ] ) times . append ( float ( idx ) / fs ) elif len ( over ) == 1 : times . append ( float ( over [ 0 ] ) / fs ) if len ( times ) > 0 : return refractory ( times ) else : return times
Detect spikes from a given signal
16,016
def bin_spikes ( spike_times , binsz ) : bins = np . empty ( ( len ( spike_times ) , ) , dtype = int ) for i , stime in enumerate ( spike_times ) : bins [ i ] = np . floor ( np . around ( stime / binsz , 5 ) ) return bins
Sort spike times into bins
16,017
def spike_latency ( signal , threshold , fs ) : over , = np . where ( signal > threshold ) segments , = np . where ( np . diff ( over ) > 1 ) if len ( over ) > 1 : if len ( segments ) == 0 : idx = over [ 0 ] + np . argmax ( signal [ over [ 0 ] : over [ - 1 ] ] ) latency = float ( idx ) / fs elif segments [ 0 ] == 0 : latency = float ( over [ 0 ] ) / fs else : idx = over [ 0 ] + np . argmax ( signal [ over [ 0 ] : over [ segments [ 0 ] ] ] ) latency = float ( idx ) / fs elif len ( over ) > 0 : latency = float ( over [ 0 ] ) / fs else : latency = np . nan return latency
Find the latency of the first spike over threshold
16,018
def firing_rate ( spike_times , window_size = None ) : if len ( spike_times ) == 0 : return 0 if window_size is None : if len ( spike_times ) > 1 : window_size = spike_times [ - 1 ] - spike_times [ 0 ] elif len ( spike_times ) > 0 : window_size = 1 else : window_size = 0 rate = window_size / len ( spike_times ) return rate
Calculate the firing rate of spikes
16,019
def evaluate_rules ( self ) : outputs = defaultdict ( list ) total_rules = len ( self . _rules ) for rule in self . _rules : res = rule . evaluate ( self . _variables ) outputs [ res [ 'term' ] ] . append ( [ res [ 'weight' ] , res [ 'output' ] ] ) return_values = { } for k , v in outputs . items ( ) : num = sum ( map ( lambda ( x , y ) : x * y , v ) ) den = sum ( [ i for i , j in v ] ) if den == 0 : return_values [ k ] = 0 else : return_values [ k ] = num / den return return_values
Perform Sugeno inference .
16,020
def is_complete ( self ) : qstat = self . _grep_qstat ( 'complete' ) comp = self . _grep_status ( 'complete' ) if qstat and comp : return True return False
Checks the job s output or log file to determing if the completion criteria was met .
16,021
def initiate ( mw_uri , consumer_token , callback = 'oob' , user_agent = defaults . USER_AGENT ) : auth = OAuth1 ( consumer_token . key , client_secret = consumer_token . secret , callback_uri = callback ) r = requests . post ( url = mw_uri , params = { 'title' : "Special:OAuth/initiate" } , auth = auth , headers = { 'User-Agent' : user_agent } ) credentials = parse_qs ( r . content ) if credentials is None or credentials == { } : raise OAuthException ( "Expected x-www-form-urlencoded response from " + "MediaWiki, but got something else: " + "{0}" . format ( repr ( r . content ) ) ) elif b ( 'oauth_token' ) not in credentials or b ( 'oauth_token_secret' ) not in credentials : raise OAuthException ( "MediaWiki response lacks token information: " "{0}" . format ( repr ( credentials ) ) ) else : request_token = RequestToken ( credentials . get ( b ( 'oauth_token' ) ) [ 0 ] , credentials . get ( b ( 'oauth_token_secret' ) ) [ 0 ] ) params = { 'title' : "Special:OAuth/authenticate" , 'oauth_token' : request_token . key , 'oauth_consumer_key' : consumer_token . key } return ( mw_uri + "?" + urlencode ( params ) , request_token )
Initiate an oauth handshake with MediaWiki .
16,022
def configure ( * args , ** kwargs ) : global _stats_client log . debug ( 'statsd.configure(%s)' % kwargs ) _config . update ( kwargs ) _stats_client = _create_client ( ** _config )
Configure the module level statsd client that will be used in all library operations .
16,023
def incr ( name , value = 1 , rate = 1 , tags = None ) : client ( ) . incr ( name , value , rate , tags )
Increment a metric by value .
16,024
def decr ( name , value = 1 , rate = 1 , tags = None ) : client ( ) . decr ( name , value , rate , tags )
Decrement a metric by value .
16,025
def gauge ( name , value , rate = 1 , tags = None ) : client ( ) . gauge ( name , value , rate , tags )
Set the value for a gauge .
16,026
def timing ( name , delta , rate = 1 , tags = None ) : return client ( ) . timing ( name , delta , rate = rate , tags = tags )
Sends new timing information . delta is in milliseconds .
16,027
def list_pages ( self , request , template_name = None , extra_context = None ) : if not self . admin_site . has_permission ( request ) : return self . admin_site . login ( request ) language = get_language_from_request ( request ) query = request . POST . get ( 'q' , '' ) . strip ( ) if query : page_ids = list ( set ( [ c . page . pk for c in Content . objects . filter ( body__icontains = query ) ] ) ) pages = Page . objects . filter ( pk__in = page_ids ) else : pages = Page . objects . root ( ) if settings . PAGE_HIDE_SITES : pages = pages . filter ( sites = settings . SITE_ID ) context = { 'can_publish' : request . user . has_perm ( 'pages.can_publish' ) , 'language' : language , 'name' : _ ( "page" ) , 'pages' : pages , 'opts' : self . model . _meta , 'q' : query } context . update ( extra_context or { } ) change_list = self . changelist_view ( request , context ) return change_list
List root pages
16,028
def codingthreads ( self ) : printtime ( 'Extracting CDS features' , self . start ) for i in range ( self . cpus ) : threads = Thread ( target = self . codingsequences , args = ( ) ) threads . setDaemon ( True ) threads . start ( ) for sample in self . runmetadata . samples : self . codingqueue . put ( sample ) self . codingqueue . join ( ) self . corethreads ( )
Find CDS features in . gff files to filter out non - coding sequences from the analysis
16,029
def corethreads ( self ) : printtime ( 'Creating CDS files and finding core genes' , self . start ) for i in range ( self . cpus ) : threads = Thread ( target = self . coregroups , args = ( ) ) threads . setDaemon ( True ) threads . start ( ) for sample in self . runmetadata . samples : sample . prokka . cds = os . path . join ( sample . prokka . outputdir , '{}.cds' . format ( sample . name ) ) self . corequeue . put ( sample ) self . corequeue . join ( ) self . corewriter ( )
Create a . cds file consisting of fasta records of CDS features for each strain
16,030
def corewriter ( self ) : printtime ( 'Creating core allele files' , self . start ) for gene in sorted ( self . genesequence ) : self . geneset . add ( gene ) genefile = os . path . join ( self . coregenelocation , '{}.fasta' . format ( gene ) ) if not os . path . isfile ( genefile ) : with open ( genefile , 'w' ) as core : for count , sequence in enumerate ( self . genesequence [ gene ] ) : definitionline = '{}-{}' . format ( gene , count + 1 ) fasta = SeqRecord ( Seq ( sequence ) , description = '' , id = definitionline ) SeqIO . write ( fasta , core , 'fasta' ) for strain in self . coresequence [ sequence ] : try : self . corealleles [ strain [ : - 6 ] ] . update ( { gene : count + 1 } ) except KeyError : self . corealleles [ strain [ : - 6 ] ] = { gene : count + 1 } else : for count , sequence in enumerate ( self . genesequence [ gene ] ) : for strain in self . coresequence [ sequence ] : try : self . corealleles [ strain [ : - 6 ] ] . update ( { gene : count + 1 } ) except KeyError : self . corealleles [ strain [ : - 6 ] ] = { gene : count + 1 } if not os . path . isfile ( os . path . join ( self . coregenelocation , 'core_combined.fasta' ) ) : fastafiles = glob ( os . path . join ( self . coregenelocation , '*.fasta' ) ) self . combinealleles ( fastafiles ) self . profiler ( )
Creates . fasta files containing all alleles for each gene
16,031
def profiler ( self ) : printtime ( 'Calculating core profiles' , self . start ) for strain in self . corealleles : self . coreset . add ( tuple ( sorted ( self . corealleles [ strain ] . items ( ) ) ) ) header = 'ST,{}\n' . format ( ',' . join ( sorted ( self . geneset ) ) ) data = '' for count , core in sorted ( enumerate ( self . coreset ) ) : count += 1 data += '{}' . format ( count ) for strain in self . corealleles : if tuple ( sorted ( self . corealleles [ strain ] . items ( ) ) ) == core : self . profiles [ strain ] = count for gene in sorted ( core ) : data += ',{}' . format ( gene [ 1 ] ) data += '\n' with open ( os . path . join ( self . profilelocation , 'profile.txt' ) , 'w' ) as profile : profile . write ( header ) profile . write ( data ) self . linker ( )
Calculates the core profile for each strain
16,032
def linker ( self ) : strainprofile = os . path . join ( self . profilelocation , 'strainprofiles.txt' ) if not os . path . isfile ( strainprofile ) : header = 'Strain,SequenceType\n' data = '' sortedprofiles = sorted ( self . profiles . items ( ) , key = operator . itemgetter ( 1 ) ) for strain , seqtype in sortedprofiles : for sample in self . runmetadata . samples : if sample . name == strain : sample . general . coretype = seqtype data += '{},{}\n' . format ( strain , seqtype ) with open ( strainprofile , 'w' ) as profile : profile . write ( header ) profile . write ( data )
Link the sequence types to the strains . Create a . csv file of the linkages
16,033
def get ( self , index ) : assert index <= self . count assert index < self . size offset = index * self . chunk_size return self . data [ offset : offset + self . chunk_size ]
Get a chunk by index
16,034
def new ( self , init = None ) : if self . count >= self . size : self . resize ( self . count * 2 ) chunk = self . get ( self . count ) if init is not None : assert len ( init ) == self . chunk_size chunk [ 0 : self . chunk_size ] = init self . count += 1 return chunk
Return the last currently unused chunk resizing if needed .
16,035
def resize ( self , new_size ) : assert new_size > self . size new_data = self . _allocate ( new_size ) new_data [ 0 : self . size * self . chunk_size ] = self . data self . size = new_size self . data = new_data
Create a new larger array and copy data over
16,036
def remove ( self , index ) : assert index < self . count last_index = self . count - 1 data = self . get ( index ) if index == last_index : last_data = data moved = None else : last_data = self . get ( last_index ) data [ 0 : self . chunk_size ] = last_data moved = last_index last_data [ 0 : self . chunk_size ] = [ 0 ] * self . chunk_size self . count -= 1 return moved
Remove chunk at index .
16,037
def create_turtle ( self , id , shape , model_init , color_init ) : assert id not in self . id_to_shape data = self . _create_turtle ( id , shape , model_init , color_init ) self . id_to_shape [ id ] = shape return data
Create a slice of memory for turtle data storage
16,038
def set_shape ( self , id , new_shape ) : old_shape = self . id_to_shape [ id ] old_buffer = self . get_buffer ( old_shape ) model , color = old_buffer . get ( id ) new_data = self . _create_turtle ( id , new_shape , model , color ) old_buffer . remove ( id ) self . id_to_shape [ id ] = new_shape return new_data
Copies the turtle data from the old shape buffer to the new
16,039
def get_checksum_by_target ( self , target ) : for csum in self . checksums : if csum . target == target : return csum return None
returns a checksum of a specific kind
16,040
def add_checksum ( self , csum ) : for csum_tmp in self . checksums : if csum_tmp . target == csum . target : self . checksums . remove ( csum_tmp ) break self . checksums . append ( csum )
Add a checksum to a release object
16,041
def get_image_by_kind ( self , kind ) : for ss in self . images : if ss . kind == kind : return ss return None
returns a image of a specific kind
16,042
def add_image ( self , im ) : for im_tmp in self . images : if im_tmp . kind == im . kind : self . images . remove ( im_tmp ) break self . images . append ( im )
Add a image to a screenshot object
16,043
def add_screenshot ( self , screenshot ) : if screenshot in self . screenshots : return self . screenshots . append ( screenshot )
Add a screenshot object if it does not already exist
16,044
def add_provide ( self , provide ) : for p in self . provides : if p . value == provide . value : return self . provides . append ( provide )
Add a provide object if it does not already exist
16,045
def get_provides_by_kind ( self , kind ) : provs = [ ] for p in self . provides : if p . kind == kind : provs . append ( p ) return provs
Returns an array of provides of a certain kind
16,046
def add_require ( self , require ) : for p in self . requires : if p . value == require . value : return self . requires . append ( require )
Add a require object if it does not already exist
16,047
def get_require_by_kind ( self , kind , value ) : for r in self . requires : if r . kind == kind and r . value == value : return r return None
Returns a requires object of a specific value
16,048
def to_file ( self , filename ) : xml = self . to_xml ( ) f = gzip . open ( filename , 'wb' ) try : f . write ( xml . encode ( 'utf-8' ) ) finally : f . close ( )
Save the store to disk
16,049
def from_file ( self , filename ) : with gzip . open ( filename , 'rb' ) as f : self . parse ( f . read ( ) )
Open the store from disk
16,050
def get_components ( self ) : components = [ ] for app_id in self . components : components . append ( self . components [ app_id ] ) return components
Returns all the applications from the store
16,051
def add ( self , component ) : old = self . get_component ( component . id ) if old : old . releases . extend ( component . releases ) return self . components [ component . id ] = component
Add component to the store
16,052
def row2dict ( row , depth = None , exclude = None , exclude_pk = None , exclude_underscore = None , only = None , fk_suffix = None ) : if depth == 0 : return None d , mapper = { } , get_mapper ( row ) if depth is None : depth = getattr ( row , ATTR_DEPTH , DEFAULT_DEPTH ) - 1 else : depth -= 1 if exclude is None : exclude = getattr ( row , ATTR_EXCLUDE , DEFAULT_EXCLUDE ) if exclude_pk is None : exclude_pk = getattr ( row , ATTR_EXCLUDE_PK , DEFAULT_EXCLUDE_PK ) if exclude_underscore is None : exclude_underscore = getattr ( row , ATTR_EXCLUDE_UNDERSCORE , DEFAULT_EXCLUDE_UNDERSCORE ) if only is None : only = getattr ( row , ATTR_ONLY , DEFAULT_ONLY ) if fk_suffix is None : fk_suffix = getattr ( row , ATTR_FK_SUFFIX , DEFAULT_FK_SUFFIX ) for c in mapper . columns . keys ( ) + mapper . synonyms . keys ( ) : if c in exclude or check_exclude_pk ( c , exclude_pk , fk_suffix = fk_suffix ) or check_exclude_underscore ( c , exclude_underscore ) or check_only ( c , only ) : continue d [ c ] = getattr ( row , c ) for r in mapper . relationships . keys ( ) : if r in exclude or check_only ( r , only ) : continue attr = getattr ( row , r ) backref = get_backref ( mapper . relationships [ r ] ) if backref : exclude . add ( backref ) kwargs = dict ( depth = depth , exclude = exclude , exclude_pk = exclude_pk , exclude_underscore = exclude_underscore , only = only , fk_suffix = fk_suffix ) if isinstance ( attr , collections . InstrumentedList ) : d [ r ] = [ row2dict ( i , ** kwargs ) for i in attr if depth ] else : d [ r ] = row2dict ( attr , ** kwargs ) return d
Recursively walk row attributes to serialize ones into a dict .
16,053
def dict2row ( d , model , rel = None , exclude = None , exclude_pk = None , exclude_underscore = None , only = None , fk_suffix = None ) : if not isinstance ( d , dict ) : raise TypeError ( 'Source must be instance of dict, got %s instead' % type ( d ) . __name__ ) row = model ( ) mapper = get_mapper ( row ) if rel is None : rel = getattr ( row , ATTR_REL , DEFAULT_REL ) if exclude is None : exclude = getattr ( row , ATTR_EXCLUDE , DEFAULT_EXCLUDE ) if exclude_pk is None : exclude_pk = getattr ( row , ATTR_EXCLUDE_PK , DEFAULT_EXCLUDE_PK ) if exclude_underscore is None : exclude_underscore = getattr ( row , ATTR_EXCLUDE_UNDERSCORE , DEFAULT_EXCLUDE_UNDERSCORE ) if only is None : only = getattr ( row , ATTR_ONLY , DEFAULT_ONLY ) if fk_suffix is None : fk_suffix = getattr ( row , ATTR_FK_SUFFIX , DEFAULT_FK_SUFFIX ) for c in mapper . columns . keys ( ) + mapper . synonyms . keys ( ) : if c not in d or c in exclude or check_exclude_pk ( c , exclude_pk , fk_suffix = fk_suffix ) or check_exclude_underscore ( c , exclude_underscore ) or check_only ( c , only ) : continue setattr ( row , c , d [ c ] ) for r in mapper . relationships . keys ( ) : if r not in d or r not in rel or check_only ( r , only ) : continue kwargs = dict ( rel = rel , exclude = exclude , exclude_pk = exclude_pk , exclude_underscore = exclude_underscore , only = only , fk_suffix = fk_suffix ) if isinstance ( d [ r ] , list ) : setattr ( row , r , collections . InstrumentedList ( ) ) for i in d [ r ] : getattr ( row , r ) . append ( dict2row ( i , rel [ r ] , ** kwargs ) ) else : if not exclude_pk : rpk = d [ r ] . get ( 'id' ) if isinstance ( d [ r ] , dict ) else None setattr ( row , r + fk_suffix , rpk ) setattr ( row , r , dict2row ( d [ r ] , rel [ r ] , ** kwargs ) ) return row
Recursively walk dict attributes to serialize ones into a row .
16,054
def copy_file ( source , destination , follow_symlinks = True , template : arg ( type = bool_or ( str ) , choices = ( 'format' , 'string' ) ) = False , context = None ) : if not template : return shutil . copy ( source , destination , follow_symlinks = follow_symlinks ) if os . path . isdir ( destination ) : destination = os . path . join ( destination , os . path . basename ( source ) ) with open ( source ) as source : contents = source . read ( ) if template is True or template == 'format' : contents = contents . format_map ( context ) elif template == 'string' : string_template = string . Template ( contents ) contents = string_template . substitute ( context ) else : raise ValueError ( 'Unknown template type: %s' % template ) with tempfile . NamedTemporaryFile ( 'w' , delete = False ) as temp_file : temp_file . write ( contents ) path = shutil . copy ( temp_file . name , destination ) os . remove ( temp_file . name ) return path
Copy source file to destination .
16,055
def git_version ( short : 'Get short hash' = True , show : 'Print version to stdout' = False ) : result = local ( [ 'git' , 'rev-parse' , '--is-inside-work-tree' ] , stdout = 'hide' , stderr = 'hide' , echo = False , raise_on_error = False ) if not result : return None result = local ( [ 'git' , 'describe' , '--exact-match' ] , stdout = 'capture' , stderr = 'hide' , echo = False , raise_on_error = False ) if result : return result . stdout result = local ( [ 'git' , 'rev-parse' , '--short' if short else None , 'HEAD' ] , stdout = 'capture' , stderr = 'hide' , echo = False , raise_on_error = False ) if result : version = result . stdout . strip ( ) if show : print ( version ) return version return None
Get tag associated with HEAD ; fall back to SHA1 .
16,056
def remote ( cmd : arg ( container = list ) , host , user = None , port = None , sudo = False , run_as = None , shell = '/bin/sh' , cd = None , environ : arg ( container = dict ) = None , paths = ( ) , stdout : arg ( type = StreamOptions ) = None , stderr : arg ( type = StreamOptions ) = None , echo = False , raise_on_error = True , dry_run = False , ) -> Result : if not isinstance ( cmd , str ) : cmd = flatten_args ( cmd , join = True ) ssh_options = [ '-q' ] if isatty ( sys . stdin ) : ssh_options . append ( '-t' ) if port is not None : ssh_options . extend ( ( '-p' , port ) ) ssh_connection_str = '{user}@{host}' . format_map ( locals ( ) ) if user else host remote_cmd = [ ] if sudo : remote_cmd . extend ( ( 'sudo' , '-H' ) ) elif run_as : remote_cmd . extend ( ( 'sudo' , '-H' , '-u' , run_as ) ) remote_cmd . extend ( ( shell , '-c' ) ) inner_cmd = [ ] if cd : inner_cmd . append ( 'cd {cd}' . format_map ( locals ( ) ) ) if environ : inner_cmd . extend ( 'export {k}="{v}"' . format_map ( locals ( ) ) for k , v in environ . items ( ) ) if paths : inner_cmd . append ( 'export PATH="{path}:$PATH"' . format ( path = ':' . join ( paths ) ) ) inner_cmd . append ( cmd ) inner_cmd = ' &&\n ' . join ( inner_cmd ) inner_cmd = '\n {inner_cmd}\n' . format_map ( locals ( ) ) inner_cmd = shlex . quote ( inner_cmd ) remote_cmd . append ( inner_cmd ) remote_cmd = ' ' . join ( remote_cmd ) args = ( 'ssh' , ssh_options , ssh_connection_str , remote_cmd ) return local ( args , stdout = stdout , stderr = stderr , echo = echo , raise_on_error = raise_on_error , dry_run = dry_run )
Run a remote command via SSH .
16,057
def sync ( source , destination , host , user = None , sudo = False , run_as = None , options = ( '-rltvz' , '--no-perms' , '--no-group' ) , excludes = ( ) , exclude_from = None , delete = False , dry_run = False , mode = 'u=rwX,g=rwX,o=' , quiet = True , pull = False , stdout : arg ( type = StreamOptions ) = None , stderr : arg ( type = StreamOptions ) = None , echo = False , raise_on_error = True , ) -> Result : source = abs_path ( source , keep_slash = True ) destination = abs_path ( destination , keep_slash = True ) connection_str = '{user}@{host}' . format_map ( locals ( ) ) if user else host push = not pull if sudo : rsync_path = ( '--rsync-path' , 'sudo rsync' ) elif run_as : rsync_path = ( '--rsync-path' , 'sudo -u {run_as} rsync' . format_map ( locals ( ) ) ) else : rsync_path = None if push : destination = '{connection_str}:{destination}' . format_map ( locals ( ) ) else : source = '{connection_str}:{source}' . format_map ( locals ( ) ) args = ( 'rsync' , rsync_path , options , ( '--chmod' , mode ) if mode else None , tuple ( ( '--exclude' , exclude ) for exclude in excludes ) , ( '--exclude-from' , exclude_from ) if exclude_from else None , '--delete' if delete else None , '--dry-run' if dry_run else None , '--quiet' if quiet else None , source , destination , ) return local ( args , stdout = stdout , stderr = stderr , echo = echo , raise_on_error = raise_on_error )
Sync files using rsync .
16,058
def _get_mean_and_median ( hist : Hist ) -> Tuple [ float , float ] : x = ctypes . c_double ( 0 ) q = ctypes . c_double ( 0.5 ) hist . ComputeIntegral ( ) hist . GetQuantiles ( 1 , x , q ) mean = hist . GetMean ( ) return ( mean , x . value )
Retrieve the mean and median from a ROOT histogram .
16,059
def _project_to_part_level ( hist : Hist , outliers_removal_axis : OutliersRemovalAxis ) -> Hist : import ROOT if isinstance ( hist , ( ROOT . TH2 , ROOT . TH3 ) ) : projection_information : Dict [ str , Any ] = { } output_object = _OutputObject ( None ) projector = projectors . HistProjector ( observable_to_project_from = hist , output_observable = output_object , output_attribute_name = "output" , projection_name_format = "outliers_removal_hist" , projection_information = projection_information , ) projector . projection_axes . append ( projectors . HistAxisRange ( axis_type = outliers_removal_axis , axis_range_name = "outliers_removal_axis" , min_val = projectors . HistAxisRange . apply_func_to_find_bin ( None , 1 ) , max_val = projectors . HistAxisRange . apply_func_to_find_bin ( ROOT . TAxis . GetNbins ) , ) ) projector . project ( ) return output_object . output return hist
Project the input histogram to the particle level axis .
16,060
def _determine_outliers_index ( hist : Hist , moving_average_threshold : float = 1.0 , number_of_values_to_search_ahead : int = 5 , limit_of_number_of_values_below_threshold : int = None ) -> int : import ROOT if isinstance ( hist , ( ROOT . TH2 , ROOT . TH3 , ROOT . THnBase ) ) : raise ValueError ( f"Given histogram '{hist.GetName()}' of type {type(hist)}, but can only" " determine the outlier location of a 1D histogram. Please project to" " the particle level axis first." ) if limit_of_number_of_values_below_threshold is None : limit_of_number_of_values_below_threshold = number_of_values_to_search_ahead - 1 hist_to_check = histogram . Histogram1D . from_existing_hist ( hist ) number_of_values_to_search_ahead = 5 moving_average = utils . moving_average ( hist_to_check . y , n = number_of_values_to_search_ahead ) cut_index = _determine_outliers_for_moving_average ( moving_average = moving_average , moving_average_threshold = moving_average_threshold , number_of_values_to_search_ahead = number_of_values_to_search_ahead , limit_of_number_of_values_below_threshold = limit_of_number_of_values_below_threshold , ) if cut_index != - 1 : cut_index += 1 return cut_index
Determine the location of where outliers begin in a 1D histogram .
16,061
def _determine_outliers_for_moving_average ( moving_average : np . ndarray , moving_average_threshold : float , number_of_values_to_search_ahead : int , limit_of_number_of_values_below_threshold : int ) -> int : below_threshold = moving_average < moving_average_threshold values_to_check = [ ] for i in range ( limit_of_number_of_values_below_threshold ) : values_to_check . append ( below_threshold [ i : - ( limit_of_number_of_values_below_threshold - 1 - i ) or None ] ) found_at_least_one_bin_above_threshold = False cut_index = - 1 for i , values in enumerate ( zip ( * values_to_check ) ) : if i == 0 : continue above_threshold = [ not value for value in values ] if any ( above_threshold ) : found_at_least_one_bin_above_threshold = True if found_at_least_one_bin_above_threshold and all ( np . invert ( above_threshold ) ) : logger . debug ( f"i at found cut_index: {i} with moving_average: {moving_average[i]}" ) cut_index = i + limit_of_number_of_values_below_threshold // 2 break return cut_index
Determine outliers to remove from a given moving average .
16,062
def _remove_outliers_from_hist ( hist : Hist , outliers_start_index : int , outliers_removal_axis : OutliersRemovalAxis ) -> None : if outliers_start_index > 0 : x = ctypes . c_int ( 0 ) y = ctypes . c_int ( 0 ) z = ctypes . c_int ( 0 ) outliers_removal_axis_values : Dict [ OutliersRemovalAxis , ctypes . c_int ] = { projectors . TH1AxisType . x_axis : x , projectors . TH1AxisType . y_axis : y , projectors . TH1AxisType . z_axis : z , } for index in range ( 0 , hist . GetNcells ( ) ) : hist . GetBinXYZ ( index , x , y , z ) if hist . GetBinContent ( index ) < hist . GetBinError ( index ) : logger . warning ( f"Bin content < error. Name: {hist.GetName()}, Bin content: {hist.GetBinContent(index)}, Bin error: {hist.GetBinError(index)}, index: {index}, ({x.value}, {y.value})" ) if outliers_removal_axis_values [ outliers_removal_axis ] . value >= outliers_start_index : hist . SetBinContent ( index , 0 ) hist . SetBinError ( index , 0 ) else : logger . info ( f"Hist {hist.GetName()} did not have any outliers to cut" )
Remove outliers from a given histogram .
16,063
def shapefile ( self , file ) : driver = ogr . GetDriverByName ( 'ESRI Shapefile' ) dataset = driver . Open ( file ) if dataset is not None : layer = dataset . GetLayer ( ) spatialRef = layer . GetSpatialRef ( ) feature = layer . GetNextFeature ( ) geom = feature . GetGeometryRef ( ) spatialRef = geom . GetSpatialReference ( ) outSpatialRef = osr . SpatialReference ( ) outSpatialRef . ImportFromEPSG ( 4326 ) coordTrans = osr . CoordinateTransformation ( spatialRef , outSpatialRef ) env = geom . GetEnvelope ( ) xmin = env [ 0 ] ymin = env [ 2 ] xmax = env [ 1 ] ymax = env [ 3 ] pointMAX = ogr . Geometry ( ogr . wkbPoint ) pointMAX . AddPoint ( env [ 1 ] , env [ 3 ] ) pointMAX . Transform ( coordTrans ) pointMIN = ogr . Geometry ( ogr . wkbPoint ) pointMIN . AddPoint ( env [ 0 ] , env [ 2 ] ) pointMIN . Transform ( coordTrans ) self . bbox = str ( pointMIN . GetPoint ( ) [ 0 ] ) + ',' + str ( pointMIN . GetPoint ( ) [ 1 ] ) + ',' + str ( pointMAX . GetPoint ( ) [ 0 ] ) + ',' + str ( pointMAX . GetPoint ( ) [ 1 ] ) self . query = None else : exit ( " shapefile not found. Please verify your path to the shapefile" )
reprojette en WGS84 et recupere l extend
16,064
def set_comment ( self , cellid , comment ) : info = { 'cellid' : cellid , 'comment' : comment } self . datafile . set_metadata ( self . current_dataset_name , info )
Saves the provided comment to the current dataset .
16,065
def main ( argument , sets , big_endian , optimal , output , clipboard , quiet , verbose ) : logger = logging . getLogger ( ) handler = logging . StreamHandler ( sys . stderr ) handler . setFormatter ( LevelFormatter ( ) ) logger . addHandler ( handler ) logger . setLevel ( logging . WARNING + ( quiet - verbose ) * 10 ) if sets and optimal : pat = Pat . from_chars ( '' . join ( sets ) , optimal ) elif optimal : pat = Pat . from_chars ( optimal = optimal ) elif sets : pat = Pat ( sets ) else : pat = Pat ( ) if argument . isdigit ( ) : count = int ( argument ) try : pattern = pat . create ( count ) except IndexError : logging . exception ( _ ( 'Failed to create the pattern.' ) ) sys . exit ( 1 ) else : if output : output . write ( pattern ) elif clipboard : copy ( pattern ) else : print ( pattern ) else : target = argument try : index = pat . locate ( target , big_endian ) except KeyError : logging . exception ( _ ( 'Failed to locate the pattern.' ) ) sys . exit ( 1 ) else : print ( index ) sys . exit ( 0 )
Customizable Lazy Exploit Pattern Utility .
16,066
def mousePressEvent ( self , event ) : if event . x ( ) < 50 : super ( PlotMenuBar , self ) . mousePressEvent ( event ) else : event . ignore ( )
Marshalls behaviour depending on location of the mouse click
16,067
def add ( self , command_template , job_class ) : job = JobTemplate ( command_template . alias , command_template = command_template , depends_on = command_template . depends_on , queue = self . queue , job_class = job_class ) self . queue . push ( job )
Given a command template add it as a job to the queue .
16,068
def run ( self ) : iterations = 0 queue = self . queue . tick ( ) while True : try : next ( queue ) except StopIteration : break iterations += 1 sleep ( self . sleep_time ) return iterations
Begins the runtime execution .
16,069
def make_dynamic_class ( typename , field_names ) : if isinstance ( field_names , basestring ) : field_names = field_names . replace ( "," , " " ) . split ( ) field_names = map ( str , field_names ) safe_fields_names = map ( _encode_property_name , field_names ) attr = dict ( ( safe_name , _property ( name ) ) for name , safe_name in zip ( field_names , safe_fields_names ) ) attr [ '__doc__' ] = typename attr [ '__identifier__' ] = "dolphin" attr [ '__init__' ] = _dynamic__init attr [ '__getitem__' ] = lambda self , key : self . __dict__ . get ( key ) attr [ '__setitem__' ] = _dynamic__setitem attr [ '__iter__' ] = lambda self : iter ( self . __dict__ ) attr [ '__repr__' ] = lambda self : "{%s}" % ( ', ' . join ( [ "%s=%r" % ( key , self [ key ] ) for key in sorted ( self . __dict__ . keys ( ) ) ] ) ) return type ( typename , ( object , ) , attr )
a factory function to create type dynamically
16,070
def get_memory_usage ( ) : process = psutil . Process ( os . getpid ( ) ) mem = process . memory_info ( ) . rss return mem / ( 1024 * 1024 )
Gets RAM memory usage
16,071
def create_db ( file_pth ) : conn = sqlite3 . connect ( file_pth ) c = conn . cursor ( ) c . execute ( 'DROP TABLE IF EXISTS library_spectra_source' ) c . execute ( ) c . execute ( 'DROP TABLE IF EXISTS metab_compound' ) c . execute ( ) c . execute ( 'DROP TABLE IF EXISTS library_spectra_meta' ) c . execute ( ) c . execute ( 'DROP TABLE IF EXISTS library_spectra' ) c . execute ( ) c . execute ( 'DROP TABLE IF EXISTS library_spectra_annotation' ) c . execute ( )
Create an empty SQLite database for library spectra .
16,072
def get_connection ( db_type , db_pth , user = None , password = None , name = None ) : if db_type == 'sqlite' : print ( db_pth ) conn = sqlite3 . connect ( db_pth ) elif db_type == 'mysql' : import mysql . connector conn = mysql . connector . connect ( user = user , password = password , database = name ) elif db_type == 'django_mysql' : from django . db import connection as conn else : print ( 'unsupported database type: {}, choices are "sqlite", "mysql" or "django_mysql"' . format ( db_type ) ) return conn
Get a connection to a SQL database . Can be used for SQLite MySQL or Django MySQL database
16,073
def db_dict ( c ) : db_d = { } c . execute ( 'SELECT * FROM library_spectra' ) db_d [ 'library_spectra' ] = [ list ( row ) for row in c ] c . execute ( 'SELECT * FROM library_spectra_meta' ) db_d [ 'library_spectra_meta' ] = [ list ( row ) for row in c ] c . execute ( 'SELECT * FROM library_spectra_annotation' ) db_d [ 'library_spectra_annotations' ] = [ list ( row ) for row in c ] c . execute ( 'SELECT * FROM library_spectra_source' ) db_d [ 'library_spectra_source' ] = [ list ( row ) for row in c ] c . execute ( 'SELECT * FROM metab_compound' ) db_d [ 'metab_compound' ] = [ list ( row ) for row in c ] return db_d
Get a dictionary of the library spectra from a database
16,074
def insert_query_m ( data , table , conn , columns = None , db_type = 'mysql' ) : if len ( data ) > 10000 : _chunk_query ( data , 10000 , columns , conn , table , db_type ) else : if db_type == 'sqlite' : type_sign = '?' else : type_sign = '%s' type_com = type_sign + ", " type = type_com * ( len ( data [ 0 ] ) - 1 ) type = type + type_sign if columns : stmt = "INSERT INTO " + table + "( " + columns + ") VALUES (" + type + ")" else : stmt = "INSERT INTO " + table + " VALUES (" + type + ")" cursor = conn . cursor ( ) cursor . executemany ( stmt , data ) conn . commit ( )
Insert python list of tuples into SQL table
16,075
def _chunk_query ( l , n , cn , conn , table , db_type ) : [ insert_query_m ( l [ i : i + n ] , table , conn , cn , db_type ) for i in range ( 0 , len ( l ) , n ) ]
Call for inserting SQL query in chunks based on n rows
16,076
async def send ( from_addr , to_addrs , subject = "Ellis" , msg = "" , ** kwargs ) : async with SMTP ( ) as client : msg = "Subject: {0}\n\n{1}" . format ( subject , msg ) if kwargs : values = "\n" . join ( [ "{0}: {1}" . format ( k , v ) for k , v in kwargs . items ( ) ] ) msg = ( "{0}\n\nThe following variables have been caught:" "\n{1}" . format ( msg , values ) ) try : await client . sendmail ( from_addr , to_addrs , msg ) except : raise
Sends an e - mail to the provided address .
16,077
def show_correlation_matrix ( self , correlation_matrix ) : cr_plot . create_correlation_matrix_plot ( correlation_matrix , self . title , self . headers_to_test ) pyplot . show ( )
Shows the given correlation matrix as image
16,078
def save_to_file ( self , out_file ) : correlation_matrix = self . get_correlation_matrix_from_columns ( ) cr_plot . create_correlation_matrix_plot ( correlation_matrix , self . title , self . headers_to_test ) fig = pyplot . gcf ( ) fig . set_size_inches ( 23.4 , 23.4 ) pyplot . savefig ( out_file , dpi = 120 )
Saves correlation matrix of selected headers
16,079
def save_correlation_matrix_from_folder ( folder_path ) : file_name = "output-" + str ( int ( time . time ( ) ) ) output_folder = os . path . join ( folder_path , file_name ) os . makedirs ( output_folder ) for file in list_content ( folder_path , False , False ) : if is_file ( file ) and str ( file ) . endswith ( "csv" ) : print ( "Analysing file " , str ( file ) ) file_name = Document ( file ) . name . strip ( ) output_file_name = file_name + ".png" output_file_path = os . path . join ( output_folder , output_file_name ) headers , data = CSVParser . get_headers_data ( file ) matrix = CorrelationMatrix ( "Correlation of logs data for file " + file_name , headers , headers , data ) matrix . save_to_file ( output_file_path )
Saves each file s correlation matrix of common headers
16,080
def run ( self , * args , ** kwargs ) : pm = MayaPluginManager . get ( ) guerilla = pm . get_plugin ( "GuerillaMGMT" ) mayawin = maya_main_window ( ) guerilla . run ( parent = mayawin )
Start the tool
16,081
def login_github ( token_path = None , token = None ) : token = codetools . github_token ( token_path = token_path , token = token ) g = Github ( token ) debug_ratelimit ( g ) return g
Log into GitHub using an existing token .
16,082
def find_tag_by_name ( repo , tag_name , safe = True ) : tagfmt = 'tags/{ref}' . format ( ref = tag_name ) try : ref = repo . get_git_ref ( tagfmt ) if ref and ref . ref : return ref except github . UnknownObjectException : if not safe : raise return None
Find tag by name in a github Repository
16,083
def debug_ratelimit ( g ) : assert isinstance ( g , github . MainClass . Github ) , type ( g ) debug ( "github ratelimit: {rl}" . format ( rl = g . rate_limiting ) )
Log debug of github ratelimit information from last API call
16,084
def get_default_ref ( repo ) : assert isinstance ( repo , github . Repository . Repository ) , type ( repo ) default_branch = repo . default_branch default_branch_ref = "heads/{ref}" . format ( ref = default_branch ) try : head = repo . get_git_ref ( default_branch_ref ) except github . RateLimitExceededException : raise except github . GithubException as e : msg = "error getting ref: {ref}" . format ( ref = default_branch_ref ) raise CaughtRepositoryError ( repo , e , msg ) from None return head
Return a github . GitRef object for the HEAD of the default branch .
16,085
def main ( argv = None ) : t = CrfTokenizer ( ) print t . tokenize ( "This is a sentence." ) print t . tokenize ( "Buy???This...Now!!!" ) print t . tokenize ( "The <bold>only</bold> source." ) print t . tokenize ( "The<bold>only</bold>source." ) print t . tokenize ( "Big&gt;little." ) print t . tokenize ( "Big & little." ) print t . tokenize ( "blond&curly." ) print t . tokenize ( "&brokenHtml" ) t . setGroupPunctuation ( True ) t . setRecognizeHtmlTags ( True ) t . setRecognizeHtmlEntities ( True ) print t . tokenize ( "Buy???This...Now!!!" ) print t . tokenize ( "The <bold>only</bold> source." ) print t . tokenize ( "The<bold>only</bold>source." ) print t . tokenize ( "Big&gt;little." ) print t . tokenize ( "Big & little." ) print t . tokenize ( "blond&curly." ) print t . tokenize ( "&brokenHtml" ) t . setSkipHtmlTags ( True ) t . setSkipHtmlEntities ( True ) print t . tokenize ( "Buy???This...Now!!!" ) print t . tokenize ( "The <bold>only</bold> source." ) print t . tokenize ( "The<bold>only</bold>source." ) print t . tokenize ( "Big&gt;little." ) print t . tokenize ( "Big & little." ) print t . tokenize ( "blond&curly." ) print t . tokenize ( "&brokenHtml" ) t . setTokenPrefix ( "X:" ) print t . tokenize ( "Tokenize with prefixes." ) t . setTokenPrefix ( None ) print t . tokenize ( "No more prefixes." ) t . setRecognizePunctuation ( False ) print t . tokenize ( "This is a sentence." ) print t . tokenize ( "Buy???This...Now!!!" ) print t . tokenize ( "The <bold>only</bold> source." ) print t . tokenize ( "The<bold>only</bold>source." ) print t . tokenize ( "Big&gt;little." ) print t . tokenize ( "Big & little." ) print t . tokenize ( "blond&curly." ) print t . tokenize ( "&brokenHtml" ) print t . tokenize ( "A line break goes here\n\t \rand a new line starts" ) t . setRecognizeLinebreaks ( True ) print t . tokenize ( "A line break goes here\n\r \rand a new line starts" )
this is called if run from command line
16,086
def verifyInputs ( self , mode ) : if len ( self . _aichans ) < 1 : failmsg = "Must have at least one input channel selected" QtGui . QMessageBox . warning ( self , "Invalid Setting" , failmsg ) return False if mode == 'chart' : if self . ui . aifsSpnbx . value ( ) * self . fscale > 100000 : QtGui . QMessageBox . warning ( self , "Invalid Input" , "Recording samplerate cannot exceed 100kHz for chart acquisition" ) return False elif mode is not None : if self . ui . tabGroup . currentWidget ( ) . objectName ( ) == 'tabExplore' : self . ui . exploreStimEditor . saveToObject ( ) failmsg = self . ui . exploreStimEditor . verify ( self . ui . windowszSpnbx . value ( ) ) if failmsg : QtGui . QMessageBox . warning ( self , "Invalid Input" , failmsg ) return False elif self . ui . tabGroup . currentWidget ( ) . objectName ( ) == 'tabProtocol' : protocol_model = self . acqmodel . protocol_model ( ) failure = protocol_model . verify ( float ( self . ui . windowszSpnbx . value ( ) ) ) if failure : QtGui . QMessageBox . warning ( self , "Invalid Input" , failure ) return False elif self . ui . tabGroup . currentWidget ( ) . objectName ( ) == 'tabCalibrate' : if len ( self . _aichans ) > 1 : failmsg = "Speaker calibration only supported for single channel, currently {} channels selected; select 1 input channel." . format ( len ( self . _aichans ) ) QtGui . QMessageBox . warning ( self , "Invalid Setting" , failmsg ) return False if self . ui . calibrationWidget . ui . savecalCkbx . isChecked ( ) or not self . ui . calibrationWidget . currentSelection ( ) == 'Tone Curve' : calibration_stimulus = self . acqmodel . calibration_stimulus ( 'noise' ) self . ui . calibrationWidget . saveToObject ( ) else : calibration_stimulus = self . acqmodel . calibration_stimulus ( 'tone' ) failmsg = calibration_stimulus . verify ( float ( self . ui . windowszSpnbx . value ( ) ) ) if failmsg : QtGui . QMessageBox . warning ( self , "Invalid Input" , failmsg ) return False failmsg = calibration_stimulus . verifyExpanded ( samplerate = self . ui . aifsSpnbx . value ( ) ) if failmsg : failmsg = failmsg . replace ( 'Generation' , 'Recording' ) QtGui . QMessageBox . warning ( self , "Invalid Input" , failmsg ) return False if self . advanced_options [ 'use_attenuator' ] and not self . acqmodel . attenuator_connection ( ) : failmsg = "Error Connection to attenuator, make sure it it turned on and connected, and try again" QtGui . QMessageBox . warning ( self , "Connection Error" , failmsg ) return False return True
Goes through and checks all stimuli and input settings are valid and consistent . Prompts user with a message if there is a condition that would prevent acquisition .
16,087
def updateUnitLabels ( self , tscale , fscale ) : AbstractEditorWidget . updateScales ( tscale , fscale ) SmartDelegate . updateScales ( tscale , fscale ) AbstractEditorWidget . purgeDeletedWidgets ( ) self . tscale = tscale time_inputs = self . timeInputs + AbstractEditorWidget . tunit_fields for field in time_inputs : field . setScale ( tscale ) self . fscale = fscale frequency_inputs = self . frequencyInputs + AbstractEditorWidget . funit_fields for field in frequency_inputs : field . setScale ( fscale )
When the GUI unit scale changes it is neccessary to update the unit labels on all fields throughout the GUI . This handles The main window and also notifys other windows to update
16,088
def reset_device_channels ( self ) : self . ui . aochanBox . clear ( ) devname = self . advanced_options [ 'device_name' ] device_list = get_devices ( ) if devname in device_list : cnames = get_ao_chans ( devname ) self . ui . aochanBox . addItems ( cnames ) cnames = get_ai_chans ( devname ) self . _aichans = [ chan for chan in self . _aichans if chan in cnames ] self . _aichan_details = { chan : deets for chan , deets in self . _aichan_details . items ( ) if chan in cnames } elif devname == '' and len ( device_list ) > 0 : devname = device_list [ 0 ] cnames = get_ao_chans ( devname ) self . ui . aochanBox . addItems ( cnames ) self . advanced_options [ 'device_name' ] = devname self . _aichans = [ ] self . _aichan_details = { } else : self . _aichans = [ ] self . _aichan_details = { } self . ui . chanNumLbl . setText ( str ( len ( self . _aichans ) ) ) self . display . removeResponsePlot ( * self . display . responseNameList ( ) ) self . display . addResponsePlot ( * self . _aichans ) for name , deets in self . _aichan_details . items ( ) : self . display . setThreshold ( deets [ 'threshold' ] , name ) self . display . setRasterBounds ( deets [ 'raster_bounds' ] , name ) self . display . setAbs ( deets [ 'abs' ] , name ) self . ui . trigchanBox . addItems ( [ '/' + devname + '/PFI0' , '/' + devname + '/PFI1' ] )
Updates the input channel selection boxes based on the current device name stored in this object
16,089
def saveInputs ( self , fname ) : if not fname : return appdir = systools . get_appdir ( ) if not os . path . isdir ( appdir ) : os . makedirs ( appdir ) fname = os . path . join ( appdir , fname ) savedict = { } savedict [ 'binsz' ] = self . ui . binszSpnbx . value ( ) savedict [ 'aifs' ] = self . ui . aifsSpnbx . value ( ) savedict [ 'tscale' ] = self . tscale savedict [ 'fscale' ] = self . fscale savedict [ 'saveformat' ] = self . saveformat savedict [ 'ex_nreps' ] = self . ui . exploreStimEditor . repCount ( ) savedict [ 'reprate' ] = self . ui . reprateSpnbx . value ( ) savedict [ 'windowsz' ] = self . ui . windowszSpnbx . value ( ) savedict [ 'specargs' ] = self . specArgs savedict [ 'viewSettings' ] = self . viewSettings savedict [ 'calvals' ] = self . calvals savedict [ 'calparams' ] = self . acqmodel . calibration_template ( ) savedict [ 'calreps' ] = self . ui . calibrationWidget . ui . nrepsSpnbx . value ( ) savedict [ 'mphonesens' ] = self . ui . mphoneSensSpnbx . value ( ) savedict [ 'mphonedb' ] = self . ui . mphoneDBSpnbx . value ( ) savedict [ 'vocalpaths' ] = Vocalization . paths savedict [ 'aichans' ] = self . _aichans savedict [ 'aichan_details' ] = self . _aichan_details savedict [ 'explorestims' ] = self . ui . exploreStimEditor . saveTemplate ( ) savedict [ 'advanced_options' ] = self . advanced_options savedict [ 'stim_view_defaults' ] = StimulusView . getDefaults ( ) savedict [ 'tuning_curve' ] = TCFactory . defaultInputs savedict = convert2native ( savedict ) try : with open ( fname , 'w' ) as jf : json . dump ( savedict , jf ) except : logger = logging . getLogger ( 'main' ) logger . exception ( "Unable to save app data to file: {}" . format ( fname ) )
Save the values in the input fields so they can be loaded next time the GUI is run
16,090
def closeEvent ( self , event ) : self . acqmodel . stop_listening ( ) self . saveInputs ( self . inputsFilename ) settings = QtCore . QSettings ( "audiolab" ) settings . setValue ( "geometry" , self . saveGeometry ( ) ) settings . setValue ( "windowState" , self . saveState ( ) ) logger = logging . getLogger ( 'main' ) logger . info ( 'All user settings saved' ) self . garbage_timer . stop ( ) gc . enable ( )
Closes listening threads and saves GUI data for later use .
16,091
def ordered_async_call ( func_list ) : def worker ( function , f_args , f_kwargs , queue , index ) : response = { 'index' : index , 'data' : None , 'error' : None } try : response [ 'data' ] = function ( * f_args , ** f_kwargs ) except Exception as e : response [ 'error' ] = e queue . put ( response ) queue = Queue ( ) processes = [ Process ( target = worker , args = ( func , args , kwargs , queue , i ) ) for i , ( func , args , kwargs ) in enumerate ( func_list ) ] for process in processes : process . start ( ) response_list = [ ] for process in processes : process . join ( ) response = queue . get ( ) if response [ 'error' ] : raise response [ 'error' ] response_list . append ( response ) return [ content [ 'data' ] for content in sorted ( response_list , key = lambda x : x [ 'index' ] ) ]
Runs the list of function asynchronously returns the response maintaining the order
16,092
def add_params_to_url ( url , params ) : url_parts = list ( urlparse . urlparse ( url ) ) query = dict ( urlparse . parse_qsl ( url_parts [ 4 ] ) ) query . update ( params ) url_parts [ 4 ] = urlencode ( query ) return urlparse . urlunparse ( url_parts )
Adds params to url
16,093
def is_internet_on ( host = "8.8.8.8" , port = 53 , timeout = 3 ) : socket . setdefaulttimeout ( timeout ) socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) . connect ( ( host , port ) )
Checks if machine has internet connection
16,094
def wait_until_internet ( time_between_attempts = 3 , max_attempts = 10 ) : counter = 0 while not is_internet_on ( ) : time . sleep ( time_between_attempts ) counter += 1 if counter > max_attempts : return False return True
Waits until machine has internet
16,095
def transform_key ( startkey , seed_key , seed_rand , rounds ) : masterkey = startkey aes = AES . new ( seed_key , AES . MODE_ECB ) for _i in range ( rounds ) : masterkey = aes . encrypt ( masterkey ) masterkey = hashlib . sha256 ( masterkey ) . digest ( ) return hashlib . sha256 ( seed_rand + masterkey ) . digest ( )
This method creates the key to decrypt the database .
16,096
def complete ( command_line , current_token , position , shell : arg ( choices = ( 'bash' , 'fish' ) ) ) : position = int ( position ) tokens = shlex . split ( command_line [ : position ] ) all_argv , run_argv , command_argv = run . partition_argv ( tokens [ 1 : ] ) run_args = run . parse_args ( run_argv ) module = run_args . get ( 'commands_module' ) module = module or DEFAULT_COMMANDS_MODULE module = normalize_path ( module ) try : collection = Collection . load_from_module ( module ) except Exception : collection = { } found_command = find_command ( collection , tokens ) or run if current_token : if current_token . startswith ( '-' ) : if current_token not in found_command . option_map : print_command_options ( found_command , current_token ) else : print_commands ( collection , shell ) path = os . path . expanduser ( current_token ) path = os . path . expandvars ( path ) paths = glob . glob ( '%s*' % path ) if paths : for entry in paths : if os . path . isdir ( entry ) : print ( '%s/' % entry ) else : print ( entry ) else : option = found_command . option_map . get ( tokens [ - 1 ] ) if option and option . takes_value : if option . choices : for choice in option . choices : print ( choice ) else : for entry in os . listdir ( ) : if os . path . isdir ( entry ) : print ( '%s/' % entry ) else : print ( entry ) else : print_command_options ( found_command ) print_commands ( collection , shell )
Find completions for current command .
16,097
def install_package ( self , name , index = None , force = False , update = False ) : cmd = 'install' if force : cmd = '{0} {1}' . format ( cmd , '--force-reinstall' ) if update : cmd = '{0} {1}' . format ( cmd , '--update' ) if index : cmd = '{0} {1}' . format ( cmd , '--index-url {0}' . format ( index ) ) self . pip ( '{0} {1}' . format ( cmd , name ) )
Install a given package .
16,098
def install_requirements ( self , path , index = None ) : cmd = 'install -r {0}' . format ( path ) if index : cmd = 'install --index-url {0} -r {1}' . format ( index , path ) self . pip ( cmd )
Install packages from a requirements . txt file .
16,099
def get_next ( weekday , including_today = False ) : now = datetime . datetime . now ( ) if now . weekday ( ) == weekday . value and including_today : delta = datetime . timedelta ( days = 0 ) elif now . weekday ( ) == weekday . value and not including_today : delta = datetime . timedelta ( days = 7 ) else : delta = datetime . timedelta ( ( 7 + weekday . value - now . weekday ( ) ) % 7 ) return Day ( now + delta ) . get_just_date ( )
Gets next day of week