idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
54,300
def decode ( self , binSequence ) : try : binSeq = iter ( binSequence [ 0 ] ) except TypeError , te : binSeq = binSequence ret = '' for b in binSeq : ch = '' for c in self . charToBin : if b & self . forma [ self . charToBin [ c ] ] > 0 : ch += c + '/' if ch == '' : raise KeyError ( 'Key %d unkowom, bad format' % b ) ret += ch [ : - 1 ] return ret
decodes a binary sequence to return a string
54,301
def parse ( self , filename , gziped = False , stream = False ) : self . stream = stream if gziped : self . f = gzip . open ( filename ) else : self . f = open ( filename ) self . filename = filename self . gziped = gziped lineId = 0 inLegend = True while inLegend : ll = self . f . readline ( ) l = ll . replace ( '\r' , '\n' ) . replace ( '\n' , '' ) if l [ : 2 ] == '##' : eqPos = l . find ( '=' ) key = l [ 2 : eqPos ] values = l [ eqPos + 1 : ] . strip ( ) if l [ eqPos + 1 ] != '<' : self . meta [ key ] = values else : if key not in self . meta : self . meta [ key ] = { } svalues = l [ eqPos + 2 : - 1 ] . split ( ',' ) idKey = svalues [ 0 ] . split ( '=' ) [ 1 ] self . meta [ key ] [ idKey ] = { } i = 1 for v in svalues [ 1 : ] : sv = v . split ( "=" ) field = sv [ 0 ] value = sv [ 1 ] if field . lower ( ) == 'description' : self . meta [ key ] [ idKey ] [ field ] = ',' . join ( svalues [ i : ] ) [ len ( field ) + 2 : - 1 ] break self . meta [ key ] [ idKey ] [ field ] = value i += 1 elif l [ : 6 ] == '#CHROM' : sl = l . split ( '\t' ) for i in range ( len ( sl ) ) : self . legend [ sl [ i ] ] = i self . dnegel [ i ] = sl [ i ] break lineId += 1 if not stream : self . lines = self . f . readlines ( ) self . f . close ( )
opens a file
54,302
def _decode8 ( self , offset ) : str_len , skip = self . _decode_length ( offset , 1 ) offset += skip encoded_bytes , skip = self . _decode_length ( offset , 1 ) offset += skip data = self . m_charbuff [ offset : offset + encoded_bytes ] assert self . m_charbuff [ offset + encoded_bytes ] == 0 , "UTF-8 String is not null terminated! At offset={}" . format ( offset ) return self . _decode_bytes ( data , 'utf-8' , str_len )
Decode an UTF - 8 String at the given offset
54,303
def _decode16 ( self , offset ) : str_len , skip = self . _decode_length ( offset , 2 ) offset += skip encoded_bytes = str_len * 2 data = self . m_charbuff [ offset : offset + encoded_bytes ] assert self . m_charbuff [ offset + encoded_bytes : offset + encoded_bytes + 2 ] == b"\x00\x00" , "UTF-16 String is not null terminated! At offset={}" . format ( offset ) return self . _decode_bytes ( data , 'utf-16' , str_len )
Decode an UTF - 16 String at the given offset
54,304
def _decode_length ( self , offset , sizeof_char ) : sizeof_2chars = sizeof_char << 1 fmt = "<2{}" . format ( 'B' if sizeof_char == 1 else 'H' ) highbit = 0x80 << ( 8 * ( sizeof_char - 1 ) ) length1 , length2 = unpack ( fmt , self . m_charbuff [ offset : ( offset + sizeof_2chars ) ] ) if ( length1 & highbit ) != 0 : length = ( ( length1 & ~ highbit ) << ( 8 * sizeof_char ) ) | length2 size = sizeof_2chars else : length = length1 size = sizeof_char if sizeof_char == 1 : assert length <= 0x7FFF , "length of UTF-8 string is too large! At offset={}" . format ( offset ) else : assert length <= 0x7FFFFFFF , "length of UTF-16 string is too large! At offset={}" . format ( offset ) return length , size
Generic Length Decoding at offset of string
54,305
def _apk_analysis ( self ) : i = "AndroidManifest.xml" try : manifest_data = self . zip . read ( i ) except KeyError : log . warning ( "Missing AndroidManifest.xml. Is this an APK file?" ) else : ap = AXMLPrinter ( manifest_data ) if not ap . is_valid ( ) : log . error ( "Error while parsing AndroidManifest.xml - is the file valid?" ) return self . axml [ i ] = ap self . xml [ i ] = self . axml [ i ] . get_xml_obj ( ) if self . axml [ i ] . is_packed ( ) : log . warning ( "XML Seems to be packed, operations on the AndroidManifest.xml might fail." ) if self . xml [ i ] is not None : if self . xml [ i ] . tag != "manifest" : log . error ( "AndroidManifest.xml does not start with a <manifest> tag! Is this a valid APK?" ) return self . package = self . get_attribute_value ( "manifest" , "package" ) self . androidversion [ "Code" ] = self . get_attribute_value ( "manifest" , "versionCode" ) self . androidversion [ "Name" ] = self . get_attribute_value ( "manifest" , "versionName" ) permission = list ( self . get_all_attribute_value ( "uses-permission" , "name" ) ) self . permissions = list ( set ( self . permissions + permission ) ) for uses_permission in self . find_tags ( "uses-permission" ) : self . uses_permissions . append ( [ self . get_value_from_tag ( uses_permission , "name" ) , self . _get_permission_maxsdk ( uses_permission ) ] ) for d_perm_item in self . find_tags ( 'permission' ) : d_perm_name = self . _get_res_string_value ( str ( self . get_value_from_tag ( d_perm_item , "name" ) ) ) d_perm_label = self . _get_res_string_value ( str ( self . get_value_from_tag ( d_perm_item , "label" ) ) ) d_perm_description = self . _get_res_string_value ( str ( self . get_value_from_tag ( d_perm_item , "description" ) ) ) d_perm_permissionGroup = self . _get_res_string_value ( str ( self . get_value_from_tag ( d_perm_item , "permissionGroup" ) ) ) d_perm_protectionLevel = self . _get_res_string_value ( str ( self . get_value_from_tag ( d_perm_item , "protectionLevel" ) ) ) d_perm_details = { "label" : d_perm_label , "description" : d_perm_description , "permissionGroup" : d_perm_permissionGroup , "protectionLevel" : d_perm_protectionLevel , } self . declared_permissions [ d_perm_name ] = d_perm_details self . valid_apk = True
Run analysis on the APK file .
54,306
def is_multidex ( self ) : dexre = re . compile ( "^classes(\d+)?.dex$" ) return len ( [ instance for instance in self . get_files ( ) if dexre . search ( instance ) ] ) > 1
Test if the APK has multiple DEX files
54,307
def _format_value ( self , value ) : if len ( value ) > 0 : if value [ 0 ] == "." : value = self . package + value else : v_dot = value . find ( "." ) if v_dot == 0 : value = self . package + "." + value elif v_dot == - 1 : value = self . package + "." + value return value
Format a value with packagename if not already set
54,308
def getAttributeName ( self , index ) : offset = self . _get_attribute_offset ( index ) name = self . m_attributes [ offset + const . ATTRIBUTE_IX_NAME ] res = self . sb [ name ] if not res : attr = self . m_resourceIDs [ name ] if attr in public . SYSTEM_RESOURCES [ 'attributes' ] [ 'inverse' ] : res = 'android:' + public . SYSTEM_RESOURCES [ 'attributes' ] [ 'inverse' ] [ attr ] else : res = 'android:UNKNOWN_SYSTEM_ATTRIBUTE_{:08x}' . format ( attr ) return res
Returns the String which represents the attribute name
54,309
def getAttributeValueType ( self , index ) : offset = self . _get_attribute_offset ( index ) return self . m_attributes [ offset + const . ATTRIBUTE_IX_VALUE_TYPE ]
Return the type of the attribute at the given index
54,310
def getAttributeValueData ( self , index ) : offset = self . _get_attribute_offset ( index ) return self . m_attributes [ offset + const . ATTRIBUTE_IX_VALUE_DATA ]
Return the data of the attribute at the given index
54,311
def make_cache_key ( request ) : headers = frozenset ( request . _p [ 'header' ] . items ( ) ) path = frozenset ( request . _p [ 'path' ] . items ( ) ) query = frozenset ( request . _p [ 'query' ] ) return ( request . url , headers , path , query )
Generate a cache key from request object data
54,312
def check_cache ( cache ) : if isinstance ( cache , BaseCache ) : return cache elif cache is False : return DictCache ( ) elif cache is None : return DummyCache ( ) else : raise ValueError ( 'Provided cache must implement BaseCache' )
check if a cache fits esipy needs or not
54,313
def get_cache_time_left ( expires_header ) : epoch = datetime ( 1970 , 1 , 1 ) expire = ( datetime ( * parsedate ( expires_header ) [ : 6 ] ) - epoch ) . total_seconds ( ) now = ( datetime . utcnow ( ) - epoch ) . total_seconds ( ) return int ( expire ) - int ( now )
return the time left in second for an expires header
54,314
def __get_or_create_app ( self , url , cache_key ) : headers = { "Accept" : "application/json" } app_url = '%s?datasource=%s' % ( url , self . datasource ) cached = self . cache . get ( cache_key , ( None , None , 0 ) ) if cached is None or len ( cached ) != 3 : self . cache . invalidate ( cache_key ) cached_app , cached_headers , cached_expiry = ( cached , None , 0 ) else : cached_app , cached_headers , cached_expiry = cached if cached_app is not None and cached_headers is not None : expires = cached_headers . get ( 'expires' , None ) cache_timeout = - 1 if self . expire is None and expires is not None : cache_timeout = get_cache_time_left ( cached_headers [ 'expires' ] ) if cache_timeout >= 0 : return cached_app else : if self . expire == 0 or cached_expiry >= time . time ( ) : return cached_app etag = cached_headers . get ( 'etag' , None ) if etag is not None : headers [ 'If-None-Match' ] = etag if ( ( expires is None or cache_timeout < 0 or cached_expiry < time . time ( ) ) and etag is None ) : self . cache . invalidate ( cache_key ) timeout = 0 if self . expire is not None and self . expire > 0 : timeout = time . time ( ) + self . expire res = requests . head ( app_url , headers = headers ) if res . status_code == 304 and cached_app is not None : self . cache . set ( cache_key , ( cached_app , res . headers , timeout ) ) return cached_app app = App . create ( app_url ) if self . caching : self . cache . set ( cache_key , ( app , res . headers , timeout ) ) return app
Get the app from cache or generate a new one if required
54,315
def clear_cached_endpoints ( self , prefix = None ) : prefix = prefix if prefix is not None else self . cache_prefix for endpoint in self . app . op . values ( ) : cache_key = '%s:app:%s' % ( prefix , endpoint . url ) self . cache . invalidate ( cache_key ) self . cache . invalidate ( '%s:app:meta_swagger_url' % self . cache_prefix ) self . app = None
Invalidate all cached endpoints meta included
54,316
def _hash ( data ) : hash_algo = hashlib . new ( 'md5' ) hash_algo . update ( pickle . dumps ( data ) ) return 'esi_' + hash_algo . hexdigest ( )
generate a hash from data object to be used as cache key
54,317
def fit_from_image ( self , data , voxelsize , seeds , unique_cls ) : fvs , clsselected = self . features_from_image ( data , voxelsize , seeds , unique_cls ) self . fit ( fvs , clsselected )
This Method allows computes feature vector and train model .
54,318
def save ( self , filename ) : import dill tmpmodelparams = self . modelparams . copy ( ) fv_extern_name = None if "fv_extern" in tmpmodelparams : tmpmodelparams . pop ( "fv_extern" ) sv = { "modelparams" : tmpmodelparams , "mdl" : self . mdl , } sss = dill . dumps ( self . modelparams ) logger . debug ( "pickled " + str ( sss ) ) dill . dump ( sv , open ( filename , "wb" ) )
Save model to pickle file . External feature function is not stored
54,319
def load ( self , mdl_file ) : import dill as pickle mdl_file_e = op . expanduser ( mdl_file ) sv = pickle . load ( open ( mdl_file_e , "rb" ) ) self . mdl = sv [ "mdl" ] self . modelparams . update ( sv [ "modelparams" ] ) logger . debug ( "loaded model from path: " + mdl_file_e )
load model from file . fv_type is not set with this function . It is expected to set it before .
54,320
def features_from_image ( self , data , voxelsize , seeds = None , unique_cls = None ) : fv_type = self . modelparams [ "fv_type" ] logger . debug ( "fv_type " + fv_type ) fv = [ ] if fv_type == "intensity" : fv = data . reshape ( - 1 , 1 ) if seeds is not None : logger . debug ( "seeds: %s" , scipy . stats . describe ( seeds , axis = None ) ) sd = seeds . reshape ( - 1 , 1 ) selection = np . in1d ( sd , unique_cls ) fv = fv [ selection ] sd = sd [ selection ] return fv , sd return fv elif fv_type in ( "fv001" , "FV001" , "intensity_and_blur" ) : return features . fv_function_intensity_and_smoothing ( data , voxelsize , seeds , unique_cls ) elif fv_type == "fv_extern" : fv_function = self . modelparams [ "fv_extern" ] return fv_function ( data , voxelsize , seeds , unique_cls ) else : logger . error ( "Unknown feature vector type: " + self . modelparams [ "fv_type" ] ) return fv
Input data is 3d image
54,321
def _fit_one_class ( self , clx , cl ) : logger . debug ( "clx " + str ( clx [ : 10 , : ] ) ) logger . debug ( "clx type" + str ( clx . dtype ) ) logger . debug ( "_fit()" ) if self . modelparams [ "adaptation" ] == "original_data" : if cl in self . mdl . keys ( ) : return logger . debug ( "training continues" ) if self . modelparams [ "type" ] == "gmmsame" : if len ( clx . shape ) == 1 : logger . warning ( "reshaping in train will be removed. Use \ \ntrainFromImageAndSeeds() function" ) print ( "Warning deprecated feature in train() function" ) clx = clx . reshape ( - 1 , 1 ) gmmparams = self . modelparams [ "params" ] self . mdl [ cl ] = sklearn . mixture . GaussianMixture ( ** gmmparams ) self . mdl [ cl ] . fit ( clx ) elif self . modelparams [ "type" ] == "kernel" : from sklearn . neighbors import KernelDensity kernelmodelparams = self . modelparams [ "params" ] self . mdl [ cl ] = KernelDensity ( ** kernelmodelparams ) . fit ( clx ) elif self . modelparams [ "type" ] == "gaussian_kde" : import scipy . stats self . mdl [ cl ] = scipy . stats . gaussian_kde ( clx . astype ( np . float ) ) elif self . modelparams [ "type" ] == "dpgmm" : gmmparams = self . modelparams [ "params" ] self . mdl [ cl ] = sklearn . mixture . DPGMM ( ** gmmparams ) self . mdl [ cl ] . fit ( clx * 0.001 ) elif self . modelparams [ "type" ] == "stored" : import pickle print ( "stored" ) logger . warning ( "deprecated use of stored parameters" ) mdl_file = self . modelparams [ "params" ] [ "mdl_file" ] self . mdl = pickle . load ( open ( mdl_file , "rb" ) ) else : raise NameError ( "Unknown model type" )
Train clas number cl with data clx .
54,322
def relabel_squeeze ( data ) : palette , index = np . unique ( data , return_inverse = True ) data = index . reshape ( data . shape ) return data
Makes relabeling of data if there are unused values .
54,323
def load ( self , filename , fv_extern = None ) : self . modelparams [ "mdl_stored_file" ] = filename if fv_extern is not None : self . modelparams [ "fv_extern" ] = fv_extern self . mdl = Model ( modelparams = self . modelparams )
Read model stored in the file .
54,324
def show_slug_with_level ( context , page , lang = None , fallback = True ) : if not lang : lang = context . get ( 'lang' , pages_settings . PAGE_DEFAULT_LANGUAGE ) page = get_page_from_string_or_id ( page , lang ) if not page : return '' return { 'content' : page . slug_with_level ( lang ) }
Display slug with level by language .
54,325
def do_get_pages_with_tag ( parser , token ) : bits = token . split_contents ( ) if 4 != len ( bits ) : raise TemplateSyntaxError ( '%r expects 2 arguments' % bits [ 0 ] ) if bits [ - 2 ] != 'as' : raise TemplateSyntaxError ( '%r expects "as" as the second last argument' % bits [ 0 ] ) varname = bits [ - 1 ] tag = parser . compile_filter ( bits [ 1 ] ) varname = bits [ - 1 ] return GetPagesWithTagNode ( tag , varname )
Return Pages with given tag
54,326
def remove_year ( name ) : for i in range ( len ( name ) - 3 ) : if name [ i : i + 4 ] . isdigit ( ) : name = name [ : i ] + name [ i + 4 : ] return remove_year ( name ) return name
Removes year from input
54,327
def remove_brackets ( name ) : name = re . sub ( r"([(\[]).*?([)\]])" , r"\g<1>\g<2>" , name ) brackets = "()[]{}" for bracket in brackets : name = name . replace ( bracket , "" ) return name
Removes brackets form input
54,328
def extract_name_max_chars ( name , max_chars = 64 , blank = " " ) : new_name = name . strip ( ) if len ( new_name ) > max_chars : new_name = new_name [ : max_chars ] if new_name . rfind ( blank ) > 0 : new_name = new_name [ : new_name . rfind ( blank ) ] return new_name
Extracts max chars in name truncated to nearest word
54,329
def get_parent_folder_name ( file_path ) : return os . path . split ( os . path . split ( os . path . abspath ( file_path ) ) [ 0 ] ) [ - 1 ]
Finds parent folder of file
54,330
def ls_dir ( path , include_hidden = False ) : lst = [ ] for file in os . listdir ( path ) : hidden_file = FileSystem ( file ) . is_hidden ( ) if ( hidden_file and include_hidden ) or ( not hidden_file ) : lst . append ( os . path . join ( path , file ) ) return list ( set ( lst ) )
Finds content of folder
54,331
def ls_recurse ( path , include_hidden = False ) : lst = [ ] for file in os . listdir ( path ) : hidden_file = FileSystem ( file ) . is_hidden ( ) if ( hidden_file and include_hidden ) or ( not hidden_file ) : lst . append ( os . path . join ( path , file ) ) if is_folder ( os . path . join ( path , file ) ) : lst += ls_recurse ( os . path . join ( path , file ) , include_hidden = include_hidden ) return list ( set ( lst ) )
Finds content of folder recursively
54,332
def is_russian ( self ) : russian_chars = 0 for char in RUSSIAN_CHARS : if char in self . name : russian_chars += 1 return russian_chars > len ( RUSSIAN_CHARS ) / 2.0
Checks if file path is russian
54,333
def rename ( self , new_path ) : rename_path = fix_raw_path ( new_path ) if is_folder ( self . path ) : os . rename ( self . path , rename_path ) else : os . renames ( self . path , rename_path )
Renames to new path
54,334
def setClass ( self , factoryclass ) : self . factoryclass = factoryclass self . setText ( str ( factoryclass . name ) )
Sets the constructor for the component type this label is to represent
54,335
def getLabel ( self , key ) : axisItem = self . getPlotItem ( ) . axes [ key ] [ 'item' ] return axisItem . label . toPlainText ( )
Gets the label assigned to an axes
54,336
def updateData ( self , axeskey , x , y ) : if axeskey == 'stim' : self . stimPlot . setData ( x , y ) ranges = self . viewRange ( ) self . rangeChange ( self , ranges ) if axeskey == 'response' : self . clearTraces ( ) if self . _traceUnit == 'A' : y = y * self . _ampScalar if self . zeroAction . isChecked ( ) : start_avg = np . mean ( y [ 5 : 25 ] ) y = y - start_avg self . tracePlot . setData ( x , y * self . _polarity )
Replaces the currently displayed data
54,337
def appendData ( self , axeskey , bins , ypoints ) : if axeskey == 'raster' and len ( bins ) > 0 : x , y = self . rasterPlot . getData ( ) bins = np . unique ( bins ) ypoints = np . ones_like ( bins ) * self . rasterYslots [ ypoints [ 0 ] ] x = np . append ( x , bins ) y = np . append ( y , ypoints ) self . rasterPlot . setData ( x , y )
Appends data to existing plotted data
54,338
def setThreshold ( self , threshold ) : self . threshLine . setValue ( threshold ) self . threshold_field . setValue ( threshold )
Sets the current threshold
54,339
def setRasterBounds ( self , lims ) : self . rasterBottom = lims [ 0 ] self . rasterTop = lims [ 1 ] self . updateRasterBounds ( )
Sets the raster plot y - axis bounds where in the plot the raster will appear between
54,340
def updateRasterBounds ( self ) : yrange = self . viewRange ( ) [ 1 ] yrange_size = yrange [ 1 ] - yrange [ 0 ] rmax = self . rasterTop * yrange_size + yrange [ 0 ] rmin = self . rasterBottom * yrange_size + yrange [ 0 ] self . rasterYslots = np . linspace ( rmin , rmax , self . nreps ) self . rasterBoundsUpdated . emit ( ( self . rasterBottom , self . rasterTop ) , self . getTitle ( ) )
Updates the y - coordinate slots where the raster points are plotted according to the current limits of the y - axis
54,341
def askRasterBounds ( self ) : dlg = RasterBoundsDialog ( bounds = ( self . rasterBottom , self . rasterTop ) ) if dlg . exec_ ( ) : bounds = dlg . values ( ) self . setRasterBounds ( bounds )
Prompts the user to provide the raster bounds with a dialog . Saves the bounds to be applied to the plot
54,342
def rangeChange ( self , pw , ranges ) : if hasattr ( ranges , '__iter__' ) : yrange_size = ranges [ 1 ] [ 1 ] - ranges [ 1 ] [ 0 ] stim_x , stim_y = self . stimPlot . getData ( ) if stim_y is not None : stim_height = yrange_size * STIM_HEIGHT stim_y = stim_y - np . amin ( stim_y ) if np . amax ( stim_y ) != 0 : stim_y = stim_y / np . amax ( stim_y ) stim_y = stim_y * stim_height stim_y = stim_y + ( ranges [ 1 ] [ 1 ] - ( stim_height * 1.1 + ( stim_height * 0.2 ) ) ) self . stimPlot . setData ( stim_x , stim_y ) self . updateRasterBounds ( )
Adjusts the stimulus signal to keep it at the top of a plot after any ajustment to the axes ranges takes place .
54,343
def update_thresh ( self ) : thresh_val = self . threshLine . value ( ) self . threshold_field . setValue ( thresh_val ) self . thresholdUpdated . emit ( thresh_val , self . getTitle ( ) )
Emits a Qt signal thresholdUpdated with the current threshold value
54,344
def updateImage ( self , imgdata , xaxis = None , yaxis = None ) : imgdata = imgdata . T self . img . setImage ( imgdata ) if xaxis is not None and yaxis is not None : xscale = 1.0 / ( imgdata . shape [ 0 ] / xaxis [ - 1 ] ) yscale = 1.0 / ( imgdata . shape [ 1 ] / yaxis [ - 1 ] ) self . resetScale ( ) self . img . scale ( xscale , yscale ) self . imgScale = ( xscale , yscale ) self . imageArray = np . fliplr ( imgdata ) self . updateColormap ( )
Updates the Widget image directly .
54,345
def resetScale ( self ) : self . img . scale ( 1. / self . imgScale [ 0 ] , 1. / self . imgScale [ 1 ] ) self . imgScale = ( 1. , 1. )
Resets the scale on this image . Correctly aligns time scale undoes manual scaling
54,346
def updateData ( self , signal , fs ) : t = threading . Thread ( target = _doSpectrogram , args = ( self . spec_done , ( fs , signal ) , ) , kwargs = self . specgramArgs ) t . start ( )
Displays a spectrogram of the provided signal
54,347
def setSpecArgs ( ** kwargs ) : for key , value in kwargs . items ( ) : if key == 'colormap' : SpecWidget . imgArgs [ 'lut' ] = value [ 'lut' ] SpecWidget . imgArgs [ 'levels' ] = value [ 'levels' ] SpecWidget . imgArgs [ 'state' ] = value [ 'state' ] for w in SpecWidget . instances : w . updateColormap ( ) else : SpecWidget . specgramArgs [ key ] = value
Sets optional arguments for the spectrogram appearance .
54,348
def clearImg ( self ) : self . img . setImage ( np . array ( [ [ 0 ] ] ) ) self . img . image = None
Clears the current image
54,349
def editColormap ( self ) : self . editor = pg . ImageView ( ) self . editor . ui . roiBtn . setVisible ( False ) self . editor . ui . menuBtn . setVisible ( False ) self . editor . setImage ( self . imageArray ) if self . imgArgs [ 'state' ] is not None : self . editor . getHistogramWidget ( ) . item . gradient . restoreState ( self . imgArgs [ 'state' ] ) self . editor . getHistogramWidget ( ) . item . setLevels ( * self . imgArgs [ 'levels' ] ) self . editor . closeEvent = self . _editor_close self . editor . setWindowModality ( QtCore . Qt . ApplicationModal ) self . editor . show ( )
Prompts the user with a dialog to change colormap
54,350
def updateColormap ( self ) : if self . imgArgs [ 'lut' ] is not None : self . img . setLookupTable ( self . imgArgs [ 'lut' ] ) self . img . setLevels ( self . imgArgs [ 'levels' ] )
Updates the currently colormap accoring to stored settings
54,351
def appendData ( self , xdata , ydata , color = 'b' , legendstr = None ) : item = self . plot ( xdata , ydata , pen = color ) if legendstr is not None : self . legend . addItem ( item , legendstr ) return item
Adds the data to the plot
54,352
def setLabels ( self , xlabel = None , ylabel = None , title = None , xunits = None , yunits = None ) : if xlabel is not None : self . setLabel ( 'bottom' , xlabel , units = xunits ) if ylabel is not None : self . setLabel ( 'left' , ylabel , units = yunits ) if title is not None : self . setTitle ( title )
Sets the plot labels
54,353
def setPoint ( self , x , group , y ) : if x == - 1 : self . plot ( [ 0 ] , [ y ] , symbol = 'o' ) else : yindex = self . groups . index ( group ) xdata , ydata = self . lines [ yindex ] . getData ( ) if ydata is None : xdata = [ x ] ydata = [ y ] else : xdata = np . append ( xdata , x ) ydata = np . append ( ydata , y ) self . lines [ yindex ] . setData ( xdata , ydata )
Sets the given point connects line to previous point in group
54,354
def setLabels ( self , name ) : if name == "calibration" : self . setWindowTitle ( "Calibration Curve" ) self . setTitle ( "Calibration Curve" ) self . setLabel ( 'bottom' , "Frequency" , units = 'Hz' ) self . setLabel ( 'left' , 'Recorded Intensity (dB SPL)' ) elif name == "tuning" : self . setWindowTitle ( "Tuning Curve" ) self . setTitle ( "Tuning Curve" ) self . setLabel ( 'bottom' , "Frequency" , units = "Hz" ) self . setLabel ( 'left' , "Spike Count (mean)" ) else : self . setWindowTitle ( "Spike Counts" ) self . setTitle ( "Spike Counts" ) self . setLabel ( 'bottom' , "Test Number" , units = '' ) self . setLabel ( 'left' , "Spike Count (mean)" , units = '' )
Sets plot labels according to predefined options
54,355
def loadCurve ( data , groups , thresholds , absvals , fs , xlabels ) : xlims = ( xlabels [ 0 ] , xlabels [ - 1 ] ) pw = ProgressWidget ( groups , xlims ) spike_counts = [ ] for itrace in range ( data . shape [ 0 ] ) : count = 0 for ichan in range ( data . shape [ 2 ] ) : flat_reps = data [ itrace , : , ichan , : ] . flatten ( ) count += len ( spikestats . spike_times ( flat_reps , thresholds [ ichan ] , fs , absvals [ ichan ] ) ) spike_counts . append ( count / ( data . shape [ 1 ] * data . shape [ 2 ] ) ) i = 0 for g in groups : for x in xlabels : pw . setPoint ( x , g , spike_counts [ i ] ) i += 1 return pw
Accepts a data set from a whole test averages reps and re - creates the progress plot as the same as it was during live plotting . Number of thresholds must match the size of the channel dimension
54,356
def processData ( self , times , response , test_num , trace_num , rep_num ) : response = response * self . _polarity if rep_num == 0 : self . spike_counts = [ ] self . spike_latencies = [ ] self . spike_rates = [ ] fs = 1. / ( times [ 1 ] - times [ 0 ] ) spike_times = spikestats . spike_times ( response , self . _threshold , fs ) self . spike_counts . append ( len ( spike_times ) ) if len ( spike_times ) > 0 : self . spike_latencies . append ( spike_times [ 0 ] ) else : self . spike_latencies . append ( np . nan ) self . spike_rates . append ( spikestats . firing_rate ( spike_times , times ) ) binsz = self . _bins [ 1 ] - self . _bins [ 0 ] response_bins = spikestats . bin_spikes ( spike_times , binsz ) self . appendData ( response_bins , rep_num )
Calulate spike times from raw response data
54,357
def setSr ( self , fs ) : self . tracePlot . setSr ( fs ) self . stimPlot . setSr ( fs )
Sets the samplerate of the input operation being plotted
54,358
def setWindowSize ( self , winsz ) : self . tracePlot . setWindowSize ( winsz ) self . stimPlot . setWindowSize ( winsz )
Sets the size of scroll window
54,359
def addSpectrogram ( self , ydata , fs , title = None ) : p = SpecWidget ( ) p . updateData ( ydata , fs ) if title is not None : p . setTitle ( title ) self . stacker . addWidget ( p )
Adds a new spectorgram plot for the given image . Generates a SpecWidget
54,360
def nextPlot ( self ) : if self . stacker . currentIndex ( ) < self . stacker . count ( ) : self . stacker . setCurrentIndex ( self . stacker . currentIndex ( ) + 1 )
Moves the displayed plot to the next one
54,361
def prevPlot ( self ) : if self . stacker . currentIndex ( ) > 0 : self . stacker . setCurrentIndex ( self . stacker . currentIndex ( ) - 1 )
Moves the displayed plot to the previous one
54,362
def most_even_chunk ( string , group ) : counts = [ 0 ] + most_even ( len ( string ) , group ) indices = accumulate ( counts ) slices = window ( indices , 2 ) return [ string [ slice ( * one ) ] for one in slices ]
Divide a string into a list of strings as even as possible .
54,363
def most_even ( number , group ) : count , rest = divmod ( number , group ) counts = zip_longest ( [ count ] * group , [ 1 ] * rest , fillvalue = 0 ) chunks = [ sum ( one ) for one in counts ] logging . debug ( 'chunks: %s' , chunks ) return chunks
Divide a number into a list of numbers as even as possible .
54,364
def window ( seq , count = 2 ) : iseq = iter ( seq ) result = tuple ( islice ( iseq , count ) ) if len ( result ) == count : yield result for elem in iseq : result = result [ 1 : ] + ( elem , ) yield result
Slide window .
54,365
def _get_modules ( path ) : lst = [ ] folder_contents = os . listdir ( path ) is_python_module = "__init__.py" in folder_contents if is_python_module : for file in folder_contents : full_path = os . path . join ( path , file ) if is_file ( full_path ) : lst . append ( full_path ) if is_folder ( full_path ) : lst += _get_modules ( full_path ) return list ( set ( lst ) )
Finds modules in folder recursively
54,366
def _parse ( self ) : with open ( self . path , "rt" ) as reader : return ast . parse ( reader . read ( ) , filename = self . path )
Parses file contents
54,367
def _find_package ( self , root_package ) : package = self . path . replace ( root_package , "" ) if package . endswith ( ".py" ) : package = package [ : - 3 ] package = package . replace ( os . path . sep , MODULE_SEP ) root_package = get_folder_name ( root_package ) package = root_package + package return package
Finds package name of file
54,368
def _get_instances ( self , instance ) : return [ x for x in self . tree . body if isinstance ( x , instance ) ]
Finds all instances of instance in tree
54,369
def get_classes ( self ) : instances = self . _get_instances ( ast . ClassDef ) instances = [ PyClass ( instance , self . package ) for instance in instances ] return instances
Finds classes in file
54,370
def best_assemblyfile ( self ) : for sample in self . metadata : try : filtered_outputfile = os . path . join ( self . path , 'raw_assemblies' , '{}.fasta' . format ( sample . name ) ) if os . path . isfile ( sample . general . assemblyfile ) : size = os . path . getsize ( sample . general . assemblyfile ) if size == 0 : sample . general . bestassemblyfile = 'NA' else : sample . general . bestassemblyfile = sample . general . assemblyfile shutil . copyfile ( sample . general . bestassemblyfile , filtered_outputfile ) else : sample . general . bestassemblyfile = 'NA' sample . general . filteredfile = filtered_outputfile except AttributeError : sample . general . assemblyfile = 'NA' sample . general . bestassemblyfile = 'NA'
Determine whether the contigs . fasta output file from the assembler is present . If not set the . bestassembly attribute to NA
54,371
def get ( self , group_id = None , ** kwargs ) : path = 'components/groups' if group_id is not None : path += '/%s' % group_id return self . paginate_get ( path , data = kwargs )
Get component groups
54,372
def create ( self , name , order = None , collapsed = None ) : data = ApiParams ( ) data [ 'name' ] = name data [ 'order' ] = order data [ 'collapsed' ] = collapsed return self . _post ( 'components/groups' , data = data ) [ 'data' ]
Create a new Component Group
54,373
def update ( self , group_id , name = None , order = None , collapsed = None ) : data = ApiParams ( ) data [ 'group' ] = group_id data [ 'name' ] = name data [ 'order' ] = order data [ 'collapsed' ] = collapsed return self . _put ( 'components/groups/%s' % group_id , data = data ) [ 'data' ]
Update a Component Group
54,374
def create ( self , name , message , status , visible , component_id = None , component_status = None , notify = None , created_at = None , template = None , tplvars = None ) : data = ApiParams ( ) data [ 'name' ] = name data [ 'message' ] = message data [ 'status' ] = status data [ 'visible' ] = visible data [ 'component_id' ] = component_id data [ 'component_status' ] = component_status data [ 'notify' ] = notify data [ 'created_at' ] = created_at data [ 'template' ] = template data [ 'vars' ] = tplvars return self . _post ( 'incidents' , data = data ) [ 'data' ]
Create a new Incident
54,375
def update ( self , incident_id , name = None , message = None , status = None , visible = None , component_id = None , component_status = None , notify = None , created_at = None , template = None , tpl_vars = None ) : data = ApiParams ( ) data [ 'name' ] = name data [ 'message' ] = message data [ 'status' ] = status data [ 'visible' ] = visible data [ 'component_id' ] = component_id data [ 'component_status' ] = component_status data [ 'notify' ] = notify data [ 'created_at' ] = created_at data [ 'template' ] = template data [ 'vars' ] = tpl_vars return self . _put ( 'incidents/%s' % incident_id , data = data ) [ 'data' ]
Update an Incident
54,376
def create ( self , name , suffix , description , default_value , display = None ) : data = ApiParams ( ) data [ 'name' ] = name data [ 'suffix' ] = suffix data [ 'description' ] = description data [ 'default_value' ] = default_value data [ 'display' ] = display return self . _post ( 'metrics' , data = data ) [ 'data' ]
Create a new Metric
54,377
def create ( self , metric_id , value , timestamp = None ) : data = ApiParams ( ) data [ 'value' ] = value data [ 'timestamp' ] = timestamp return self . _post ( 'metrics/%s/points' % metric_id , data = data ) [ 'data' ]
Add a Metric Point to a Metric
54,378
def create ( self , email , verify = None , components = None ) : data = ApiParams ( ) data [ 'email' ] = email data [ 'verify' ] = verify data [ 'components' ] = components return self . _post ( 'subscribers' , data = data ) [ 'data' ]
Create a new subscriber
54,379
def annotatedcore ( self ) : logging . info ( 'Calculating annotated core' ) self . total_core ( ) for sample in self . metadata : if sample . general . bestassemblyfile != 'NA' : sample [ self . analysistype ] . coreset = set ( ) if sample . general . referencegenus == 'Escherichia' : self . runmetadata . samples . append ( sample ) try : report = sample [ self . analysistype ] . report self . blastparser ( report = report , sample = sample , fieldnames = self . fieldnames ) except KeyError : sample [ self . analysistype ] . coreset = list ( ) self . reporter ( )
Calculates the core genome of organisms using custom databases
54,380
def total_core ( self ) : corefile = os . path . join ( self . reffilepath , self . analysistype , 'Escherichia' , 'core_combined.fasta' ) for record in SeqIO . parse ( corefile , 'fasta' ) : gene_name = record . id . split ( '-' ) [ 0 ] if gene_name not in self . coregenomes : self . coregenomes . append ( gene_name )
Determine the total number of core genes present
54,381
def get_simple_output ( self , stderr = STDOUT ) : args = shlex . split ( self . cmd ) proc = Popen ( args , stdout = PIPE , stderr = stderr ) return proc . communicate ( ) [ 0 ] . decode ( "utf8" )
Executes a simple external command and get its output The command contains no pipes . Error messages are redirected to the standard output by default
54,382
def get_complex_output ( self , stderr = STDOUT ) : proc = Popen ( self . cmd , shell = True , stdout = PIPE , stderr = stderr ) return proc . stdout . readlines ( )
Executes a piped command and get the lines of the output in a list
54,383
def keep_alive ( self ) : while True : pid = self . execute_in_background ( ) p = psutil . Process ( pid ) while p . is_running ( ) and str ( p . status ) != 'zombie' : os . system ( 'sleep 5' )
Keeps a process alive . If the process terminates it will restart it The terminated processes become zombies . They die when their parent terminates
54,384
def increment_title ( title ) : count = re . search ( '\d+$' , title ) . group ( 0 ) new_title = title [ : - ( len ( count ) ) ] + str ( int ( count ) + 1 ) return new_title
Increments a string that ends in a number
54,385
def check_limit ( self , limit ) : if limit > 0 : self . limit = limit else : raise ValueError ( "Rule limit must be strictly > 0 ({0} given)" . format ( limit ) ) return self
Checks if the given limit is valid .
54,386
def get_args ( ) : ap = argparse . ArgumentParser ( description = "Create frames for a movie that can be compiled using ffmpeg" ) ap . add_argument ( "start" , help = "date string as start time" ) ap . add_argument ( "end" , help = "date string as end time" ) ap . add_argument ( "step" , type = float , help = "fraction of a day to step by" ) ap . add_argument ( "--config" , help = "path to a config file" , default = "config.json" ) return ap . parse_args ( )
request the arguments for running
54,387
def main ( ) : args = get_args ( ) args . start = date_parser . parse ( args . start ) args . end = date_parser . parse ( args . end ) args . step = timedelta ( args . step ) config = Config ( args . config ) times = [ args . start + i * args . step for i in range ( int ( ( args . end - args . start ) / args . step ) ) ] for i , time in enumerate ( times ) : make_plot ( time , config , args . step )
process the main task
54,388
def overall ( ) : return ZeroOrMore ( Grammar . comment ) + Dict ( ZeroOrMore ( Group ( Grammar . _section + ZeroOrMore ( Group ( Grammar . line ) ) ) ) )
The overall grammer for pulling apart the main input files .
54,389
def file ( ) : return ( Optional ( Word ( alphanums ) . setResultsName ( 'alias' ) + Suppress ( Literal ( '.' ) ) ) + Suppress ( White ( ) ) + Word ( approved_printables ) . setResultsName ( 'filename' ) )
Grammar for files found in the overall input files .
54,390
def listen_to_event_updates ( ) : def callback ( event ) : print ( 'Event:' , event ) client . create_event_subscription ( instance = 'simulator' , on_data = callback ) sleep ( 5 )
Subscribe to events .
54,391
def get_current_scene_node ( ) : c = cmds . namespaceInfo ( ':' , listOnlyDependencyNodes = True , absoluteName = True , dagPath = True ) l = cmds . ls ( c , type = 'jb_sceneNode' , absoluteName = True ) if not l : return else : for n in sorted ( l ) : if not cmds . listConnections ( "%s.reftrack" % n , d = False ) : return n
Return the name of the jb_sceneNode that describes the current scene or None if there is no scene node .
54,392
def updateSpec ( self , * args , ** kwargs ) : if args [ 0 ] is None : self . specPlot . clearImg ( ) elif isinstance ( args [ 0 ] , basestring ) : self . specPlot . fromFile ( * args , ** kwargs ) else : self . specPlot . updateData ( * args , ** kwargs )
Updates the spectrogram . First argument can be a filename or a data array . If no arguments are given clears the spectrograms .
54,393
def showSpec ( self , fname ) : if not self . specPlot . hasImg ( ) and fname is not None : self . specPlot . fromFile ( fname )
Draws the spectrogram if it is currently None
54,394
def updateSpiketrace ( self , xdata , ydata , plotname = None ) : if plotname is None : plotname = self . responsePlots . keys ( ) [ 0 ] if len ( ydata . shape ) == 1 : self . responsePlots [ plotname ] . updateData ( axeskey = 'response' , x = xdata , y = ydata ) else : self . responsePlots [ plotname ] . addTraces ( xdata , ydata )
Updates the spike trace
54,395
def updateSignal ( self , xdata , ydata , plotname = None ) : if plotname is None : plotname = self . responsePlots . keys ( ) [ 0 ] self . responsePlots [ plotname ] . updateData ( axeskey = 'stim' , x = xdata , y = ydata )
Updates the trace of the outgoing signal
54,396
def setXlimits ( self , lims ) : self . specPlot . setXlim ( lims ) for plot in self . responsePlots . values ( ) : plot . setXlim ( lims ) sizes = self . splittersw . sizes ( ) if len ( sizes ) > 1 : if self . badbadbad : sizes [ 0 ] += 1 sizes [ 1 ] -= 1 else : sizes [ 0 ] -= 1 sizes [ 1 ] += 1 self . badbadbad = not self . badbadbad self . splittersw . setSizes ( sizes ) self . _ignore_range_signal = False
Sets the X axis limits of the trace plot
54,397
def setNreps ( self , nreps ) : for plot in self . responsePlots . values ( ) : plot . setNreps ( nreps )
Sets the number of reps before the raster plot resets
54,398
def specAutoRange ( self ) : trace_range = self . responsePlots . values ( ) [ 0 ] . viewRange ( ) [ 0 ] vb = self . specPlot . getViewBox ( ) vb . autoRange ( padding = 0 ) self . specPlot . setXlim ( trace_range )
Auto adjusts the visible range of the spectrogram
54,399
def save ( self ) : out = Outgest ( self . output , self . selection_array . astype ( 'uint8' ) , self . headers , self . config_path ) out . save ( ) out . upload ( )
Save as a FITS file and attempt an upload if designated in the configuration file