idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
51,800
def _iter_vars ( mod ) : vars = sorted ( var for var in dir ( mod ) if _is_public ( var ) ) for var in vars : yield getattr ( mod , var )
Iterate through a list of variables define in a module s public namespace .
51,801
def _function_header ( subpackage , func ) : args = inspect . formatargspec ( * inspect . getfullargspec ( func ) ) return "{name}{args}" . format ( name = _full_name ( subpackage , func ) , args = args , )
Generate the docstring of a function .
51,802
def _doc_method ( klass , func ) : argspec = inspect . getfullargspec ( func ) if argspec . args and argspec . args [ 0 ] == 'self' : del argspec . args [ 0 ] args = inspect . formatargspec ( * argspec ) header = "{klass}.{name}{args}" . format ( klass = klass . __name__ , name = _name ( func ) , args = args , ) docstring = _doc ( func ) return _concat ( header , docstring )
Generate the docstring of a method .
51,803
def _doc_property ( klass , prop ) : header = "{klass}.{name}" . format ( klass = klass . __name__ , name = _name ( prop ) , ) docstring = _doc ( prop ) return _concat ( header , docstring )
Generate the docstring of a property .
51,804
def _generate_paragraphs ( package , subpackages ) : for subpackage in _iter_subpackages ( package , subpackages ) : subpackage_name = subpackage . __name__ yield "## {}" . format ( subpackage_name ) yield _doc ( _import_module ( subpackage_name ) ) for func in _iter_functions ( subpackage ) : yield '##### ' + _doc_function ( subpackage , func ) for klass in _iter_classes ( subpackage ) : yield "### {}" . format ( _full_name ( subpackage , klass ) ) yield _doc ( klass ) yield "#### Methods" for method in _iter_methods ( klass , package ) : yield '##### ' + _doc_method ( klass , method ) yield "#### Properties" for prop in _iter_properties ( klass , package ) : yield '##### ' + _doc_property ( klass , prop )
Generate the paragraphs of the API documentation .
51,805
def _add_field_column ( self , field ) : @ self . add_column ( name = field ) def get_my_label ( cluster_id ) : return self . cluster_meta . get ( field , cluster_id )
Add a column for a given label field .
51,806
def _emit_select ( self , cluster_ids , ** kwargs ) : cluster_ids = self . _keep_existing_clusters ( cluster_ids ) logger . debug ( "Select cluster(s): %s." , ', ' . join ( map ( str , cluster_ids ) ) ) self . emit ( 'select' , cluster_ids , ** kwargs )
Choose spikes from the specified clusters and emit the select event on the GUI .
51,807
def _update_cluster_view ( self ) : logger . log ( 5 , "Update the cluster view." ) cluster_ids = [ int ( c ) for c in self . clustering . cluster_ids ] self . cluster_view . set_rows ( cluster_ids )
Initialize the cluster view with cluster data .
51,808
def _update_similarity_view ( self ) : if not self . similarity : return selection = self . cluster_view . selected if not len ( selection ) : return cluster_id = selection [ 0 ] cluster_ids = self . clustering . cluster_ids self . _best = cluster_id logger . log ( 5 , "Update the similarity view." ) similarities = self . similarity ( cluster_id ) clusters_sim = OrderedDict ( [ ( int ( cl ) , s ) for ( cl , s ) in similarities ] ) clusters = [ c for c in clusters_sim . keys ( ) if c in cluster_ids ] self . _current_similarity_values = clusters_sim self . similarity_view . set_rows ( [ c for c in clusters if c not in selection ] )
Update the similarity view with matches for the specified clusters .
51,809
def on_cluster ( self , up ) : similar = self . similarity_view . selected if up . added : self . _update_cluster_view ( ) if up . history == 'undo' : clusters_0 , clusters_1 = up . undo_state [ 0 ] [ 'selection' ] self . cluster_view . select ( clusters_0 , up = up ) self . similarity_view . select ( clusters_1 , up = up ) elif up . added : if up . description == 'assign' : added = list ( up . added [ 1 : ] ) + [ up . added [ 0 ] ] else : added = up . added self . cluster_view . select ( added , up = up ) if similar : self . similarity_view . next ( ) elif up . metadata_changed : if set ( up . metadata_changed ) <= set ( similar ) : next_cluster = self . similarity_view . get_next_id ( ) self . _update_similarity_view ( ) if next_cluster is not None : self . similarity_view . select ( [ next_cluster ] ) else : self . _update_cluster_view ( ) cluster = up . metadata_changed [ 0 ] next_cluster = self . cluster_meta . get ( 'next_cluster' , cluster ) logger . debug ( "Get next_cluster for %d: %s." , cluster , next_cluster ) if next_cluster is None : self . cluster_view . select ( [ cluster ] , do_emit = False ) self . cluster_view . next ( ) else : self . cluster_view . select ( [ next_cluster ] )
Update the cluster views after clustering actions .
51,810
def select ( self , * cluster_ids ) : if cluster_ids and isinstance ( cluster_ids [ 0 ] , ( tuple , list ) ) : cluster_ids = list ( cluster_ids [ 0 ] ) + list ( cluster_ids [ 1 : ] ) cluster_ids = self . _keep_existing_clusters ( cluster_ids ) self . cluster_view . select ( cluster_ids )
Select a list of clusters .
51,811
def merge ( self , cluster_ids = None , to = None ) : if cluster_ids is None : cluster_ids = self . selected if len ( cluster_ids or [ ] ) <= 1 : return self . clustering . merge ( cluster_ids , to = to ) self . _global_history . action ( self . clustering )
Merge the selected clusters .
51,812
def split ( self , spike_ids = None , spike_clusters_rel = 0 ) : if spike_ids is None : spike_ids = self . emit ( 'request_split' , single = True ) spike_ids = np . asarray ( spike_ids , dtype = np . int64 ) assert spike_ids . dtype == np . int64 assert spike_ids . ndim == 1 if len ( spike_ids ) == 0 : msg = ( "You first need to select spikes in the feature " "view with a few Ctrl+Click around the spikes " "that you want to split." ) self . emit ( 'error' , msg ) return self . clustering . split ( spike_ids , spike_clusters_rel = spike_clusters_rel ) self . _global_history . action ( self . clustering )
Split the selected spikes .
51,813
def get_labels ( self , field ) : return { c : self . cluster_meta . get ( field , c ) for c in self . clustering . cluster_ids }
Return the labels of all clusters for a given field .
51,814
def label ( self , name , value , cluster_ids = None ) : if cluster_ids is None : cluster_ids = self . cluster_view . selected if not hasattr ( cluster_ids , '__len__' ) : cluster_ids = [ cluster_ids ] if len ( cluster_ids ) == 0 : return self . cluster_meta . set ( name , cluster_ids , value ) self . _global_history . action ( self . cluster_meta )
Assign a label to clusters .
51,815
def move ( self , group , cluster_ids = None ) : if isinstance ( cluster_ids , string_types ) : logger . warn ( "The list of clusters should be a list of integers, " "not a string." ) return self . label ( 'group' , group , cluster_ids = cluster_ids )
Assign a group to some clusters .
51,816
def next ( self ) : if not self . selected : self . cluster_view . next ( ) else : self . similarity_view . next ( )
Select the next cluster .
51,817
def save ( self ) : spike_clusters = self . clustering . spike_clusters groups = { c : self . cluster_meta . get ( 'group' , c ) or 'unsorted' for c in self . clustering . cluster_ids } labels = [ ( field , self . get_labels ( field ) ) for field in self . cluster_meta . fields if field not in ( 'next_cluster' ) ] self . emit ( 'request_save' , spike_clusters , groups , * labels ) self . _save_spikes_per_cluster ( )
Save the manual clustering back to disk .
51,818
def create_cluster_meta ( cluster_groups ) : meta = ClusterMeta ( ) meta . add_field ( 'group' ) cluster_groups = cluster_groups or { } data = { c : { 'group' : v } for c , v in cluster_groups . items ( ) } meta . from_dict ( data ) return meta
Return a ClusterMeta instance with cluster group support .
51,819
def add_field ( self , name , default_value = None ) : self . _fields [ name ] = default_value def func ( cluster ) : return self . get ( name , cluster ) setattr ( self , name , func )
Add a field with an optional default value .
51,820
def set ( self , field , clusters , value , add_to_stack = True ) : if field not in self . _fields : self . add_field ( field ) assert field in self . _fields clusters = _as_list ( clusters ) for cluster in clusters : if cluster not in self . _data : self . _data [ cluster ] = { } self . _data [ cluster ] [ field ] = value up = UpdateInfo ( description = 'metadata_' + field , metadata_changed = clusters , metadata_value = value , ) undo_state = self . emit ( 'request_undo_state' , up ) if add_to_stack : self . _undo_stack . add ( ( clusters , field , value , up , undo_state ) ) self . emit ( 'cluster' , up ) return up
Set the value of one of several clusters .
51,821
def get ( self , field , cluster ) : if _is_list ( cluster ) : return [ self . get ( field , c ) for c in cluster ] assert field in self . _fields default = self . _fields [ field ] return self . _data . get ( cluster , { } ) . get ( field , default )
Retrieve the value of one cluster .
51,822
def set_from_descendants ( self , descendants ) : for field in self . fields : candidates = defaultdict ( set ) for old , new in descendants : candidates [ new ] . add ( self . get ( field , old ) ) for new , vals in candidates . items ( ) : vals = list ( vals ) default = self . _fields [ field ] if len ( vals ) == 1 and vals [ 0 ] != default : self . set ( field , new , vals [ 0 ] )
Update metadata of some clusters given the metadata of their ascendants .
51,823
def undo ( self ) : args = self . _undo_stack . back ( ) if args is None : return self . _data = deepcopy ( self . _data_base ) for clusters , field , value , up , undo_state in self . _undo_stack : if clusters is not None : self . set ( field , clusters , value , add_to_stack = False ) up , undo_state = args [ - 2 : ] up . history = 'undo' up . undo_state = undo_state self . emit ( 'cluster' , up ) return up
Undo the last metadata change .
51,824
def redo ( self ) : args = self . _undo_stack . forward ( ) if args is None : return clusters , field , value , up , undo_state = args self . set ( field , clusters , value , add_to_stack = False ) up . history = 'redo' self . emit ( 'cluster' , up ) return up
Redo the next metadata change .
51,825
def _get_boxes ( pos , size = None , margin = 0 , keep_aspect_ratio = True ) : pos = np . asarray ( pos , dtype = np . float64 ) x , y = pos . T x = x [ : , np . newaxis ] y = y [ : , np . newaxis ] w , h = size if size is not None else _get_box_size ( x , y , margin = margin ) x0 , y0 = x - w , y - h x1 , y1 = x + w , y + h x0min , y0min , x1max , y1max = x0 . min ( ) , y0 . min ( ) , x1 . max ( ) , y1 . max ( ) if not keep_aspect_ratio : b = ( x0min , y0min , x1max , y1max ) else : dx = x1max - x0min dy = y1max - y0min if dx > dy : b = ( x0min , ( y1max + y0min ) / 2. - dx / 2. , x1max , ( y1max + y0min ) / 2. + dx / 2. ) else : b = ( ( x1max + x0min ) / 2. - dy / 2. , y0min , ( x1max + x0min ) / 2. + dy / 2. , y1max ) r = Range ( from_bounds = b , to_bounds = ( - 1 , - 1 , 1 , 1 ) ) return np . c_ [ r . apply ( np . c_ [ x0 , y0 ] ) , r . apply ( np . c_ [ x1 , y1 ] ) ]
Generate non - overlapping boxes in NDC from a set of positions .
51,826
def _get_texture ( arr , default , n_items , from_bounds ) : if not hasattr ( default , '__len__' ) : default = [ default ] n_cols = len ( default ) if arr is None : arr = np . tile ( default , ( n_items , 1 ) ) assert arr . shape == ( n_items , n_cols ) arr = arr [ np . newaxis , ... ] . astype ( np . float64 ) assert arr . shape == ( 1 , n_items , n_cols ) assert len ( from_bounds ) == 2 m , M = map ( float , from_bounds ) assert np . all ( arr >= m ) assert np . all ( arr <= M ) arr = ( arr - m ) / ( M - m ) assert np . all ( arr >= 0 ) assert np . all ( arr <= 1. ) return arr
Prepare data to be uploaded as a texture .
51,827
def _get_array ( val , shape , default = None , dtype = np . float64 ) : assert val is not None or default is not None if hasattr ( val , '__len__' ) and len ( val ) == 0 : val = None if ( isinstance ( val , np . ndarray ) and val . shape == shape and val . dtype == dtype ) : return val out = np . zeros ( shape , dtype = dtype ) if val is not None and isinstance ( val , np . ndarray ) : if val . size == out . size : val = val . reshape ( out . shape ) out . flat [ : ] = val if val is not None else default assert out . shape == shape return out
Ensure an object is an array with the specified shape .
51,828
def _get_index ( n_items , item_size , n ) : index = np . arange ( n_items ) index = np . repeat ( index , item_size ) index = index . astype ( np . float64 ) assert index . shape == ( n , ) return index
Prepare an index attribute for GPU uploading .
51,829
def _load_shader ( filename ) : curdir = op . dirname ( op . realpath ( __file__ ) ) glsl_path = op . join ( curdir , 'glsl' ) path = op . join ( glsl_path , filename ) with open ( path , 'r' ) as f : return f . read ( )
Load a shader file .
51,830
def _random_color ( h_range = ( 0. , 1. ) , s_range = ( .5 , 1. ) , v_range = ( .5 , 1. ) , ) : h , s , v = uniform ( * h_range ) , uniform ( * s_range ) , uniform ( * v_range ) r , g , b = hsv_to_rgb ( np . array ( [ [ [ h , s , v ] ] ] ) ) . flat return r , g , b
Generate a random RGB color .
51,831
def _is_bright ( rgb ) : r , g , b = rgb gray = 0.299 * r + 0.587 * g + 0.114 * b return gray >= .5
Return whether a RGB color is bright or not .
51,832
def _bunchify ( b ) : assert isinstance ( b , dict ) b = Bunch ( b ) for k in b : if isinstance ( b [ k ] , dict ) : b [ k ] = Bunch ( b [ k ] ) return b
Ensure all dict elements are Bunch .
51,833
def _as_list ( obj ) : if obj is None : return None elif isinstance ( obj , string_types ) : return [ obj ] elif isinstance ( obj , tuple ) : return list ( obj ) elif not hasattr ( obj , '__len__' ) : return [ obj ] else : return obj
Ensure an object is a list .
51,834
def _as_array ( arr , dtype = None ) : if arr is None : return None if isinstance ( arr , np . ndarray ) and dtype is None : return arr if isinstance ( arr , integer_types + ( float , ) ) : arr = [ arr ] out = np . asarray ( arr ) if dtype is not None : if out . dtype != dtype : out = out . astype ( dtype ) if out . dtype not in _ACCEPTED_ARRAY_DTYPES : raise ValueError ( "'arr' seems to have an invalid dtype: " "{0:s}" . format ( str ( out . dtype ) ) ) return out
Convert an object to a numerical NumPy array .
51,835
def _glslify ( r ) : if isinstance ( r , string_types ) : return r else : assert 2 <= len ( r ) <= 4 return 'vec{}({})' . format ( len ( r ) , ', ' . join ( map ( str , r ) ) )
Transform a string or a n - tuple to a valid GLSL expression .
51,836
def get ( self , class_name ) : for transform in self . cpu_transforms + self . gpu_transforms : if transform . __class__ . __name__ == class_name : return transform
Get a transform in the chain from its name .
51,837
def remove ( self , name ) : cpu_transforms = self . _remove_transform ( self . cpu_transforms , name ) gpu_transforms = self . _remove_transform ( self . gpu_transforms , name ) return ( TransformChain ( ) . add_on_cpu ( cpu_transforms ) . add_on_gpu ( gpu_transforms ) )
Remove a transform in the chain .
51,838
def apply ( self , arr ) : for t in self . cpu_transforms : arr = t . apply ( arr ) return arr
Apply all CPU transforms on an array .
51,839
def inverse ( self ) : transforms = self . cpu_transforms + self . gpu_transforms inv_transforms = [ transform . inverse ( ) for transform in transforms [ : : - 1 ] ] return TransformChain ( ) . add_on_cpu ( inv_transforms )
Return the inverse chain of transforms .
51,840
def _create_emitter ( self , event ) : if not hasattr ( self , event ) : setattr ( self , event , lambda * args , ** kwargs : self . emit ( event , * args , ** kwargs ) )
Create a method that emits an event of the same name .
51,841
def connect ( self , func = None , event = None , set_method = False ) : if func is None : return partial ( self . connect , set_method = set_method ) if event is None : event = self . _get_on_name ( func ) self . _callbacks [ event ] . append ( func ) if set_method : self . _create_emitter ( event ) return func
Register a callback function to a given event .
51,842
def unconnect ( self , * funcs ) : for func in funcs : for callbacks in self . _callbacks . values ( ) : if func in callbacks : callbacks . remove ( func )
Unconnect specified callback functions .
51,843
def emit ( self , event , * args , ** kwargs ) : callbacks = self . _callbacks . get ( event , [ ] ) single = kwargs . pop ( 'single' , None ) if single and callbacks : return callbacks [ - 1 ] ( * args , ** kwargs ) res = [ ] for callback in callbacks : res . append ( callback ( * args , ** kwargs ) ) return res
Call all callback functions registered with an event .
51,844
def set_progress_message ( self , message , line_break = False ) : end = '\r' if not line_break else None @ self . connect def on_progress ( value , value_max , ** kwargs ) : kwargs [ 'end' ] = None if value == value_max else end _default_on_progress ( message , value , value_max , ** kwargs )
Set a progress message .
51,845
def set_complete_message ( self , message ) : @ self . connect def on_complete ( ** kwargs ) : _default_on_complete ( message , ** kwargs )
Set a complete message .
51,846
def get_plugin ( name ) : for plugin in IPluginRegistry . plugins : if name in plugin . __name__ : return plugin raise ValueError ( "The plugin %s cannot be found." % name )
Get a plugin class from its name .
51,847
def discover_plugins ( dirs ) : for path in _iter_plugin_files ( dirs ) : filename = op . basename ( path ) subdir = op . dirname ( path ) modname , ext = op . splitext ( filename ) file , path , descr = imp . find_module ( modname , [ subdir ] ) if file : try : mod = imp . load_module ( modname , file , path , descr ) except Exception as e : logger . exception ( e ) finally : file . close ( ) return IPluginRegistry . plugins
Discover the plugin classes contained in Python files .
51,848
def add_view ( self , view , name = None , position = None , closable = False , floatable = True , floating = None ) : view . view_index = self . _get_view_index ( view ) view . name = name or view . __class__ . __name__ + str ( view . view_index ) widget = _try_get_vispy_canvas ( view ) widget = _try_get_matplotlib_canvas ( widget ) dock_widget = _create_dock_widget ( widget , view . name , closable = closable , floatable = floatable , ) self . addDockWidget ( _get_dock_position ( position ) , dock_widget ) if floating is not None : dock_widget . setFloating ( floating ) dock_widget . view = view @ dock_widget . connect_ def on_close_widget ( ) : self . emit ( 'close_view' , view ) dock_widget . show ( ) self . emit ( 'add_view' , view ) logger . log ( 5 , "Add %s to GUI." , view . name ) return dock_widget
Add a widget to the main window .
51,849
def list_views ( self , name = '' , is_visible = True ) : children = self . findChildren ( QWidget ) return [ child . view for child in children if isinstance ( child , QDockWidget ) and child . view . name . startswith ( name ) and ( child . isVisible ( ) if is_visible else True ) and child . width ( ) >= 10 and child . height ( ) >= 10 ]
List all views which name start with a given string .
51,850
def get_view ( self , name , is_visible = True ) : views = self . list_views ( name , is_visible = is_visible ) return views [ 0 ] if views else None
Return a view from its name .
51,851
def view_count ( self ) : views = self . list_views ( ) counts = defaultdict ( lambda : 0 ) for view in views : counts [ view . name ] += 1 return dict ( counts )
Return the number of opened views .
51,852
def get_menu ( self , name ) : if name not in self . _menus : self . _menus [ name ] = self . menuBar ( ) . addMenu ( name ) return self . _menus [ name ]
Return or create a menu .
51,853
def restore_geometry_state ( self , gs ) : if not gs : return if gs . get ( 'geometry' , None ) : self . restoreGeometry ( ( gs [ 'geometry' ] ) ) if gs . get ( 'state' , None ) : self . restoreState ( ( gs [ 'state' ] ) )
Restore the position of the main window and the docks .
51,854
def update_view_state ( self , view , state ) : if view . name not in self : self [ view . name ] = Bunch ( ) self [ view . name ] . update ( state )
Update the state of a view .
51,855
def load ( self ) : if not op . exists ( self . path ) : logger . debug ( "The GUI state file `%s` doesn't exist." , self . path ) return assert op . exists ( self . path ) logger . debug ( "Load the GUI state from `%s`." , self . path ) self . update ( _bunchify ( _load_json ( self . path ) ) )
Load the state from the JSON file in the config dir .
51,856
def save ( self ) : logger . debug ( "Save the GUI state to `%s`." , self . path ) _save_json ( self . path , { k : v for k , v in self . items ( ) if k not in ( 'config_dir' , 'name' ) } )
Save the state to the JSON file in the config dir .
51,857
def cache ( self , f ) : if self . _memory is None : logger . debug ( "Joblib is not installed: skipping cacheing." ) return f assert f if 'self' in inspect . getargspec ( f ) . args : ignore = [ 'self' ] else : ignore = None disk_cached = self . _memory . cache ( f , ignore = ignore ) return disk_cached
Cache a function using the context s cache directory .
51,858
def memcache ( self , f ) : name = _fullname ( f ) cache = self . load_memcache ( name ) @ wraps ( f ) def memcached ( * args ) : h = args out = cache . get ( h , None ) if out is None : out = f ( * args ) cache [ h ] = out return out return memcached
Cache a function in memory using an internal dictionary .
51,859
def save ( self , name , data , location = 'local' , kind = 'json' ) : file_ext = '.json' if kind == 'json' else '.pkl' path = self . _get_path ( name , location , file_ext = file_ext ) _ensure_dir_exists ( op . dirname ( path ) ) logger . debug ( "Save data to `%s`." , path ) if kind == 'json' : _save_json ( path , data ) else : _save_pickle ( path , data )
Save a dictionary in a JSON file within the cache directory .
51,860
def load ( self , name , location = 'local' ) : path = self . _get_path ( name , location , file_ext = '.json' ) if op . exists ( path ) : return _load_json ( path ) path = self . _get_path ( name , location , file_ext = '.pkl' ) if op . exists ( path ) : return _load_pickle ( path ) logger . debug ( "The file `%s` doesn't exist." , path ) return { }
Load saved data from the cache directory .
51,861
def _ensure_dir_exists ( path ) : if not op . exists ( path ) : os . makedirs ( path ) assert op . exists ( path ) and op . isdir ( path )
Ensure a directory exists .
51,862
def load_config ( path = None ) : if not path or not op . exists ( path ) : return Config ( ) path = op . realpath ( path ) dirpath , filename = op . split ( path ) file_ext = op . splitext ( path ) [ 1 ] logger . debug ( "Load config file `%s`." , path ) if file_ext == '.py' : config = PyFileConfigLoader ( filename , dirpath , log = logger ) . load_config ( ) elif file_ext == '.json' : config = JSONFileConfigLoader ( filename , dirpath , log = logger ) . load_config ( ) return config
Load a Python or JSON config file .
51,863
def save_config ( path , config ) : import json config [ 'version' ] = 1 with open ( path , 'w' ) as f : json . dump ( config , f )
Save a config object to a JSON file .
51,864
def _edges_to_adjacency_list ( edges ) : adj = { } for i , j in edges : if i in adj : ni = adj [ i ] else : ni = adj [ i ] = set ( ) if j in adj : nj = adj [ j ] else : nj = adj [ j ] = set ( ) ni . add ( j ) nj . add ( i ) return adj
Convert a list of edges into an adjacency list .
51,865
def _probe_positions ( probe , group ) : positions = probe [ 'channel_groups' ] [ group ] [ 'geometry' ] channels = _probe_channels ( probe , group ) return np . array ( [ positions [ channel ] for channel in channels ] )
Return the positions of a probe channel group .
51,866
def _probe_adjacency_list ( probe ) : cgs = probe [ 'channel_groups' ] . values ( ) graphs = [ cg [ 'graph' ] for cg in cgs ] edges = list ( itertools . chain ( * graphs ) ) adjacency_list = _edges_to_adjacency_list ( edges ) return adjacency_list
Return an adjacency list of a whole probe .
51,867
def load_probe ( name ) : if op . exists ( name ) : path = name else : curdir = op . realpath ( op . dirname ( __file__ ) ) path = op . join ( curdir , 'probes/{}.prb' . format ( name ) ) if not op . exists ( path ) : raise IOError ( "The probe `{}` cannot be found." . format ( name ) ) return MEA ( probe = _read_python ( path ) )
Load one of the built - in probes .
51,868
def list_probes ( ) : curdir = op . realpath ( op . dirname ( __file__ ) ) return [ op . splitext ( fn ) [ 0 ] for fn in os . listdir ( op . join ( curdir , 'probes' ) ) if fn . endswith ( '.prb' ) ]
Return the list of built - in probes .
51,869
def linear_positions ( n_channels ) : return np . c_ [ np . zeros ( n_channels ) , np . linspace ( 0. , 1. , n_channels ) ]
Linear channel positions along the vertical axis .
51,870
def staggered_positions ( n_channels ) : i = np . arange ( n_channels - 1 ) x , y = ( - 1 ) ** i * ( 5 + i ) , 10 * ( i + 1 ) pos = np . flipud ( np . r_ [ np . zeros ( ( 1 , 2 ) ) , np . c_ [ x , y ] ] ) return pos
Generate channel positions for a staggered probe .
51,871
def change_channel_group ( self , group ) : assert self . _probe is not None self . _channels = _probe_channels ( self . _probe , group ) self . _positions = _probe_positions ( self . _probe , group )
Change the current channel group .
51,872
def build ( self ) : if self . is_built ( ) : return with _wait_signal ( self . loadFinished , 20 ) : self . rebuild ( ) self . _built = True
Build the full HTML source .
51,873
def add_to_js ( self , name , var ) : frame = self . page ( ) . mainFrame ( ) frame . addToJavaScriptWindowObject ( name , var )
Add an object to Javascript .
51,874
def eval_js ( self , expr ) : if not self . is_built ( ) : self . _pending_js_eval . append ( expr ) return logger . log ( 5 , "Evaluate Javascript: `%s`." , expr ) out = self . page ( ) . mainFrame ( ) . evaluateJavaScript ( expr ) return _to_py ( out )
Evaluate a Javascript expression .
51,875
def add_column ( self , func , name = None , show = True ) : assert func name = name or func . __name__ if name == '<lambda>' : raise ValueError ( "Please provide a valid name for " + name ) d = { 'func' : func , 'show' : show , } self . _columns [ name ] = d data = _create_json_dict ( cols = self . column_names , ) self . eval_js ( 'table.setHeaders({});' . format ( data ) ) return func
Add a column function which takes an id as argument and returns a value .
51,876
def column_names ( self ) : return [ name for ( name , d ) in self . _columns . items ( ) if d . get ( 'show' , True ) ]
List of column names .
51,877
def _get_row ( self , id ) : return { name : d [ 'func' ] ( id ) for ( name , d ) in self . _columns . items ( ) }
Create a row dictionary for a given object id .
51,878
def set_rows ( self , ids ) : assert all ( isinstance ( i , int ) for i in ids ) sort_col , sort_dir = self . current_sort default_sort_col , default_sort_dir = self . default_sort sort_col = sort_col or default_sort_col sort_dir = sort_dir or default_sort_dir or 'desc' logger . log ( 5 , "Set %d rows in the table." , len ( ids ) ) items = [ self . _get_row ( id ) for id in ids ] data = _create_json_dict ( items = items , cols = self . column_names , ) self . eval_js ( 'table.setData({});' . format ( data ) ) if sort_col : self . sort_by ( sort_col , sort_dir )
Set the rows of the table .
51,879
def sort_by ( self , name , sort_dir = 'asc' ) : logger . log ( 5 , "Sort by `%s` %s." , name , sort_dir ) self . eval_js ( 'table.sortBy("{}", "{}");' . format ( name , sort_dir ) )
Sort by a given variable .
51,880
def select ( self , ids , do_emit = True , ** kwargs ) : self . eval_js ( 'table.select({}, false);' . format ( dumps ( ids ) ) ) if do_emit : self . emit ( 'select' , ids , ** kwargs )
Select some rows in the table .
51,881
def set_interval ( self , interval = None , change_status = True , force_update = False ) : if interval is None : interval = self . _interval interval = self . _restrict_interval ( interval ) if not force_update and interval == self . _interval : return self . _interval = interval start , end = interval self . clear ( ) if change_status : self . set_status ( 'Interval: {:.3f} s - {:.3f} s' . format ( start , end ) ) traces = self . traces ( interval ) ymin , ymax = traces . data . min ( ) , traces . data . max ( ) data_bounds = ( start , ymin , end , ymax ) self . _data_bounds = data_bounds self . _waveform_times = [ ] self . _plot_traces ( traces . data , color = traces . get ( 'color' , None ) , data_bounds = data_bounds , ) waveforms = traces . waveforms assert isinstance ( waveforms , list ) for w in waveforms : self . _plot_waveforms ( waveforms = w . data , color = w . color , channel_ids = w . get ( 'channel_ids' , None ) , start_time = w . start_time , data_bounds = data_bounds , ) self . _waveform_times . append ( ( w . start_time , w . spike_id , w . spike_cluster , w . get ( 'channel_ids' , None ) , ) ) if self . do_show_labels : self . _plot_labels ( traces . data , data_bounds = data_bounds ) self . build ( ) self . update ( )
Display the traces and spikes in a given interval .
51,882
def half_duration ( self ) : if self . _interval is not None : a , b = self . _interval return ( b - a ) * .5 else : return self . interval_duration * .5
Half of the duration of the current interval .
51,883
def go_right ( self ) : start , end = self . _interval delay = ( end - start ) * .2 self . shift ( delay )
Go to right .
51,884
def go_left ( self ) : start , end = self . _interval delay = ( end - start ) * .2 self . shift ( - delay )
Go to left .
51,885
def widen ( self ) : t , h = self . time , self . half_duration h *= self . scaling_coeff_x self . set_interval ( ( t - h , t + h ) )
Increase the interval size .
51,886
def narrow ( self ) : t , h = self . time , self . half_duration h /= self . scaling_coeff_x self . set_interval ( ( t - h , t + h ) )
Decrease the interval size .
51,887
def auth_from_hass_config ( path = None , ** kwargs ) : if path is None : path = config . find_hass_config ( ) return Auth ( os . path . join ( path , ".storage/auth" ) , ** kwargs )
Initialize auth from HASS config .
51,888
def user_name ( self , user_id ) : user = self . users . get ( user_id ) if user is None : return "Unknown user ({})" . format ( user_id ) return user [ "name" ]
Return name for user .
51,889
def default_hass_config_dir ( ) : data_dir = os . getenv ( "APPDATA" ) if os . name == "nt" else os . path . expanduser ( "~" ) return os . path . join ( data_dir , ".homeassistant" )
Put together the default configuration directory based on the OS .
51,890
def find_hass_config ( ) : if "HASSIO_TOKEN" in os . environ : return "/config" config_dir = default_hass_config_dir ( ) if os . path . isdir ( config_dir ) : return config_dir raise ValueError ( "Unable to automatically find the location of Home Assistant " "config. Please pass it in." )
Try to find HASS config .
51,891
def _secret_yaml ( loader , node ) : fname = os . path . join ( os . path . dirname ( loader . name ) , "secrets.yaml" ) try : with open ( fname , encoding = "utf-8" ) as secret_file : secrets = YAML ( typ = "safe" ) . load ( secret_file ) except FileNotFoundError : raise ValueError ( "Secrets file {} not found" . format ( fname ) ) from None try : return secrets [ node . value ] except KeyError : raise ValueError ( "Secret {} not found" . format ( node . value ) ) from None
Load secrets and embed it into the configuration YAML .
51,892
def _include_yaml ( loader , node ) : return load_yaml ( os . path . join ( os . path . dirname ( loader . name ) , node . value ) )
Load another YAML file and embeds it using the !include tag .
51,893
def _stub_tag ( constructor , node ) : seen = getattr ( constructor , "_stub_seen" , None ) if seen is None : seen = constructor . _stub_seen = set ( ) if node . tag not in seen : print ( "YAML tag {} is not supported" . format ( node . tag ) ) seen . add ( node . tag ) return { }
Stub a constructor with a dictionary .
51,894
def load_yaml ( fname ) : yaml = YAML ( typ = "safe" ) yaml . allow_duplicate_keys = True HassSafeConstructor . name = fname yaml . Constructor = HassSafeConstructor with open ( fname , encoding = "utf-8" ) as conf_file : return yaml . load ( conf_file ) or { }
Load a YAML file .
51,895
def db_url_from_hass_config ( path ) : config = load_hass_config ( path ) default_path = os . path . join ( path , "home-assistant_v2.db" ) default_url = "sqlite:///{}" . format ( default_path ) recorder = config . get ( "recorder" ) if recorder : db_url = recorder . get ( "db_url" ) if db_url is not None : return db_url if not os . path . isfile ( default_path ) : raise ValueError ( "Unable to determine DB url from hass config at {}" . format ( path ) ) return default_url
Find the recorder database url from a HASS config dir .
51,896
def localize ( dt ) : if dt . tzinfo is UTC : return ( dt + LOCAL_UTC_OFFSET ) . replace ( tzinfo = None ) return dt
Localize a datetime object to local time .
51,897
def sqlalch_datetime ( dt ) : if isinstance ( dt , str ) : return datetime . strptime ( dt , "%Y-%m-%d %H:%M:%S.%f" ) . replace ( tzinfo = UTC ) if dt . tzinfo is not None and dt . tzinfo . utcoffset ( dt ) is not None : return dt . astimezone ( UTC ) return dt . replace ( tzinfo = UTC )
Convert a SQLAlchemy datetime string to a datetime object .
51,898
def db_from_hass_config ( path = None , ** kwargs ) : if path is None : path = config . find_hass_config ( ) url = config . db_url_from_hass_config ( path ) return HassDatabase ( url , ** kwargs )
Initialize a database from HASS config .
51,899
def stripped_db_url ( url ) : parsed = urlparse ( url ) if parsed . password is None : return url return parsed . _replace ( netloc = "{}:***@{}" . format ( parsed . username , parsed . hostname ) ) . geturl ( )
Return a version of the DB url with the password stripped out .