idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
3,100
def write ( self , text = '' , wrap = True ) : if not isinstance ( text , string_types ) : raise TypeError ( 'text must be a string' ) text = text . encode ( 'utf-8' ) . decode ( 'ascii' , errors = 'replace' ) self . _pending_writes . append ( ( text , wrap ) ) self . update ( )
Write text and scroll
3,101
def _do_pending_writes ( self ) : for text , wrap in self . _pending_writes : text = text [ - self . _n_cols * self . _n_rows : ] text = text . split ( '\n' ) text = [ t if len ( t ) > 0 else '' for t in text ] nr , nc = self . _n_rows , self . _n_cols for para in text : para = para [ : nc ] if not wrap else para lines = [ para [ ii : ( ii + nc ) ] for ii in range ( 0 , len ( para ) , nc ) ] lines = [ '' ] if len ( lines ) == 0 else lines for line in lines : self . _text_lines . insert ( 0 , line ) self . _text_lines = self . _text_lines [ : nr ] self . _bytes_012 [ 1 : ] = self . _bytes_012 [ : - 1 ] self . _bytes_345 [ 1 : ] = self . _bytes_345 [ : - 1 ] self . _insert_text_buf ( line , 0 ) self . _pending_writes = [ ]
Do any pending text writes
3,102
def _insert_text_buf ( self , line , idx ) : self . _bytes_012 [ idx ] = 0 self . _bytes_345 [ idx ] = 0 I = np . array ( [ ord ( c ) - 32 for c in line [ : self . _n_cols ] ] ) I = np . clip ( I , 0 , len ( __font_6x8__ ) - 1 ) if len ( I ) > 0 : b = __font_6x8__ [ I ] self . _bytes_012 [ idx , : len ( I ) ] = b [ : , : 3 ] self . _bytes_345 [ idx , : len ( I ) ] = b [ : , 3 : ]
Insert text into bytes buffers
3,103
def _parse_template_vars ( self ) : template_vars = set ( ) for var in parsing . find_template_variables ( self . _code ) : var = var . lstrip ( '$' ) if var == self . name : continue if var in ( 'pre' , 'post' ) : raise ValueError ( 'GLSL uses reserved template variable $%s' % var ) template_vars . add ( var ) return template_vars
find all template variables in self . _code excluding the function name .
3,104
def _get_replaced_code ( self , names ) : code = self . _code fname = names [ self ] code = code . replace ( " " + self . name + "(" , " " + fname + "(" ) for key , val in self . _replacements . items ( ) : code = code . replace ( key , val ) post_lines = [ ] for key , val in self . _assignments . items ( ) : if isinstance ( key , Variable ) : key = names [ key ] if isinstance ( val , ShaderObject ) : val = val . expression ( names ) line = ' %s = %s;' % ( key , val ) post_lines . append ( line ) if 'post' in self . _expressions : post_lines . append ( ' $post' ) post_text = '\n' . join ( post_lines ) if post_text : post_text = '\n' + post_text + '\n' code = code . rpartition ( '}' ) code = code [ 0 ] + post_text + code [ 1 ] + code [ 2 ] if 'pre' in self . _expressions : m = re . search ( fname + r'\s*\([^{]*\)\s*{' , code ) if m is None : raise RuntimeError ( "Cound not find beginning of function '%s'" % fname ) ind = m . span ( ) [ 1 ] code = code [ : ind ] + "\n $pre\n" + code [ ind : ] for key , val in self . _expressions . items ( ) : val = val . expression ( names ) search = r'\$' + key + r'($|[^a-zA-Z0-9_])' code = re . sub ( search , val + r'\1' , code ) if '$' in code : v = parsing . find_template_variables ( code ) logger . warning ( 'Unsubstituted placeholders in code: %s\n' ' replacements made: %s' , v , list ( self . _expressions . keys ( ) ) ) return code + '\n'
Return code with new name expressions and replacements applied .
3,105
def append ( self , function , update = True ) : self . _funcs . append ( function ) self . _add_dep ( function ) if update : self . _update ( )
Append a new function to the end of this chain .
3,106
def remove ( self , function , update = True ) : self . _funcs . remove ( function ) self . _remove_dep ( function ) if update : self . _update ( )
Remove a function from the chain .
3,107
def add ( self , item , position = 5 ) : if item in self . items : return self . items [ item ] = position self . _add_dep ( item ) self . order = None self . changed ( code_changed = True )
Add an item to the list unless it is already present . If the item is an expression then a semicolon will be appended to it in the final compiled code .
3,108
def remove ( self , item ) : self . items . pop ( item ) self . _remove_dep ( item ) self . order = None self . changed ( code_changed = True )
Remove an item from the list .
3,109
def convex_hull ( self ) : if self . _faces is None : if self . _vertices is None : return None self . triangulate ( ) return self . _convex_hull
Return an array of vertex indexes representing the convex hull .
3,110
def triangulate ( self ) : npts = self . _vertices . shape [ 0 ] if np . any ( self . _vertices [ 0 ] != self . _vertices [ 1 ] ) : edges = np . empty ( ( npts , 2 ) , dtype = np . uint32 ) edges [ : , 0 ] = np . arange ( npts ) edges [ : , 1 ] = edges [ : , 0 ] + 1 edges [ - 1 , 1 ] = 0 else : edges = np . empty ( ( npts - 1 , 2 ) , dtype = np . uint32 ) edges [ : , 0 ] = np . arange ( npts ) edges [ : , 1 ] = edges [ : , 0 ] + 1 tri = Triangulation ( self . _vertices , edges ) tri . triangulate ( ) return tri . pts , tri . tris
Triangulates the set of vertices and stores the triangles in faces and the convex hull in convex_hull .
3,111
def find ( name ) : if op . exists ( name ) : return name path = op . dirname ( __file__ ) or '.' paths = [ path ] + config [ 'include_path' ] for path in paths : filename = op . abspath ( op . join ( path , name ) ) if op . exists ( filename ) : return filename for d in os . listdir ( path ) : fullpath = op . abspath ( op . join ( path , d ) ) if op . isdir ( fullpath ) : filename = op . abspath ( op . join ( fullpath , name ) ) if op . exists ( filename ) : return filename return None
Locate a filename into the shader library .
3,112
def get ( name ) : filename = find ( name ) if filename is None : raise RuntimeError ( 'Could not find %s' % name ) with open ( filename ) as fid : return fid . read ( )
Retrieve code from the given filename .
3,113
def expect ( func , args , times = 7 , sleep_t = 0.5 ) : while times > 0 : try : return func ( * args ) except Exception as e : times -= 1 logger . debug ( "expect failed - attempts left: %d" % times ) time . sleep ( sleep_t ) if times == 0 : raise exceptions . BaseExc ( e )
try many times as in times with sleep time
3,114
def num ( string ) : if not isinstance ( string , type ( '' ) ) : raise ValueError ( type ( '' ) ) try : string = re . sub ( '[^a-zA-Z0-9\.\-]' , '' , string ) number = re . findall ( r"[-+]?\d*\.\d+|[-+]?\d+" , string ) return float ( number [ 0 ] ) except Exception as e : logger = logging . getLogger ( 'tradingAPI.utils.num' ) logger . debug ( "number not found in %s" % string ) logger . debug ( e ) return None
convert a string to float
3,115
def get_number_unit ( number ) : n = str ( float ( number ) ) mult , submult = n . split ( '.' ) if float ( submult ) != 0 : unit = '0.' + ( len ( submult ) - 1 ) * '0' + '1' return float ( unit ) else : return float ( 1 )
get the unit of number
3,116
def get_pip ( mov = None , api = None , name = None ) : if mov is None and api is None : logger . error ( "need at least one of those" ) raise ValueError ( ) elif mov is not None and api is not None : logger . error ( "mov and api are exclusive" ) raise ValueError ( ) if api is not None : if name is None : logger . error ( "need a name" ) raise ValueError ( ) mov = api . new_mov ( name ) mov . open ( ) if mov is not None : mov . _check_open ( ) try : logger . debug ( len ( Glob ( ) . theCollector . collection ) ) pip = Glob ( ) . theCollector . collection [ 'pip' ] if name is not None : pip_res = pip [ name ] elif mov is not None : pip_res = pip [ mov . product ] logger . debug ( "pip found in the collection" ) return pip_res except KeyError : logger . debug ( "pip not found in the collection" ) records = [ ] intervals = [ 10 , 20 , 30 ] def _check_price ( interval = 10 ) : timeout = time . time ( ) + interval while time . time ( ) < timeout : records . append ( mov . get_price ( ) ) time . sleep ( 0.5 ) for interval in intervals : _check_price ( interval ) if min ( records ) == max ( records ) : logger . debug ( "no variation in %d seconds" % interval ) if interval == intervals [ - 1 ] : raise TimeoutError ( "no variation" ) else : break for price in records : if 'best_price' not in locals ( ) : best_price = price if len ( str ( price ) ) > len ( str ( best_price ) ) : logger . debug ( "found new best_price %f" % price ) best_price = price pip = get_number_unit ( best_price ) Glob ( ) . pipHandler . add_val ( { mov . product : pip } ) return pip
get value of pip
3,117
def itemsize ( self ) : return self . _items [ : self . _count , 1 ] - self . _items [ : self . _count , 0 ]
Individual item sizes
3,118
def reserve ( self , capacity ) : if capacity >= self . _data . size : capacity = int ( 2 ** np . ceil ( np . log2 ( capacity ) ) ) self . _data = np . resize ( self . _data , capacity )
Set current capacity of the underlying array
3,119
def append ( self , data , itemsize = None ) : self . insert ( len ( self ) , data , itemsize )
Append data to the end .
3,120
def build_if_needed ( self ) : if self . _need_build : self . _build ( ) self . _need_build = False self . update_variables ( )
Reset shader source if necesssary .
3,121
def link_view ( self , view ) : if view is self . _linked_view : return if self . _linked_view is not None : self . _linked_view . scene . transform . changed . disconnect ( self . _view_changed ) self . _linked_view = view view . scene . transform . changed . connect ( self . _view_changed ) self . _view_changed ( )
Link this axis to a ViewBox
3,122
def _view_changed ( self , event = None ) : tr = self . node_transform ( self . _linked_view . scene ) p1 , p2 = tr . map ( self . _axis_ends ( ) ) if self . orientation in ( 'left' , 'right' ) : self . axis . domain = ( p1 [ 1 ] , p2 [ 1 ] ) else : self . axis . domain = ( p1 [ 0 ] , p2 [ 0 ] )
Linked view transform has changed ; update ticks .
3,123
def next_power_of_2 ( n ) : n -= 1 shift = 1 while ( n + 1 ) & n : n |= n >> shift shift *= 2 return max ( 4 , n + 1 )
Return next power of 2 greater than or equal to n
3,124
def _compute_texture_shape ( self , size = 1 ) : linesize = 1024 count = self . _uniforms_float_count cols = 4 * linesize // int ( count ) rows = max ( 1 , int ( math . ceil ( size / float ( cols ) ) ) ) shape = rows , cols * ( count // 4 ) , count self . _ushape = shape return shape
Compute uniform texture shape
3,125
def _update ( self ) : if self . _vertices_buffer is not None : self . _vertices_buffer . delete ( ) self . _vertices_buffer = VertexBuffer ( self . _vertices_list . data ) if self . itype is not None : if self . _indices_buffer is not None : self . _indices_buffer . delete ( ) self . _indices_buffer = IndexBuffer ( self . _indices_list . data ) if self . utype is not None : if self . _uniforms_texture is not None : self . _uniforms_texture . delete ( ) texture = self . _uniforms_list . _data . view ( np . float32 ) size = len ( texture ) / self . _uniforms_float_count shape = self . _compute_texture_shape ( size ) texture = texture . reshape ( shape [ 0 ] , shape [ 1 ] , 4 ) self . _uniforms_texture = Texture2D ( texture ) self . _uniforms_texture . data = texture self . _uniforms_texture . interpolation = 'nearest' if len ( self . _programs ) : for program in self . _programs : program . bind ( self . _vertices_buffer ) if self . _uniforms_list is not None : program [ "uniforms" ] = self . _uniforms_texture program [ "uniforms_shape" ] = self . _ushape
Update vertex buffers & texture
3,126
def get_layout ( name , * args , ** kwargs ) : if name not in _layout_map : raise KeyError ( "Graph layout '%s' not found. Should be one of %s" % ( name , AVAILABLE_LAYOUTS ) ) layout = _layout_map [ name ] if inspect . isclass ( layout ) : layout = layout ( * args , ** kwargs ) return layout
Retrieve a graph layout
3,127
def update_viewer_state ( rec , context ) : if '_protocol' not in rec : rec . pop ( 'properties' ) rec [ 'state' ] = { } rec [ 'state' ] [ 'values' ] = rec . pop ( 'options' ) layer_states = [ ] for layer in rec [ 'layers' ] : state_id = str ( uuid . uuid4 ( ) ) state_cls = STATE_CLASS [ layer [ '_type' ] . split ( '.' ) [ - 1 ] ] state = state_cls ( layer = context . object ( layer . pop ( 'layer' ) ) ) properties = set ( layer . keys ( ) ) - set ( [ '_type' ] ) for prop in sorted ( properties , key = state . update_priority , reverse = True ) : value = layer . pop ( prop ) value = context . object ( value ) if isinstance ( value , six . string_types ) and value == 'fixed' : value = 'Fixed' if isinstance ( value , six . string_types ) and value == 'linear' : value = 'Linear' setattr ( state , prop , value ) context . register_object ( state_id , state ) layer [ 'state' ] = state_id layer_states . append ( state ) list_id = str ( uuid . uuid4 ( ) ) context . register_object ( list_id , layer_states ) rec [ 'state' ] [ 'values' ] [ 'layers' ] = list_id rec [ 'state' ] [ 'values' ] [ 'visible_axes' ] = rec [ 'state' ] [ 'values' ] . pop ( 'visible_box' )
Given viewer session information make sure the session information is compatible with the current version of the viewers and if not update the session information in - place .
3,128
def remove_comments ( code ) : pattern = r"(\".*?\"|\'.*?\')|(/\*.*?\*/|//[^\r\n]*\n)" regex = re . compile ( pattern , re . MULTILINE | re . DOTALL ) def do_replace ( match ) : if match . group ( 2 ) is not None : return "" else : return match . group ( 1 ) return regex . sub ( do_replace , code )
Remove C - style comment from GLSL code string .
3,129
def merge_includes ( code ) : pattern = '\#\s*include\s*"(?P<filename>[a-zA-Z0-9\_\-\.\/]+)"' regex = re . compile ( pattern ) includes = [ ] def replace ( match ) : filename = match . group ( "filename" ) if filename not in includes : includes . append ( filename ) path = glsl . find ( filename ) if not path : logger . critical ( '"%s" not found' % filename ) raise RuntimeError ( "File not found" , filename ) text = '\n// --- start of "%s" ---\n' % filename with open ( path ) as fh : text += fh . read ( ) text += '// --- end of "%s" ---\n' % filename return text return '' for i in range ( 10 ) : if re . search ( regex , code ) : code = re . sub ( regex , replace , code ) else : break return code
Merge all includes recursively .
3,130
def add_widget ( self , widget = None , row = None , col = None , row_span = 1 , col_span = 1 , ** kwargs ) : if row is None : row = self . _next_cell [ 0 ] if col is None : col = self . _next_cell [ 1 ] if widget is None : widget = Widget ( ** kwargs ) else : if kwargs : raise ValueError ( "cannot send kwargs if widget is given" ) _row = self . _cells . setdefault ( row , { } ) _row [ col ] = widget self . _grid_widgets [ self . _n_added ] = ( row , col , row_span , col_span , widget ) self . _n_added += 1 widget . parent = self self . _next_cell = [ row , col + col_span ] widget . _var_w = Variable ( "w-(row: %s | col: %s)" % ( row , col ) ) widget . _var_h = Variable ( "h-(row: %s | col: %s)" % ( row , col ) ) stretch = list ( widget . stretch ) stretch [ 0 ] = col_span if stretch [ 0 ] is None else stretch [ 0 ] stretch [ 1 ] = row_span if stretch [ 1 ] is None else stretch [ 1 ] widget . stretch = stretch self . _need_solver_recreate = True return widget
Add a new widget to this grid . This will cause other widgets in the grid to be resized to make room for the new widget . Can be used to replace a widget as well
3,131
def remove_widget ( self , widget ) : self . _grid_widgets = dict ( ( key , val ) for ( key , val ) in self . _grid_widgets . items ( ) if val [ - 1 ] != widget ) self . _need_solver_recreate = True
Remove a widget from this grid
3,132
def resize_widget ( self , widget , row_span , col_span ) : row = None col = None for ( r , c , rspan , cspan , w ) in self . _grid_widgets . values ( ) : if w == widget : row = r col = c break if row is None or col is None : raise ValueError ( "%s not found in grid" % widget ) self . remove_widget ( widget ) self . add_widget ( widget , row , col , row_span , col_span ) self . _need_solver_recreate = True
Resize a widget in the grid to new dimensions .
3,133
def _get_vispy_caller ( ) : records = inspect . stack ( ) for record in records [ 5 : ] : module = record [ 0 ] . f_globals [ '__name__' ] if module . startswith ( 'vispy' ) : line = str ( record [ 0 ] . f_lineno ) func = record [ 3 ] cls = record [ 0 ] . f_locals . get ( 'self' , None ) clsname = "" if cls is None else cls . __class__ . __name__ + '.' caller = "{0}:{1}{2}({3}): " . format ( module , clsname , func , line ) return caller return 'unknown'
Helper to get vispy calling function from the stack
3,134
def set_log_level ( verbose , match = None , return_old = False ) : if isinstance ( verbose , bool ) : verbose = 'info' if verbose else 'warning' if isinstance ( verbose , string_types ) : verbose = verbose . lower ( ) if verbose not in logging_types : raise ValueError ( 'Invalid argument "%s"' % verbose ) verbose = logging_types [ verbose ] else : raise TypeError ( 'verbose must be a bool or string' ) logger = logging . getLogger ( 'vispy' ) old_verbose = logger . level old_match = _lh . _vispy_set_match ( match ) logger . setLevel ( verbose ) if verbose <= logging . DEBUG : _lf . _vispy_set_prepend ( True ) else : _lf . _vispy_set_prepend ( False ) out = None if return_old : out = ( old_verbose , old_match ) return out
Convenience function for setting the logging level
3,135
def _handle_exception ( ignore_callback_errors , print_callback_errors , obj , cb_event = None , node = None ) : if not hasattr ( obj , '_vispy_err_registry' ) : obj . _vispy_err_registry = { } registry = obj . _vispy_err_registry if cb_event is not None : cb , event = cb_event exp_type = 'callback' else : exp_type = 'node' type_ , value , tb = sys . exc_info ( ) tb = tb . tb_next sys . last_type = type_ sys . last_value = value sys . last_traceback = tb del tb if not ignore_callback_errors : raise if print_callback_errors != "never" : this_print = 'full' if print_callback_errors in ( 'first' , 'reminders' ) : if exp_type == 'callback' : key = repr ( cb ) + repr ( event ) else : key = repr ( node ) if key in registry : registry [ key ] += 1 if print_callback_errors == 'first' : this_print = None else : ii = registry [ key ] if ii == ( 2 ** int ( np . log2 ( ii ) ) ) : this_print = ii else : this_print = None else : registry [ key ] = 1 if this_print == 'full' : logger . log_exception ( ) if exp_type == 'callback' : logger . error ( "Invoking %s for %s" % ( cb , event ) ) else : logger . error ( "Drawing node %s" % node ) elif this_print is not None : if exp_type == 'callback' : logger . error ( "Invoking %s repeat %s" % ( cb , this_print ) ) else : logger . error ( "Drawing node %s repeat %s" % ( node , this_print ) )
Helper for prining errors in callbacks
3,136
def _serialize_buffer ( buffer , array_serialization = None ) : if array_serialization == 'binary' : return buffer . ravel ( ) . tostring ( ) elif array_serialization == 'base64' : return { 'storage_type' : 'base64' , 'buffer' : base64 . b64encode ( buffer ) . decode ( 'ascii' ) } raise ValueError ( "The array serialization method should be 'binary' or " "'base64'." )
Serialize a NumPy array .
3,137
def _dep_changed ( self , dep , code_changed = False , value_changed = False ) : self . changed ( code_changed , value_changed )
Called when a dependency s expression has changed .
3,138
def changed ( self , code_changed = False , value_changed = False ) : for d in self . _dependents : d . _dep_changed ( self , code_changed = code_changed , value_changed = value_changed )
Inform dependents that this shaderobject has changed .
3,139
def pan ( self , * pan ) : if len ( pan ) == 1 : pan = pan [ 0 ] self . rect = self . rect + pan
Pan the view .
3,140
def viewbox_mouse_event ( self , event ) : if event . handled or not self . interactive : return BaseCamera . viewbox_mouse_event ( self , event ) if event . type == 'mouse_wheel' : center = self . _scene_transform . imap ( event . pos ) self . zoom ( ( 1 + self . zoom_factor ) ** ( - event . delta [ 1 ] * 30 ) , center ) event . handled = True elif event . type == 'mouse_move' : if event . press_event is None : return modifiers = event . mouse_event . modifiers p1 = event . mouse_event . press_event . pos p2 = event . mouse_event . pos if 1 in event . buttons and not modifiers : p1 = np . array ( event . last_event . pos ) [ : 2 ] p2 = np . array ( event . pos ) [ : 2 ] p1s = self . _transform . imap ( p1 ) p2s = self . _transform . imap ( p2 ) self . pan ( p1s - p2s ) event . handled = True elif 2 in event . buttons and not modifiers : p1c = np . array ( event . last_event . pos ) [ : 2 ] p2c = np . array ( event . pos ) [ : 2 ] scale = ( ( 1 + self . zoom_factor ) ** ( ( p1c - p2c ) * np . array ( [ 1 , - 1 ] ) ) ) center = self . _transform . imap ( event . press_event . pos [ : 2 ] ) self . zoom ( scale , center ) event . handled = True else : event . handled = False elif event . type == 'mouse_press' : event . handled = event . button in [ 1 , 2 ] else : event . handled = False
The SubScene received a mouse event ; update transform accordingly .
3,141
def set_data ( self , vol , clim = None ) : if not isinstance ( vol , np . ndarray ) : raise ValueError ( 'Volume visual needs a numpy array.' ) if not ( ( vol . ndim == 3 ) or ( vol . ndim == 4 and vol . shape [ - 1 ] <= 4 ) ) : raise ValueError ( 'Volume visual needs a 3D image.' ) if clim is not None : clim = np . array ( clim , float ) if not ( clim . ndim == 1 and clim . size == 2 ) : raise ValueError ( 'clim must be a 2-element array-like' ) self . _clim = tuple ( clim ) if self . _clim is None : self . _clim = vol . min ( ) , vol . max ( ) vol = np . array ( vol , dtype = 'float32' , copy = False ) if self . _clim [ 1 ] == self . _clim [ 0 ] : if self . _clim [ 0 ] != 0. : vol *= 1.0 / self . _clim [ 0 ] else : vol -= self . _clim [ 0 ] vol /= self . _clim [ 1 ] - self . _clim [ 0 ] self . _tex . set_data ( vol ) self . shared_program [ 'u_shape' ] = ( vol . shape [ 2 ] , vol . shape [ 1 ] , vol . shape [ 0 ] ) shape = vol . shape [ : 3 ] if self . _vol_shape != shape : self . _vol_shape = shape self . _need_vertex_update = True self . _vol_shape = shape self . _kb_for_texture = np . prod ( self . _vol_shape ) / 1024
Set the volume data .
3,142
def _prepare_draw ( self , view = None ) : if self . _changed [ 'pos' ] : self . pos_buf . set_data ( self . _pos ) self . _changed [ 'pos' ] = False if self . _changed [ 'color' ] : self . color_buf . set_data ( self . _color ) self . _program . vert [ 'color' ] = self . color_buf self . _changed [ 'color' ] = False return True
This method is called immediately before each draw .
3,143
def _merge_intervals ( self , min_depth ) : def add_interval ( ret , start , stop ) : if min_depth is not None : shift = 2 * ( 29 - min_depth ) mask = ( int ( 1 ) << shift ) - 1 if stop - start < mask : ret . append ( ( start , stop ) ) else : ofs = start & mask st = start if ofs > 0 : st = ( start - ofs ) + ( mask + 1 ) ret . append ( ( start , st ) ) while st + mask + 1 < stop : ret . append ( ( st , st + mask + 1 ) ) st = st + mask + 1 ret . append ( ( st , stop ) ) else : ret . append ( ( start , stop ) ) ret = [ ] start = stop = None self . _intervals . sort ( axis = 0 ) for itv in self . _intervals : if start is None : start , stop = itv continue if itv [ 0 ] > stop : add_interval ( ret , start , stop ) start , stop = itv else : if itv [ 1 ] > stop : stop = itv [ 1 ] if start is not None and stop is not None : add_interval ( ret , start , stop ) self . _intervals = np . asarray ( ret )
Merge overlapping intervals .
3,144
def union ( self , another_is ) : result = IntervalSet ( ) if another_is . empty ( ) : result . _intervals = self . _intervals elif self . empty ( ) : result . _intervals = another_is . _intervals else : result . _intervals = IntervalSet . merge ( self . _intervals , another_is . _intervals , lambda in_a , in_b : in_a or in_b ) return result
Return the union between self and another_is .
3,145
def to_nuniq_interval_set ( cls , nested_is ) : r2 = nested_is . copy ( ) res = [ ] if r2 . empty ( ) : return IntervalSet ( ) order = 0 while not r2 . empty ( ) : shift = int ( 2 * ( IntervalSet . HPY_MAX_ORDER - order ) ) ofs = ( int ( 1 ) << shift ) - 1 ofs2 = int ( 1 ) << ( 2 * order + 2 ) r4 = [ ] for iv in r2 . _intervals : a = ( int ( iv [ 0 ] ) + ofs ) >> shift b = int ( iv [ 1 ] ) >> shift c = a << shift d = b << shift if d > c : r4 . append ( ( c , d ) ) res . append ( ( a + ofs2 , b + ofs2 ) ) if len ( r4 ) > 0 : r4_is = IntervalSet ( np . asarray ( r4 ) ) r2 = r2 . difference ( r4_is ) order += 1 return IntervalSet ( np . asarray ( res ) )
Convert an IntervalSet using the NESTED numbering scheme to an IntervalSet containing UNIQ numbers for HEALPix cells .
3,146
def from_nuniq_interval_set ( cls , nuniq_is ) : nested_is = IntervalSet ( ) rtmp = [ ] last_order = 0 intervals = nuniq_is . _intervals diff_order = IntervalSet . HPY_MAX_ORDER shift_order = 2 * diff_order for interval in intervals : for j in range ( interval [ 0 ] , interval [ 1 ] ) : order , i_pix = uniq2orderipix ( j ) if order != last_order : nested_is = nested_is . union ( IntervalSet ( np . asarray ( rtmp ) ) ) rtmp = [ ] last_order = order diff_order = IntervalSet . HPY_MAX_ORDER - order shift_order = 2 * diff_order rtmp . append ( ( i_pix << shift_order , ( i_pix + 1 ) << shift_order ) ) nested_is = nested_is . union ( IntervalSet ( np . asarray ( rtmp ) ) ) return nested_is
Convert an IntervalSet containing NUNIQ intervals to an IntervalSet representing HEALPix cells following the NESTED numbering scheme .
3,147
def merge ( a_intervals , b_intervals , op ) : a_endpoints = a_intervals . flatten ( ) . tolist ( ) b_endpoints = b_intervals . flatten ( ) . tolist ( ) sentinel = max ( a_endpoints [ - 1 ] , b_endpoints [ - 1 ] ) + 1 a_endpoints += [ sentinel ] b_endpoints += [ sentinel ] a_index = 0 b_index = 0 res = [ ] scan = min ( a_endpoints [ 0 ] , b_endpoints [ 0 ] ) while scan < sentinel : in_a = not ( ( scan < a_endpoints [ a_index ] ) ^ ( a_index % 2 ) ) in_b = not ( ( scan < b_endpoints [ b_index ] ) ^ ( b_index % 2 ) ) in_res = op ( in_a , in_b ) if in_res ^ ( len ( res ) % 2 ) : res += [ scan ] if scan == a_endpoints [ a_index ] : a_index += 1 if scan == b_endpoints [ b_index ] : b_index += 1 scan = min ( a_endpoints [ a_index ] , b_endpoints [ b_index ] ) return np . asarray ( res ) . reshape ( ( - 1 , 2 ) )
Merge two lists of intervals according to the boolean function op
3,148
def delete ( self ) : if hasattr ( self , '_glir' ) : self . _glir . command ( 'DELETE' , self . _id ) self . _glir . _deletable = True del self . _glir
Delete the object from GPU memory .
3,149
def _build_interpolation ( self ) : interpolation = self . _interpolation self . _data_lookup_fn = self . _interpolation_fun [ interpolation ] self . shared_program . frag [ 'get_data' ] = self . _data_lookup_fn if interpolation == 'bilinear' : texture_interpolation = 'linear' else : texture_interpolation = 'nearest' if interpolation != 'nearest' : self . shared_program [ 'u_kernel' ] = self . _kerneltex self . _data_lookup_fn [ 'shape' ] = self . _data . shape [ : 2 ] [ : : - 1 ] if self . _texture . interpolation != texture_interpolation : self . _texture . interpolation = texture_interpolation self . _data_lookup_fn [ 'texture' ] = self . _texture self . _need_interpolation_update = False
Rebuild the _data_lookup_fn using different interpolations within the shader
3,150
def _build_vertex_data ( self ) : grid = self . _grid w = 1.0 / grid [ 1 ] h = 1.0 / grid [ 0 ] quad = np . array ( [ [ 0 , 0 , 0 ] , [ w , 0 , 0 ] , [ w , h , 0 ] , [ 0 , 0 , 0 ] , [ w , h , 0 ] , [ 0 , h , 0 ] ] , dtype = np . float32 ) quads = np . empty ( ( grid [ 1 ] , grid [ 0 ] , 6 , 3 ) , dtype = np . float32 ) quads [ : ] = quad mgrid = np . mgrid [ 0. : grid [ 1 ] , 0. : grid [ 0 ] ] . transpose ( 1 , 2 , 0 ) mgrid = mgrid [ : , : , np . newaxis , : ] mgrid [ ... , 0 ] *= w mgrid [ ... , 1 ] *= h quads [ ... , : 2 ] += mgrid tex_coords = quads . reshape ( grid [ 1 ] * grid [ 0 ] * 6 , 3 ) tex_coords = np . ascontiguousarray ( tex_coords [ : , : 2 ] ) vertices = tex_coords * self . size self . _subdiv_position . set_data ( vertices . astype ( 'float32' ) ) self . _subdiv_texcoord . set_data ( tex_coords . astype ( 'float32' ) )
Rebuild the vertex buffers used for rendering the image when using the subdivide method .
3,151
def bake ( self , P , key = 'curr' , closed = False , itemsize = None ) : itemsize = itemsize or len ( P ) itemcount = len ( P ) / itemsize n = itemsize if closed : I = np . arange ( n + 3 ) if key == 'prev' : I -= 2 I [ 0 ] , I [ 1 ] , I [ - 1 ] = n - 1 , n - 1 , n - 1 elif key == 'next' : I [ 0 ] , I [ - 3 ] , I [ - 2 ] , I [ - 1 ] = 1 , 0 , 1 , 1 else : I -= 1 I [ 0 ] , I [ - 1 ] , I [ n + 1 ] = 0 , 0 , 0 else : I = np . arange ( n + 2 ) if key == 'prev' : I -= 2 I [ 0 ] , I [ 1 ] , I [ - 1 ] = 0 , 0 , n - 2 elif key == 'next' : I [ 0 ] , I [ - 1 ] , I [ - 2 ] = 1 , n - 1 , n - 1 else : I -= 1 I [ 0 ] , I [ - 1 ] = 0 , n - 1 I = np . repeat ( I , 2 ) return P [ I ]
Given a path P return the baked vertices as they should be copied in the collection if the path has already been appended .
3,152
def _stop_timers ( canvas ) : for attr in dir ( canvas ) : try : attr_obj = getattr ( canvas , attr ) except NotImplementedError : attr_obj = None if isinstance ( attr_obj , Timer ) : attr_obj . stop ( )
Stop all timers in a canvas .
3,153
def _last_stack_str ( ) : stack = extract_stack ( ) for s in stack [ : : - 1 ] : if op . join ( 'vispy' , 'gloo' , 'buffer.py' ) not in __file__ : break return format_list ( [ s ] ) [ 0 ]
Print stack trace from call that didn t originate from here
3,154
def glsl_type ( self ) : if self . dtype is None : return None dtshape = self . dtype [ 0 ] . shape n = dtshape [ 0 ] if dtshape else 1 if n > 1 : dtype = 'vec%d' % n else : dtype = 'float' if 'f' in self . dtype [ 0 ] . base . kind else 'int' return 'attribute' , dtype
GLSL declaration strings required for a variable to hold this data .
3,155
def _rename_objects_pretty ( self ) : self . _global_ns = dict ( [ ( kwd , None ) for kwd in gloo . util . KEYWORDS ] ) self . _shader_ns = dict ( [ ( shader , { } ) for shader in self . shaders ] ) obj_shaders = { } for shader_name , deps in self . _shader_deps . items ( ) : for dep in deps : for name in dep . static_names ( ) : self . _global_ns [ name ] = None obj_shaders . setdefault ( dep , [ ] ) . append ( shader_name ) name_index = { } for obj , shaders in obj_shaders . items ( ) : name = obj . name if self . _name_available ( obj , name , shaders ) : self . _assign_name ( obj , name , shaders ) else : while True : index = name_index . get ( name , 0 ) + 1 name_index [ name ] = index ext = '_%d' % index new_name = name [ : 32 - len ( ext ) ] + ext if self . _name_available ( obj , new_name , shaders ) : self . _assign_name ( obj , new_name , shaders ) break
Rename all objects like name_1 to avoid conflicts . Objects are only renamed if necessary .
3,156
def _update_positions ( self ) : self . _colorbar . pos = self . _pos self . _border . pos = self . _pos if self . _orientation == "right" or self . _orientation == "left" : self . _label . rotation = - 90 x , y = self . _pos halfw , halfh = self . _halfdim label_anchors = ColorBarVisual . _get_label_anchors ( center = self . _pos , halfdim = self . _halfdim , orientation = self . _orientation , transforms = self . label . transforms ) self . _label . anchors = label_anchors ticks_anchors = ColorBarVisual . _get_ticks_anchors ( center = self . _pos , halfdim = self . _halfdim , orientation = self . _orientation , transforms = self . label . transforms ) self . _ticks [ 0 ] . anchors = ticks_anchors self . _ticks [ 1 ] . anchors = ticks_anchors ( label_pos , ticks_pos ) = ColorBarVisual . _calc_positions ( center = self . _pos , halfdim = self . _halfdim , border_width = self . border_width , orientation = self . _orientation , transforms = self . transforms ) self . _label . pos = label_pos self . _ticks [ 0 ] . pos = ticks_pos [ 0 ] self . _ticks [ 1 ] . pos = ticks_pos [ 1 ]
updates the positions of the colorbars and labels
3,157
def _calc_positions ( center , halfdim , border_width , orientation , transforms ) : ( x , y ) = center ( halfw , halfh ) = halfdim visual_to_doc = transforms . get_transform ( 'visual' , 'document' ) doc_to_visual = transforms . get_transform ( 'document' , 'visual' ) doc_x = visual_to_doc . map ( np . array ( [ halfw , 0 , 0 , 0 ] , dtype = np . float32 ) ) doc_y = visual_to_doc . map ( np . array ( [ 0 , halfh , 0 , 0 ] , dtype = np . float32 ) ) if doc_x [ 0 ] < 0 : doc_x *= - 1 if doc_y [ 1 ] < 0 : doc_y *= - 1 if orientation == "top" : doc_perp_vector = - doc_y elif orientation == "bottom" : doc_perp_vector = doc_y elif orientation == "left" : doc_perp_vector = - doc_x if orientation == "right" : doc_perp_vector = doc_x perp_len = np . linalg . norm ( doc_perp_vector ) doc_perp_vector /= perp_len perp_len += border_width perp_len += 5 perp_len *= ColorBarVisual . text_padding_factor doc_perp_vector *= perp_len doc_center = visual_to_doc . map ( np . array ( [ x , y , 0 , 0 ] , dtype = np . float32 ) ) doc_label_pos = doc_center + doc_perp_vector visual_label_pos = doc_to_visual . map ( doc_label_pos ) [ : 3 ] if orientation in [ "top" , "bottom" ] : doc_ticks_pos = [ doc_label_pos - doc_x , doc_label_pos + doc_x ] else : doc_ticks_pos = [ doc_label_pos + doc_y , doc_label_pos - doc_y ] visual_ticks_pos = [ ] visual_ticks_pos . append ( doc_to_visual . map ( doc_ticks_pos [ 0 ] ) [ : 3 ] ) visual_ticks_pos . append ( doc_to_visual . map ( doc_ticks_pos [ 1 ] ) [ : 3 ] ) return ( visual_label_pos , visual_ticks_pos )
Calculate the text centeritions given the ColorBar parameters .
3,158
def size ( self ) : ( halfw , halfh ) = self . _halfdim if self . orientation in [ "top" , "bottom" ] : return ( halfw * 2. , halfh * 2. ) else : return ( halfh * 2. , halfw * 2. )
The size of the ColorBar
3,159
def normalized ( self ) : return Rect ( pos = ( min ( self . left , self . right ) , min ( self . top , self . bottom ) ) , size = ( abs ( self . width ) , abs ( self . height ) ) )
Return a Rect covering the same area but with height and width guaranteed to be positive .
3,160
def flipped ( self , x = False , y = True ) : pos = list ( self . pos ) size = list ( self . size ) for i , flip in enumerate ( ( x , y ) ) : if flip : pos [ i ] += size [ i ] size [ i ] *= - 1 return Rect ( pos , size )
Return a Rect with the same bounds but with axes inverted
3,161
def _transform_in ( self ) : return np . array ( [ [ self . left , self . bottom , 0 , 1 ] , [ self . right , self . top , 0 , 1 ] ] )
Return array of coordinates that can be mapped by Transform classes .
3,162
def _calculate_delta_pos ( adjacency_arr , pos , t , optimal ) : delta = pos [ : , np . newaxis , : ] - pos distance2 = ( delta * delta ) . sum ( axis = - 1 ) distance2 = np . where ( distance2 < 0.0001 , 0.0001 , distance2 ) distance = np . sqrt ( distance2 ) displacement = np . zeros ( ( len ( delta ) , 2 ) ) for ii in range ( 2 ) : displacement [ : , ii ] = ( delta [ : , : , ii ] * ( ( optimal * optimal ) / ( distance * distance ) - ( adjacency_arr * distance ) / optimal ) ) . sum ( axis = 1 ) length = np . sqrt ( ( displacement ** 2 ) . sum ( axis = 1 ) ) length = np . where ( length < 0.01 , 0.1 , length ) delta_pos = displacement * t / length [ : , np . newaxis ] return delta_pos
Helper to calculate the delta position
3,163
def get_recipe_intent_handler ( request ) : ingredient = request . slots [ "Ingredient" ] if ingredient == None : return alexa . create_response ( "Could not find an ingredient!" ) request . session [ 'last_ingredient' ] = ingredient card = alexa . create_card ( title = "GetRecipeIntent activated" , subtitle = None , content = "asked alexa to find a recipe using {}" . format ( ingredient ) ) return alexa . create_response ( "Finding a recipe with the ingredient {}" . format ( ingredient ) , end_session = False , card_obj = card )
You can insert arbitrary business logic code here
3,164
def use ( app = None , gl = None ) : if app is None and gl is None : raise TypeError ( 'Must specify at least one of "app" or "gl".' ) if app == 'ipynb_webgl' : app = 'headless' gl = 'webgl' if app == 'osmesa' : from . . util . osmesa_gl import fix_osmesa_gl_lib fix_osmesa_gl_lib ( ) if gl is not None : raise ValueError ( "Do not specify gl when using osmesa" ) if gl : from . . import gloo , config config [ 'gl_backend' ] = gl gloo . gl . use_gl ( gl ) if app : from . . app import use_app use_app ( app )
Set the usage options for vispy
3,165
def run_subprocess ( command , return_code = False , ** kwargs ) : use_kwargs = dict ( stderr = subprocess . PIPE , stdout = subprocess . PIPE ) use_kwargs . update ( kwargs ) p = subprocess . Popen ( command , ** use_kwargs ) output = p . communicate ( ) output = [ '' if s is None else s for s in output ] output = [ s . decode ( 'utf-8' ) if isinstance ( s , bytes ) else s for s in output ] output = tuple ( output ) if not return_code and p . returncode : print ( output [ 0 ] ) print ( output [ 1 ] ) err_fun = subprocess . CalledProcessError . __init__ if 'output' in inspect . getargspec ( err_fun ) . args : raise subprocess . CalledProcessError ( p . returncode , command , output ) else : raise subprocess . CalledProcessError ( p . returncode , command ) if return_code : output = output + ( p . returncode , ) return output
Run command using subprocess . Popen
3,166
def start ( self , interval = None , iterations = None ) : if self . running : return self . iter_count = 0 if interval is not None : self . interval = interval if iterations is not None : self . max_iterations = iterations self . _backend . _vispy_start ( self . interval ) self . _running = True self . _first_emit_time = precision_time ( ) self . _last_emit_time = precision_time ( ) self . events . start ( type = 'timer_start' )
Start the timer .
3,167
def _best_res_pixels ( self ) : factor = 2 * ( AbstractMOC . HPY_MAX_NORDER - self . max_order ) pix_l = [ ] for iv in self . _interval_set . _intervals : for val in range ( iv [ 0 ] >> factor , iv [ 1 ] >> factor ) : pix_l . append ( val ) return np . asarray ( pix_l )
Returns a numpy array of all the HEALPix indexes contained in the MOC at its max order .
3,168
def add_neighbours ( self ) : ipix = self . _best_res_pixels ( ) hp = HEALPix ( nside = ( 1 << self . max_order ) , order = 'nested' ) extend_ipix = AbstractMOC . _neighbour_pixels ( hp , ipix ) neigh_ipix = np . setdiff1d ( extend_ipix , ipix ) shift = 2 * ( AbstractMOC . HPY_MAX_NORDER - self . max_order ) neigh_itv = np . vstack ( ( neigh_ipix << shift , ( neigh_ipix + 1 ) << shift ) ) . T self . _interval_set = self . _interval_set . union ( IntervalSet ( neigh_itv ) ) return self
Extends the MOC instance so that it includes the HEALPix cells touching its border .
3,169
def remove_neighbours ( self ) : ipix = self . _best_res_pixels ( ) hp = HEALPix ( nside = ( 1 << self . max_order ) , order = 'nested' ) extend_ipix = AbstractMOC . _neighbour_pixels ( hp , ipix ) neigh_ipix = np . setxor1d ( extend_ipix , ipix ) border_ipix = AbstractMOC . _neighbour_pixels ( hp , neigh_ipix ) reduced_ipix = np . setdiff1d ( ipix , border_ipix ) shift = 2 * ( AbstractMOC . HPY_MAX_NORDER - self . max_order ) reduced_itv = np . vstack ( ( reduced_ipix << shift , ( reduced_ipix + 1 ) << shift ) ) . T self . _interval_set = IntervalSet ( reduced_itv ) return self
Removes from the MOC instance the HEALPix cells located at its border .
3,170
def fill ( self , ax , wcs , ** kw_mpl_pathpatch ) : fill . fill ( self , ax , wcs , ** kw_mpl_pathpatch )
Draws the MOC on a matplotlib axis .
3,171
def from_image ( cls , header , max_norder , mask = None ) : height = header [ 'NAXIS2' ] width = header [ 'NAXIS1' ] w = wcs . WCS ( header ) if mask is not None : y , x = np . where ( mask ) pix_crd = np . dstack ( ( x , y ) ) [ 0 ] else : step_pix = 1 x , y = np . mgrid [ 0.5 : ( width + 0.5 + step_pix ) : step_pix , 0.5 : ( height + 0.5 + step_pix ) : step_pix ] pix_crd = np . dstack ( ( x . ravel ( ) , y . ravel ( ) ) ) [ 0 ] frame = wcs . utils . wcs_to_celestial_frame ( w ) world_pix_crd = SkyCoord ( w . wcs_pix2world ( pix_crd , 1 ) , unit = 'deg' , frame = frame ) hp = HEALPix ( nside = ( 1 << max_norder ) , order = 'nested' , frame = ICRS ( ) ) ipix = hp . skycoord_to_healpix ( world_pix_crd ) ipix = np . unique ( ipix ) shift = 2 * ( AbstractMOC . HPY_MAX_NORDER - max_norder ) intervals_arr = np . vstack ( ( ipix << shift , ( ipix + 1 ) << shift ) ) . T interval_set = IntervalSet ( intervals_arr ) return cls ( interval_set = interval_set )
Creates a ~mocpy . moc . MOC from an image stored as a FITS file .
3,172
def from_fits_images ( cls , path_l , max_norder ) : moc = MOC ( ) for path in path_l : header = fits . getheader ( path ) current_moc = MOC . from_image ( header = header , max_norder = max_norder ) moc = moc . union ( current_moc ) return moc
Loads a MOC from a set of FITS file images .
3,173
def from_vizier_table ( cls , table_id , nside = 256 ) : nside_possible_values = ( 8 , 16 , 32 , 64 , 128 , 256 , 512 ) if nside not in nside_possible_values : raise ValueError ( 'Bad value for nside. Must be in {0}' . format ( nside_possible_values ) ) result = cls . from_ivorn ( 'ivo://CDS/' + table_id , nside ) return result
Creates a ~mocpy . moc . MOC object from a VizieR table .
3,174
def from_ivorn ( cls , ivorn , nside = 256 ) : return cls . from_url ( '%s?%s' % ( MOC . MOC_SERVER_ROOT_URL , urlencode ( { 'ivorn' : ivorn , 'get' : 'moc' , 'order' : int ( np . log2 ( nside ) ) } ) ) )
Creates a ~mocpy . moc . MOC object from a given ivorn .
3,175
def from_url ( cls , url ) : path = download_file ( url , show_progress = False , timeout = 60 ) return cls . from_fits ( path )
Creates a ~mocpy . moc . MOC object from a given url .
3,176
def from_skycoords ( cls , skycoords , max_norder ) : hp = HEALPix ( nside = ( 1 << max_norder ) , order = 'nested' ) ipix = hp . lonlat_to_healpix ( skycoords . icrs . ra , skycoords . icrs . dec ) shift = 2 * ( AbstractMOC . HPY_MAX_NORDER - max_norder ) intervals = np . vstack ( ( ipix << shift , ( ipix + 1 ) << shift ) ) . T interval_set = IntervalSet ( intervals ) return cls ( interval_set )
Creates a MOC from an astropy . coordinates . SkyCoord .
3,177
def from_polygon_skycoord ( cls , skycoord , inside = None , max_depth = 10 ) : return MOC . from_polygon ( lon = skycoord . icrs . ra , lat = skycoord . icrs . dec , inside = inside , max_depth = max_depth )
Creates a MOC from a polygon .
3,178
def from_polygon ( cls , lon , lat , inside = None , max_depth = 10 ) : from . polygon import PolygonComputer polygon_computer = PolygonComputer ( lon , lat , inside , max_depth ) moc = MOC . from_json ( polygon_computer . ipix ) if polygon_computer . degrade_to_max_depth : moc = moc . degrade_to_order ( max_depth ) return moc
Creates a MOC from a polygon
3,179
def sky_fraction ( self ) : pix_id = self . _best_res_pixels ( ) nb_pix_filled = pix_id . size return nb_pix_filled / float ( 3 << ( 2 * ( self . max_order + 1 ) ) )
Sky fraction covered by the MOC
3,180
def _query ( self , resource_id , max_rows ) : from astropy . io . votable import parse_single_table if max_rows is not None and max_rows >= 0 : max_rows_str = str ( max_rows ) else : max_rows_str = str ( 9999999999 ) tmp_moc = tempfile . NamedTemporaryFile ( delete = False ) self . write ( tmp_moc . name ) r = requests . post ( 'http://cdsxmatch.u-strasbg.fr/QueryCat/QueryCat' , data = { 'mode' : 'mocfile' , 'catName' : resource_id , 'format' : 'votable' , 'limit' : max_rows_str } , files = { 'moc' : open ( tmp_moc . name , 'rb' ) } , headers = { 'User-Agent' : 'MOCPy' } , stream = True ) tmp_vot = BytesIO ( ) tmp_vot . write ( r . content ) table = parse_single_table ( tmp_vot ) . to_table ( ) os . unlink ( tmp_moc . name ) return table
Internal method to query Simbad or a VizieR table for sources in the coverage of the MOC instance
3,181
def inverse ( self ) : if self . _inverse is None : self . _inverse = InverseTransform ( self ) return self . _inverse
The inverse of this transform .
3,182
def _tile_ticks ( self , frac , tickvec ) : origins = np . tile ( self . axis . _vec , ( len ( frac ) , 1 ) ) origins = self . axis . pos [ 0 ] . T + ( origins . T * frac ) . T endpoints = tickvec + origins return origins , endpoints
Tiles tick marks along the axis .
3,183
def _get_tick_frac_labels ( self ) : minor_num = 4 if ( self . axis . scale_type == 'linear' ) : domain = self . axis . domain if domain [ 1 ] < domain [ 0 ] : flip = True domain = domain [ : : - 1 ] else : flip = False offset = domain [ 0 ] scale = domain [ 1 ] - domain [ 0 ] transforms = self . axis . transforms length = self . axis . pos [ 1 ] - self . axis . pos [ 0 ] n_inches = np . sqrt ( np . sum ( length ** 2 ) ) / transforms . dpi major = _get_ticks_talbot ( domain [ 0 ] , domain [ 1 ] , n_inches , 2 ) labels = [ '%g' % x for x in major ] majstep = major [ 1 ] - major [ 0 ] minor = [ ] minstep = majstep / ( minor_num + 1 ) minstart = 0 if self . axis . _stop_at_major [ 0 ] else - 1 minstop = - 1 if self . axis . _stop_at_major [ 1 ] else 0 for i in range ( minstart , len ( major ) + minstop ) : maj = major [ 0 ] + i * majstep minor . extend ( np . linspace ( maj + minstep , maj + majstep - minstep , minor_num ) ) major_frac = ( major - offset ) / scale minor_frac = ( np . array ( minor ) - offset ) / scale major_frac = major_frac [ : : - 1 ] if flip else major_frac use_mask = ( major_frac > - 0.0001 ) & ( major_frac < 1.0001 ) major_frac = major_frac [ use_mask ] labels = [ l for li , l in enumerate ( labels ) if use_mask [ li ] ] minor_frac = minor_frac [ ( minor_frac > - 0.0001 ) & ( minor_frac < 1.0001 ) ] elif self . axis . scale_type == 'logarithmic' : return NotImplementedError elif self . axis . scale_type == 'power' : return NotImplementedError return major_frac , minor_frac , labels
Get the major ticks minor ticks and major labels
3,184
def write_packed ( self , outfile , rows ) : if self . rescale : raise Error ( "write_packed method not suitable for bit depth %d" % self . rescale [ 0 ] ) return self . write_passes ( outfile , rows , packed = True )
Write PNG file to outfile . The pixel data comes from rows which should be in boxed row packed format . Each row should be a sequence of packed bytes .
3,185
def convert_ppm_and_pgm ( self , ppmfile , pgmfile , outfile ) : pixels = array ( 'B' ) pixels . fromfile ( ppmfile , ( self . bitdepth / 8 ) * self . color_planes * self . width * self . height ) apixels = array ( 'B' ) apixels . fromfile ( pgmfile , ( self . bitdepth / 8 ) * self . width * self . height ) pixels = interleave_planes ( pixels , apixels , ( self . bitdepth / 8 ) * self . color_planes , ( self . bitdepth / 8 ) ) if self . interlace : self . write_passes ( outfile , self . array_scanlines_interlace ( pixels ) ) else : self . write_passes ( outfile , self . array_scanlines ( pixels ) )
Convert a PPM and PGM file containing raw pixel data into a PNG outfile with the parameters set in the writer object .
3,186
def array_scanlines_interlace ( self , pixels ) : fmt = 'BH' [ self . bitdepth > 8 ] vpr = self . width * self . planes for xstart , ystart , xstep , ystep in _adam7 : if xstart >= self . width : continue ppr = int ( math . ceil ( ( self . width - xstart ) / float ( xstep ) ) ) row_len = ppr * self . planes for y in range ( ystart , self . height , ystep ) : if xstep == 1 : offset = y * vpr yield pixels [ offset : offset + vpr ] else : row = array ( fmt ) row . extend ( pixels [ 0 : row_len ] ) offset = y * vpr + xstart * self . planes end_offset = ( y + 1 ) * vpr skip = self . planes * xstep for i in range ( self . planes ) : row [ i : : self . planes ] = pixels [ offset + i : end_offset : skip ] yield row
Generator for interlaced scanlines from an array . pixels is the full source image in flat row flat pixel format . The generator yields each scanline of the reduced passes in turn in boxed row flat pixel format .
3,187
def deinterlace ( self , raw ) : vpr = self . width * self . planes fmt = 'BH' [ self . bitdepth > 8 ] a = array ( fmt , [ 0 ] * vpr * self . height ) source_offset = 0 for xstart , ystart , xstep , ystep in _adam7 : if xstart >= self . width : continue recon = None ppr = int ( math . ceil ( ( self . width - xstart ) / float ( xstep ) ) ) row_size = int ( math . ceil ( self . psize * ppr ) ) for y in range ( ystart , self . height , ystep ) : filter_type = raw [ source_offset ] source_offset += 1 scanline = raw [ source_offset : source_offset + row_size ] source_offset += row_size recon = self . undo_filter ( filter_type , scanline , recon ) flat = self . serialtoflat ( recon , ppr ) if xstep == 1 : assert xstart == 0 offset = y * vpr a [ offset : offset + vpr ] = flat else : offset = y * vpr + xstart * self . planes end_offset = ( y + 1 ) * vpr skip = self . planes * xstep for i in range ( self . planes ) : a [ offset + i : end_offset : skip ] = flat [ i : : self . planes ] return a
Read raw pixel data undo filters deinterlace and flatten . Return in flat row flat pixel format .
3,188
def iterboxed ( self , rows ) : def asvalues ( raw ) : if self . bitdepth == 8 : return array ( 'B' , raw ) if self . bitdepth == 16 : raw = tostring ( raw ) return array ( 'H' , struct . unpack ( '!%dH' % ( len ( raw ) // 2 ) , raw ) ) assert self . bitdepth < 8 width = self . width spb = 8 // self . bitdepth out = array ( 'B' ) mask = 2 ** self . bitdepth - 1 shifts = map ( self . bitdepth . __mul__ , reversed ( range ( spb ) ) ) for o in raw : out . extend ( map ( lambda i : mask & ( o >> i ) , shifts ) ) return out [ : width ] return imap ( asvalues , rows )
Iterator that yields each scanline in boxed row flat pixel format . rows should be an iterator that yields the bytes of each row in turn .
3,189
def load_data_file ( fname , directory = None , force_download = False ) : _url_root = 'http://github.com/vispy/demo-data/raw/master/' url = _url_root + fname if directory is None : directory = config [ 'data_path' ] if directory is None : raise ValueError ( 'config["data_path"] is not defined, ' 'so directory must be supplied' ) fname = op . join ( directory , op . normcase ( fname ) ) if op . isfile ( fname ) : if not force_download : return fname if isinstance ( force_download , string_types ) : ntime = time . strptime ( force_download , '%Y-%m-%d' ) ftime = time . gmtime ( op . getctime ( fname ) ) if ftime >= ntime : return fname else : print ( 'File older than %s, updating...' % force_download ) if not op . isdir ( op . dirname ( fname ) ) : os . makedirs ( op . abspath ( op . dirname ( fname ) ) ) _fetch_file ( url , fname ) return fname
Get a standard vispy demo data file
3,190
def _chunk_write ( chunk , local_file , progress ) : local_file . write ( chunk ) progress . update_with_increment_value ( len ( chunk ) )
Write a chunk to file and update the progress bar
3,191
def _fetch_file ( url , file_name , print_destination = True ) : temp_file_name = file_name + ".part" local_file = None initial_size = 0 n_try = 3 for ii in range ( n_try ) : try : data = urllib . request . urlopen ( url , timeout = 15. ) except Exception as e : if ii == n_try - 1 : raise RuntimeError ( 'Error while fetching file %s.\n' 'Dataset fetching aborted (%s)' % ( url , e ) ) try : file_size = int ( data . headers [ 'Content-Length' ] . strip ( ) ) print ( 'Downloading data from %s (%s)' % ( url , sizeof_fmt ( file_size ) ) ) local_file = open ( temp_file_name , "wb" ) _chunk_read ( data , local_file , initial_size = initial_size ) if not local_file . closed : local_file . close ( ) shutil . move ( temp_file_name , file_name ) if print_destination is True : sys . stdout . write ( 'File saved as %s.\n' % file_name ) except Exception as e : raise RuntimeError ( 'Error while fetching file %s.\n' 'Dataset fetching aborted (%s)' % ( url , e ) ) finally : if local_file is not None : if not local_file . closed : local_file . close ( )
Load requested file downloading it if needed or requested
3,192
def update ( self , cur_value , mesg = None ) : self . cur_value = cur_value progress = float ( self . cur_value ) / self . max_value num_chars = int ( progress * self . max_chars ) num_left = self . max_chars - num_chars if mesg is not None : self . mesg = mesg bar = self . template . format ( self . progress_character * num_chars , ' ' * num_left , progress * 100 , self . spinner_symbols [ self . spinner_index ] , self . mesg ) sys . stdout . write ( bar ) if self . spinner : self . spinner_index = ( self . spinner_index + 1 ) % self . n_spinner sys . stdout . flush ( )
Update progressbar with current value of process
3,193
def central_widget ( self ) : if self . _central_widget is None : self . _central_widget = Widget ( size = self . size , parent = self . scene ) return self . _central_widget
Returns the default widget that occupies the entire area of the canvas .
3,194
def visual_at ( self , pos ) : tr = self . transforms . get_transform ( 'canvas' , 'framebuffer' ) fbpos = tr . map ( pos ) [ : 2 ] try : id_ = self . _render_picking ( region = ( fbpos [ 0 ] , fbpos [ 1 ] , 1 , 1 ) ) vis = VisualNode . _visual_ids . get ( id_ [ 0 , 0 ] , None ) except RuntimeError : return self . _visual_bounds_at ( pos ) return vis
Return the visual at a given position
3,195
def _render_picking ( self , ** kwargs ) : try : self . _scene . picking = True img = self . render ( bgcolor = ( 0 , 0 , 0 , 0 ) , ** kwargs ) finally : self . _scene . picking = False img = img . astype ( 'int32' ) * [ 2 ** 0 , 2 ** 8 , 2 ** 16 , 2 ** 24 ] id_ = img . sum ( axis = 2 ) . astype ( 'int32' ) return id_
Render the scene in picking mode returning a 2D array of visual IDs .
3,196
def on_close ( self , event ) : self . events . mouse_press . disconnect ( self . _process_mouse_event ) self . events . mouse_move . disconnect ( self . _process_mouse_event ) self . events . mouse_release . disconnect ( self . _process_mouse_event ) self . events . mouse_wheel . disconnect ( self . _process_mouse_event )
Close event handler
3,197
def pop_viewport ( self ) : vp = self . _vp_stack . pop ( ) if len ( self . _vp_stack ) > 0 : self . context . set_viewport ( * self . _vp_stack [ - 1 ] ) else : self . context . set_viewport ( 0 , 0 , * self . physical_size ) self . _update_transforms ( ) return vp
Pop a viewport from the stack .
3,198
def push_fbo ( self , fbo , offset , csize ) : self . _fb_stack . append ( ( fbo , offset , csize ) ) try : fbo . activate ( ) h , w = fbo . color_buffer . shape [ : 2 ] self . push_viewport ( ( 0 , 0 , w , h ) ) except Exception : self . _fb_stack . pop ( ) raise self . _update_transforms ( )
Push an FBO on the stack . This activates the framebuffer and causes subsequent rendering to be written to the framebuffer rather than the canvas s back buffer . This will also set the canvas viewport to cover the boundaries of the framebuffer .
3,199
def pop_fbo ( self ) : fbo = self . _fb_stack . pop ( ) fbo [ 0 ] . deactivate ( ) self . pop_viewport ( ) if len ( self . _fb_stack ) > 0 : old_fbo = self . _fb_stack [ - 1 ] old_fbo [ 0 ] . activate ( ) self . _update_transforms ( ) return fbo
Pop an FBO from the stack .