idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
54,400
def autodetect_style ( self , data ) : found_keys = defaultdict ( int ) for style in self . tagstyles : for key in self . opt : found_keys [ style ] += data . count ( self . opt [ key ] [ style ] [ 'name' ] ) fkey = max ( found_keys , key = found_keys . get ) detected_style = fkey if found_keys [ fkey ] else 'unknown' if detected_style == 'unknown' : found_groups = 0 found_googledoc = 0 found_numpydoc = 0 found_numpydocsep = 0 for line in data . strip ( ) . splitlines ( ) : for key in self . groups : found_groups += 1 if isin_start ( self . groups [ key ] , line ) else 0 for key in self . googledoc : found_googledoc += 1 if isin_start ( self . googledoc [ key ] , line ) else 0 for key in self . numpydoc : found_numpydoc += 1 if isin_start ( self . numpydoc [ key ] , line ) else 0 if line . strip ( ) and isin_alone ( [ '-' * len ( line . strip ( ) ) ] , line ) : found_numpydocsep += 1 elif isin ( self . numpydoc . keywords , line ) : found_numpydoc += 1 if found_numpydoc and found_numpydocsep : detected_style = 'numpydoc' elif found_googledoc >= found_groups : detected_style = 'google' elif found_groups : detected_style = 'groups' self . style [ 'in' ] = detected_style return detected_style
Determine the style of a docstring and sets it as the default input one for the instance .
54,401
def _get_options ( self , style ) : return [ self . opt [ o ] [ style ] [ 'name' ] for o in self . opt ]
Get the list of keywords for a particular style
54,402
def get_group_key_line ( self , data , key ) : idx = - 1 for i , line in enumerate ( data . splitlines ( ) ) : if isin_start ( self . groups [ key ] , line ) : idx = i return idx
Get the next group - style key s line number .
54,403
def get_group_key_index ( self , data , key ) : idx = - 1 li = self . get_group_key_line ( data , key ) if li != - 1 : idx = 0 for line in data . splitlines ( ) [ : li ] : idx += len ( line ) + len ( '\n' ) return idx
Get the next groups style s starting line index for a key
54,404
def get_group_line ( self , data ) : idx = - 1 for key in self . groups : i = self . get_group_key_line ( data , key ) if ( i < idx and i != - 1 ) or idx == - 1 : idx = i return idx
Get the next group - style key s line .
54,405
def get_group_index ( self , data ) : idx = - 1 li = self . get_group_line ( data ) if li != - 1 : idx = 0 for line in data . splitlines ( ) [ : li ] : idx += len ( line ) + len ( '\n' ) return idx
Get the next groups style s starting line index
54,406
def get_key_index ( self , data , key , starting = True ) : key = self . opt [ key ] [ self . style [ 'in' ] ] [ 'name' ] if key . startswith ( ':returns' ) : data = data . replace ( ':return:' , ':returns:' ) idx = len ( data ) ini = 0 loop = True if key in data : while loop : i = data . find ( key ) if i != - 1 : if starting : if not data [ : i ] . rstrip ( ' \t' ) . endswith ( '\n' ) and len ( data [ : i ] . strip ( ) ) > 0 : ini = i + 1 data = data [ ini : ] else : idx = ini + i loop = False else : idx = ini + i loop = False else : loop = False if idx == len ( data ) : idx = - 1 return idx
Get from a docstring the next option with a given key .
54,407
def _extract_docs_description ( self ) : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) if self . dst . style [ 'in' ] == 'groups' : idx = self . dst . get_group_index ( data ) elif self . dst . style [ 'in' ] == 'google' : lines = data . splitlines ( ) line_num = self . dst . googledoc . get_next_section_start_line ( lines ) if line_num == - 1 : idx = - 1 else : idx = len ( '\n' . join ( lines [ : line_num ] ) ) elif self . dst . style [ 'in' ] == 'numpydoc' : lines = data . splitlines ( ) line_num = self . dst . numpydoc . get_next_section_start_line ( lines ) if line_num == - 1 : idx = - 1 else : idx = len ( '\n' . join ( lines [ : line_num ] ) ) elif self . dst . style [ 'in' ] == 'unknown' : idx = - 1 else : idx = self . dst . get_elem_index ( data ) if idx == 0 : self . docs [ 'in' ] [ 'desc' ] = '' elif idx == - 1 : self . docs [ 'in' ] [ 'desc' ] = data else : self . docs [ 'in' ] [ 'desc' ] = data [ : idx ]
Extract main description from docstring
54,408
def _extract_groupstyle_docs_params ( self ) : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) idx = self . dst . get_group_key_line ( data , 'param' ) if idx >= 0 : data = data . splitlines ( ) [ idx + 1 : ] end = self . dst . get_group_line ( '\n' . join ( data ) ) end = end if end != - 1 else len ( data ) for i in range ( end ) : line = data [ i ] param = None desc = '' ptype = '' m = re . match ( r'^\W*(\w+)[\W\s]+(\w[\s\w]+)' , line . strip ( ) ) if m : param = m . group ( 1 ) . strip ( ) desc = m . group ( 2 ) . strip ( ) else : m = re . match ( r'^\W*(\w+)\W*' , line . strip ( ) ) if m : param = m . group ( 1 ) . strip ( ) if param : self . docs [ 'in' ] [ 'params' ] . append ( ( param , desc , ptype ) )
Extract group style parameters
54,409
def _extract_docs_return ( self ) : if self . dst . style [ 'in' ] == 'numpydoc' : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) self . docs [ 'in' ] [ 'return' ] = self . dst . numpydoc . get_return_list ( data ) self . docs [ 'in' ] [ 'rtype' ] = None elif self . dst . style [ 'in' ] == 'google' : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) self . docs [ 'in' ] [ 'return' ] = self . dst . googledoc . get_return_list ( data ) self . docs [ 'in' ] [ 'rtype' ] = None elif self . dst . style [ 'in' ] == 'groups' : self . _extract_groupstyle_docs_return ( ) elif self . dst . style [ 'in' ] in [ 'javadoc' , 'reST' ] : self . _extract_tagstyle_docs_return ( )
Extract return description and type
54,410
def _extract_docs_other ( self ) : if self . dst . style [ 'in' ] == 'numpydoc' : data = '\n' . join ( [ d . rstrip ( ) . replace ( self . docs [ 'out' ] [ 'spaces' ] , '' , 1 ) for d in self . docs [ 'in' ] [ 'raw' ] . splitlines ( ) ] ) lst = self . dst . numpydoc . get_list_key ( data , 'also' ) lst = self . dst . numpydoc . get_list_key ( data , 'ref' ) lst = self . dst . numpydoc . get_list_key ( data , 'note' ) lst = self . dst . numpydoc . get_list_key ( data , 'other' ) lst = self . dst . numpydoc . get_list_key ( data , 'example' ) lst = self . dst . numpydoc . get_list_key ( data , 'attr' )
Extract other specific sections
54,411
def _set_desc ( self ) : if self . docs [ 'in' ] [ 'desc' ] : self . docs [ 'out' ] [ 'desc' ] = self . docs [ 'in' ] [ 'desc' ] else : self . docs [ 'out' ] [ 'desc' ] = ''
Sets the global description if any
54,412
def _set_params ( self ) : if self . docs [ 'in' ] [ 'params' ] : self . docs [ 'out' ] [ 'params' ] = list ( self . docs [ 'in' ] [ 'params' ] ) for e in self . element [ 'params' ] : if type ( e ) is tuple : param = e [ 0 ] else : param = e found = False for i , p in enumerate ( self . docs [ 'out' ] [ 'params' ] ) : if param == p [ 0 ] : found = True if type ( e ) is tuple : self . docs [ 'out' ] [ 'params' ] [ i ] = ( p [ 0 ] , p [ 1 ] , p [ 2 ] , e [ 1 ] ) if not found : if type ( e ) is tuple : p = ( param , '' , None , e [ 1 ] ) else : p = ( param , '' , None , None ) self . docs [ 'out' ] [ 'params' ] . append ( p )
Sets the parameters with types descriptions and default value if any
54,413
def _set_raises ( self ) : if self . docs [ 'in' ] [ 'raises' ] : if self . dst . style [ 'out' ] != 'numpydoc' or self . dst . style [ 'in' ] == 'numpydoc' or ( self . dst . style [ 'out' ] == 'numpydoc' and 'raise' not in self . dst . numpydoc . get_excluded_sections ( ) ) : self . docs [ 'out' ] [ 'raises' ] = list ( self . docs [ 'in' ] [ 'raises' ] )
Sets the raises and descriptions
54,414
def _set_return ( self ) : if type ( self . docs [ 'in' ] [ 'return' ] ) is list and self . dst . style [ 'out' ] not in [ 'groups' , 'numpydoc' , 'google' ] : lst = self . docs [ 'in' ] [ 'return' ] if lst : if lst [ 0 ] [ 0 ] is not None : self . docs [ 'out' ] [ 'return' ] = "%s-> %s" % ( lst [ 0 ] [ 0 ] , lst [ 0 ] [ 1 ] ) else : self . docs [ 'out' ] [ 'return' ] = lst [ 0 ] [ 1 ] self . docs [ 'out' ] [ 'rtype' ] = lst [ 0 ] [ 2 ] else : self . docs [ 'out' ] [ 'return' ] = self . docs [ 'in' ] [ 'return' ] self . docs [ 'out' ] [ 'rtype' ] = self . docs [ 'in' ] [ 'rtype' ]
Sets the return parameter with description and rtype if any
54,415
def _set_other ( self ) : if self . dst . style [ 'in' ] == 'numpydoc' : if self . docs [ 'in' ] [ 'raw' ] is not None : self . docs [ 'out' ] [ 'post' ] = self . dst . numpydoc . get_raw_not_managed ( self . docs [ 'in' ] [ 'raw' ] ) elif 'post' not in self . docs [ 'out' ] or self . docs [ 'out' ] [ 'post' ] is None : self . docs [ 'out' ] [ 'post' ] = ''
Sets other specific sections
54,416
def _set_raw ( self ) : sep = self . dst . get_sep ( target = 'out' ) sep = sep + ' ' if sep != ' ' else sep with_space = lambda s : '\n' . join ( [ self . docs [ 'out' ] [ 'spaces' ] + l if i > 0 else l for i , l in enumerate ( s . splitlines ( ) ) ] ) raw = self . docs [ 'out' ] [ 'spaces' ] + self . quotes desc = self . docs [ 'out' ] [ 'desc' ] . strip ( ) if not desc or not desc . count ( '\n' ) : if not self . docs [ 'out' ] [ 'params' ] and not self . docs [ 'out' ] [ 'return' ] and not self . docs [ 'out' ] [ 'rtype' ] and not self . docs [ 'out' ] [ 'raises' ] : raw += desc if desc else self . trailing_space raw += self . quotes self . docs [ 'out' ] [ 'raw' ] = raw . rstrip ( ) return if not self . first_line : raw += '\n' + self . docs [ 'out' ] [ 'spaces' ] raw += with_space ( self . docs [ 'out' ] [ 'desc' ] ) . strip ( ) + '\n' raw += self . _set_raw_params ( sep ) raw += self . _set_raw_return ( sep ) raw += self . _set_raw_raise ( sep ) if 'post' in self . docs [ 'out' ] : raw += self . docs [ 'out' ] [ 'spaces' ] + with_space ( self . docs [ 'out' ] [ 'post' ] ) . strip ( ) + '\n' if 'doctests' in self . docs [ 'out' ] : raw += self . docs [ 'out' ] [ 'spaces' ] + with_space ( self . docs [ 'out' ] [ 'doctests' ] ) . strip ( ) + '\n' if raw . count ( self . quotes ) == 1 : raw += self . docs [ 'out' ] [ 'spaces' ] + self . quotes self . docs [ 'out' ] [ 'raw' ] = raw . rstrip ( )
Sets the output raw docstring
54,417
def generate_docs ( self ) : if self . dst . style [ 'out' ] == 'numpydoc' and self . dst . numpydoc . first_line is not None : self . first_line = self . dst . numpydoc . first_line self . _set_desc ( ) self . _set_params ( ) self . _set_return ( ) self . _set_raises ( ) self . _set_other ( ) self . _set_raw ( ) self . generated_docs = True
Generates the output docstring
54,418
def get_files_from_dir ( path , recursive = True , depth = 0 , file_ext = '.py' ) : file_list = [ ] if os . path . isfile ( path ) or path == '-' : return [ path ] if path [ - 1 ] != os . sep : path = path + os . sep for f in glob . glob ( path + "*" ) : if os . path . isdir ( f ) : if depth < MAX_DEPTH_RECUR : file_list . extend ( get_files_from_dir ( f , recursive , depth + 1 ) ) else : continue elif f . endswith ( file_ext ) : file_list . append ( f ) return file_list
Retrieve the list of files from a folder .
54,419
def get_config ( config_file ) : config = { } tobool = lambda s : True if s . lower ( ) == 'true' else False if config_file : try : f = open ( config_file , 'r' ) except : print ( "Unable to open configuration file '{0}'" . format ( config_file ) ) else : for line in f . readlines ( ) : if len ( line . strip ( ) ) : key , value = line . split ( "=" , 1 ) key , value = key . strip ( ) , value . strip ( ) if key in [ 'init2class' , 'first_line' , 'convert_only' ] : value = tobool ( value ) config [ key ] = value return config
Get the configuration from a file .
54,420
def get_output_docs ( self ) : if not self . parsed : self . _parse ( ) lst = [ ] for e in self . docs_list : lst . append ( e [ 'docs' ] . get_raw_docs ( ) ) return lst
Return the output docstrings once formatted
54,421
def compute_before_after ( self ) : if not self . parsed : self . _parse ( ) list_from = self . input_lines list_to = [ ] last = 0 for e in self . docs_list : start , end = e [ 'location' ] if start <= 0 : start , end = - start , - end list_to . extend ( list_from [ last : start + 1 ] ) else : list_to . extend ( list_from [ last : start ] ) docs = e [ 'docs' ] . get_raw_docs ( ) list_docs = [ l + '\n' for l in docs . splitlines ( ) ] list_to . extend ( list_docs ) last = end + 1 if last < len ( list_from ) : list_to . extend ( list_from [ last : ] ) return list_from , list_to
Compute the list of lines before and after the proposed docstring changes .
54,422
def diff ( self , source_path = '' , target_path = '' , which = - 1 ) : list_from , list_to = self . compute_before_after ( ) if source_path . startswith ( os . sep ) : source_path = source_path [ 1 : ] if source_path and not source_path . endswith ( os . sep ) : source_path += os . sep if target_path . startswith ( os . sep ) : target_path = target_path [ 1 : ] if target_path and not target_path . endswith ( os . sep ) : target_path += os . sep fromfile = 'a/' + source_path + os . path . basename ( self . input_file ) tofile = 'b/' + target_path + os . path . basename ( self . input_file ) diff_list = difflib . unified_diff ( list_from , list_to , fromfile , tofile ) return [ d for d in diff_list ]
Build the diff between original docstring and proposed docstring .
54,423
def get_patch_lines ( self , source_path , target_path ) : diff = self . diff ( source_path , target_path ) return [ "# Patch generated by Pyment v{0}\n\n" . format ( __version__ ) ] + diff
Return the diff between source_path and target_path
54,424
def write_patch_file ( self , patch_file , lines_to_write ) : with open ( patch_file , 'w' ) as f : f . writelines ( lines_to_write )
Write lines_to_write to a the file called patch_file
54,425
def overwrite_source_file ( self , lines_to_write ) : tmp_filename = '{0}.writing' . format ( self . input_file ) ok = False try : with open ( tmp_filename , 'w' ) as fh : fh . writelines ( lines_to_write ) ok = True finally : if ok : if platform . system ( ) == 'Windows' : self . _windows_rename ( tmp_filename ) else : os . rename ( tmp_filename , self . input_file ) else : os . unlink ( tmp_filename )
overwrite the file with line_to_write
54,426
def by_own_time_per_call ( stat ) : return ( - stat . own_time_per_call if stat . own_hits else - stat . own_time , by_deep_time_per_call ( stat ) )
Sorting by exclusive elapsed time per call in descending order .
54,427
def result ( self ) : try : cpu_time = max ( 0 , time . clock ( ) - self . _cpu_time_started ) wall_time = max ( 0 , time . time ( ) - self . _wall_time_started ) except AttributeError : cpu_time = wall_time = 0.0 return self . stats , cpu_time , wall_time
Gets the frozen statistics to serialize by Pickle .
54,428
def dump ( self , dump_filename , pickle_protocol = pickle . HIGHEST_PROTOCOL ) : result = self . result ( ) with open ( dump_filename , 'wb' ) as f : pickle . dump ( ( self . __class__ , result ) , f , pickle_protocol )
Saves the profiling result to a file
54,429
def make_viewer ( self , title = None , at = None ) : viewer = StatisticsViewer ( ) viewer . set_profiler_class ( self . __class__ ) stats , cpu_time , wall_time = self . result ( ) viewer . set_result ( stats , cpu_time , wall_time , title = title , at = at ) viewer . activate ( ) return viewer
Makes a statistics viewer from the profiling result .
54,430
def pack_msg ( method , msg , pickle_protocol = PICKLE_PROTOCOL ) : dump = io . BytesIO ( ) pickle . dump ( msg , dump , pickle_protocol ) size = dump . tell ( ) return ( struct . pack ( METHOD_STRUCT_FORMAT , method ) + struct . pack ( SIZE_STRUCT_FORMAT , size ) + dump . getvalue ( ) )
Packs a method and message .
54,431
def recv ( sock , size ) : data = sock . recv ( size , socket . MSG_WAITALL ) if len ( data ) < size : raise socket . error ( ECONNRESET , 'Connection closed' ) return data
Receives exactly size bytes . This function blocks the thread .
54,432
def recv_msg ( sock ) : data = recv ( sock , struct . calcsize ( METHOD_STRUCT_FORMAT ) ) method , = struct . unpack ( METHOD_STRUCT_FORMAT , data ) data = recv ( sock , struct . calcsize ( SIZE_STRUCT_FORMAT ) ) size , = struct . unpack ( SIZE_STRUCT_FORMAT , data ) data = recv ( sock , size ) msg = pickle . loads ( data ) return method , msg
Receives a method and message from the socket . This function blocks the current thread .
54,433
def connected ( self , client ) : self . clients . add ( client ) self . _log_connected ( client ) self . _start_watching ( client ) self . send_msg ( client , WELCOME , ( self . pickle_protocol , __version__ ) , pickle_protocol = 0 ) profiler = self . profiler while True : try : profiler = profiler . profiler except AttributeError : break self . send_msg ( client , PROFILER , type ( profiler ) ) if self . _latest_result_data is not None : try : self . _send ( client , self . _latest_result_data ) except socket . error as exc : if exc . errno in ( EBADF , EPIPE ) : self . disconnected ( client ) return raise if len ( self . clients ) == 1 : self . _start_profiling ( )
Call this method when a client connected .
54,434
def disconnected ( self , client ) : if client not in self . clients : return self . clients . remove ( client ) self . _log_disconnected ( client ) self . _close ( client )
Call this method when a client disconnected .
54,435
def get_mark ( self ) : if self . is_leaf : char = self . icon_chars [ 2 ] else : char = self . icon_chars [ int ( self . expanded ) ] return urwid . SelectableIcon ( ( 'mark' , char ) , 0 )
Gets an expanded collapsed or leaf icon .
54,436
def get_path ( self ) : path = deque ( ) __ , node = self . get_focus ( ) while not node . is_root ( ) : stats = node . get_value ( ) path . appendleft ( hash ( stats ) ) node = node . get_parent ( ) return path
Gets the path to the focused statistics . Each step is a hash of statistics object .
54,437
def find_node ( self , node , path ) : for hash_value in path : if isinstance ( node , LeafStatisticsNode ) : break for stats in node . get_child_keys ( ) : if hash ( stats ) == hash_value : node = node . get_child_node ( stats ) break else : break return node
Finds a node by the given path from the given node .
54,438
def update_result ( self ) : try : if self . paused : result = self . _paused_result else : result = self . _final_result except AttributeError : self . table . update_frame ( ) return stats , cpu_time , wall_time , title , at = result self . table . set_result ( stats , cpu_time , wall_time , title , at )
Updates the result on the table .
54,439
def option_getter ( type ) : option_getters = { None : ConfigParser . get , int : ConfigParser . getint , float : ConfigParser . getfloat , bool : ConfigParser . getboolean } return option_getters . get ( type , option_getters [ None ] )
Gets an unbound method to get a configuration option as the given type .
54,440
def config_default ( option , default = None , type = None , section = cli . name ) : def f ( option = option , default = default , type = type , section = section ) : config = read_config ( ) if type is None and default is not None : type = builtins . type ( default ) get_option = option_getter ( type ) try : return get_option ( config , section , option ) except ( NoOptionError , NoSectionError ) : return default return f
Guesses a default value of a CLI option from the configuration .
54,441
def config_flag ( option , value , default = False , section = cli . name ) : class x ( object ) : def __bool__ ( self , option = option , value = value , default = default , section = section ) : config = read_config ( ) type = builtins . type ( value ) get_option = option_getter ( type ) try : return get_option ( config , section , option ) == value except ( NoOptionError , NoSectionError ) : return default __nonzero__ = __bool__ return x ( )
Guesses whether a CLI flag should be turned on or off from the configuration . If the configuration option value is same with the given value it returns True .
54,442
def get_title ( src_name , src_type = None ) : if src_type == 'tcp' : return '{0}:{1}' . format ( * src_name ) return os . path . basename ( src_name )
Normalizes a source name as a string to be used for viewer s title .
54,443
def spawn_thread ( func , * args , ** kwargs ) : thread = threading . Thread ( target = func , args = args , kwargs = kwargs ) thread . daemon = True thread . start ( ) return thread
Spawns a daemon thread .
54,444
def spawn ( mode , func , * args , ** kwargs ) : if mode is None : mode = 'threading' elif mode not in spawn . modes : raise ValueError ( 'Invalid spawn mode: %s' % mode ) if mode == 'threading' : return spawn_thread ( func , * args , ** kwargs ) elif mode == 'gevent' : import gevent import gevent . monkey gevent . monkey . patch_select ( ) gevent . monkey . patch_socket ( ) return gevent . spawn ( func , * args , ** kwargs ) elif mode == 'eventlet' : import eventlet eventlet . patcher . monkey_patch ( select = True , socket = True ) return eventlet . spawn ( func , * args , ** kwargs ) assert False
Spawns a thread - like object which runs the given function concurrently .
54,445
def profile ( script , argv , profiler_factory , pickle_protocol , dump_filename , mono ) : filename , code , globals_ = script sys . argv [ : ] = [ filename ] + list ( argv ) __profile__ ( filename , code , globals_ , profiler_factory , pickle_protocol = pickle_protocol , dump_filename = dump_filename , mono = mono )
Profile a Python script .
54,446
def live_profile ( script , argv , profiler_factory , interval , spawn , signum , pickle_protocol , mono ) : filename , code , globals_ = script sys . argv [ : ] = [ filename ] + list ( argv ) parent_sock , child_sock = socket . socketpair ( ) stderr_r_fd , stderr_w_fd = os . pipe ( ) pid = os . fork ( ) if pid : os . close ( stderr_w_fd ) viewer , loop = make_viewer ( mono ) title = get_title ( filename ) client = ProfilingClient ( viewer , loop . event_loop , parent_sock , title ) client . start ( ) try : loop . run ( ) except KeyboardInterrupt : os . kill ( pid , signal . SIGINT ) except BaseException : os . kill ( pid , signal . SIGTERM ) raise finally : parent_sock . close ( ) w_pid , status = os . waitpid ( pid , os . WNOHANG ) if w_pid == 0 : os . kill ( pid , signal . SIGTERM ) exit_code = os . WEXITSTATUS ( status ) with os . fdopen ( stderr_r_fd , 'r' ) as f : child_stderr = f . read ( ) if child_stderr : sys . stdout . flush ( ) sys . stderr . write ( child_stderr ) sys . exit ( exit_code ) else : os . close ( stderr_r_fd ) devnull = os . open ( os . devnull , os . O_RDWR ) for f in [ sys . stdin , sys . stdout ] : os . dup2 ( devnull , f . fileno ( ) ) os . dup2 ( stderr_w_fd , sys . stderr . fileno ( ) ) frame = sys . _getframe ( ) profiler = profiler_factory ( base_frame = frame , base_code = code ) profiler_trigger = BackgroundProfiler ( profiler , signum ) profiler_trigger . prepare ( ) server_args = ( interval , noop , pickle_protocol ) server = SelectProfilingServer ( None , profiler_trigger , * server_args ) server . clients . add ( child_sock ) spawn ( server . connected , child_sock ) try : exec_ ( code , globals_ ) finally : os . close ( stderr_w_fd ) child_sock . shutdown ( socket . SHUT_WR )
Profile a Python script continuously .
54,447
def view ( src , mono ) : src_type , src_name = src title = get_title ( src_name , src_type ) viewer , loop = make_viewer ( mono ) if src_type == 'dump' : time = datetime . fromtimestamp ( os . path . getmtime ( src_name ) ) with open ( src_name , 'rb' ) as f : profiler_class , ( stats , cpu_time , wall_time ) = pickle . load ( f ) viewer . set_profiler_class ( profiler_class ) viewer . set_result ( stats , cpu_time , wall_time , title = title , at = time ) viewer . activate ( ) elif src_type in ( 'tcp' , 'sock' ) : family = { 'tcp' : socket . AF_INET , 'sock' : socket . AF_UNIX } [ src_type ] client = FailoverProfilingClient ( viewer , loop . event_loop , src_name , family , title = title ) client . start ( ) try : loop . run ( ) except KeyboardInterrupt : pass
Inspect statistics by TUI view .
54,448
def timeit_profile ( stmt , number , repeat , setup , profiler_factory , pickle_protocol , dump_filename , mono , ** _ignored ) : del _ignored globals_ = { } exec_ ( setup , globals_ ) if number is None : dummy_profiler = profiler_factory ( ) dummy_profiler . start ( ) for x in range ( 1 , 10 ) : number = 10 ** x t = time . time ( ) for y in range ( number ) : exec_ ( stmt , globals_ ) if time . time ( ) - t >= 0.2 : break dummy_profiler . stop ( ) del dummy_profiler code = compile ( 'for _ in range(%d): %s' % ( number , stmt ) , 'STATEMENT' , 'exec' ) __profile__ ( stmt , code , globals_ , profiler_factory , pickle_protocol = pickle_protocol , dump_filename = dump_filename , mono = mono )
Profile a Python statement like timeit .
54,449
def spread_stats ( stats , spreader = False ) : spread = spread_t ( ) if spreader else True descendants = deque ( stats ) while descendants : _stats = descendants . popleft ( ) if spreader : spread . clear ( ) yield _stats , spread else : yield _stats if spread : descendants . extend ( _stats )
Iterates all descendant statistics under the given root statistics .
54,450
def own_time ( self ) : sub_time = sum ( stats . deep_time for stats in self ) return max ( 0. , self . deep_time - sub_time )
The exclusive execution time .
54,451
def flatten ( cls , stats ) : flat_children = { } for _stats in spread_stats ( stats ) : key = ( _stats . name , _stats . filename , _stats . lineno , _stats . module ) try : flat_stats = flat_children [ key ] except KeyError : flat_stats = flat_children [ key ] = cls ( * key ) flat_stats . own_hits += _stats . own_hits flat_stats . deep_hits += _stats . deep_hits flat_stats . own_time += _stats . own_time flat_stats . deep_time += _stats . deep_time children = list ( itervalues ( flat_children ) ) return cls ( stats . name , stats . filename , stats . lineno , stats . module , stats . own_hits , stats . deep_hits , stats . own_time , stats . deep_time , children )
Makes a flat statistics from the given statistics .
54,452
def requirements ( filename ) : with open ( filename ) as f : return [ x . strip ( ) for x in f . readlines ( ) if x . strip ( ) ]
Reads requirements from a file .
54,453
def sample ( self , frame ) : frames = self . frame_stack ( frame ) if frames : frames . pop ( ) parent_stats = self . stats for f in frames : parent_stats = parent_stats . ensure_child ( f . f_code , void ) stats = parent_stats . ensure_child ( frame . f_code , RecordingStatistics ) stats . own_hits += 1
Samples the given frame .
54,454
def deferral ( ) : deferred = [ ] defer = lambda f , * a , ** k : deferred . append ( ( f , a , k ) ) try : yield defer finally : while deferred : f , a , k = deferred . pop ( ) f ( * a , ** k )
Defers a function call when it is being required like Go .
54,455
def start ( self , * args , ** kwargs ) : if self . is_running ( ) : raise RuntimeError ( 'Already started' ) self . _running = self . run ( * args , ** kwargs ) try : yielded = next ( self . _running ) except StopIteration : raise TypeError ( 'run() must yield just one time' ) if yielded is not None : raise TypeError ( 'run() must yield without value' )
Starts the instance .
54,456
def stop ( self ) : if not self . is_running ( ) : raise RuntimeError ( 'Not started' ) running , self . _running = self . _running , None try : next ( running ) except StopIteration : pass else : raise TypeError ( 'run() must yield just one time' )
Stops the instance .
54,457
def sockets ( self ) : if self . listener is None : return self . clients else : return self . clients . union ( [ self . listener ] )
Returns the set of the sockets .
54,458
def select_sockets ( self , timeout = None ) : if timeout is not None : t = time . time ( ) while True : try : ready , __ , __ = select . select ( self . sockets ( ) , ( ) , ( ) , timeout ) except ValueError : pass except select . error as exc : if exc . args [ 0 ] != EINTR : raise else : return ready if timeout is None : continue t2 = time . time ( ) timeout -= t2 - t t = t2 if timeout <= 0 : return [ ]
EINTR safe version of select . It focuses on just incoming sockets .
54,459
def dispatch_sockets ( self , timeout = None ) : for sock in self . select_sockets ( timeout = timeout ) : if sock is self . listener : listener = sock sock , addr = listener . accept ( ) self . connected ( sock ) else : try : sock . recv ( 1 ) except socket . error as exc : if exc . errno != ECONNRESET : raise self . disconnected ( sock )
Dispatches incoming sockets .
54,460
def record_entering ( self , time , code , frame_key , parent_stats ) : stats = parent_stats . ensure_child ( code , RecordingStatistics ) self . _times_entered [ ( code , frame_key ) ] = time stats . own_hits += 1
Entered to a function call .
54,461
def record_leaving ( self , time , code , frame_key , parent_stats ) : try : stats = parent_stats . get_child ( code ) time_entered = self . _times_entered . pop ( ( code , frame_key ) ) except KeyError : return time_elapsed = time - time_entered stats . deep_time += max ( 0 , time_elapsed )
Left from a function call .
54,462
def build_sink ( function : Callable [ ... , None ] = None , * , unpack : bool = False ) : def _build_sink ( function : Callable [ ... , None ] ) : @ wraps ( function ) def _wrapper ( * args , ** kwargs ) -> Sink : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) return Sink ( function , * args , unpack = unpack , ** kwargs ) return _wrapper if function : return _build_sink ( function ) return _build_sink
Decorator to wrap a function to return a Sink subscriber .
54,463
def build_map ( function : Callable [ [ Any ] , Any ] = None , unpack : bool = False ) : def _build_map ( function : Callable [ [ Any ] , Any ] ) : @ wraps ( function ) def _wrapper ( * args , ** kwargs ) -> Map : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) return Map ( function , * args , unpack = unpack , ** kwargs ) return _wrapper if function : return _build_map ( function ) return _build_map
Decorator to wrap a function to return a Map operator .
54,464
def _trace_handler ( publisher , value , label = None ) : line = '--- %8.3f: ' % ( time ( ) - Trace . _timestamp_start ) line += repr ( publisher ) if label is None else label line += ' %r' % ( value , ) print ( line )
Default trace handler is printing the timestamp the publisher name and the emitted value
54,465
def build_sink_async ( coro = None , * , mode = None , unpack : bool = False ) : _mode = mode def _build_sink_async ( coro ) : @ wraps ( coro ) def _wrapper ( * args , mode = None , ** kwargs ) -> SinkAsync : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) if mode is None : mode = MODE . CONCURRENT if _mode is None else _mode return SinkAsync ( coro , * args , mode = mode , unpack = unpack , ** kwargs ) return _wrapper if coro : return _build_sink_async ( coro ) return _build_sink_async
Decorator to wrap a coroutine to return a SinkAsync subscriber .
54,466
def build_accumulate ( function : Callable [ [ Any , Any ] , Tuple [ Any , Any ] ] = None , * , init : Any = NONE ) : _init = init def _build_accumulate ( function : Callable [ [ Any , Any ] , Tuple [ Any , Any ] ] ) : @ wraps ( function ) def _wrapper ( init = NONE ) -> Accumulate : init = _init if init is NONE else init if init is NONE : raise TypeError ( '"init" argument has to be defined' ) return Accumulate ( function , init = init ) return _wrapper if function : return _build_accumulate ( function ) return _build_accumulate
Decorator to wrap a function to return an Accumulate operator .
54,467
def resolve_meta_key ( hub , key , meta ) : if key not in meta : return None value = meta [ key ] if isinstance ( value , str ) and value [ 0 ] == '>' : topic = value [ 1 : ] if topic not in hub : raise KeyError ( 'topic %s not found in hub' % topic ) return hub [ topic ] . get ( ) return value
Resolve a value when it s a string and starts with >
54,468
def checked_emit ( self , value : Any ) -> asyncio . Future : if not isinstance ( self . _subject , Subscriber ) : raise TypeError ( 'Topic %r has to be a subscriber' % self . _path ) value = self . cast ( value ) self . check ( value ) return self . _subject . emit ( value , who = self )
Casting and checking in one call
54,469
def add_datatype ( self , name : str , datatype : DT ) : self . _datatypes [ name ] = datatype
Register the datatype with it s name
54,470
def cast ( self , topic , value ) : datatype_key = topic . meta . get ( 'datatype' , 'none' ) result = self . _datatypes [ datatype_key ] . cast ( topic , value ) validate_dt = topic . meta . get ( 'validate' , None ) if validate_dt : result = self . _datatypes [ validate_dt ] . cast ( topic , result ) return result
Cast a string to the value based on the datatype
54,471
def check ( self , topic , value ) : datatype_key = topic . meta . get ( 'datatype' , 'none' ) self . _datatypes [ datatype_key ] . check ( topic , value ) validate_dt = topic . meta . get ( 'validate' , None ) if validate_dt : self . _datatypes [ validate_dt ] . check ( topic , value )
Checking the value if it fits into the given specification
54,472
def flush ( self ) : self . notify ( tuple ( self . _queue ) ) self . _queue . clear ( )
Emits the current queue and clears the queue
54,473
def _periodic_callback ( self ) : try : self . notify ( self . _state ) except Exception : self . _error_callback ( * sys . exc_info ( ) ) if self . _subscriptions : self . _call_later_handle = self . _loop . call_later ( self . _interval , self . _periodic_callback ) else : self . _state = NONE self . _call_later_handle = None
Will be started on first emit
54,474
def build_reduce ( function : Callable [ [ Any , Any ] , Any ] = None , * , init : Any = NONE ) : _init = init def _build_reduce ( function : Callable [ [ Any , Any ] , Any ] ) : @ wraps ( function ) def _wrapper ( init = NONE ) -> Reduce : init = _init if init is NONE else init if init is NONE : raise TypeError ( 'init argument has to be defined' ) return Reduce ( function , init = init ) return _wrapper if function : return _build_reduce ( function ) return _build_reduce
Decorator to wrap a function to return a Reduce operator .
54,475
def flush ( self ) : if not self . _emit_partial and len ( self . _state ) != self . _state . maxlen : self . notify ( tuple ( self . _state ) ) self . _state . clear ( )
Flush the queue - this will emit the current queue
54,476
def build_map_async ( coro = None , * , mode = None , unpack : bool = False ) : _mode = mode def _build_map_async ( coro ) : @ wraps ( coro ) def _wrapper ( * args , mode = None , ** kwargs ) -> MapAsync : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) if mode is None : mode = MODE . CONCURRENT if _mode is None else _mode return MapAsync ( coro , * args , mode = mode , unpack = unpack , ** kwargs ) return _wrapper if coro : return _build_map_async ( coro ) return _build_map_async
Decorator to wrap a coroutine to return a MapAsync operator .
54,477
def _future_done ( self , future ) : try : result = future . result ( ) if result is not NONE : self . notify ( result ) except asyncio . CancelledError : return except Exception : self . _options . error_callback ( * sys . exc_info ( ) ) if self . _queue : value = self . _queue . popleft ( ) self . _run_coro ( value ) else : self . _future = None
Will be called when the coroutine is done
54,478
def _run_coro ( self , value ) : if self . _options . mode is MODE . LAST_DISTINCT and value == self . _last_emit : self . _future = None return self . _last_emit = value self . scheduled . notify ( value ) values = value if self . _options . unpack else ( value , ) coro = self . _options . coro ( * values , * self . _options . args , ** self . _options . kwargs ) self . _future = asyncio . ensure_future ( coro ) self . _future . add_done_callback ( self . _future_done )
Start the coroutine as task
54,479
def build_filter ( predicate : Callable [ [ Any ] , bool ] = None , * , unpack : bool = False ) : def _build_filter ( predicate : Callable [ [ Any ] , bool ] ) : @ wraps ( predicate ) def _wrapper ( * args , ** kwargs ) -> Filter : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) return Filter ( predicate , * args , unpack = unpack , ** kwargs ) return _wrapper if predicate : return _build_filter ( predicate ) return _build_filter
Decorator to wrap a function to return a Filter operator .
54,480
def apply_operator_overloading ( ) : for method in ( '__lt__' , '__le__' , '__eq__' , '__ne__' , '__ge__' , '__gt__' , '__add__' , '__and__' , '__lshift__' , '__mod__' , '__mul__' , '__pow__' , '__rshift__' , '__sub__' , '__xor__' , '__concat__' , '__getitem__' , '__floordiv__' , '__truediv__' ) : def _op ( operand_left , operand_right , operation = method ) : if isinstance ( operand_right , Publisher ) : return CombineLatest ( operand_left , operand_right , map_ = getattr ( operator , operation ) ) return _MapConstant ( operand_left , operand_right , getattr ( operator , operation ) ) setattr ( Publisher , method , _op ) for method , _method in ( ( '__radd__' , '__add__' ) , ( '__rand__' , '__and__' ) , ( '__rlshift__' , '__lshift__' ) , ( '__rmod__' , '__mod__' ) , ( '__rmul__' , '__mul__' ) , ( '__rpow__' , '__pow__' ) , ( '__rrshift__' , '__rshift__' ) , ( '__rsub__' , '__sub__' ) , ( '__rxor__' , '__xor__' ) , ( '__rfloordiv__' , '__floordiv__' ) , ( '__rtruediv__' , '__truediv__' ) ) : def _op ( operand_left , operand_right , operation = _method ) : return _MapConstantReverse ( operand_left , operand_right , getattr ( operator , operation ) ) setattr ( Publisher , method , _op ) for method , _method in ( ( '__neg__' , operator . neg ) , ( '__pos__' , operator . pos ) , ( '__abs__' , operator . abs ) , ( '__invert__' , operator . invert ) , ( '__round__' , round ) , ( '__trunc__' , math . trunc ) , ( '__floor__' , math . floor ) , ( '__ceil__' , math . ceil ) ) : def _op_unary ( operand , operation = _method ) : return _MapUnary ( operand , operation ) setattr ( Publisher , method , _op_unary ) def _getattr ( publisher , attribute_name ) : if not publisher . inherited_type or not hasattr ( publisher . inherited_type , attribute_name ) : raise AttributeError ( 'Attribute %r not found' % attribute_name ) return _GetAttr ( publisher , attribute_name ) setattr ( Publisher , '__getattr__' , _getattr )
Function to apply operator overloading to Publisher class
54,481
def assign ( self , subject ) : if not isinstance ( subject , ( Publisher , Subscriber ) ) : raise TypeError ( 'Assignee has to be Publisher or Subscriber' ) if self . _subject is not None : raise SubscriptionError ( 'Topic %r already assigned' % self . _path ) self . _subject = subject if self . _subscriptions : self . _subject . subscribe ( self ) if self . _pre_assign_emit is not None : for value in self . _pre_assign_emit : self . _subject . emit ( value , who = self ) self . _pre_assign_emit = None return subject
Assigns the given subject to the topic
54,482
def freeze ( self , freeze : bool = True ) : for topic in self . _topics . values ( ) : topic . freeze ( ) self . _frozen = freeze
Freezing the hub means that each topic has to be assigned and no new topics can be created after this point .
54,483
def reset ( self ) : if self . _call_later_handler is not None : self . _call_later_handler . cancel ( ) self . _call_later_handler = None self . _wait_done_cb ( )
Reseting duration for throttling
54,484
def build_map_threaded ( function : Callable [ [ Any ] , Any ] = None , mode = MODE . CONCURRENT , unpack : bool = False ) : _mode = mode def _build_map_threaded ( function : Callable [ [ Any ] , Any ] ) : @ wraps ( function ) def _wrapper ( * args , mode = None , ** kwargs ) -> MapThreaded : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) if mode is None : mode = MODE . CONCURRENT if _mode is None else _mode return MapThreaded ( function , * args , mode = mode , unpack = unpack , ** kwargs ) return _wrapper if function : return _build_map_threaded ( function ) return _build_map_threaded
Decorator to wrap a function to return a MapThreaded operator .
54,485
async def _thread_coro ( self , * args ) : return await self . _loop . run_in_executor ( self . _executor , self . _function , * args )
Coroutine called by MapAsync . It s wrapping the call of run_in_executor to run the synchronous function as thread
54,486
def reset ( self ) : if self . _retrigger_value is not NONE : self . notify ( self . _retrigger_value ) self . _state = self . _retrigger_value self . _next_state = self . _retrigger_value if self . _call_later_handler : self . _call_later_handler . cancel ( ) self . _call_later_handler = None
Reset the debounce time
54,487
def subscribe ( self , subscriber : 'Subscriber' , prepend : bool = False ) -> SubscriptionDisposable : if any ( subscriber is s for s in self . _subscriptions ) : raise SubscriptionError ( 'Subscriber already registered' ) if prepend : self . _subscriptions . insert ( 0 , subscriber ) else : self . _subscriptions . append ( subscriber ) return SubscriptionDisposable ( self , subscriber )
Subscribing the given subscriber .
54,488
def unsubscribe ( self , subscriber : 'Subscriber' ) -> None : for i , _s in enumerate ( self . _subscriptions ) : if _s is subscriber : self . _subscriptions . pop ( i ) return raise SubscriptionError ( 'Subscriber is not registered' )
Unsubscribe the given subscriber
54,489
def inherit_type ( self , type_cls : Type [ TInherit ] ) -> Union [ TInherit , 'Publisher' ] : self . _inherited_type = type_cls return self
enables the usage of method and attribute overloading for this publisher .
54,490
def _move_tuple_axes_first ( array , axis ) : naxis = len ( axis ) axis += tuple ( i for i in range ( array . ndim ) if i not in axis ) destination = tuple ( range ( array . ndim ) ) array_new = np . moveaxis ( array , axis , destination ) first = np . prod ( array_new . shape [ : naxis ] ) array_new = array_new . reshape ( ( first , ) + array_new . shape [ naxis : ] ) return array_new
Bottleneck can only take integer axis not tuple so this function takes all the axes to be operated on and combines them into the first dimension of the array so that we can then use axis = 0
54,491
def _nanmean ( array , axis = None ) : if isinstance ( axis , tuple ) : array = _move_tuple_axes_first ( array , axis = axis ) axis = 0 return bottleneck . nanmean ( array , axis = axis )
Bottleneck nanmean function that handle tuple axis .
54,492
def _nanmedian ( array , axis = None ) : if isinstance ( axis , tuple ) : array = _move_tuple_axes_first ( array , axis = axis ) axis = 0 return bottleneck . nanmedian ( array , axis = axis )
Bottleneck nanmedian function that handle tuple axis .
54,493
def _nanstd ( array , axis = None , ddof = 0 ) : if isinstance ( axis , tuple ) : array = _move_tuple_axes_first ( array , axis = axis ) axis = 0 return bottleneck . nanstd ( array , axis = axis , ddof = ddof )
Bottleneck nanstd function that handle tuple axis .
54,494
def sigma_clip ( data , sigma = 3 , sigma_lower = None , sigma_upper = None , maxiters = 5 , cenfunc = 'median' , stdfunc = 'std' , axis = None , masked = True , return_bounds = False , copy = True ) : sigclip = SigmaClip ( sigma = sigma , sigma_lower = sigma_lower , sigma_upper = sigma_upper , maxiters = maxiters , cenfunc = cenfunc , stdfunc = stdfunc ) return sigclip ( data , axis = axis , masked = masked , return_bounds = return_bounds , copy = copy )
Perform sigma - clipping on the provided data .
54,495
def sigma_clipped_stats ( data , mask = None , mask_value = None , sigma = 3.0 , sigma_lower = None , sigma_upper = None , maxiters = 5 , cenfunc = 'median' , stdfunc = 'std' , std_ddof = 0 , axis = None ) : if mask is not None : data = np . ma . MaskedArray ( data , mask ) if mask_value is not None : data = np . ma . masked_values ( data , mask_value ) sigclip = SigmaClip ( sigma = sigma , sigma_lower = sigma_lower , sigma_upper = sigma_upper , maxiters = maxiters , cenfunc = cenfunc , stdfunc = stdfunc ) data_clipped = sigclip ( data , axis = axis , masked = False , return_bounds = False , copy = False ) if HAS_BOTTLENECK : mean = _nanmean ( data_clipped , axis = axis ) median = _nanmedian ( data_clipped , axis = axis ) std = _nanstd ( data_clipped , ddof = std_ddof , axis = axis ) else : mean = np . nanmean ( data_clipped , axis = axis ) median = np . nanmedian ( data_clipped , axis = axis ) std = np . nanstd ( data_clipped , ddof = std_ddof , axis = axis ) return mean , median , std
Calculate sigma - clipped statistics on the provided data .
54,496
def _sigmaclip_noaxis ( self , data , masked = True , return_bounds = False , copy = True ) : filtered_data = data . ravel ( ) if isinstance ( filtered_data , np . ma . MaskedArray ) : filtered_data = filtered_data . data [ ~ filtered_data . mask ] good_mask = np . isfinite ( filtered_data ) if np . any ( ~ good_mask ) : filtered_data = filtered_data [ good_mask ] warnings . warn ( 'Input data contains invalid values (NaNs or ' 'infs), which were automatically clipped.' , AstropyUserWarning ) nchanged = 1 iteration = 0 while nchanged != 0 and ( iteration < self . maxiters ) : iteration += 1 size = filtered_data . size self . _compute_bounds ( filtered_data , axis = None ) filtered_data = filtered_data [ ( filtered_data >= self . _min_value ) & ( filtered_data <= self . _max_value ) ] nchanged = size - filtered_data . size self . _niterations = iteration if masked : filtered_data = np . ma . masked_invalid ( data , copy = copy ) with np . errstate ( invalid = 'ignore' ) : filtered_data . mask |= np . logical_or ( data < self . _min_value , data > self . _max_value ) if return_bounds : return filtered_data , self . _min_value , self . _max_value else : return filtered_data
Sigma clip the data when axis is None .
54,497
def _sigmaclip_withaxis ( self , data , axis = None , masked = True , return_bounds = False , copy = True ) : filtered_data = data . astype ( float ) bad_mask = ~ np . isfinite ( filtered_data ) if np . any ( bad_mask ) : filtered_data [ bad_mask ] = np . nan warnings . warn ( 'Input data contains invalid values (NaNs or ' 'infs), which were automatically clipped.' , AstropyUserWarning ) if isinstance ( filtered_data , np . ma . MaskedArray ) : filtered_data = np . ma . masked_invalid ( filtered_data ) . astype ( float ) filtered_data = filtered_data . filled ( np . nan ) if not isiterable ( axis ) : axis = ( axis , ) axis = tuple ( filtered_data . ndim + n if n < 0 else n for n in axis ) mshape = tuple ( 1 if dim in axis else size for dim , size in enumerate ( filtered_data . shape ) ) nchanged = 1 iteration = 0 while nchanged != 0 and ( iteration < self . maxiters ) : iteration += 1 n_nan = np . count_nonzero ( np . isnan ( filtered_data ) ) self . _compute_bounds ( filtered_data , axis = axis ) if not np . isscalar ( self . _min_value ) : self . _min_value = self . _min_value . reshape ( mshape ) self . _max_value = self . _max_value . reshape ( mshape ) with np . errstate ( invalid = 'ignore' ) : filtered_data [ ( filtered_data < self . _min_value ) | ( filtered_data > self . _max_value ) ] = np . nan nchanged = n_nan - np . count_nonzero ( np . isnan ( filtered_data ) ) self . _niterations = iteration if masked : if copy : filtered_data = np . ma . masked_invalid ( filtered_data ) else : with np . errstate ( invalid = 'ignore' ) : out = np . ma . masked_invalid ( data , copy = False ) filtered_data = np . ma . masked_where ( np . logical_or ( out < self . _min_value , out > self . _max_value ) , out , copy = False ) if return_bounds : return filtered_data , self . _min_value , self . _max_value else : return filtered_data
Sigma clip the data when axis is specified .
54,498
def do_photometry ( self , data , error = None , mask = None , method = 'exact' , subpixels = 5 , unit = None ) : data = np . asanyarray ( data ) if mask is not None : mask = np . asanyarray ( mask ) data = copy . deepcopy ( data ) data [ mask ] = 0 if error is not None : error = copy . deepcopy ( np . asanyarray ( error ) ) error [ mask ] = 0. aperture_sums = [ ] aperture_sum_errs = [ ] for mask in self . to_mask ( method = method , subpixels = subpixels ) : data_cutout = mask . cutout ( data ) if data_cutout is None : aperture_sums . append ( np . nan ) else : aperture_sums . append ( np . sum ( data_cutout * mask . data ) ) if error is not None : error_cutout = mask . cutout ( error ) if error_cutout is None : aperture_sum_errs . append ( np . nan ) else : aperture_var = np . sum ( error_cutout ** 2 * mask . data ) aperture_sum_errs . append ( np . sqrt ( aperture_var ) ) aperture_sums = self . _prepare_photometry_output ( aperture_sums , unit = unit ) aperture_sum_errs = self . _prepare_photometry_output ( aperture_sum_errs , unit = unit ) return aperture_sums , aperture_sum_errs
Perform aperture photometry on the input data .
54,499
def _to_sky_params ( self , wcs , mode = 'all' ) : sky_params = { } x , y = np . transpose ( self . positions ) sky_params [ 'positions' ] = pixel_to_skycoord ( x , y , wcs , mode = mode ) crval = SkyCoord ( [ wcs . wcs . crval ] , frame = wcs_to_celestial_frame ( wcs ) , unit = wcs . wcs . cunit ) scale , angle = pixel_scale_angle_at_skycoord ( crval , wcs ) params = self . _params [ : ] theta_key = 'theta' if theta_key in self . _params : sky_params [ theta_key ] = ( self . theta * u . rad ) - angle . to ( u . rad ) params . remove ( theta_key ) param_vals = [ getattr ( self , param ) for param in params ] for param , param_val in zip ( params , param_vals ) : sky_params [ param ] = ( param_val * u . pix * scale ) . to ( u . arcsec ) return sky_params
Convert the pixel aperture parameters to those for a sky aperture .