idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
10,400
async def _query_chunked_post ( self , path , method = "POST" , * , params = None , data = None , headers = None , timeout = None ) : if headers is None : headers = { } if headers and "content-type" not in headers : headers [ "content-type" ] = "application/octet-stream" response = await self . _query ( path , method , params = params , data = data , headers = headers , timeout = timeout , chunked = True , ) return response
A shorthand for uploading data by chunks
10,401
async def init ( self , * , advertise_addr : str = None , listen_addr : str = "0.0.0.0:2377" , force_new_cluster : bool = False , swarm_spec : Mapping = None ) -> str : data = { "AdvertiseAddr" : advertise_addr , "ListenAddr" : listen_addr , "ForceNewCluster" : force_new_cluster , "Spec" : swarm_spec , } response = await self . docker . _query_json ( "swarm/init" , method = "POST" , data = data ) return response
Initialize a new swarm .
10,402
async def join ( self , * , remote_addrs : Iterable [ str ] , listen_addr : str = "0.0.0.0:2377" , join_token : str , advertise_addr : str = None , data_path_addr : str = None ) -> bool : data = { "RemoteAddrs" : list ( remote_addrs ) , "JoinToken" : join_token , "ListenAddr" : listen_addr , "AdvertiseAddr" : advertise_addr , "DataPathAddr" : data_path_addr , } await self . docker . _query ( "swarm/join" , method = "POST" , data = clean_map ( data ) ) return True
Join a swarm .
10,403
async def list ( self , * , filters : Mapping = None ) -> List [ Mapping ] : params = { "filters" : clean_filters ( filters ) } response = await self . docker . _query_json ( "services" , method = "GET" , params = params ) return response
Return a list of services
10,404
async def update ( self , service_id : str , version : str , * , image : str = None , rollback : bool = False ) -> bool : if image is None and rollback is False : raise ValueError ( "You need to specify an image." ) inspect_service = await self . inspect ( service_id ) spec = inspect_service [ "Spec" ] if image is not None : spec [ "TaskTemplate" ] [ "ContainerSpec" ] [ "Image" ] = image params = { "version" : version } if rollback is True : params [ "rollback" ] = "previous" data = json . dumps ( clean_map ( spec ) ) await self . docker . _query_json ( "services/{service_id}/update" . format ( service_id = service_id ) , method = "POST" , data = data , params = params , ) return True
Update a service . If rollback is True image will be ignored .
10,405
async def delete ( self , service_id : str ) -> bool : await self . docker . _query ( "services/{service_id}" . format ( service_id = service_id ) , method = "DELETE" ) return True
Remove a service
10,406
async def inspect ( self , service_id : str ) -> Mapping [ str , Any ] : response = await self . docker . _query_json ( "services/{service_id}" . format ( service_id = service_id ) , method = "GET" ) return response
Inspect a service
10,407
async def logs ( self , service_id : str , * , details : bool = False , follow : bool = False , stdout : bool = False , stderr : bool = False , since : int = 0 , timestamps : bool = False , is_tty : bool = False , tail : str = "all" ) -> Union [ str , AsyncIterator [ str ] ] : if stdout is False and stderr is False : raise TypeError ( "Need one of stdout or stderr" ) params = { "details" : details , "follow" : follow , "stdout" : stdout , "stderr" : stderr , "since" : since , "timestamps" : timestamps , "tail" : tail , } response = await self . docker . _query ( "services/{service_id}/logs" . format ( service_id = service_id ) , method = "GET" , params = params , ) return await multiplexed_result ( response , follow , is_tty = is_tty )
Retrieve logs of the given service
10,408
def store_many_vectors ( self , hash_name , bucket_keys , vs , data ) : if data is None : data = itertools . repeat ( data ) for v , k , d in zip ( vs , bucket_keys , data ) : self . store_vector ( hash_name , k , v , d )
Store a batch of vectors . Stores vector and JSON - serializable data in bucket with specified key .
10,409
def collect_all_bucket_keys ( self ) : if len ( self . childs ) == 0 : return [ self . bucket_key ] result = [ ] for child in self . childs . values ( ) : result = result + child . collect_all_bucket_keys ( ) return result
Just collects all buckets keys from subtree
10,410
def bucket_keys_to_guarantee_result_set_size ( self , bucket_key , N , tree_depth ) : if tree_depth == len ( bucket_key ) : return [ self . bucket_key ] hash_char = bucket_key [ tree_depth ] if hash_char == '0' : other_hash_char = '1' else : other_hash_char = '0' if hash_char in self . childs : if self . childs [ hash_char ] . vector_count < N : listA = self . childs [ hash_char ] . collect_all_bucket_keys ( ) listB = self . childs [ other_hash_char ] . collect_all_bucket_keys ( ) return listA + listB else : return self . childs [ hash_char ] . bucket_keys_to_guarantee_result_set_size ( bucket_key , N , tree_depth + 1 ) else : return self . childs [ other_hash_char ] . bucket_keys_to_guarantee_result_set_size ( bucket_key , N , tree_depth + 1 )
Returns list of bucket keys based on the specified bucket key and minimum result size N .
10,411
def store_vector ( self , hash_name , bucket_key , v , data ) : mongo_key = self . _format_mongo_key ( hash_name , bucket_key ) val_dict = { } val_dict [ 'lsh' ] = mongo_key if scipy . sparse . issparse ( v ) : if not scipy . sparse . isspmatrix_coo ( v ) : v = scipy . sparse . coo_matrix ( v ) encoded_values = [ ] for k in range ( v . data . size ) : row_index = v . row [ k ] value = v . data [ k ] encoded_values . append ( [ int ( row_index ) , value ] ) val_dict [ 'sparse' ] = 1 val_dict [ 'nonzeros' ] = encoded_values val_dict [ 'dim' ] = v . shape [ 0 ] else : v = numpy . reshape ( v , v . shape [ 0 ] ) val_dict [ 'vector' ] = v . tostring ( ) val_dict [ 'dtype' ] = v . dtype . name if data is not None : val_dict [ 'data' ] = data self . mongo_object . insert_one ( val_dict )
Stores vector and JSON - serializable data in MongoDB with specified key .
10,412
def numpy_array_from_list_or_numpy_array ( vectors ) : if not isinstance ( vectors , numpy . ndarray ) : V = numpy . zeros ( ( vectors [ 0 ] . shape [ 0 ] , len ( vectors ) ) ) for index in range ( len ( vectors ) ) : vector = vectors [ index ] V [ : , index ] = vector return V return vectors
Returns numpy array representation of argument .
10,413
def unitvec ( vec ) : if scipy . sparse . issparse ( vec ) : vec = vec . tocsr ( ) veclen = numpy . sqrt ( numpy . sum ( vec . data ** 2 ) ) if veclen > 0.0 : return vec / veclen else : return vec if isinstance ( vec , numpy . ndarray ) : vec = numpy . asarray ( vec , dtype = float ) veclen = numpy . linalg . norm ( vec ) if veclen > 0.0 : return vec / veclen else : return vec
Scale a vector to unit length . The only exception is the zero vector which is returned back unchanged .
10,414
def perform_pca ( A ) : M = ( A - numpy . mean ( A . T , axis = 1 ) ) . T return numpy . linalg . eig ( numpy . cov ( M ) )
Computes eigenvalues and eigenvectors of covariance matrix of A . The rows of a correspond to observations the columns to variables .
10,415
def permute ( self , ba ) : c = ba . copy ( ) for i in xrange ( len ( self . mapping ) ) : ba [ i ] = c [ self . mapping [ i ] ] return ba
Permute the bitarray ba inplace .
10,416
def store_vector ( self , v , data = None ) : nv = unitvec ( v ) for lshash in self . lshashes : for bucket_key in lshash . hash_vector ( v ) : self . storage . store_vector ( lshash . hash_name , bucket_key , nv , data )
Hashes vector v and stores it in all matching buckets in the storage . The data argument must be JSON - serializable . It is stored with the vector and will be returned in search results .
10,417
def store_many_vectors ( self , vs , data = None ) : nvs = [ unitvec ( i ) for i in vs ] for lshash in self . lshashes : bucket_keys = [ lshash . hash_vector ( i ) [ 0 ] for i in vs ] self . storage . store_many_vectors ( lshash . hash_name , bucket_keys , nvs , data )
Store a batch of vectors . Hashes vector vs and stores them in all matching buckets in the storage . The data argument must be either None or a list of JSON - serializable object . It is stored with the vector and will be returned in search results .
10,418
def _get_candidates ( self , v ) : candidates = [ ] for lshash in self . lshashes : for bucket_key in lshash . hash_vector ( v , querying = True ) : bucket_content = self . storage . get_bucket ( lshash . hash_name , bucket_key , ) candidates . extend ( bucket_content ) return candidates
Collect candidates from all buckets from all hashes
10,419
def _apply_filter ( self , filters , candidates ) : if filters : filter_input = candidates for fetch_vector_filter in filters : filter_input = fetch_vector_filter . filter_vectors ( filter_input ) return filter_input else : return candidates
Apply vector filters if specified and return filtered list
10,420
def _append_distances ( self , v , distance , candidates ) : if distance : nv = unitvec ( v ) candidates = [ ( x [ 0 ] , x [ 1 ] , self . distance . distance ( x [ 0 ] , nv ) ) for x in candidates ] return candidates
Apply distance implementation if specified
10,421
def perform_experiment ( self , engine_list ) : result = [ ] for endine_idx , engine in enumerate ( engine_list ) : print ( 'Engine %d / %d' % ( endine_idx , len ( engine_list ) ) ) engine . clean_all_buckets ( ) avg_recall = 0.0 avg_precision = 0.0 avg_search_time = 0.0 for index , v in enumerate ( self . vectors ) : engine . store_vector ( v , 'data_%d' % index ) for index in self . query_indices : real_nearest = set ( self . closest [ index ] ) search_time_start = time . time ( ) nearest = engine . neighbours ( self . vectors [ index ] ) search_time = time . time ( ) - search_time_start nearest = set ( [ self . __index_of_vector ( x [ 0 ] ) for x in nearest ] ) nearest . remove ( index ) if len ( nearest ) == 0 : recall = 0.0 precision = 0.0 else : inter_count = float ( len ( real_nearest & nearest ) ) recall = inter_count / float ( len ( real_nearest ) ) precision = inter_count / float ( len ( nearest ) ) avg_recall += recall avg_precision += precision avg_search_time += search_time avg_recall /= float ( len ( self . query_indices ) ) avg_precision /= float ( len ( self . query_indices ) ) avg_search_time = avg_search_time / float ( len ( self . query_indices ) ) avg_search_time /= self . exact_search_time_per_vector print ( ' recall=%f, precision=%f, time=%f' % ( avg_recall , avg_precision , avg_search_time ) ) result . append ( ( avg_recall , avg_precision , avg_search_time ) ) return result
Performs nearest neighbour recall experiments with custom vector data for all engines in the specified list .
10,422
def __vector_to_string ( self , vector ) : return numpy . array_str ( numpy . round ( unitvec ( vector ) , decimals = 3 ) )
Returns string representation of vector .
10,423
def distance ( self , x , y ) : if scipy . sparse . issparse ( x ) : return numpy . sum ( numpy . absolute ( ( x - y ) . toarray ( ) . ravel ( ) ) ) else : return numpy . sum ( numpy . absolute ( x - y ) )
Computes the Manhattan distance between vectors x and y . Returns float .
10,424
def store_many_vectors ( self , hash_name , bucket_keys , vs , data ) : with self . redis_object . pipeline ( ) as pipeline : if data is None : data = [ None ] * len ( vs ) for bucket_key , data , v in zip ( bucket_keys , data , vs ) : self . _add_vector ( hash_name , bucket_key , v , data , pipeline ) pipeline . execute ( )
Store a batch of vectors in Redis . Stores vector and JSON - serializable data in bucket with specified key .
10,425
def _add_vector ( self , hash_name , bucket_key , v , data , redis_object ) : redis_key = self . _format_redis_key ( hash_name , bucket_key ) val_dict = { } if scipy . sparse . issparse ( v ) : if not scipy . sparse . isspmatrix_coo ( v ) : v = scipy . sparse . coo_matrix ( v ) encoded_values = [ ] for k in range ( v . data . size ) : row_index = v . row [ k ] value = v . data [ k ] encoded_values . append ( [ int ( row_index ) , value ] ) val_dict [ 'sparse' ] = 1 val_dict [ 'nonzeros' ] = encoded_values val_dict [ 'dim' ] = v . shape [ 0 ] else : v = numpy . reshape ( v , v . shape [ 0 ] ) val_dict [ 'vector' ] = v . tostring ( ) val_dict [ 'dtype' ] = v . dtype . name if data is not None : val_dict [ 'data' ] = data self . redis_object . rpush ( redis_key , pickle . dumps ( val_dict , protocol = 2 ) )
Store vector and JSON - serializable data in bucket with specified key .
10,426
def clean_buckets ( self , hash_name ) : bucket_keys = self . _iter_bucket_keys ( hash_name ) self . redis_object . delete ( * bucket_keys )
Removes all buckets and their content for specified hash .
10,427
def clean_all_buckets ( self ) : bucket_keys = self . redis_object . keys ( pattern = 'nearpy_*' ) if len ( bucket_keys ) > 0 : self . redis_object . delete ( * bucket_keys )
Removes all buckets from all hashes and their content .
10,428
def add_child_hash ( self , child_hash ) : if not ( isinstance ( child_hash , PCABinaryProjections ) or isinstance ( child_hash , RandomBinaryProjections ) or isinstance ( child_hash , RandomBinaryProjectionTree ) ) : raise ValueError ( 'Child hashes must generate binary keys' ) self . child_hashes . append ( child_hash )
Adds specified child hash .
10,429
def perform_experiment ( self , engine_list ) : result = [ ] for engine in engine_list : print ( 'Engine %d / %d' % ( engine_list . index ( engine ) , len ( engine_list ) ) ) engine . clean_all_buckets ( ) avg_distance_ratio = 0.0 avg_result_size = 0.0 avg_search_time = 0.0 for index in range ( self . vectors . shape [ 1 ] ) : engine . store_vector ( self . vectors [ : , index ] , 'data_%d' % index ) for index in self . query_indices : search_time_start = time . time ( ) nearest = engine . neighbours ( self . vectors [ : , index ] ) search_time = time . time ( ) - search_time_start distance_ratio = 0.0 for n in nearest : if n [ 2 ] > self . nearest_radius [ index ] : d = ( n [ 2 ] - self . nearest_radius [ index ] ) d /= self . nearest_radius [ index ] distance_ratio += d distance_ratio /= len ( nearest ) avg_distance_ratio += distance_ratio avg_result_size += len ( nearest ) avg_search_time += search_time avg_distance_ratio /= float ( len ( self . query_indices ) ) avg_result_size /= float ( len ( self . query_indices ) ) avg_search_time = avg_search_time / float ( len ( self . query_indices ) ) avg_search_time /= self . exact_search_time_per_vector print ( ' distance_ratio=%f, result_size=%f, time=%f' % ( avg_distance_ratio , avg_result_size , avg_search_time ) ) result . append ( ( avg_distance_ratio , avg_result_size , avg_search_time ) ) return result
Performs nearest neighbour experiments with custom vector data for all engines in the specified list .
10,430
def get_neighbour_keys ( self , hash_name , bucket_key ) : permutedIndex = self . permutedIndexs [ hash_name ] return permutedIndex . get_neighbour_keys ( bucket_key , permutedIndex . num_neighbour )
Return the neighbour buckets given hash_name and query bucket key .
10,431
def run_rc_file ( editor , rc_file ) : assert isinstance ( editor , Editor ) assert isinstance ( rc_file , six . string_types ) rc_file = os . path . expanduser ( rc_file ) if not os . path . exists ( rc_file ) : print ( 'Impossible to read %r' % rc_file ) _press_enter_to_continue ( ) return try : namespace = { } with open ( rc_file , 'r' ) as f : code = compile ( f . read ( ) , rc_file , 'exec' ) six . exec_ ( code , namespace , namespace ) if 'configure' in namespace : namespace [ 'configure' ] ( editor ) except Exception as e : traceback . print_exc ( ) _press_enter_to_continue ( )
Run rc file .
10,432
def _try_char ( character , backup , encoding = sys . stdout . encoding ) : if character . encode ( encoding , 'replace' ) == b'?' : return backup else : return character
Return character if it can be encoded using sys . stdout else return the backup character .
10,433
def update ( self ) : existing_frames = self . _frames self . _frames = { } def create_layout_from_node ( node ) : if isinstance ( node , window_arrangement . Window ) : key = ( node , node . editor_buffer ) frame = existing_frames . get ( key ) if frame is None : frame , pt_window = self . _create_window_frame ( node . editor_buffer ) node . pt_window = pt_window self . _frames [ key ] = frame return frame elif isinstance ( node , window_arrangement . VSplit ) : return VSplit ( [ create_layout_from_node ( n ) for n in node ] , padding = 1 , padding_char = self . get_vertical_border_char ( ) , padding_style = 'class:frameborder' ) if isinstance ( node , window_arrangement . HSplit ) : return HSplit ( [ create_layout_from_node ( n ) for n in node ] ) layout = create_layout_from_node ( self . window_arrangement . active_tab . root ) self . _fc . content = layout
Update layout to match the layout as described in the WindowArrangement .
10,434
def _create_window_frame ( self , editor_buffer ) : @ Condition def wrap_lines ( ) : return self . editor . wrap_lines window = Window ( self . _create_buffer_control ( editor_buffer ) , allow_scroll_beyond_bottom = True , scroll_offsets = ScrollOffsets ( left = 0 , right = 0 , top = ( lambda : self . editor . scroll_offset ) , bottom = ( lambda : self . editor . scroll_offset ) ) , wrap_lines = wrap_lines , left_margins = [ ConditionalMargin ( margin = NumberedMargin ( display_tildes = True , relative = Condition ( lambda : self . editor . relative_number ) ) , filter = Condition ( lambda : self . editor . show_line_numbers ) ) ] , cursorline = Condition ( lambda : self . editor . cursorline ) , cursorcolumn = Condition ( lambda : self . editor . cursorcolumn ) , colorcolumns = ( lambda : [ ColorColumn ( pos ) for pos in self . editor . colorcolumn ] ) , ignore_content_width = True , ignore_content_height = True , get_line_prefix = partial ( self . _get_line_prefix , editor_buffer . buffer ) ) return HSplit ( [ window , VSplit ( [ WindowStatusBar ( self . editor , editor_buffer ) , WindowStatusBarRuler ( self . editor , window , editor_buffer . buffer ) , ] , width = Dimension ( ) ) , ] ) , window
Create a Window for the buffer with underneat a status bar .
10,435
def _create_buffer_control ( self , editor_buffer ) : @ Condition def preview_search ( ) : return self . editor . incsearch input_processors = [ ConditionalProcessor ( ShowTrailingWhiteSpaceProcessor ( ) , Condition ( lambda : self . editor . display_unprintable_characters ) ) , TabsProcessor ( tabstop = ( lambda : self . editor . tabstop ) , char1 = ( lambda : '|' if self . editor . display_unprintable_characters else ' ' ) , char2 = ( lambda : _try_char ( '\u2508' , '.' , get_app ( ) . output . encoding ( ) ) if self . editor . display_unprintable_characters else ' ' ) , ) , ReportingProcessor ( editor_buffer ) , HighlightSelectionProcessor ( ) , ConditionalProcessor ( HighlightSearchProcessor ( ) , Condition ( lambda : self . editor . highlight_search ) ) , ConditionalProcessor ( HighlightIncrementalSearchProcessor ( ) , Condition ( lambda : self . editor . highlight_search ) & preview_search ) , HighlightMatchingBracketProcessor ( ) , DisplayMultipleCursors ( ) , ] return BufferControl ( lexer = DocumentLexer ( editor_buffer ) , include_default_input_processors = False , input_processors = input_processors , buffer = editor_buffer . buffer , preview_search = preview_search , search_buffer_control = self . search_control , focus_on_click = True )
Create a new BufferControl for a given location .
10,436
def handle_command ( editor , input_string ) : m = COMMAND_GRAMMAR . match ( input_string ) if m is None : return variables = m . variables ( ) command = variables . get ( 'command' ) go_to_line = variables . get ( 'go_to_line' ) shell_command = variables . get ( 'shell_command' ) if go_to_line is not None : _go_to_line ( editor , go_to_line ) elif shell_command is not None : editor . application . run_system_command ( shell_command ) elif has_command_handler ( command ) : call_command_handler ( command , editor , variables ) else : editor . show_message ( 'Not an editor command: %s' % input_string ) return editor . sync_with_prompt_toolkit ( )
Handle commands entered on the Vi command line .
10,437
def _go_to_line ( editor , line ) : b = editor . application . current_buffer b . cursor_position = b . document . translate_row_col_to_index ( max ( 0 , int ( line ) - 1 ) , 0 )
Move cursor to this line in the current buffer .
10,438
def report_pyflakes ( document ) : reporter = _FlakesReporter ( ) pyflakes . api . check ( document . text , '' , reporter = reporter ) def format_flake_message ( message ) : return [ ( 'class:flakemessage.prefix' , 'pyflakes:' ) , ( '' , ' ' ) , ( 'class:flakemessage' , message . message % message . message_args ) ] def message_to_reporter_error ( message ) : start_index = document . translate_row_col_to_index ( message . lineno - 1 , message . col ) end_index = start_index while end_index < len ( document . text ) and document . text [ end_index ] in WORD_CHARACTERS : end_index += 1 return ReporterError ( lineno = message . lineno - 1 , start_column = message . col , end_column = message . col + end_index - start_index , formatted_text = format_flake_message ( message ) ) return [ message_to_reporter_error ( m ) for m in reporter . messages ]
Run pyflakes on document and return list of ReporterError instances .
10,439
def load_initial_files ( self , locations , in_tab_pages = False , hsplit = False , vsplit = False ) : assert in_tab_pages + hsplit + vsplit <= 1 locations2 = locations or [ None ] self . window_arrangement . open_buffer ( locations2 [ 0 ] ) for f in locations2 [ 1 : ] : if in_tab_pages : self . window_arrangement . create_tab ( f ) elif hsplit : self . window_arrangement . hsplit ( location = f ) elif vsplit : self . window_arrangement . vsplit ( location = f ) else : self . window_arrangement . open_buffer ( f ) self . window_arrangement . active_tab_index = 0 if locations and len ( locations ) > 1 : self . show_message ( '%i files loaded.' % len ( locations ) )
Load a list of files .
10,440
def _create_application ( self ) : application = Application ( input = self . input , output = self . output , editing_mode = EditingMode . VI , layout = self . editor_layout . layout , key_bindings = self . key_bindings , style = DynamicStyle ( lambda : self . current_style ) , paste_mode = Condition ( lambda : self . paste_mode ) , include_default_pygments_style = False , mouse_support = Condition ( lambda : self . enable_mouse_support ) , full_screen = True , enable_page_navigation_bindings = True ) def preview ( _ ) : if self . application . layout . has_focus ( self . command_buffer ) : self . previewer . preview ( self . command_buffer . text ) self . command_buffer . on_text_changed += preview return application
Create CommandLineInterface instance .
10,441
def current_editor_buffer ( self ) : current_buffer = self . application . current_buffer for b in self . window_arrangement . editor_buffers : if b . buffer == current_buffer : return b
Return the EditorBuffer that is currently active .
10,442
def sync_with_prompt_toolkit ( self ) : self . editor_layout . update ( ) window = self . window_arrangement . active_pt_window if window : self . application . layout . focus ( window )
Update the prompt - toolkit Layout and FocusStack .
10,443
def run ( self ) : self . sync_with_prompt_toolkit ( ) def pre_run ( ) : self . application . vi_state . input_mode = InputMode . NAVIGATION self . application . run ( pre_run = pre_run )
Run the event loop for the interface . This starts the interaction .
10,444
def enter_command_mode ( self ) : self . application . layout . focus ( self . command_buffer ) self . application . vi_state . input_mode = InputMode . INSERT self . previewer . save ( )
Go into command mode .
10,445
def leave_command_mode ( self , append_to_history = False ) : self . previewer . restore ( ) self . application . layout . focus_last ( ) self . application . vi_state . input_mode = InputMode . NAVIGATION self . command_buffer . reset ( append_to_history = append_to_history )
Leave command mode . Focus document window again .
10,446
def get_editor_style_by_name ( name ) : if name == 'vim' : vim_style = Style . from_dict ( default_vim_style ) else : vim_style = style_from_pygments_cls ( get_style_by_name ( name ) ) return merge_styles ( [ vim_style , Style . from_dict ( style_extensions ) , ] )
Get Style class . This raises pygments . util . ClassNotFound when there is no style with this name .
10,447
def lex_document ( self , document ) : location = self . editor_buffer . location if location : if self . editor_buffer . in_file_explorer_mode : return PygmentsLexer ( DirectoryListingLexer , sync_from_start = False ) . lex_document ( document ) return PygmentsLexer . from_filename ( location , sync_from_start = False ) . lex_document ( document ) return SimpleLexer ( ) . lex_document ( document )
Call the lexer and return a get_tokens_for_line function .
10,448
def read ( self , location ) : location = os . path . expanduser ( location ) for e in ENCODINGS : try : with codecs . open ( location , 'r' , e ) as f : return f . read ( ) , e except UnicodeDecodeError : pass raise Exception ( 'Unable to open file: %r' % location )
Read file from disk .
10,449
def reload ( self ) : text = self . _read ( self . location ) cursor_position = min ( self . buffer . cursor_position , len ( text ) ) self . buffer . document = Document ( text , cursor_position ) self . _file_content = text
Reload file again from storage .
10,450
def get_display_name ( self , short = False ) : if self . location is None : return '[New file]' elif short : return os . path . basename ( self . location ) else : return self . location
Return name as displayed .
10,451
def run_reporter ( self ) : " Buffer text changed. " if not self . _reporter_is_running : self . _reporter_is_running = True text = self . buffer . text self . report_errors = [ ] if self . location is None : return document = self . buffer . document def in_executor ( ) : report_errors = report ( self . location , document ) def ready ( ) : self . _reporter_is_running = False if text == self . buffer . text : self . report_errors = report_errors get_app ( ) . invalidate ( ) else : self . run_reporter ( ) call_from_executor ( ready ) run_in_executor ( in_executor )
Buffer text changed .
10,452
def save ( self ) : e = self . editor self . _style = e . current_style self . _show_line_numbers = e . show_line_numbers self . _highlight_search = e . highlight_search self . _show_ruler = e . show_ruler self . _relative_number = e . relative_number self . _cursorcolumn = e . cursorcolumn self . _cursorline = e . cursorline self . _colorcolumn = e . colorcolumn
Back up current editor state .
10,453
def _apply ( self , input_string ) : e = self . editor m = COMMAND_GRAMMAR . match ( input_string ) if m is None : return variables = m . variables ( ) command = variables . get ( 'command' ) set_option = variables . get ( 'set_option' ) if command == 'colorscheme' : colorscheme = variables . get ( 'colorscheme' ) if colorscheme : e . use_colorscheme ( colorscheme ) if command == 'set' : if set_option in ( 'hlsearch' , 'hls' ) : e . highlight_search = True elif set_option in ( 'nohlsearch' , 'nohls' ) : e . highlight_search = False elif set_option in ( 'nu' , 'number' ) : e . show_line_numbers = True elif set_option in ( 'nonu' , 'nonumber' ) : e . show_line_numbers = False elif set_option in ( 'ruler' , 'ru' ) : e . show_ruler = True elif set_option in ( 'noruler' , 'noru' ) : e . show_ruler = False elif set_option in ( 'relativenumber' , 'rnu' ) : e . relative_number = True elif set_option in ( 'norelativenumber' , 'nornu' ) : e . relative_number = False elif set_option in ( 'cursorline' , 'cul' ) : e . cursorline = True elif set_option in ( 'cursorcolumn' , 'cuc' ) : e . cursorcolumn = True elif set_option in ( 'nocursorline' , 'nocul' ) : e . cursorline = False elif set_option in ( 'nocursorcolumn' , 'nocuc' ) : e . cursorcolumn = False elif set_option in ( 'colorcolumn' , 'cc' ) : value = variables . get ( 'set_value' , '' ) if value : e . colorcolumn = [ int ( v ) for v in value . split ( ',' ) if v . isdigit ( ) ]
Apply command .
10,454
def cmd ( name , accepts_force = False ) : def decorator ( func ) : @ _cmd ( name ) def command_wrapper ( editor , variables ) : force = bool ( variables [ 'force' ] ) if force and not accepts_force : editor . show_message ( 'No ! allowed' ) elif accepts_force : func ( editor , force = force ) else : func ( editor ) return func return decorator
Decarator that registers a command that doesn t take any parameters .
10,455
def _bn ( editor , force = False ) : eb = editor . window_arrangement . active_editor_buffer if not force and eb . has_unsaved_changes : editor . show_message ( _NO_WRITE_SINCE_LAST_CHANGE_TEXT ) else : editor . window_arrangement . go_to_next_buffer ( )
Go to next buffer .
10,456
def _bp ( editor , force = False ) : eb = editor . window_arrangement . active_editor_buffer if not force and eb . has_unsaved_changes : editor . show_message ( _NO_WRITE_SINCE_LAST_CHANGE_TEXT ) else : editor . window_arrangement . go_to_previous_buffer ( )
Go to previous buffer .
10,457
def buffer_list ( editor ) : def handler ( ) : wa = editor . window_arrangement for info in wa . list_open_buffers ( ) : char = '%' if info . is_active else '' eb = info . editor_buffer print ( ' %3i %-2s %-20s line %i' % ( info . index , char , eb . location , ( eb . buffer . document . cursor_position_row + 1 ) ) ) six . moves . input ( '\nPress ENTER to continue...' ) run_in_terminal ( handler )
List all buffers .
10,458
def buffer_wipe ( editor , force = False ) : eb = editor . window_arrangement . active_editor_buffer if not force and eb . has_unsaved_changes : editor . show_message ( _NO_WRITE_SINCE_LAST_CHANGE_TEXT ) else : editor . window_arrangement . close_buffer ( )
Wipe buffer .
10,459
def buffer_edit ( editor , location , force = False ) : if location is None : eb = editor . window_arrangement . active_editor_buffer if eb . location is None : editor . show_message ( _NO_FILE_NAME ) elif not force and eb . has_unsaved_changes : editor . show_message ( _NO_WRITE_SINCE_LAST_CHANGE_TEXT ) else : eb . reload ( ) else : editor . file_explorer = '' editor . window_arrangement . open_buffer ( location , show_in_current_window = True )
Edit new buffer .
10,460
def quit_all ( editor , force = False ) : quit ( editor , all_ = True , force = force )
Quit all .
10,461
def write ( editor , location , force = False ) : if location and not force and os . path . exists ( location ) : editor . show_message ( 'File exists (add ! to overriwe)' ) else : eb = editor . window_arrangement . active_editor_buffer if location is None and eb . location is None : editor . show_message ( _NO_FILE_NAME ) else : eb . write ( location )
Write file .
10,462
def write_and_quit ( editor , location , force = False ) : write ( editor , location , force = force ) editor . application . exit ( )
Write file and quit .
10,463
def write_and_quit_all ( editor ) : eb = editor . window_arrangement . active_editor_buffer if eb . location is None : editor . show_message ( _NO_FILE_NAME ) else : eb . write ( ) quit ( editor , all_ = True , force = False )
Write current buffer and quit all .
10,464
def tab_stop ( editor , value ) : if value is None : editor . show_message ( 'tabstop=%i' % editor . tabstop ) else : try : value = int ( value ) if value > 0 : editor . tabstop = value else : editor . show_message ( 'Argument must be positive' ) except ValueError : editor . show_message ( 'Number required after =' )
Set tabstop .
10,465
def set_scroll_offset ( editor , value ) : if value is None : editor . show_message ( 'scrolloff=%i' % editor . scroll_offset ) else : try : value = int ( value ) if value >= 0 : editor . scroll_offset = value else : editor . show_message ( 'Argument must be positive' ) except ValueError : editor . show_message ( 'Number required after =' )
Set scroll offset .
10,466
def whitespace_before_cursor_on_line ( ) : b = get_app ( ) . current_buffer before_cursor = b . document . current_line_before_cursor return bool ( not before_cursor or before_cursor [ - 1 ] . isspace ( ) )
Filter which evaluates to True when the characters before the cursor are whitespace or we are at the start of te line .
10,467
def show_editor_buffer ( self , editor_buffer ) : assert isinstance ( editor_buffer , EditorBuffer ) self . active_window . editor_buffer = editor_buffer
Open this EditorBuffer in the active window .
10,468
def close_editor_buffer ( self , editor_buffer ) : for split , window in self . _walk_through_windows ( ) : if window . editor_buffer == editor_buffer : self . _close_window ( window )
Close all the windows that have this editor buffer open .
10,469
def _close_window ( self , window ) : if window == self . active_window : self . close_active_window ( ) else : original_active_window = self . active_window self . close_active_window ( ) self . active_window = original_active_window
Close this window .
10,470
def close_active_window ( self ) : active_split = self . _get_active_split ( ) index = active_split . index ( self . active_window ) del active_split [ index ] if len ( active_split ) : new_active_window = active_split [ max ( 0 , index - 1 ) ] while isinstance ( new_active_window , ( HSplit , VSplit ) ) : new_active_window = new_active_window [ 0 ] self . active_window = new_active_window else : self . active_window = None if len ( active_split ) == 1 and active_split != self . root : parent = self . _get_split_parent ( active_split ) index = parent . index ( active_split ) parent [ index ] = active_split [ 0 ]
Close active window .
10,471
def cycle_focus ( self ) : windows = self . windows ( ) new_index = ( windows . index ( self . active_window ) + 1 ) % len ( windows ) self . active_window = windows [ new_index ]
Cycle through all windows .
10,472
def has_unsaved_changes ( self ) : for w in self . windows ( ) : if w . editor_buffer . has_unsaved_changes : return True return False
True when any of the visible buffers in this tab has unsaved changes .
10,473
def active_editor_buffer ( self ) : if self . active_tab and self . active_tab . active_window : return self . active_tab . active_window . editor_buffer
The active EditorBuffer or None .
10,474
def active_pt_window ( self ) : " The active prompt_toolkit layout Window. " if self . active_tab : w = self . active_tab . active_window if w : return w . pt_window
The active prompt_toolkit layout Window .
10,475
def get_editor_buffer_for_location ( self , location ) : for eb in self . editor_buffers : if eb . location == location : return eb
Return the EditorBuffer for this location . When this file was not yet loaded return None
10,476
def get_editor_buffer_for_buffer_name ( self , buffer_name ) : for eb in self . editor_buffers : if eb . buffer_name == buffer_name : return eb
Return the EditorBuffer for this buffer_name . When not found return None
10,477
def close_tab ( self ) : if len ( self . tab_pages ) > 1 : del self . tab_pages [ self . active_tab_index ] self . active_tab_index = max ( 0 , self . active_tab_index - 1 ) self . _auto_close_new_empty_buffers ( )
Close active tab .
10,478
def hsplit ( self , location = None , new = False , text = None ) : assert location is None or text is None or new is False if location or text or new : editor_buffer = self . _get_or_create_editor_buffer ( location = location , text = text ) else : editor_buffer = None self . active_tab . hsplit ( editor_buffer )
Split horizontally .
10,479
def keep_only_current_window ( self ) : self . tab_pages = [ TabPage ( self . active_tab . active_window ) ] self . active_tab_index = 0
Close all other windows except the current one .
10,480
def go_to_next_buffer ( self , _previous = False ) : if self . active_editor_buffer : index = self . editor_buffers . index ( self . active_editor_buffer ) if _previous : new_index = ( len ( self . editor_buffers ) + index - 1 ) % len ( self . editor_buffers ) else : new_index = ( index + 1 ) % len ( self . editor_buffers ) self . active_tab . show_editor_buffer ( self . editor_buffers [ new_index ] ) self . _auto_close_new_empty_buffers ( )
Open next buffer in active window .
10,481
def go_to_previous_tab ( self ) : self . active_tab_index = ( self . active_tab_index - 1 + len ( self . tab_pages ) ) % len ( self . tab_pages )
Focus the previous tab .
10,482
def _add_editor_buffer ( self , editor_buffer , show_in_current_window = False ) : assert isinstance ( editor_buffer , EditorBuffer ) and editor_buffer not in self . editor_buffers eb = self . active_editor_buffer if eb is None : self . editor_buffers . append ( editor_buffer ) else : try : index = self . editor_buffers . index ( self . active_editor_buffer ) except ValueError : index = 0 self . editor_buffers . insert ( index , editor_buffer ) if self . tab_pages == [ ] : self . tab_pages . append ( TabPage ( Window ( editor_buffer ) ) ) self . active_tab_index = 0 if show_in_current_window and self . active_tab : self . active_tab . show_editor_buffer ( editor_buffer ) editor_buffer . run_reporter ( )
Insert this new buffer in the list of buffers right after the active one .
10,483
def _get_or_create_editor_buffer ( self , location = None , text = None ) : assert location is None or text is None assert location is None or isinstance ( location , string_types ) if location is None : eb = EditorBuffer ( self . editor , text = text ) self . _add_editor_buffer ( eb ) return eb else : eb = self . get_editor_buffer_for_location ( location ) if eb is None : eb = EditorBuffer ( self . editor , location ) self . _add_editor_buffer ( eb ) return eb else : return eb
Given a location return the EditorBuffer instance that we have if the file is already open or create a new one .
10,484
def close_buffer ( self ) : eb = self . active_editor_buffer index = self . editor_buffers . index ( eb ) self . editor_buffers . remove ( eb ) self . active_tab . close_active_window ( ) for i , t in enumerate ( self . tab_pages [ : ] ) : t . close_editor_buffer ( eb ) if t . window_count ( ) == 0 : self . tab_pages . remove ( t ) if i >= self . active_tab_index : self . active_tab_index = max ( 0 , self . active_tab_index - 1 ) if len ( self . tab_pages ) == 0 : self . active_tab_index = None if len ( self . editor_buffers ) > 0 : new_index = ( len ( self . editor_buffers ) + index - 1 ) % len ( self . editor_buffers ) eb = self . editor_buffers [ new_index ] self . tab_pages . append ( TabPage ( Window ( eb ) ) ) self . active_tab_index = 0 else : eb = self . _get_or_create_editor_buffer ( )
Close current buffer . When there are other windows showing the same buffer they are closed as well . When no windows are left the previous buffer or an empty buffer is shown .
10,485
def create_tab ( self , location = None ) : eb = self . _get_or_create_editor_buffer ( location ) self . tab_pages . insert ( self . active_tab_index + 1 , TabPage ( Window ( eb ) ) ) self . active_tab_index += 1
Create a new tab page .
10,486
def list_open_buffers ( self ) : active_eb = self . active_editor_buffer visible_ebs = self . active_tab . visible_editor_buffers ( ) def make_info ( i , eb ) : return OpenBufferInfo ( index = i , editor_buffer = eb , is_active = ( eb == active_eb ) , is_visible = ( eb in visible_ebs ) ) return [ make_info ( i , eb ) for i , eb in enumerate ( self . editor_buffers ) ]
Return a OpenBufferInfo list that gives information about the open buffers .
10,487
def register_code_directive ( ) : if not SPHINX_INSTALLED : docutils . parsers . rst . directives . register_directive ( 'code' , CodeBlockDirective ) docutils . parsers . rst . directives . register_directive ( 'code-block' , CodeBlockDirective ) docutils . parsers . rst . directives . register_directive ( 'sourcecode' , CodeBlockDirective )
Register code directive .
10,488
def find_ignored_languages ( source ) : for ( index , line ) in enumerate ( source . splitlines ( ) ) : match = RSTCHECK_COMMENT_RE . match ( line ) if match : key_and_value = line [ match . end ( ) : ] . strip ( ) . split ( '=' ) if len ( key_and_value ) != 2 : raise Error ( 'Expected "key=value" syntax' , line_number = index + 1 ) if key_and_value [ 0 ] == 'ignore-language' : for language in key_and_value [ 1 ] . split ( ',' ) : yield language . strip ( )
Yield ignored languages .
10,489
def _check_file ( parameters ) : ( filename , args ) = parameters if filename == '-' : contents = sys . stdin . read ( ) else : with contextlib . closing ( docutils . io . FileInput ( source_path = filename ) ) as input_file : contents = input_file . read ( ) args = load_configuration_from_file ( os . path . dirname ( os . path . realpath ( filename ) ) , args ) ignore_directives_and_roles ( args . ignore_directives , args . ignore_roles ) for substitution in args . ignore_substitutions : contents = contents . replace ( '|{}|' . format ( substitution ) , 'None' ) ignore = { 'languages' : args . ignore_language , 'messages' : args . ignore_messages , } all_errors = [ ] for error in check ( contents , filename = filename , report_level = args . report , ignore = ignore , debug = args . debug ) : all_errors . append ( error ) return ( filename , all_errors )
Return list of errors .
10,490
def check_rst ( code , ignore ) : filename = '<string>' for result in check ( code , filename = filename , ignore = ignore ) : yield result
Yield errors in nested RST code .
10,491
def _get_directives_and_roles_from_sphinx ( ) : if SPHINX_INSTALLED : sphinx_directives = list ( sphinx . domains . std . StandardDomain . directives ) sphinx_roles = list ( sphinx . domains . std . StandardDomain . roles ) for domain in [ sphinx . domains . c . CDomain , sphinx . domains . cpp . CPPDomain , sphinx . domains . javascript . JavaScriptDomain , sphinx . domains . python . PythonDomain ] : sphinx_directives += list ( domain . directives ) + [ '{}:{}' . format ( domain . name , item ) for item in list ( domain . directives ) ] sphinx_roles += list ( domain . roles ) + [ '{}:{}' . format ( domain . name , item ) for item in list ( domain . roles ) ] else : sphinx_roles = [ 'abbr' , 'command' , 'dfn' , 'doc' , 'download' , 'envvar' , 'file' , 'guilabel' , 'kbd' , 'keyword' , 'mailheader' , 'makevar' , 'manpage' , 'menuselection' , 'mimetype' , 'newsgroup' , 'option' , 'program' , 'py:func' , 'ref' , 'regexp' , 'samp' , 'term' , 'token' ] sphinx_directives = [ 'autosummary' , 'currentmodule' , 'centered' , 'c:function' , 'c:type' , 'include' , 'deprecated' , 'envvar' , 'glossary' , 'index' , 'no-code-block' , 'literalinclude' , 'hlist' , 'option' , 'productionlist' , 'py:function' , 'seealso' , 'toctree' , 'todo' , 'versionadded' , 'versionchanged' ] return ( sphinx_directives , sphinx_roles )
Return a tuple of Sphinx directive and roles .
10,492
def ignore_sphinx ( ) : ( directives , roles ) = _get_directives_and_roles_from_sphinx ( ) directives += [ 'centered' , 'include' , 'deprecated' , 'index' , 'no-code-block' , 'literalinclude' , 'hlist' , 'seealso' , 'toctree' , 'todo' , 'versionadded' , 'versionchanged' ] ext_autosummary = [ 'autosummary' , 'currentmodule' , ] ignore_directives_and_roles ( directives + ext_autosummary , roles + [ 'ctype' ] )
Register Sphinx directives and roles to ignore .
10,493
def find_config ( directory_or_file , debug = False ) : directory_or_file = os . path . realpath ( directory_or_file ) if os . path . isfile ( directory_or_file ) : if debug : print ( 'using config file {}' . format ( directory_or_file ) , file = sys . stderr ) return directory_or_file directory = directory_or_file while directory : for filename in CONFIG_FILES : candidate = os . path . join ( directory , filename ) if os . path . exists ( candidate ) : if debug : print ( 'using config file {}' . format ( candidate ) , file = sys . stderr ) return candidate parent_directory = os . path . dirname ( directory ) if parent_directory == directory : break else : directory = parent_directory
Return configuration filename .
10,494
def load_configuration_from_file ( directory , args ) : args = copy . copy ( args ) directory_or_file = directory if args . config is not None : directory_or_file = args . config options = _get_options ( directory_or_file , debug = args . debug ) args . report = options . get ( 'report' , args . report ) threshold_dictionary = docutils . frontend . OptionParser . thresholds args . report = int ( threshold_dictionary . get ( args . report , args . report ) ) args . ignore_language = get_and_split ( options , 'ignore_language' , args . ignore_language ) args . ignore_messages = options . get ( 'ignore_messages' , args . ignore_messages ) args . ignore_directives = get_and_split ( options , 'ignore_directives' , args . ignore_directives ) args . ignore_substitutions = get_and_split ( options , 'ignore_substitutions' , args . ignore_substitutions ) args . ignore_roles = get_and_split ( options , 'ignore_roles' , args . ignore_roles ) return args
Return new args with configuration loaded from file .
10,495
def parse_gcc_style_error_message ( message , filename , has_column = True ) : colons = 2 if has_column else 1 prefix = filename + ':' if not message . startswith ( prefix ) : raise ValueError ( ) message = message [ len ( prefix ) : ] split_message = message . split ( ':' , colons ) line_number = int ( split_message [ 0 ] ) return ( line_number , split_message [ colons ] . strip ( ) )
Parse GCC - style error message .
10,496
def run_in_subprocess ( code , filename_suffix , arguments , working_directory ) : temporary_file = tempfile . NamedTemporaryFile ( mode = 'wb' , suffix = filename_suffix ) temporary_file . write ( code . encode ( 'utf-8' ) ) temporary_file . flush ( ) process = subprocess . Popen ( arguments + [ temporary_file . name ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE , cwd = working_directory ) def run ( ) : raw_result = process . communicate ( ) if process . returncode != 0 : return ( raw_result [ 1 ] . decode ( get_encoding ( ) ) , temporary_file . name ) return run
Return None on success .
10,497
def beginning_of_code_block ( node , line_number , full_contents , is_code_node ) : if SPHINX_INSTALLED and not is_code_node : delta = len ( node . non_default_attributes ( ) ) current_line_contents = full_contents . splitlines ( ) [ line_number : ] blank_lines = next ( ( i for ( i , x ) in enumerate ( current_line_contents ) if x ) , 0 ) return ( line_number + delta - 1 + blank_lines - 1 + SPHINX_CODE_BLOCK_DELTA ) else : lines = full_contents . splitlines ( ) code_block_length = len ( node . rawsource . splitlines ( ) ) try : if lines [ line_number - 1 ] . strip ( ) : return line_number - code_block_length + 1 except IndexError : pass for line_number in range ( line_number , 1 , - 1 ) : if lines [ line_number - 2 ] . strip ( ) : break return line_number - code_block_length
Return line number of beginning of code block .
10,498
def parse_args ( ) : threshold_choices = docutils . frontend . OptionParser . threshold_choices parser = argparse . ArgumentParser ( description = __doc__ + ( ' Sphinx is enabled.' if SPHINX_INSTALLED else '' ) , prog = 'rstcheck' ) parser . add_argument ( 'files' , nargs = '+' , type = decode_filename , help = 'files to check' ) parser . add_argument ( '--config' , metavar = 'CONFIG' , default = None , help = 'location of config file' ) parser . add_argument ( '-r' , '--recursive' , action = 'store_true' , help = 'run recursively over directories' ) parser . add_argument ( '--report' , metavar = 'level' , choices = threshold_choices , default = 'info' , help = 'report system messages at or higher than ' 'level; ' + ', ' . join ( choice for choice in threshold_choices if not choice . isdigit ( ) ) + ' (default: %(default)s)' ) parser . add_argument ( '--ignore-language' , '--ignore' , metavar = 'language' , default = '' , help = 'comma-separated list of languages to ignore' ) parser . add_argument ( '--ignore-messages' , metavar = 'messages' , default = '' , help = 'python regex that match the messages to ignore' ) parser . add_argument ( '--ignore-directives' , metavar = 'directives' , default = '' , help = 'comma-separated list of directives to ignore' ) parser . add_argument ( '--ignore-substitutions' , metavar = 'substitutions' , default = '' , help = 'comma-separated list of substitutions to ignore' ) parser . add_argument ( '--ignore-roles' , metavar = 'roles' , default = '' , help = 'comma-separated list of roles to ignore' ) parser . add_argument ( '--debug' , action = 'store_true' , help = 'show messages helpful for debugging' ) parser . add_argument ( '--version' , action = 'version' , version = '%(prog)s ' + __version__ ) args = parser . parse_args ( ) if '-' in args . files : if len ( args . files ) > 1 : parser . error ( "'-' for standard in can only be checked alone" ) else : args . files = list ( find_files ( filenames = args . files , recursive = args . recursive ) ) return args
Return parsed command - line arguments .
10,499
def output_message ( text , file = sys . stderr ) : if file . encoding is None : text = text . encode ( 'utf-8' ) print ( text , file = file )
Output message to terminal .