idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
6,800 | def set ( self ) : self . _is_set = True scheduler . state . awoken_from_events . update ( self . _waiters ) del self . _waiters [ : ] | set the event to triggered |
6,801 | def wait ( self , timeout = None ) : if self . _is_set : return False current = compat . getcurrent ( ) waketime = None if timeout is None else time . time ( ) + timeout if timeout is not None : scheduler . schedule_at ( waketime , current ) self . _waiters . append ( current ) scheduler . state . mainloop . switch ( ) if timeout is not None : if not scheduler . _remove_timer ( waketime , current ) : scheduler . state . awoken_from_events . discard ( current ) if current in self . _waiters : self . _waiters . remove ( current ) return True return False | pause the current coroutine until this event is set |
6,802 | def acquire ( self , blocking = True ) : current = compat . getcurrent ( ) if self . _owner is current : self . _count += 1 return True if self . _locked and not blocking : return False if self . _locked : self . _waiters . append ( compat . getcurrent ( ) ) scheduler . state . mainloop . switch ( ) else : self . _locked = True self . _owner = current self . _count = 1 return True | acquire ownership of the lock |
6,803 | def release ( self ) : if not self . _locked or self . _owner is not compat . getcurrent ( ) : raise RuntimeError ( "cannot release un-acquired lock" ) self . _count -= 1 if self . _count == 0 : self . _owner = None if self . _waiters : waiter = self . _waiters . popleft ( ) self . _locked = True self . _owner = waiter scheduler . state . awoken_from_events . add ( waiter ) else : self . _locked = False self . _owner = None | release one ownership of the lock |
6,804 | def wait ( self , timeout = None ) : if not self . _is_owned ( ) : raise RuntimeError ( "cannot wait on un-acquired lock" ) current = compat . getcurrent ( ) waketime = None if timeout is None else time . time ( ) + timeout if timeout is not None : scheduler . schedule_at ( waketime , current ) self . _waiters . append ( ( current , waketime ) ) self . _lock . release ( ) scheduler . state . mainloop . switch ( ) self . _lock . acquire ( ) if timeout is not None : timedout = not scheduler . _remove_timer ( waketime , current ) if timedout : self . _waiters . remove ( ( current , waketime ) ) return timedout return False | wait to be woken up by the condition |
6,805 | def notify ( self , num = 1 ) : if not self . _is_owned ( ) : raise RuntimeError ( "cannot wait on un-acquired lock" ) for i in xrange ( min ( num , len ( self . _waiters ) ) ) : scheduler . state . awoken_from_events . add ( self . _waiters . popleft ( ) [ 0 ] ) | wake one or more waiting greenlets |
6,806 | def notify_all ( self ) : if not self . _is_owned ( ) : raise RuntimeError ( "cannot wait on un-acquired lock" ) scheduler . state . awoken_from_events . update ( x [ 0 ] for x in self . _waiters ) self . _waiters . clear ( ) | wake all waiting greenlets |
6,807 | def acquire ( self , blocking = True ) : if self . _value : self . _value -= 1 return True if not blocking : return False self . _waiters . append ( compat . getcurrent ( ) ) scheduler . state . mainloop . switch ( ) return True | decrement the counter waiting if it is already at 0 |
6,808 | def release ( self ) : "increment the counter, waking up a waiter if there was any" if self . _waiters : scheduler . state . awoken_from_events . add ( self . _waiters . popleft ( ) ) else : self . _value += 1 | increment the counter waking up a waiter if there was any |
6,809 | def start ( self ) : if self . _started : raise RuntimeError ( "thread already started" ) def run ( ) : try : self . run ( * self . _args , ** self . _kwargs ) except SystemExit : pass finally : self . _deactivate ( ) self . _glet = scheduler . greenlet ( run ) self . _ident = id ( self . _glet ) scheduler . schedule ( self . _glet ) self . _activate ( ) | schedule to start the greenlet that runs this thread s function |
6,810 | def join ( self , timeout = None ) : if not self . _started : raise RuntimeError ( "cannot join thread before it is started" ) if compat . getcurrent ( ) is self . _glet : raise RuntimeError ( "cannot join current thread" ) self . _finished . wait ( timeout ) | block until this thread terminates |
6,811 | def cancel ( self ) : done = self . finished . is_set ( ) self . finished . set ( ) return not done | attempt to prevent the timer from ever running its function |
6,812 | def wrap ( cls , secs , args = ( ) , kwargs = None ) : def decorator ( func ) : return cls ( secs , func , args , kwargs ) return decorator | a classmethod decorator to immediately turn a function into a timer |
6,813 | def get ( self , block = True , timeout = None ) : if not self . _data : if not block : raise Empty ( ) current = compat . getcurrent ( ) waketime = None if timeout is None else time . time ( ) + timeout if timeout is not None : scheduler . schedule_at ( waketime , current ) self . _waiters . append ( ( current , waketime ) ) scheduler . state . mainloop . switch ( ) if timeout is not None : if not scheduler . _remove_timer ( waketime , current ) : self . _waiters . remove ( ( current , waketime ) ) raise Empty ( ) if self . full ( ) and self . _waiters : scheduler . schedule ( self . _waiters . popleft ( ) [ 0 ] ) return self . _get ( ) | get an item out of the queue |
6,814 | def put ( self , item , block = True , timeout = None ) : if self . full ( ) : if not block : raise Full ( ) current = compat . getcurrent ( ) waketime = None if timeout is None else time . time ( ) + timeout if timeout is not None : scheduler . schedule_at ( waketime , current ) self . _waiters . append ( ( current , waketime ) ) scheduler . state . mainloop . switch ( ) if timeout is not None : if not scheduler . _remove_timer ( waketime , current ) : self . _waiters . remove ( ( current , waketime ) ) raise Full ( ) if self . _waiters and not self . full ( ) : scheduler . schedule ( self . _waiters . popleft ( ) [ 0 ] ) if not self . _open_tasks : self . _jobs_done . clear ( ) self . _open_tasks += 1 self . _put ( item ) | put an item into the queue |
6,815 | def increment ( self ) : "increment the counter, and wake anyone waiting for the new value" self . _count += 1 waiters = self . _waiters . pop ( self . _count , [ ] ) if waiters : scheduler . state . awoken_from_events . update ( waiters ) | increment the counter and wake anyone waiting for the new value |
6,816 | def wait ( self , until = 0 ) : if self . _count != until : self . _waiters . setdefault ( until , [ ] ) . append ( compat . getcurrent ( ) ) scheduler . state . mainloop . switch ( ) | wait until the count has reached a particular number |
6,817 | def wait_callback ( connection ) : while 1 : state = connection . poll ( ) if state == extensions . POLL_OK : break elif state == extensions . POLL_READ : descriptor . wait_fds ( [ ( connection . fileno ( ) , 1 ) ] ) elif state == extensions . POLL_WRITE : descriptor . wait_fds ( [ ( connection . fileno ( ) , 2 ) ] ) else : raise psycopg2 . OperationalError ( "Bad poll result: %r" % state ) | callback function suitable for psycopg2 . set_wait_callback |
6,818 | def get_path_to_root ( self , termid ) : root = None sentence = self . sentence_for_termid . get ( termid ) if sentence is None : top_node = self . top_relation_for_term . get ( termid ) if top_node is not None : root = top_node [ 1 ] else : return None else : if sentence in self . root_for_sentence : root = self . root_for_sentence [ sentence ] else : return None path = self . get_shortest_path ( termid , root ) return path | Returns the dependency path from the term to the root |
6,819 | def get_full_dependents ( self , term_id , relations , counter = 0 ) : counter += 1 deps = self . relations_for_term if term_id in deps and len ( deps . get ( term_id ) ) > 0 : for dep in deps . get ( term_id ) : if not dep [ 1 ] in relations : relations . append ( dep [ 1 ] ) if dep [ 1 ] in deps : deprelations = self . get_full_dependents ( dep [ 1 ] , relations , counter ) for deprel in deprelations : if not deprel in relations : relations . append ( deprel ) return relations | Returns the complete list of dependents and embedded dependents of a certain term . |
6,820 | def getDirsToInclude ( string ) : dirs = [ ] a = string . strip ( ) obj = a . split ( '-' ) if len ( obj ) == 1 and obj [ 0 ] : for module in obj : try : exec ( 'import {}' . format ( module ) ) except ImportError : raise FileNotFoundError ( "The module '{}' does not" "exist" . format ( module ) ) try : dirs . append ( '-I{}' . format ( eval ( module ) . get_include ( ) ) ) except AttributeError : print ( NOT_NEEDED_MESSAGE . format ( module ) ) return dirs | Given a string of module names it will return the include directories essential to their compilation as long as the module has the conventional get_include function . |
6,821 | def purge_configs ( ) : user_config = path ( CONFIG_FILE_NAME , root = USER ) inplace_config = path ( CONFIG_FILE_NAME ) if os . path . isfile ( user_config ) : os . remove ( user_config ) if os . path . isfile ( inplace_config ) : os . remove ( inplace_config ) | These will delete any configs found in either the current directory or the user s home directory |
6,822 | def find_config_file ( ) : local_config_name = path ( CONFIG_FILE_NAME ) if os . path . isfile ( local_config_name ) : return local_config_name else : user_config_name = path ( CONFIG_FILE_NAME , root = USER ) if os . path . isfile ( user_config_name ) : return user_config_name else : return None | Returns the path to the config file if found in either the current working directory or the user s home directory . If a config file is not found the function will return None . |
6,823 | def make_config_data ( * , guided ) : config_data = { } config_data [ INCLUDE_DIRS_KEY ] = _make_include_dirs ( guided = guided ) config_data [ RUNTIME_DIRS_KEY ] = _make_runtime_dirs ( guided = guided ) config_data [ RUNTIME_KEY ] = _make_runtime ( ) return config_data | Makes the data necessary to construct a functional config file |
6,824 | def generate_configurations ( * , guided = False , fresh_start = False , save = False ) : if fresh_start : purge_configs ( ) loaded_status , loaded_data = get_config ( ) if loaded_status != CONFIG_VALID : if save : make_config_file ( guided = guided ) status , config_data = get_config ( ) else : config_data = make_config_data ( guided = guided ) else : config_data = loaded_data return config_data | If a config file is found in the standard locations it will be loaded and the config data would be retuned . If not found then generate the data on the fly and return it |
6,825 | def info ( self ) : url = "queues/%s" % ( self . name , ) result = self . client . get ( url ) return result [ 'body' ] [ 'queue' ] | Execute an HTTP request to get details on a queue and return it . |
6,826 | def clear ( self ) : url = "queues/%s/messages" % self . name result = self . client . delete ( url = url , body = json . dumps ( { } ) , headers = { 'Content-Type' : 'application/json' } ) return result [ 'body' ] | Executes an HTTP request to clear all contents of a queue . |
6,827 | def delete ( self , message_id , reservation_id = None , subscriber_name = None ) : url = "queues/%s/messages/%s" % ( self . name , message_id ) qitems = { } if reservation_id is not None : qitems [ 'reservation_id' ] = reservation_id if subscriber_name is not None : qitems [ 'subscriber_name' ] = subscriber_name body = json . dumps ( qitems ) result = self . client . delete ( url = url , body = body , headers = { 'Content-Type' : 'application/json' } ) return result [ 'body' ] | Execute an HTTP request to delete a message from queue . |
6,828 | def delete_multiple ( self , ids = None , messages = None ) : url = "queues/%s/messages" % self . name items = None if ids is None and messages is None : raise Exception ( 'Please, specify at least one parameter.' ) if ids is not None : items = [ { 'id' : item } for item in ids ] if messages is not None : items = [ { 'id' : item [ 'id' ] , 'reservation_id' : item [ 'reservation_id' ] } for item in messages [ 'messages' ] ] data = json . dumps ( { 'ids' : items } ) result = self . client . delete ( url = url , body = data , headers = { 'Content-Type' : 'application/json' } ) return result [ 'body' ] | Execute an HTTP request to delete messages from queue . |
6,829 | def post ( self , * messages ) : url = "queues/%s/messages" % self . name msgs = [ { 'body' : msg } if isinstance ( msg , basestring ) else msg for msg in messages ] data = json . dumps ( { 'messages' : msgs } ) result = self . client . post ( url = url , body = data , headers = { 'Content-Type' : 'application/json' } ) return result [ 'body' ] | Executes an HTTP request to create message on the queue . Creates queue if not existed . |
6,830 | def reserve ( self , max = None , timeout = None , wait = None , delete = None ) : url = "queues/%s/reservations" % self . name qitems = { } if max is not None : qitems [ 'n' ] = max if timeout is not None : qitems [ 'timeout' ] = timeout if wait is not None : qitems [ 'wait' ] = wait if delete is not None : qitems [ 'delete' ] = delete body = json . dumps ( qitems ) response = self . client . post ( url , body = body , headers = { 'Content-Type' : 'application/json' } ) return response [ 'body' ] | Retrieves Messages from the queue and reserves it . |
6,831 | def touch ( self , message_id , reservation_id , timeout = None ) : url = "queues/%s/messages/%s/touch" % ( self . name , message_id ) qitems = { 'reservation_id' : reservation_id } if timeout is not None : qitems [ 'timeout' ] = timeout body = json . dumps ( qitems ) response = self . client . post ( url , body = body , headers = { 'Content-Type' : 'application/json' } ) return response [ 'body' ] | Touching a reserved message extends its timeout to the duration specified when the message was created . |
6,832 | def release ( self , message_id , reservation_id , delay = 0 ) : url = "queues/%s/messages/%s/release" % ( self . name , message_id ) body = { 'reservation_id' : reservation_id } if delay > 0 : body [ 'delay' ] = delay body = json . dumps ( body ) response = self . client . post ( url , body = body , headers = { 'Content-Type' : 'application/json' } ) return response [ 'body' ] | Release locked message after specified time . If there is no message with such id on the queue . |
6,833 | def queues ( self , page = None , per_page = None , previous = None , prefix = None ) : options = { } if page is not None : raise Exception ( 'page param is deprecated!' ) if per_page is not None : options [ 'per_page' ] = per_page if previous is not None : options [ 'previous' ] = previous if prefix is not None : options [ 'prefix' ] = prefix query = urlencode ( options ) url = 'queues' if query != '' : url = "%s?%s" % ( url , query ) result = self . client . get ( url ) return [ queue [ 'name' ] for queue in result [ 'body' ] [ 'queues' ] ] | Execute an HTTP request to get a list of queues and return it . |
6,834 | def get_timex ( self , timex_id ) : if timex_id in self . idx : return Ctime ( self . idx [ timex_id ] ) else : return None | Returns the timex object for the supplied identifier |
6,835 | def add_timex ( self , timex_obj ) : timex_id = timex_obj . get_id ( ) if not timex_id in self . idx : timex_node = timex_obj . get_node ( ) self . node . append ( timex_node ) self . idx [ timex_id ] = timex_node else : print ( 'Error: trying to add new element with existing identifier' ) | Adds a timex object to the layer . |
6,836 | def ensure_storage_format ( root_dir ) : if not os . path . isdir ( os . path . join ( root_dir , 'blobs' ) ) : print ( '"blobs/" directory not found' ) sys . exit ( 1 ) if not os . path . isdir ( os . path . join ( root_dir , 'links' ) ) : print ( '"links/" directory not found' ) sys . exit ( 1 ) if not os . path . isdir ( os . path . join ( root_dir , 'db' ) ) : print ( '"db/" directory not found' ) sys . exit ( 1 ) | Checks if the directory looks like a filetracker storage . Exits with error if it doesn t . |
6,837 | def get_deepest_phrase_for_termid ( self , termid ) : terminal_id = self . terminal_for_term . get ( termid ) label = None subsumed = [ ] if terminal_id is not None : first_path = self . paths_for_terminal [ terminal_id ] [ 0 ] first_phrase_id = first_path [ 1 ] label = self . label_for_nonter . get ( first_phrase_id ) subsumed = self . terms_subsumed_by_nonter . get ( first_phrase_id , [ ] ) return label , sorted ( list ( subsumed ) ) | Returns the deepest phrase type for the term identifier and the list of subsumed by the same element |
6,838 | def get_least_common_subsumer ( self , from_tid , to_tid ) : termid_from = self . terminal_for_term . get ( from_tid ) termid_to = self . terminal_for_term . get ( to_tid ) path_from = self . paths_for_terminal [ termid_from ] [ 0 ] path_to = self . paths_for_terminal [ termid_to ] [ 0 ] common_nodes = set ( path_from ) & set ( path_to ) if len ( common_nodes ) == 0 : return None else : indexes = [ ] for common_node in common_nodes : index1 = path_from . index ( common_node ) index2 = path_to . index ( common_node ) indexes . append ( ( common_node , index1 + index2 ) ) indexes . sort ( key = itemgetter ( 1 ) ) shortest_common = indexes [ 0 ] [ 0 ] return shortest_common | Returns the deepest common subsumer among two terms |
6,839 | def get_deepest_subsumer ( self , list_terms ) : count_per_no_terminal = defaultdict ( int ) total_deep_per_no_terminal = defaultdict ( int ) for term_id in list_terms : terminal_id = self . terminal_for_term . get ( term_id ) path = self . paths_for_terminal [ terminal_id ] [ 0 ] print ( term_id , path ) for c , noter in enumerate ( path ) : count_per_no_terminal [ noter ] += 1 total_deep_per_no_terminal [ noter ] += c deepest_and_common = None deepest = 10000 for noterid , this_total in total_deep_per_no_terminal . items ( ) : if count_per_no_terminal . get ( noterid , - 1 ) == len ( list_terms ) : if this_total < deepest : deepest = this_total deepest_and_common = noterid label = None if deepest_and_common is not None : label = self . label_for_nonter [ deepest_and_common ] return deepest_and_common , label | Returns the labels of the deepest node that subsumes all the terms in the list of terms id s provided |
6,840 | def get_chunks ( self , chunk_type ) : for nonter , this_type in self . label_for_nonter . items ( ) : if this_type == chunk_type : subsumed = self . terms_subsumed_by_nonter . get ( nonter ) if subsumed is not None : yield sorted ( list ( subsumed ) ) | Returns the chunks for a certain type |
6,841 | def get_all_chunks_for_term ( self , termid ) : terminal_id = self . terminal_for_term . get ( termid ) paths = self . paths_for_terminal [ terminal_id ] for path in paths : for node in path : this_type = self . label_for_nonter [ node ] subsumed = self . terms_subsumed_by_nonter . get ( node ) if subsumed is not None : yield this_type , sorted ( list ( subsumed ) ) | Returns all the chunks in which the term is contained |
6,842 | def _lookup_enum_in_ns ( namespace , value ) : for attribute in dir ( namespace ) : if getattr ( namespace , attribute ) == value : return attribute | Return the attribute of namespace corresponding to value . |
6,843 | def _is_word_type ( token_type ) : return token_type in [ TokenType . Word , TokenType . QuotedLiteral , TokenType . UnquotedLiteral , TokenType . Number , TokenType . Deref ] | Return true if this is a word - type token . |
6,844 | def _is_in_comment_type ( token_type ) : return token_type in [ TokenType . Comment , TokenType . Newline , TokenType . Whitespace , TokenType . RST , TokenType . BeginRSTComment , TokenType . BeginInlineRST , TokenType . EndInlineRST ] | Return true if this kind of token can be inside a comment . |
6,845 | def _get_string_type_from_token ( token_type ) : return_value = None if token_type in [ TokenType . BeginSingleQuotedLiteral , TokenType . EndSingleQuotedLiteral ] : return_value = "Single" elif token_type in [ TokenType . BeginDoubleQuotedLiteral , TokenType . EndDoubleQuotedLiteral ] : return_value = "Double" assert return_value is not None return return_value | Return Single or Double depending on what kind of string this is . |
6,846 | def _make_header_body_handler ( end_body_regex , node_factory , has_footer = True ) : def handler ( tokens , tokens_len , body_index , function_call ) : def _end_header_body_definition ( token_index , tokens ) : if end_body_regex . match ( tokens [ token_index ] . content ) : try : if tokens [ token_index + 1 ] . type == TokenType . LeftParen : return True except IndexError : raise RuntimeError ( "Syntax Error" ) return False token_index , body = _ast_worker ( tokens , tokens_len , body_index , _end_header_body_definition ) extra_kwargs = { } if has_footer : token_index , footer = _handle_function_call ( tokens , tokens_len , token_index ) extra_kwargs = { "footer" : footer } return ( token_index , node_factory ( header = function_call , body = body . statements , line = tokens [ body_index ] . line , col = tokens [ body_index ] . col , index = body_index , ** extra_kwargs ) ) return handler | Utility function to make a handler for header - body node . |
6,847 | def _handle_if_block ( tokens , tokens_len , body_index , function_call ) : next_index , if_statement = _IF_BLOCK_IF_HANDLER ( tokens , tokens_len , body_index , function_call ) elseif_statements = [ ] else_statement = None footer = None while True : assert _RE_END_IF_BODY . match ( tokens [ next_index ] . content ) terminator = tokens [ next_index ] . content . lower ( ) if terminator == "endif" : next_index , footer = _handle_function_call ( tokens , tokens_len , next_index ) break next_index , header = _handle_function_call ( tokens , tokens_len , next_index ) if terminator == "elseif" : next_index , elseif_stmnt = _ELSEIF_BLOCK_HANDLER ( tokens , tokens_len , next_index + 1 , header ) elseif_statements . append ( elseif_stmnt ) elif terminator == "else" : next_index , else_statement = _ELSE_BLOCK_HANDLER ( tokens , tokens_len , next_index + 1 , header ) assert footer is not None return next_index , IfBlock ( if_statement = if_statement , elseif_statements = elseif_statements , else_statement = else_statement , footer = footer , line = if_statement . line , col = if_statement . col , index = body_index ) | Special handler for if - blocks . |
6,848 | def _handle_function_call ( tokens , tokens_len , index ) : def _end_function_call ( token_index , tokens ) : return tokens [ token_index ] . type == TokenType . RightParen next_index , call_body = _ast_worker ( tokens , tokens_len , index + 2 , _end_function_call ) function_call = FunctionCall ( name = tokens [ index ] . content , arguments = call_body . arguments , line = tokens [ index ] . line , col = tokens [ index ] . col , index = index ) try : handler = _FUNCTION_CALL_DISAMBIGUATE [ tokens [ index ] . content . lower ( ) ] except KeyError : handler = None if handler : return handler ( tokens , tokens_len , next_index , function_call ) else : return ( next_index , function_call ) | Handle function calls which could include a control statement . |
6,849 | def _ast_worker ( tokens , tokens_len , index , term ) : statements = [ ] arguments = [ ] while index < tokens_len : if term : if term ( index , tokens ) : break if tokens [ index ] . type == TokenType . Word and index + 1 < tokens_len and tokens [ index + 1 ] . type == TokenType . LeftParen : index , statement = _handle_function_call ( tokens , tokens_len , index ) statements . append ( statement ) elif _is_word_type ( tokens [ index ] . type ) : arguments . append ( Word ( type = _word_type ( tokens [ index ] . type ) , contents = tokens [ index ] . content , line = tokens [ index ] . line , col = tokens [ index ] . col , index = index ) ) index = index + 1 return ( index , GenericBody ( statements = statements , arguments = arguments ) ) | The main collector for all AST functions . |
6,850 | def _scan_for_tokens ( contents ) : scanner = re . Scanner ( [ ( r"(?<![^\s\(])([\"\'])(?:(?=(\\?))\2.)*?\1(?![^\s\)])" , lambda s , t : ( TokenType . QuotedLiteral , t ) ) , ( r"(?<![^\s\(])-?[0-9]+(?![^\s\)\(])" , lambda s , t : ( TokenType . Number , t ) ) , ( r"\(" , lambda s , t : ( TokenType . LeftParen , t ) ) , ( r"\)" , lambda s , t : ( TokenType . RightParen , t ) ) , ( r"(?<![^\s\(])[a-zA-z_][a-zA-Z0-9_]*(?![^\s\)\(])" , lambda s , t : ( TokenType . Word , t ) ) , ( r"(?<![^\s\(])\${[a-zA-z_][a-zA-Z0-9_]*}(?![^\s\)])" , lambda s , t : ( TokenType . Deref , t ) ) , ( r"\n" , lambda s , t : ( TokenType . Newline , t ) ) , ( r"\s+" , lambda s , t : ( TokenType . Whitespace , t ) ) , ( r"(?<![^\s\(\\])[\"]([^\"]|\\[\"])*$" , lambda s , t : ( TokenType . BeginDoubleQuotedLiteral , t ) ) , ( r"[^\s]*(?<!\\)[\"](?![^\s\)])" , lambda s , t : ( TokenType . EndDoubleQuotedLiteral , t ) ) , ( r"(?<![^\s\(\\])[\']([^\']|\\[\'])*$" , lambda s , t : ( TokenType . BeginSingleQuotedLiteral , t ) ) , ( r"[^\s]*(?<!\\)[\'](?![^\s\)])" , lambda s , t : ( TokenType . EndSingleQuotedLiteral , t ) ) , ( r"#.rst:$" , lambda s , t : ( TokenType . BeginRSTComment , t ) ) , ( r"#\[=*\[.rst:$" , lambda s , t : ( TokenType . BeginInlineRST , t ) ) , ( r"#\]=*\]$" , lambda s , t : ( TokenType . EndInlineRST , t ) ) , ( r"#" , lambda s , t : ( TokenType . Comment , t ) ) , ( r"([^\s\(\)]+|[^\s\(]*[^\)]|[^\(][^\s\)]*)" , lambda s , t : ( TokenType . UnquotedLiteral , t ) ) ] ) tokens_return = [ ] lines = contents . splitlines ( True ) lineno = 0 for line in lines : lineno += 1 col = 1 tokens , remaining = scanner . scan ( line ) if remaining != "" : msg = "Unknown tokens found on line {0}: {1}" . format ( lineno , remaining ) raise RuntimeError ( msg ) for token_type , token_contents in tokens : tokens_return . append ( Token ( type = token_type , content = token_contents , line = lineno , col = col ) ) col += len ( token_contents ) return tokens_return | Scan a string for tokens and return immediate form tokens . |
6,851 | def _replace_token_range ( tokens , start , end , replacement ) : tokens = tokens [ : start ] + replacement + tokens [ end : ] return tokens | For a range indicated from start to end replace with replacement . |
6,852 | def _is_really_comment ( tokens , index ) : if tokens [ index ] . type == TokenType . Comment : return True try : if tokens [ index ] . content . lstrip ( ) [ 0 ] == "#" : return True except IndexError : return False | Return true if the token at index is really a comment . |
6,853 | def _paste_tokens_line_by_line ( tokens , token_type , begin , end ) : block_index = begin while block_index < end : rst_line = tokens [ block_index ] . line line_traversal_index = block_index pasted = "" try : while tokens [ line_traversal_index ] . line == rst_line : pasted += tokens [ line_traversal_index ] . content line_traversal_index += 1 except IndexError : assert line_traversal_index == end last_tokens_len = len ( tokens ) tokens = _replace_token_range ( tokens , block_index , line_traversal_index , [ Token ( type = token_type , content = pasted , line = tokens [ block_index ] . line , col = tokens [ block_index ] . col ) ] ) end -= last_tokens_len - len ( tokens ) block_index += 1 return ( block_index , len ( tokens ) , tokens ) | Return lines of tokens pasted together line by line . |
6,854 | def _find_recorder ( recorder , tokens , index ) : if recorder is None : for recorder_factory in _RECORDERS : recorder = recorder_factory . maybe_start_recording ( tokens , index ) if recorder is not None : return recorder return recorder | Given a current recorder and a token index try to find a recorder . |
6,855 | def _compress_tokens ( tokens ) : recorder = None def _edge_case_stray_end_quoted ( tokens , index ) : tokens [ index ] = Token ( type = TokenType . UnquotedLiteral , content = tokens [ index ] . content , line = tokens [ index ] . line , col = tokens [ index ] . col ) tokens_len = len ( tokens ) index = 0 with _EdgeCaseStrayParens ( ) as edge_case_stray_parens : edge_cases = [ ( _is_paren_type , edge_case_stray_parens ) , ( _is_end_quoted_type , _edge_case_stray_end_quoted ) , ] while index < tokens_len : recorder = _find_recorder ( recorder , tokens , index ) if recorder is not None : result = recorder . consume_token ( tokens , index , tokens_len ) if result is not None : ( index , tokens_len , tokens ) = result recorder = None else : for matcher , handler in edge_cases : if matcher ( tokens [ index ] . type ) : handler ( tokens , index ) index += 1 return tokens | Paste multi - line strings comments RST etc together . |
6,856 | def tokenize ( contents ) : tokens = _scan_for_tokens ( contents ) tokens = _compress_tokens ( tokens ) tokens = [ token for token in tokens if token . type != TokenType . Whitespace ] return tokens | Parse a string called contents for CMake tokens . |
6,857 | def parse ( contents , tokens = None ) : if tokens is None : tokens = [ t for t in tokenize ( contents ) ] token_index , body = _ast_worker ( tokens , len ( tokens ) , 0 , None ) assert token_index == len ( tokens ) assert body . arguments == [ ] return ToplevelBody ( statements = body . statements ) | Parse a string called contents for an AST and return it . |
6,858 | def maybe_start_recording ( tokens , index ) : if _is_really_comment ( tokens , index ) : return _CommentedLineRecorder ( index , tokens [ index ] . line ) return None | Return a new _CommentedLineRecorder when it is time to record . |
6,859 | def maybe_start_recording ( tokens , index ) : if tokens [ index ] . type == TokenType . BeginRSTComment : return _RSTCommentBlockRecorder ( index , tokens [ index ] . line ) return None | Return a new _RSTCommentBlockRecorder when its time to record . |
6,860 | def maybe_start_recording ( tokens , index ) : if tokens [ index ] . type == TokenType . BeginInlineRST : return _InlineRSTRecorder ( index ) | Return a new _InlineRSTRecorder when its time to record . |
6,861 | def maybe_start_recording ( tokens , index ) : if _is_begin_quoted_type ( tokens [ index ] . type ) : string_type = _get_string_type_from_token ( tokens [ index ] . type ) return _MultilineStringRecorder ( index , string_type ) return None | Return a new _MultilineStringRecorder when its time to record . |
6,862 | def stratified_kfold ( df , n_folds ) : sessions = pd . DataFrame . from_records ( list ( df . index . unique ( ) ) ) . groupby ( 0 ) . apply ( lambda x : x [ 1 ] . unique ( ) ) sessions . apply ( lambda x : np . random . shuffle ( x ) ) folds = [ ] for i in range ( n_folds ) : idx = sessions . apply ( lambda x : pd . Series ( x [ i * ( len ( x ) / n_folds ) : ( i + 1 ) * ( len ( x ) / n_folds ) ] ) ) idx = pd . DataFrame ( idx . stack ( ) . reset_index ( level = 1 , drop = True ) ) . set_index ( 0 , append = True ) . index . values folds . append ( df . loc [ idx ] ) return folds | Create stratified k - folds from an indexed dataframe |
6,863 | def keystroke_model ( ) : model = Pohmm ( n_hidden_states = 2 , init_spread = 2 , emissions = [ 'lognormal' , 'lognormal' ] , smoothing = 'freq' , init_method = 'obs' , thresh = 1 ) return model | Generates a 2 - state model with lognormal emissions and frequency smoothing |
6,864 | def run_backdoor ( address , namespace = None ) : log . info ( "starting on %r" % ( address , ) ) serversock = io . Socket ( ) serversock . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 ) serversock . bind ( address ) serversock . listen ( socket . SOMAXCONN ) while 1 : clientsock , address = serversock . accept ( ) log . info ( "connection received from %r" % ( address , ) ) scheduler . schedule ( backdoor_handler , args = ( clientsock , namespace ) ) | start a server that runs python interpreters on connections made to it |
6,865 | def backdoor_handler ( clientsock , namespace = None ) : namespace = { } if namespace is None else namespace . copy ( ) console = code . InteractiveConsole ( namespace ) multiline_statement = [ ] stdout , stderr = StringIO ( ) , StringIO ( ) clientsock . sendall ( PREAMBLE + "\n" + PS1 ) for input_line in _produce_lines ( clientsock ) : input_line = input_line . rstrip ( ) if input_line : input_line = '\n' + input_line source = '\n' . join ( multiline_statement ) + input_line response = '' with _wrap_stdio ( stdout , stderr ) : result = console . runsource ( source ) response += stdout . getvalue ( ) err = stderr . getvalue ( ) if err : response += err if err or not result : multiline_statement = [ ] response += PS1 else : multiline_statement . append ( input_line ) response += PS2 clientsock . sendall ( response ) | start an interactive python interpreter on an existing connection |
6,866 | def prepare_params ( self ) : if self . options . resolve_fragment : self . fragment_name = self . node . fragment_name . resolve ( self . context ) else : self . fragment_name = str ( self . node . fragment_name ) for char in '\'\"' : if self . fragment_name . startswith ( char ) or self . fragment_name . endswith ( char ) : if self . fragment_name . startswith ( char ) and self . fragment_name . endswith ( char ) : self . fragment_name = self . fragment_name [ 1 : - 1 ] break else : raise ValueError ( 'Number of quotes around the fragment name is incoherent' ) self . expire_time = self . get_expire_time ( ) if self . options . versioning : self . version = force_bytes ( self . get_version ( ) ) self . vary_on = [ template . Variable ( var ) . resolve ( self . context ) for var in self . node . vary_on ] | Prepare the parameters passed to the templatetag |
6,867 | def get_expire_time ( self ) : try : expire_time = self . node . expire_time . resolve ( self . context ) except template . VariableDoesNotExist : raise template . TemplateSyntaxError ( '"%s" tag got an unknown variable: %r' % ( self . node . nodename , self . node . expire_time . var ) ) try : if expire_time is not None : expire_time = str ( expire_time ) if not expire_time . isdigit ( ) : raise TypeError expire_time = int ( expire_time ) except ( ValueError , TypeError ) : raise template . TemplateSyntaxError ( '"%s" tag got a non-integer (or None) timeout value: %r' % ( self . node . nodename , expire_time ) ) return expire_time | Return the expire time passed to the templatetag . Must be None or an integer . |
6,868 | def get_version ( self ) : if not self . node . version : return None try : version = smart_str ( '%s' % self . node . version . resolve ( self . context ) ) except template . VariableDoesNotExist : raise template . TemplateSyntaxError ( '"%s" tag got an unknown variable: %r' % ( self . node . nodename , self . node . version . var ) ) return '%s' % version | Return the stringified version passed to the templatetag . |
6,869 | def hash_args ( self ) : return hashlib . md5 ( force_bytes ( ':' . join ( [ urlquote ( force_bytes ( var ) ) for var in self . vary_on ] ) ) ) . hexdigest ( ) | Take all the arguments passed after the fragment name and return a hashed version which will be used in the cache key |
6,870 | def get_cache_key_args ( self ) : cache_key_args = dict ( nodename = self . node . nodename , name = self . fragment_name , hash = self . hash_args ( ) , ) if self . options . include_pk : cache_key_args [ 'pk' ] = self . get_pk ( ) return cache_key_args | Return the arguments to be passed to the base cache key returned by get_base_cache_key . |
6,871 | def cache_set ( self , to_cache ) : self . cache . set ( self . cache_key , to_cache , self . expire_time ) | Set content into the cache |
6,872 | def render_node ( self ) : self . content = self . node . nodelist . render ( self . context ) | Render the template and save the generated content |
6,873 | def create_content ( self ) : self . render_node ( ) if self . options . compress_spaces : self . content = self . RE_SPACELESS . sub ( ' ' , self . content ) if self . options . compress : to_cache = self . encode_content ( ) else : to_cache = self . content to_cache = self . join_content_version ( to_cache ) try : self . cache_set ( to_cache ) except Exception : if is_template_debug_activated ( ) : raise logger . exception ( 'Error when saving the cached template fragment' ) | Render the template apply options on it and save it to the cache . |
6,874 | def get_templatetag_module ( cls ) : if cls not in CacheTag . _templatetags_modules : all_tags = cls . get_all_tags_and_filters_by_function ( ) [ 'tags' ] CacheTag . _templatetags_modules [ cls ] = all_tags [ CacheTag . _templatetags [ cls ] [ 'cache' ] ] [ 0 ] return CacheTag . _templatetags_modules [ cls ] | Return the templatetags module name for which the current class is used . It s used to render the nocache blocks by loading the correct module |
6,875 | def render_nocache ( self ) : tmpl = template . Template ( '' . join ( [ template . BLOCK_TAG_START , 'load %s' % self . get_templatetag_module ( ) , template . BLOCK_TAG_END , self . RAW_TOKEN_START , self . content , self . RAW_TOKEN_END , ] ) ) return tmpl . render ( self . context ) | Render the nocache blocks of the content and return the whole html |
6,876 | def user_active_directory_deactivate ( user , attributes , created , updated ) : try : user_account_control = int ( attributes [ 'userAccountControl' ] [ 0 ] ) if user_account_control & 2 : user . is_active = False except KeyError : pass | Deactivate user accounts based on Active Directory s userAccountControl flags . Requires userAccountControl to be included in LDAP_SYNC_USER_EXTRA_ATTRIBUTES . |
6,877 | def _get_contents_between ( string , opener , closer ) : opener_location = string . index ( opener ) closer_location = string . index ( closer ) content = string [ opener_location + 1 : closer_location ] return content | Get the contents of a string between two characters |
6,878 | def _check_whitespace ( string ) : if string . count ( ' ' ) + string . count ( '\t' ) + string . count ( '\n' ) > 0 : raise ValueError ( INSTRUCTION_HAS_WHITESPACE ) | Make sure thre is no whitespace in the given string . Will raise a ValueError if whitespace is detected |
6,879 | def _check_parameters ( parameters , symbols ) : for param in parameters : if not param : raise ValueError ( EMPTY_PARAMETER ) elif ( param [ 0 ] in symbols ) and ( not param [ 1 : ] ) : print ( param ) raise ValueError ( EMPTY_KEYWORD_PARAMETER ) | Checks that the parameters given are not empty . Ones with prefix symbols can be denoted by including the prefix in symbols |
6,880 | def _check_dependencies ( string ) : opener , closer = '(' , ')' _check_enclosing_characters ( string , opener , closer ) if opener in string : if string [ 0 ] != opener : raise ValueError ( DEPENDENCIES_NOT_FIRST ) ret = True else : ret = False return ret | Checks the dependencies constructor . Looks to make sure that the dependencies are the first things defined |
6,881 | def _check_building_options ( string ) : opener , closer = '{' , '}' _check_enclosing_characters ( string , opener , closer ) if opener in string : if string [ - 1 ] != closer : raise ValueError ( OPTIONS_NOT_LAST ) ret = True else : ret = False return ret | Checks the building options to make sure that they are defined last after the task name and the dependencies |
6,882 | def _parse_dependencies ( string ) : contents = _get_contents_between ( string , '(' , ')' ) unsorted_dependencies = contents . split ( ',' ) _check_parameters ( unsorted_dependencies , ( '?' , ) ) buildable_dependencies = [ ] given_dependencies = [ ] for dependency in unsorted_dependencies : if dependency [ 0 ] == '?' : given_dependencies . append ( dependency [ 1 : ] ) else : buildable_dependencies . append ( dependency ) string = string [ string . index ( ')' ) + 1 : ] return buildable_dependencies , given_dependencies , string | This function actually parses the dependencies are sorts them into the buildable and given dependencies |
6,883 | def parseString ( string ) : buildable_dependencies = [ ] given_dependencies = [ ] output_directory = None output_format = None building_directory = None output_name = None _check_whitespace ( string ) there_are_dependencies = _check_dependencies ( string ) if there_are_dependencies : buildable_dependencies , given_dependencies , string = _parse_dependencies ( string ) there_are_options = _check_building_options ( string ) if there_are_options : output_directory , output_format , building_directory , string = _parse_building_options ( string ) if string [ 0 ] == '>' : string = string [ 1 : ] if string [ - 1 ] == '>' : string = string [ : - 1 ] is_a_flow_operator = _check_flow_operator ( string ) if is_a_flow_operator : greater_than_location = string . index ( '>' ) output_name = string [ greater_than_location + 1 : ] string = string [ : greater_than_location ] ret = object ( ) ret . input_name = string ret . output_name = output_name ret . buildable_dependencies = buildable_dependencies ret . given_dependencies = given_dependencies ret . output_format = output_format ret . building_directory = building_directory ret . output_directory = output_directory return ret | This function takes an entire instruction in the form of a string and will parse the entire string and return a dictionary of the fields gathered from the parsing |
6,884 | def GetAlias ( session = None ) : if session is not None : return session [ 'alias' ] if not clc . ALIAS : clc . v2 . API . _Login ( ) return ( clc . ALIAS ) | Return specified alias or if none the alias associated with the provided credentials . |
6,885 | def GetLocation ( session = None ) : if session is not None : return session [ 'location' ] if not clc . LOCATION : clc . v2 . API . _Login ( ) return ( clc . LOCATION ) | Return specified location or if none the default location associated with the provided credentials and alias . |
6,886 | def PrimaryDatacenter ( self ) : return ( clc . v2 . Datacenter ( alias = self . alias , location = self . data [ 'primaryDataCenter' ] , session = self . session ) ) | Returns the primary datacenter object associated with the account . |
6,887 | def add_file ( self , name , filename , compress_hint = True ) : return self . add_stream ( name , open ( filename , 'rb' ) ) | Saves the actual file in the store . |
6,888 | def get_file ( self , name , filename ) : stream , vname = self . get_stream ( name ) path , version = split_name ( vname ) dir_path = os . path . dirname ( filename ) if dir_path : mkdir ( dir_path ) with open ( filename , 'wb' ) as f : shutil . copyfileobj ( stream , f ) return vname | Saves the content of file named name to filename . |
6,889 | def remove_this_tlink ( self , tlink_id ) : for tlink in self . get_tlinks ( ) : if tlink . get_id ( ) == tlink_id : self . node . remove ( tlink . get_node ( ) ) break | Removes the tlink for the given tlink identifier |
6,890 | def remove_this_predicateAnchor ( self , predAnch_id ) : for predAnch in self . get_predicateAnchors ( ) : if predAnch . get_id ( ) == predAnch_id : self . node . remove ( predAnch . get_node ( ) ) break | Removes the predicate anchor for the given predicate anchor identifier |
6,891 | def wait_fds ( fd_events , inmask = 1 , outmask = 2 , timeout = None ) : current = compat . getcurrent ( ) activated = { } poll_regs = { } callback_refs = { } def activate ( fd , event ) : if not activated and timeout != 0 : scheduler . schedule ( current ) if timeout : scheduler . _remove_timer ( waketime , current ) activated . setdefault ( fd , 0 ) activated [ fd ] |= event for fd , events in fd_events : readable = None writable = None if events & inmask : readable = functools . partial ( activate , fd , inmask ) if events & outmask : writable = functools . partial ( activate , fd , outmask ) callback_refs [ fd ] = ( readable , writable ) poll_regs [ fd ] = scheduler . _register_fd ( fd , readable , writable ) if timeout : waketime = time . time ( ) + timeout scheduler . pause_until ( waketime ) elif timeout == 0 : scheduler . pause ( ) else : scheduler . state . mainloop . switch ( ) for fd , reg in poll_regs . iteritems ( ) : readable , writable = callback_refs [ fd ] scheduler . _unregister_fd ( fd , readable , writable , reg ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" ) return activated . items ( ) | wait for the first of a number of file descriptors to have activity |
6,892 | def hack_find_packages ( include_str ) : new_list = [ include_str ] for element in find_packages ( include_str ) : new_list . append ( include_str + '.' + element ) return new_list | patches setuptools . find_packages issue |
6,893 | def WaitUntilComplete ( self , poll_freq = 2 , timeout = None ) : start_time = time . time ( ) while len ( self . requests ) : cur_requests = [ ] for request in self . requests : status = request . Status ( ) if status in ( 'notStarted' , 'executing' , 'resumed' , 'queued' , 'running' ) : cur_requests . append ( request ) elif status == 'succeeded' : self . success_requests . append ( request ) elif status in ( "failed" , "unknown" ) : self . error_requests . append ( request ) self . requests = cur_requests if self . requests > 0 and clc . v2 . time_utils . TimeoutExpired ( start_time , timeout ) : raise clc . RequestTimeoutException ( 'Timeout waiting for Requests: {0}' . format ( self . requests [ 0 ] . id ) , self . requests [ 0 ] . Status ( ) ) time . sleep ( poll_freq ) return ( len ( self . error_requests ) ) | Poll until all request objects have completed . |
6,894 | def WaitUntilComplete ( self , poll_freq = 2 , timeout = None ) : start_time = time . time ( ) while not self . time_completed : status = self . Status ( ) if status == 'executing' : if not self . time_executed : self . time_executed = time . time ( ) if clc . v2 . time_utils . TimeoutExpired ( start_time , timeout ) : raise clc . RequestTimeoutException ( 'Timeout waiting for Request: {0}' . format ( self . id ) , status ) elif status == 'succeeded' : self . time_completed = time . time ( ) elif status in ( "failed" , "resumed" or "unknown" ) : self . time_completed = time . time ( ) raise ( clc . CLCException ( "%s %s execution %s" % ( self . context_key , self . context_val , status ) ) ) time . sleep ( poll_freq ) | Poll until status is completed . |
6,895 | def Server ( self ) : if self . context_key == 'newserver' : server_id = clc . v2 . API . Call ( 'GET' , self . context_val , session = self . session ) [ 'id' ] return ( clc . v2 . Server ( id = server_id , alias = self . alias , session = self . session ) ) elif self . context_key == 'server' : return ( clc . v2 . Server ( id = self . context_val , alias = self . alias , session = self . session ) ) else : raise ( clc . CLCException ( "%s object not server" % self . context_key ) ) | Return server associated with this request . |
6,896 | def login ( self ) : auth_data = dict ( ) auth_data [ 'apikey' ] = self . api_key auth_data [ 'username' ] = self . username auth_data [ 'userkey' ] = self . account_identifier auth_resp = requests_util . run_request ( 'post' , self . API_BASE_URL + '/login' , data = json . dumps ( auth_data ) , headers = self . __get_header ( ) ) if auth_resp . status_code == 200 : auth_resp_data = self . parse_raw_response ( auth_resp ) self . __token = auth_resp_data [ 'token' ] self . __auth_time = datetime . now ( ) self . is_authenticated = True else : raise AuthenticationFailedException ( 'Authentication failed!' ) | This method performs the login on TheTVDB given the api key user name and account identifier . |
6,897 | def search_series ( self , name = None , imdb_id = None , zap2it_id = None ) : arguments = locals ( ) optional_parameters = { 'name' : 'name' , 'imdb_id' : 'imdbId' , 'zap2it_id' : 'zap2itId' } query_string = utils . query_param_string_from_option_args ( optional_parameters , arguments ) raw_response = requests_util . run_request ( 'get' , '%s%s?%s' % ( self . API_BASE_URL , '/search/series' , query_string ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response ) | Searchs for a series in TheTVDB by either its name imdb_id or zap2it_id . |
6,898 | def get_series_actors ( self , series_id ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/series/%d/actors' % series_id , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response ) | Retrieves the information on the actors of a particular series given its TheTVDB id . |
6,899 | def get_series_episodes ( self , series_id , page = 1 ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/series/%d/episodes?page=%d' % ( series_id , page ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response ) | Retrieves all episodes for a particular series given its TheTVDB id . It retrieves a maximum of 100 results per page . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.