idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
10,300
|
def data_filler_detailed_registration ( self , number_of_rows , db ) : try : detailed_registration = db data_list = list ( ) for i in range ( 0 , number_of_rows ) : post_det_reg = { "id" : rnd_id_generator ( self ) , "email" : self . faker . safe_email ( ) , "password" : self . faker . md5 ( raw_output = False ) , "lastname" : self . faker . last_name ( ) , "name" : self . faker . first_name ( ) , "adress" : self . faker . address ( ) , "phone" : self . faker . phone_number ( ) } detailed_registration . save ( post_det_reg ) logger . warning ( 'detailed_registration Commits are successful after write job!' , extra = d ) except Exception as e : logger . error ( e , extra = d )
|
creates and fills the table with detailed regis . information
|
10,301
|
def data_filler_customer ( self , number_of_rows , db ) : try : customer = db data_list = list ( ) for i in range ( 0 , number_of_rows ) : post_cus_reg = { "id" : rnd_id_generator ( self ) , "name" : self . faker . first_name ( ) , "lastname" : self . faker . last_name ( ) , "address" : self . faker . address ( ) , "country" : self . faker . country ( ) , "city" : self . faker . city ( ) , "registry_date" : self . faker . date ( pattern = "%d-%m-%Y" ) , "birthdate" : self . faker . date ( pattern = "%d-%m-%Y" ) , "email" : self . faker . safe_email ( ) , "phone_number" : self . faker . phone_number ( ) , "locale" : self . faker . locale ( ) } customer . save ( post_cus_reg ) logger . warning ( 'customer Commits are successful after write job!' , extra = d ) except Exception as e : logger . error ( e , extra = d )
|
creates and fills the table with customer data
|
10,302
|
def visit ( self , node ) : method = getattr ( self , 'visit_' + node . expr_name , self . generic_visit ) try : return method ( node , [ self . visit ( n ) for n in node ] ) except ( VisitationError , UndefinedLabel ) : raise except self . unwrapped_exceptions : raise except Exception : exc_class , exc , tb = exc_info ( ) reraise ( VisitationError , VisitationError ( exc , exc_class , node ) , tb )
|
Walk a parse tree transforming it into another representation .
|
10,303
|
def _parse_or_match ( self , text , pos , method_name ) : if not self . grammar : raise RuntimeError ( "The {cls}.{method}() shortcut won't work because {cls} was " "never associated with a specific " "grammar. Fill out its " "`grammar` attribute, and try again." . format ( cls = self . __class__ . __name__ , method = method_name ) ) return self . visit ( getattr ( self . grammar , method_name ) ( text , pos = pos ) )
|
Execute a parse or match on the default grammar followed by a visitation .
|
10,304
|
def _expressions_from_rules ( self , rule_syntax , custom_rules ) : comment = Regex ( r'#[^\r\n]*' , name = 'comment' ) meaninglessness = OneOf ( Regex ( r'\s+' ) , comment , name = 'meaninglessness' ) _ = ZeroOrMore ( meaninglessness , name = '_' ) equals = Sequence ( Literal ( '=' ) , _ , name = 'equals' ) label = Sequence ( Regex ( r'[a-zA-Z_][a-zA-Z_0-9]*' ) , _ , name = 'label' ) reference = Sequence ( label , Not ( equals ) , name = 'reference' ) quantifier = Sequence ( Regex ( r'[*+?]' ) , _ , name = 'quantifier' ) spaceless_literal = Regex ( r'u?r?"[^"\\]*(?:\\.[^"\\]*)*"' , ignore_case = True , dot_all = True , name = 'spaceless_literal' ) literal = Sequence ( spaceless_literal , _ , name = 'literal' ) regex = Sequence ( Literal ( '~' ) , literal , Regex ( '[ilmsuxa]*' , ignore_case = True ) , _ , name = 'regex' ) atom = OneOf ( reference , literal , regex , name = 'atom' ) quantified = Sequence ( atom , quantifier , name = 'quantified' ) term = OneOf ( quantified , atom , name = 'term' ) not_term = Sequence ( Literal ( '!' ) , term , _ , name = 'not_term' ) term . members = ( not_term , ) + term . members sequence = Sequence ( term , OneOrMore ( term ) , name = 'sequence' ) or_term = Sequence ( Literal ( '/' ) , _ , term , name = 'or_term' ) ored = Sequence ( term , OneOrMore ( or_term ) , name = 'ored' ) expression = OneOf ( ored , sequence , term , name = 'expression' ) rule = Sequence ( label , equals , expression , name = 'rule' ) rules = Sequence ( _ , OneOrMore ( rule ) , name = 'rules' ) rule_tree = rules . parse ( rule_syntax ) return RuleVisitor ( ) . visit ( rule_tree )
|
Return the rules for parsing the grammar definition syntax .
|
10,305
|
def visit_parenthesized ( self , node , parenthesized ) : left_paren , _ , expression , right_paren , _ = parenthesized return expression
|
Treat a parenthesized subexpression as just its contents .
|
10,306
|
def visit_rule ( self , node , rule ) : label , equals , expression = rule expression . name = label return expression
|
Assign a name to the Expression and return it .
|
10,307
|
def visit_regex ( self , node , regex ) : tilde , literal , flags , _ = regex flags = flags . text . upper ( ) pattern = literal . literal return Regex ( pattern , ignore_case = 'I' in flags , locale = 'L' in flags , multiline = 'M' in flags , dot_all = 'S' in flags , unicode = 'U' in flags , verbose = 'X' in flags , ascii = 'A' in flags )
|
Return a Regex expression .
|
10,308
|
def _resolve_refs ( self , rule_map , expr , done ) : if isinstance ( expr , LazyReference ) : label = text_type ( expr ) try : reffed_expr = rule_map [ label ] except KeyError : raise UndefinedLabel ( expr ) return self . _resolve_refs ( rule_map , reffed_expr , done ) else : if getattr ( expr , 'members' , ( ) ) and expr not in done : done . add ( expr ) expr . members = tuple ( self . _resolve_refs ( rule_map , member , done ) for member in expr . members ) return expr
|
Return an expression with all its lazy references recursively resolved .
|
10,309
|
def expression ( callable , rule_name , grammar ) : num_args = len ( getargspec ( callable ) . args ) if num_args == 2 : is_simple = True elif num_args == 5 : is_simple = False else : raise RuntimeError ( "Custom rule functions must take either 2 or 5 " "arguments, not %s." % num_args ) class AdHocExpression ( Expression ) : def _uncached_match ( self , text , pos , cache , error ) : result = ( callable ( text , pos ) if is_simple else callable ( text , pos , cache , error , grammar ) ) if isinstance ( result , integer_types ) : end , children = result , None elif isinstance ( result , tuple ) : end , children = result else : return result return Node ( self , text , pos , end , children = children ) def _as_rhs ( self ) : return '{custom function "%s"}' % callable . __name__ return AdHocExpression ( name = rule_name )
|
Turn a plain callable into an Expression .
|
10,310
|
def _uncached_match ( self , text , pos , cache , error ) : m = self . re . match ( text , pos ) if m is not None : span = m . span ( ) node = RegexNode ( self , text , pos , pos + span [ 1 ] - span [ 0 ] ) node . match = m return node
|
Return length of match None if no match .
|
10,311
|
def _regex_flags_from_bits ( self , bits ) : flags = 'ilmsuxa' return '' . join ( flags [ i - 1 ] if ( 1 << i ) & bits else '' for i in range ( 1 , len ( flags ) + 1 ) )
|
Return the textual equivalent of numerically encoded regex flags .
|
10,312
|
def _gegetate_args ( self , options ) : for optkey , optval in self . _normalize_options ( options ) : yield optkey if isinstance ( optval , ( list , tuple ) ) : assert len ( optval ) == 2 and optval [ 0 ] and optval [ 1 ] , 'Option value can only be either a string or a (tuple, list) of 2 items' yield optval [ 0 ] yield optval [ 1 ] else : yield optval
|
Generator of args parts based on options specification .
|
10,313
|
def _find_options_in_meta ( self , content ) : if ( isinstance ( content , io . IOBase ) or content . __class__ . __name__ == 'StreamReaderWriter' ) : content = content . read ( ) found = { } for x in re . findall ( '<meta [^>]*>' , content ) : if re . search ( 'name=["\']%s' % self . config . meta_tag_prefix , x ) : name = re . findall ( 'name=["\']%s([^"\']*)' % self . config . meta_tag_prefix , x ) [ 0 ] found [ name ] = re . findall ( 'content=["\']([^"\']*)' , x ) [ 0 ] return found
|
Reads content and extracts options encoded in HTML meta tags
|
10,314
|
def authenticate_credentials ( self , token ) : msg = _ ( 'Invalid token.' ) token = token . decode ( "utf-8" ) for auth_token in AuthToken . objects . filter ( token_key = token [ : CONSTANTS . TOKEN_KEY_LENGTH ] ) : if self . _cleanup_token ( auth_token ) : continue try : digest = hash_token ( token , auth_token . salt ) except ( TypeError , binascii . Error ) : raise exceptions . AuthenticationFailed ( msg ) if compare_digest ( digest , auth_token . digest ) : if knox_settings . AUTO_REFRESH and auth_token . expiry : self . renew_token ( auth_token ) return self . validate_user ( auth_token ) raise exceptions . AuthenticationFailed ( msg )
|
Due to the random nature of hashing a salted value this must inspect each auth_token individually to find the correct one .
|
10,315
|
def hash_token ( token , salt ) : digest = hashes . Hash ( sha ( ) , backend = default_backend ( ) ) digest . update ( binascii . unhexlify ( token ) ) digest . update ( binascii . unhexlify ( salt ) ) return binascii . hexlify ( digest . finalize ( ) ) . decode ( )
|
Calculates the hash of a token and salt . input is unhexlified
|
10,316
|
async def Check ( self , stream ) : request = await stream . recv_message ( ) checks = self . _checks . get ( request . service ) if checks is None : await stream . send_trailing_metadata ( status = Status . NOT_FOUND ) elif len ( checks ) == 0 : await stream . send_message ( HealthCheckResponse ( status = HealthCheckResponse . SERVING , ) ) else : for check in checks : await check . __check__ ( ) await stream . send_message ( HealthCheckResponse ( status = _status ( checks ) , ) )
|
Implements synchronous periodic checks
|
10,317
|
def set ( self , value : Optional [ bool ] ) : prev_value = self . _value self . _value = value if self . _value != prev_value : for event in self . _events : event . set ( )
|
Sets current status of a check
|
10,318
|
def graceful_exit ( servers , * , loop , signals = frozenset ( { signal . SIGINT , signal . SIGTERM } ) ) : signals = set ( signals ) flag = [ ] for sig_num in signals : loop . add_signal_handler ( sig_num , _exit_handler , sig_num , servers , flag ) try : yield finally : for sig_num in signals : loop . remove_signal_handler ( sig_num )
|
Utility context - manager to help properly shutdown server in response to the OS signals
|
10,319
|
async def recv_message ( self ) : message = await recv_message ( self . _stream , self . _codec , self . _recv_type ) message , = await self . _dispatch . recv_message ( message ) return message
|
Coroutine to receive incoming message from the client .
|
10,320
|
async def send_initial_metadata ( self , * , metadata = None ) : if self . _send_initial_metadata_done : raise ProtocolError ( 'Initial metadata was already sent' ) headers = [ ( ':status' , '200' ) , ( 'content-type' , self . _content_type ) , ] metadata = MultiDict ( metadata or ( ) ) metadata , = await self . _dispatch . send_initial_metadata ( metadata ) headers . extend ( encode_metadata ( metadata ) ) await self . _stream . send_headers ( headers ) self . _send_initial_metadata_done = True
|
Coroutine to send headers with initial metadata to the client .
|
10,321
|
async def send_message ( self , message , ** kwargs ) : if 'end' in kwargs : warnings . warn ( '"end" argument is deprecated, use ' '"stream.send_trailing_metadata" explicitly' , stacklevel = 2 ) end = kwargs . pop ( 'end' , False ) assert not kwargs , kwargs if not self . _send_initial_metadata_done : await self . send_initial_metadata ( ) if not self . _cardinality . server_streaming : if self . _send_message_count : raise ProtocolError ( 'Server should send exactly one message ' 'in response' ) message , = await self . _dispatch . send_message ( message ) await send_message ( self . _stream , self . _codec , message , self . _send_type ) self . _send_message_count += 1 if end : await self . send_trailing_metadata ( )
|
Coroutine to send message to the client .
|
10,322
|
async def send_trailing_metadata ( self , * , status = Status . OK , status_message = None , metadata = None ) : if self . _send_trailing_metadata_done : raise ProtocolError ( 'Trailing metadata was already sent' ) if ( not self . _cardinality . server_streaming and not self . _send_message_count and status is Status . OK ) : raise ProtocolError ( 'Unary response with OK status requires ' 'a single message to be sent' ) if self . _send_initial_metadata_done : headers = [ ] else : headers = [ ( ':status' , '200' ) ] headers . append ( ( 'grpc-status' , str ( status . value ) ) ) if status_message is not None : headers . append ( ( 'grpc-message' , encode_grpc_message ( status_message ) ) ) metadata = MultiDict ( metadata or ( ) ) metadata , = await self . _dispatch . send_trailing_metadata ( metadata ) headers . extend ( encode_metadata ( metadata ) ) await self . _stream . send_headers ( headers , end_stream = True ) self . _send_trailing_metadata_done = True if status != Status . OK and self . _stream . closable : self . _stream . reset_nowait ( )
|
Coroutine to send trailers with trailing metadata to the client .
|
10,323
|
async def start ( self , host = None , port = None , * , path = None , family = socket . AF_UNSPEC , flags = socket . AI_PASSIVE , sock = None , backlog = 100 , ssl = None , reuse_address = None , reuse_port = None ) : if path is not None and ( host is not None or port is not None ) : raise ValueError ( "The 'path' parameter can not be used with the " "'host' or 'port' parameters." ) if self . _server is not None : raise RuntimeError ( 'Server is already started' ) if path is not None : self . _server = await self . _loop . create_unix_server ( self . _protocol_factory , path , sock = sock , backlog = backlog , ssl = ssl ) else : self . _server = await self . _loop . create_server ( self . _protocol_factory , host , port , family = family , flags = flags , sock = sock , backlog = backlog , ssl = ssl , reuse_address = reuse_address , reuse_port = reuse_port )
|
Coroutine to start the server .
|
10,324
|
def close ( self ) : if self . _server is None : raise RuntimeError ( 'Server is not started' ) self . _server . close ( ) for handler in self . _handlers : handler . close ( )
|
Stops accepting new connections cancels all currently running requests . Request handlers are able to handle CancelledError and exit properly .
|
10,325
|
async def wait_closed ( self ) : if self . _server is None : raise RuntimeError ( 'Server is not started' ) await self . _server . wait_closed ( ) if self . _handlers : await asyncio . wait ( { h . wait_closed ( ) for h in self . _handlers } , loop = self . _loop )
|
Coroutine to wait until all existing request handlers will exit properly .
|
10,326
|
async def send_request ( self ) : if self . _send_request_done : raise ProtocolError ( 'Request is already sent' ) with self . _wrapper : protocol = await self . _channel . __connect__ ( ) stream = protocol . processor . connection . create_stream ( wrapper = self . _wrapper ) headers = [ ( ':method' , 'POST' ) , ( ':scheme' , self . _channel . _scheme ) , ( ':path' , self . _method_name ) , ( ':authority' , self . _channel . _authority ) , ] if self . _deadline is not None : timeout = self . _deadline . time_remaining ( ) headers . append ( ( 'grpc-timeout' , encode_timeout ( timeout ) ) ) content_type = ( GRPC_CONTENT_TYPE + '+' + self . _codec . __content_subtype__ ) headers . extend ( ( ( 'te' , 'trailers' ) , ( 'content-type' , content_type ) , ( 'user-agent' , USER_AGENT ) , ) ) metadata , = await self . _dispatch . send_request ( self . _metadata , method_name = self . _method_name , deadline = self . _deadline , content_type = content_type , ) headers . extend ( encode_metadata ( metadata ) ) release_stream = await stream . send_request ( headers , _processor = protocol . processor , ) self . _stream = stream self . _release_stream = release_stream self . _send_request_done = True
|
Coroutine to send request headers with metadata to the server .
|
10,327
|
async def send_message ( self , message , * , end = False ) : if not self . _send_request_done : await self . send_request ( ) if end and self . _end_done : raise ProtocolError ( 'Stream was already ended' ) with self . _wrapper : message , = await self . _dispatch . send_message ( message ) await send_message ( self . _stream , self . _codec , message , self . _send_type , end = end ) self . _send_message_count += 1 if end : self . _end_done = True
|
Coroutine to send message to the server .
|
10,328
|
async def end ( self ) : if self . _end_done : raise ProtocolError ( 'Stream was already ended' ) if ( not self . _cardinality . client_streaming and not self . _send_message_count ) : raise ProtocolError ( 'Unary request requires a single message ' 'to be sent' ) await self . _stream . end ( ) self . _end_done = True
|
Coroutine to end stream from the client - side .
|
10,329
|
async def recv_initial_metadata ( self ) : if not self . _send_request_done : raise ProtocolError ( 'Request was not sent yet' ) if self . _recv_initial_metadata_done : raise ProtocolError ( 'Initial metadata was already received' ) try : with self . _wrapper : headers = await self . _stream . recv_headers ( ) self . _recv_initial_metadata_done = True metadata = decode_metadata ( headers ) metadata , = await self . _dispatch . recv_initial_metadata ( metadata ) self . initial_metadata = metadata headers_map = dict ( headers ) self . _raise_for_status ( headers_map ) self . _raise_for_grpc_status ( headers_map , optional = True ) content_type = headers_map . get ( 'content-type' ) if content_type is None : raise GRPCError ( Status . UNKNOWN , 'Missing content-type header' ) base_content_type , _ , sub_type = content_type . partition ( '+' ) sub_type = sub_type or ProtoCodec . __content_subtype__ if ( base_content_type != GRPC_CONTENT_TYPE or sub_type != self . _codec . __content_subtype__ ) : raise GRPCError ( Status . UNKNOWN , 'Invalid content-type: {!r}' . format ( content_type ) ) except StreamTerminatedError : headers = self . _stream . recv_headers_nowait ( ) if headers is None : raise else : headers_map = dict ( headers ) self . _raise_for_status ( headers_map ) self . _raise_for_grpc_status ( headers_map , optional = True ) raise
|
Coroutine to wait for headers with initial metadata from the server .
|
10,330
|
async def recv_message ( self ) : if not self . _recv_initial_metadata_done : await self . recv_initial_metadata ( ) with self . _wrapper : message = await recv_message ( self . _stream , self . _codec , self . _recv_type ) self . _recv_message_count += 1 message , = await self . _dispatch . recv_message ( message ) return message
|
Coroutine to receive incoming message from the server .
|
10,331
|
async def recv_trailing_metadata ( self ) : if not self . _end_done : raise ProtocolError ( 'Outgoing stream was not ended' ) if ( not self . _cardinality . server_streaming and not self . _recv_message_count ) : raise ProtocolError ( 'No messages were received before waiting ' 'for trailing metadata' ) if self . _recv_trailing_metadata_done : raise ProtocolError ( 'Trailing metadata was already received' ) with self . _wrapper : headers = await self . _stream . recv_headers ( ) self . _recv_trailing_metadata_done = True metadata = decode_metadata ( headers ) metadata , = await self . _dispatch . recv_trailing_metadata ( metadata ) self . trailing_metadata = metadata self . _raise_for_grpc_status ( dict ( headers ) )
|
Coroutine to wait for trailers with trailing metadata from the server .
|
10,332
|
def close ( self ) : if self . _protocol is not None : self . _protocol . processor . close ( ) del self . _protocol
|
Closes connection to the server .
|
10,333
|
def read_config ( ) : with open ( PYDOCMD_CONFIG ) as fp : config = yaml . load ( fp ) return default_config ( config )
|
Reads and preprocesses the pydoc - markdown configuration file .
|
10,334
|
def write_temp_mkdocs_config ( inconf ) : ignored_keys = ( 'gens_dir' , 'pages' , 'headers' , 'generate' , 'loader' , 'preprocessor' , 'additional_search_paths' ) config = { key : value for key , value in inconf . items ( ) if key not in ignored_keys } config [ 'docs_dir' ] = inconf [ 'gens_dir' ] if 'pages' in inconf : config [ 'nav' ] = inconf [ 'pages' ] with open ( 'mkdocs.yml' , 'w' ) as fp : yaml . dump ( config , fp ) atexit . register ( lambda : os . remove ( 'mkdocs.yml' ) )
|
Generates a configuration for MkDocs on - the - fly from the pydoc - markdown configuration and makes sure it gets removed when this program exists .
|
10,335
|
def import_object_with_scope ( name ) : parts = name . split ( '.' ) current_name = parts [ 0 ] obj = import_module ( current_name ) scope = None for part in parts [ 1 : ] : current_name += '.' + part try : if hasattr ( obj , '__dict__' ) : sub_obj = obj . __dict__ [ part ] else : sub_obj = getattr ( obj , part ) scope , obj = obj , sub_obj except ( AttributeError , KeyError ) : try : obj = scope = import_module ( current_name ) except ImportError as exc : if 'named {}' . format ( part ) in str ( exc ) : raise ImportError ( current_name ) raise return obj , scope
|
Imports a Python object by an absolute identifier .
|
10,336
|
def force_lazy_import ( name ) : obj = import_object ( name ) module_items = list ( getattr ( obj , '__dict__' , { } ) . items ( ) ) for key , value in module_items : if getattr ( value , '__module__' , None ) : import_object ( name + '.' + key )
|
Import any modules off of name by iterating a new list rather than a generator so that this library works with lazy imports .
|
10,337
|
def preprocess_section ( self , section ) : lines = [ ] in_codeblock = False keyword = None components = { } for line in section . content . split ( '\n' ) : line = line . strip ( ) if line . startswith ( "```" ) : in_codeblock = not in_codeblock if not in_codeblock : match = re . match ( r':(?:param|parameter)\s+(\w+)\s*:(.*)?$' , line ) if match : keyword = 'Arguments' param = match . group ( 1 ) text = match . group ( 2 ) text = text . strip ( ) component = components . get ( keyword , [ ] ) component . append ( '- `{}`: {}' . format ( param , text ) ) components [ keyword ] = component continue match = re . match ( r':(?:return|returns)\s*:(.*)?$' , line ) if match : keyword = 'Returns' text = match . group ( 1 ) text = text . strip ( ) component = components . get ( keyword , [ ] ) component . append ( text ) components [ keyword ] = component continue match = re . match ( ':(?:raises|raise)\s+(\w+)\s*:(.*)?$' , line ) if match : keyword = 'Raises' exception = match . group ( 1 ) text = match . group ( 2 ) text = text . strip ( ) component = components . get ( keyword , [ ] ) component . append ( '- `{}`: {}' . format ( exception , text ) ) components [ keyword ] = component continue if keyword is not None : components [ keyword ] . append ( line ) else : lines . append ( line ) for key in components : self . _append_section ( lines , key , components ) section . content = '\n' . join ( lines )
|
Preprocessors a given section into it s components .
|
10,338
|
def create_class ( self , data , options = None , ** kwargs ) : _type = kwargs . get ( "_type" ) obj_map = dict ( ( cls . type , cls ) for cls in ALL_CLASSES ) try : if _type : LOGGER . debug ( "Forcing Go Type %s" % _type ) cls = obj_map [ _type ] else : cls = obj_map [ data [ "type" ] ] except KeyError : LOGGER . warning ( "Unknown Type: %s" % data ) else : if cls . inverted_names and "names" in data : for name in data [ "names" ] : data_inv = { } data_inv . update ( data ) data_inv [ "name" ] = name if "names" in data_inv : del data_inv [ "names" ] for obj in self . create_class ( data_inv ) : yield obj else : obj = cls ( data , jinja_env = self . jinja_env ) for child_type in [ "consts" , "types" , "vars" , "funcs" ] : for child_data in data . get ( child_type , [ ] ) : obj . children += list ( self . create_class ( child_data , _type = child_type . replace ( "consts" , "const" ) . replace ( "types" , "type" ) . replace ( "vars" , "variable" ) . replace ( "funcs" , "func" ) , ) ) yield obj
|
Return instance of class based on Go data
|
10,339
|
def pathname ( self ) : slug = self . name slug = unidecode . unidecode ( slug ) slug = slug . replace ( "-" , "" ) slug = re . sub ( r"[^\w\.]+" , "-" , slug ) . strip ( "-" ) return os . path . join ( * slug . split ( "." ) )
|
Sluggified path for filenames
|
10,340
|
def include_dir ( self , root ) : parts = [ root ] parts . extend ( self . pathname . split ( os . path . sep ) ) return "/" . join ( parts )
|
Return directory of file
|
10,341
|
def include_path ( self ) : parts = [ self . include_dir ( root = self . url_root ) ] parts . append ( "index" ) return "/" . join ( parts )
|
Return absolute path without regarding OS path separator
|
10,342
|
def create_class ( self , data , options = None , ** kwargs ) : obj_map = dict ( ( cls . type , cls ) for cls in ALL_CLASSES ) try : cls = obj_map [ data [ "kind" ] ] except ( KeyError , TypeError ) : LOGGER . warning ( "Unknown Type: %s" % data ) else : obj = cls ( data , jinja_env = self . jinja_env ) if "children" in data : for child_data in data [ "children" ] : for child_obj in self . create_class ( child_data , options = options ) : obj . children . append ( child_obj ) yield obj
|
Return instance of class based on Javascript data
|
10,343
|
def create_class ( self , data , options = None , path = None , ** kwargs ) : obj_map = dict ( ( cls . type , cls ) for cls in ALL_CLASSES ) try : cls = obj_map [ data [ "type" ] . lower ( ) ] except KeyError : LOGGER . warning ( "Unknown type: %s" % data ) else : obj = cls ( data , jinja_env = self . jinja_env , options = options , url_root = self . url_root , ** kwargs ) yield obj
|
Return instance of class based on Roslyn type property
|
10,344
|
def organize_objects ( self ) : def _render_children ( obj ) : for child in obj . children_strings : child_object = self . objects . get ( child ) if child_object : obj . item_map [ child_object . plural ] . append ( child_object ) obj . children . append ( child_object ) for key in obj . item_map : obj . item_map [ key ] . sort ( ) def _recurse_ns ( obj ) : if not obj : return namespace = obj . top_namespace if namespace is not None : ns_obj = self . top_namespaces . get ( namespace ) if ns_obj is None or not isinstance ( ns_obj , DotNetNamespace ) : for ns_obj in self . create_class ( { "uid" : namespace , "type" : "namespace" } ) : self . top_namespaces [ ns_obj . id ] = ns_obj if obj not in ns_obj . children and namespace != obj . id : ns_obj . children . append ( obj ) for obj in self . objects . values ( ) : _render_children ( obj ) _recurse_ns ( obj ) for key , ns in self . top_namespaces . copy ( ) . items ( ) : if not ns . children : del self . top_namespaces [ key ] for key , ns in self . namespaces . items ( ) : if not ns . children : del self . namespaces [ key ]
|
Organize objects and namespaces
|
10,345
|
def transform_doc_comments ( text ) : try : while True : found = DOC_COMMENT_SEE_PATTERN . search ( text ) if found is None : break ref = found . group ( "attr_value" ) . replace ( "<" , "\<" ) . replace ( "`" , "\`" ) reftype = "any" replacement = "" if ref [ 1 ] == ":" and ref [ 0 ] in DOC_COMMENT_IDENTITIES : reftype = DOC_COMMENT_IDENTITIES [ ref [ : 1 ] ] ref = ref [ 2 : ] replacement = ":{reftype}:`{ref}`" . format ( reftype = reftype , ref = ref ) elif ref [ : 2 ] == "!:" : replacement = ref [ 2 : ] else : replacement = ":any:`{ref}`" . format ( ref = ref ) text_end = text [ found . end ( ) : ] text_start = text [ : found . start ( ) ] text_end = re . sub ( r"^(\S)" , r"\\\1" , text_end ) text_start = re . sub ( r"(\S)$" , r"\1 " , text_start ) text = "" . join ( [ text_start , replacement , text_end ] ) while True : found = DOC_COMMENT_PARAM_PATTERN . search ( text ) if found is None : break text_end = text [ found . end ( ) : ] text_start = text [ : found . start ( ) ] text_end = re . sub ( r"^(\S)" , r"\\\1" , text_end ) text_start = re . sub ( r"(\S)$" , r"\1 " , text_start ) text = "" . join ( [ text_start , "``" , found . group ( "attr_value" ) , "``" , text_end ] ) except TypeError : pass return text
|
Parse XML content for references and other syntax .
|
10,346
|
def resolve_spec_identifier ( self , obj_name ) : ref = self . references . get ( obj_name ) if ref is None : return obj_name resolved = ref . get ( "fullName" , obj_name ) spec = ref . get ( "spec.csharp" , [ ] ) parts = [ ] for part in spec : if part . get ( "name" ) == "<" : parts . append ( "{" ) elif part . get ( "name" ) == ">" : parts . append ( "}" ) elif "fullName" in part and "uid" in part : parts . append ( "{fullName}<{uid}>" . format ( ** part ) ) elif "uid" in part : parts . append ( part [ "uid" ] ) elif "fullName" in part : parts . append ( part [ "fullName" ] ) if parts : resolved = "" . join ( parts ) return resolved
|
Find reference name based on spec identifier
|
10,347
|
def display ( self ) : if self . is_undoc_member and "undoc-members" not in self . options : return False if self . is_private_member and "private-members" not in self . options : return False if self . is_special_member and "special-members" not in self . options : return False return True
|
Whether this object should be displayed in documentation .
|
10,348
|
def summary ( self ) : for line in self . docstring . splitlines ( ) : line = line . strip ( ) if line : return line return ""
|
The summary line of the docstring .
|
10,349
|
def resolve_import_alias ( name , import_names ) : resolved_name = name for import_name , imported_as in import_names : if import_name == name : break if imported_as == name : resolved_name = import_name break return resolved_name
|
Resolve a name from an aliased import to its original name .
|
10,350
|
def get_full_import_name ( import_from , name ) : partial_basename = resolve_import_alias ( name , import_from . names ) module_name = import_from . modname if import_from . level : module = import_from . root ( ) assert isinstance ( module , astroid . nodes . Module ) module_name = module . relative_to_absolute_name ( import_from . modname , level = import_from . level ) return "{}.{}" . format ( module_name , partial_basename )
|
Get the full path of a name from a from x import y statement .
|
10,351
|
def get_full_basename ( node , basename ) : full_basename = basename top_level_name = re . sub ( r"\(.*\)" , "" , basename ) . split ( "." , 1 ) [ 0 ] lookup_node = node while not hasattr ( lookup_node , "lookup" ) : lookup_node = lookup_node . parent assigns = lookup_node . lookup ( top_level_name ) [ 1 ] for assignment in assigns : if isinstance ( assignment , astroid . nodes . ImportFrom ) : import_name = get_full_import_name ( assignment , top_level_name ) full_basename = basename . replace ( top_level_name , import_name , 1 ) break elif isinstance ( assignment , astroid . nodes . Import ) : import_name = resolve_import_alias ( top_level_name , assignment . names ) full_basename = basename . replace ( top_level_name , import_name , 1 ) break elif isinstance ( assignment , astroid . nodes . ClassDef ) : full_basename = "{}.{}" . format ( assignment . root ( ) . name , assignment . name ) break if isinstance ( node , astroid . nodes . Call ) : full_basename = re . sub ( r"\(.*\)" , "()" , full_basename ) if full_basename . startswith ( "builtins." ) : return full_basename [ len ( "builtins." ) : ] if full_basename . startswith ( "__builtin__." ) : return full_basename [ len ( "__builtin__." ) : ] return full_basename
|
Resolve a partial base name to the full path .
|
10,352
|
def get_full_basenames ( bases , basenames ) : for base , basename in zip ( bases , basenames ) : yield get_full_basename ( base , basename )
|
Resolve the base nodes and partial names of a class to full names .
|
10,353
|
def get_assign_value ( node ) : try : targets = node . targets except AttributeError : targets = [ node . target ] if len ( targets ) == 1 : target = targets [ 0 ] if isinstance ( target , astroid . nodes . AssignName ) : name = target . name elif isinstance ( target , astroid . nodes . AssignAttr ) : name = target . attrname else : return None return ( name , _get_const_values ( node . value ) ) return None
|
Get the name and value of the assignment of the given node .
|
10,354
|
def get_assign_annotation ( node ) : annotation = None annotation_node = None try : annotation_node = node . annotation except AttributeError : annotation_node = getattr ( node , "type_annotation" , None ) if annotation_node : if isinstance ( annotation_node , astroid . nodes . Const ) : annotation = node . value else : annotation = annotation_node . as_string ( ) return annotation
|
Get the type annotation of the assignment of the given node .
|
10,355
|
def is_decorated_with_property ( node ) : if not node . decorators : return False for decorator in node . decorators . nodes : if not isinstance ( decorator , astroid . Name ) : continue try : if _is_property_decorator ( decorator ) : return True except astroid . InferenceError : pass return False
|
Check if the function is decorated as a property .
|
10,356
|
def is_decorated_with_property_setter ( node ) : if not node . decorators : return False for decorator in node . decorators . nodes : if ( isinstance ( decorator , astroid . nodes . Attribute ) and decorator . attrname == "setter" ) : return True return False
|
Check if the function is decorated as a property setter .
|
10,357
|
def is_constructor ( node ) : return ( node . parent and isinstance ( node . parent . scope ( ) , astroid . nodes . ClassDef ) and node . name == "__init__" )
|
Check if the function is a constructor .
|
10,358
|
def is_exception ( node ) : if ( node . name in ( "Exception" , "BaseException" ) and node . root ( ) . name == _EXCEPTIONS_MODULE ) : return True if not hasattr ( node , "ancestors" ) : return False return any ( is_exception ( parent ) for parent in node . ancestors ( recurs = True ) )
|
Check if a class is an exception .
|
10,359
|
def is_local_import_from ( node , package_name ) : if not isinstance ( node , astroid . ImportFrom ) : return False return ( node . level or node . modname == package_name or node . modname . startswith ( package_name + "." ) )
|
Check if a node is an import from the local package .
|
10,360
|
def run_autoapi ( app ) : if not app . config . autoapi_dirs : raise ExtensionError ( "You must configure an autoapi_dirs setting" ) normalized_dirs = [ ] autoapi_dirs = app . config . autoapi_dirs if isinstance ( autoapi_dirs , str ) : autoapi_dirs = [ autoapi_dirs ] for path in autoapi_dirs : if os . path . isabs ( path ) : normalized_dirs . append ( path ) else : normalized_dirs . append ( os . path . normpath ( os . path . join ( app . confdir , path ) ) ) for _dir in normalized_dirs : if not os . path . exists ( _dir ) : raise ExtensionError ( "AutoAPI Directory `{dir}` not found. " "Please check your `autoapi_dirs` setting." . format ( dir = _dir ) ) normalized_root = os . path . normpath ( os . path . join ( app . confdir , app . config . autoapi_root ) ) url_root = os . path . join ( "/" , app . config . autoapi_root ) sphinx_mapper = default_backend_mapping [ app . config . autoapi_type ] sphinx_mapper_obj = sphinx_mapper ( app , template_dir = app . config . autoapi_template_dir , url_root = url_root ) app . env . autoapi_mapper = sphinx_mapper_obj if app . config . autoapi_file_patterns : file_patterns = app . config . autoapi_file_patterns else : file_patterns = default_file_mapping . get ( app . config . autoapi_type , [ ] ) if app . config . autoapi_ignore : ignore_patterns = app . config . autoapi_ignore else : ignore_patterns = default_ignore_patterns . get ( app . config . autoapi_type , [ ] ) if ".rst" in app . config . source_suffix : out_suffix = ".rst" elif ".txt" in app . config . source_suffix : out_suffix = ".txt" else : out_suffix = app . config . source_suffix [ 0 ] LOGGER . info ( bold ( "[AutoAPI] " ) + darkgreen ( "Loading Data" ) ) sphinx_mapper_obj . load ( patterns = file_patterns , dirs = normalized_dirs , ignore = ignore_patterns ) LOGGER . info ( bold ( "[AutoAPI] " ) + darkgreen ( "Mapping Data" ) ) sphinx_mapper_obj . map ( options = app . config . autoapi_options ) if app . config . autoapi_generate_api_docs : LOGGER . info ( bold ( "[AutoAPI] " ) + darkgreen ( "Rendering Data" ) ) sphinx_mapper_obj . output_rst ( root = normalized_root , source_suffix = out_suffix )
|
Load AutoAPI data from the filesystem .
|
10,361
|
def doctree_read ( app , doctree ) : if app . env . docname == "index" : all_docs = set ( ) insert = True nodes = doctree . traverse ( toctree ) toc_entry = "%s/index" % app . config . autoapi_root add_entry = ( nodes and app . config . autoapi_generate_api_docs and app . config . autoapi_add_toctree_entry ) if not add_entry : return for node in nodes : for entry in node [ "entries" ] : all_docs . add ( entry [ 1 ] ) for doc in all_docs : if doc . find ( app . config . autoapi_root ) != - 1 : insert = False if insert and app . config . autoapi_add_toctree_entry : nodes [ - 1 ] [ "entries" ] . append ( ( None , u"%s/index" % app . config . autoapi_root ) ) nodes [ - 1 ] [ "includefiles" ] . append ( u"%s/index" % app . config . autoapi_root ) message_prefix = bold ( "[AutoAPI] " ) message = darkgreen ( "Adding AutoAPI TOCTree [{0}] to index.rst" . format ( toc_entry ) ) LOGGER . info ( message_prefix + message )
|
Inject AutoAPI into the TOC Tree dynamically .
|
10,362
|
def _expand_wildcard_placeholder ( original_module , originals_map , placeholder ) : originals = originals_map . values ( ) if original_module [ "all" ] is not None : originals = [ ] for name in original_module [ "all" ] : if name == "__all__" : continue if name not in originals_map : msg = "Invalid __all__ entry {0} in {1}" . format ( name , original_module [ "name" ] ) LOGGER . warning ( msg ) continue originals . append ( originals_map [ name ] ) placeholders = [ ] for original in originals : new_full_name = placeholder [ "full_name" ] . replace ( "*" , original [ "name" ] ) new_original_path = placeholder [ "original_path" ] . replace ( "*" , original [ "name" ] ) if "original_path" in original : new_original_path = original [ "original_path" ] new_placeholder = dict ( placeholder , name = original [ "name" ] , full_name = new_full_name , original_path = new_original_path , ) placeholders . append ( new_placeholder ) return placeholders
|
Expand a wildcard placeholder to a sequence of named placeholders .
|
10,363
|
def _resolve_module_placeholders ( modules , module_name , visit_path , resolved ) : if module_name in resolved : return visit_path [ module_name ] = True module , children = modules [ module_name ] for child in list ( children . values ( ) ) : if child [ "type" ] != "placeholder" : continue if child [ "original_path" ] in modules : module [ "children" ] . remove ( child ) children . pop ( child [ "name" ] ) continue imported_from , original_name = child [ "original_path" ] . rsplit ( "." , 1 ) if imported_from in visit_path : msg = "Cannot resolve cyclic import: {0}, {1}" . format ( ", " . join ( visit_path ) , imported_from ) LOGGER . warning ( msg ) module [ "children" ] . remove ( child ) children . pop ( child [ "name" ] ) continue if imported_from not in modules : msg = "Cannot resolve import of unknown module {0} in {1}" . format ( imported_from , module_name ) LOGGER . warning ( msg ) module [ "children" ] . remove ( child ) children . pop ( child [ "name" ] ) continue _resolve_module_placeholders ( modules , imported_from , visit_path , resolved ) if original_name == "*" : original_module , originals_map = modules [ imported_from ] new_placeholders = _expand_wildcard_placeholder ( original_module , originals_map , child ) child_index = module [ "children" ] . index ( child ) module [ "children" ] [ child_index : child_index + 1 ] = new_placeholders children . pop ( child [ "name" ] ) for new_placeholder in new_placeholders : if new_placeholder [ "name" ] not in children : children [ new_placeholder [ "name" ] ] = new_placeholder original = originals_map [ new_placeholder [ "name" ] ] _resolve_placeholder ( new_placeholder , original ) elif original_name not in modules [ imported_from ] [ 1 ] : msg = "Cannot resolve import of {0} in {1}" . format ( child [ "original_path" ] , module_name ) LOGGER . warning ( msg ) module [ "children" ] . remove ( child ) children . pop ( child [ "name" ] ) continue else : original = modules [ imported_from ] [ 1 ] [ original_name ] _resolve_placeholder ( child , original ) del visit_path [ module_name ] resolved . add ( module_name )
|
Resolve all placeholder children under a module .
|
10,364
|
def _resolve_placeholder ( placeholder , original ) : new = copy . deepcopy ( original ) new [ "name" ] = placeholder [ "name" ] new [ "full_name" ] = placeholder [ "full_name" ] new [ "original_path" ] = original [ "full_name" ] new . pop ( "from_line_no" , None ) new . pop ( "to_line_no" , None ) stack = list ( new . get ( "children" , ( ) ) ) while stack : child = stack . pop ( ) assert child [ "full_name" ] . startswith ( original [ "full_name" ] ) suffix = child [ "full_name" ] [ len ( original [ "full_name" ] ) : ] child [ "full_name" ] = new [ "full_name" ] + suffix child . pop ( "from_line_no" , None ) child . pop ( "to_line_no" , None ) stack . extend ( child . get ( "children" , ( ) ) ) placeholder . clear ( ) placeholder . update ( new )
|
Resolve a placeholder to the given original object .
|
10,365
|
def load ( self , patterns , dirs , ignore = None ) : for dir_ in dirs : dir_root = dir_ if os . path . exists ( os . path . join ( dir_ , "__init__.py" ) ) : dir_root = os . path . abspath ( os . path . join ( dir_ , os . pardir ) ) for path in self . find_files ( patterns = patterns , dirs = [ dir_ ] , ignore = ignore ) : data = self . read_file ( path = path ) if data : data [ "relative_path" ] = os . path . relpath ( path , dir_root ) self . paths [ path ] = data
|
Load objects from the filesystem into the paths dictionary
|
10,366
|
def _resolve_placeholders ( self ) : modules = { } for module in self . paths . values ( ) : children = { child [ "name" ] : child for child in module [ "children" ] } modules [ module [ "name" ] ] = ( module , children ) resolved = set ( ) for module_name in modules : visit_path = collections . OrderedDict ( ) _resolve_module_placeholders ( modules , module_name , visit_path , resolved )
|
Resolve objects that have been imported from elsewhere .
|
10,367
|
def create_class ( self , data , options = None , ** kwargs ) : obj_map = dict ( ( cls . type , cls ) for cls in [ PythonClass , PythonFunction , PythonModule , PythonMethod , PythonPackage , PythonAttribute , PythonData , PythonException , ] ) try : cls = obj_map [ data [ "type" ] ] except KeyError : LOGGER . warning ( "Unknown type: %s" % data [ "type" ] ) else : obj = cls ( data , class_content = self . app . config . autoapi_python_class_content , options = self . app . config . autoapi_options , jinja_env = self . jinja_env , url_root = self . url_root , ** kwargs ) lines = sphinx . util . docstrings . prepare_docstring ( obj . docstring ) if lines and "autodoc-process-docstring" in self . app . events . events : self . app . emit ( "autodoc-process-docstring" , cls . type , obj . name , None , None , lines , ) obj . docstring = "\n" . join ( lines ) for child_data in data . get ( "children" , [ ] ) : for child_obj in self . create_class ( child_data , options = options , ** kwargs ) : obj . children . append ( child_obj ) yield obj
|
Create a class from the passed in data
|
10,368
|
def _build_toc_node ( docname , anchor = "anchor" , text = "test text" , bullet = False ) : reference = nodes . reference ( "" , "" , internal = True , refuri = docname , anchorname = "#" + anchor , * [ nodes . Text ( text , text ) ] ) para = addnodes . compact_paragraph ( "" , "" , reference ) ret_list = nodes . list_item ( "" , para ) return nodes . bullet_list ( "" , ret_list ) if bullet else ret_list
|
Create the node structure that Sphinx expects for TOC Tree entries .
|
10,369
|
def _traverse_parent ( node , objtypes ) : curr_node = node . parent while curr_node is not None : if isinstance ( curr_node , objtypes ) : return curr_node curr_node = curr_node . parent return None
|
Traverse up the node s parents until you hit the objtypes referenced .
|
10,370
|
def _find_toc_node ( toc , ref_id , objtype ) : for check_node in toc . traverse ( nodes . reference ) : if objtype == nodes . section and ( check_node . attributes [ "refuri" ] == ref_id or check_node . attributes [ "anchorname" ] == "#" + ref_id ) : return check_node if ( objtype == addnodes . desc and check_node . attributes [ "anchorname" ] == "#" + ref_id ) : return check_node return None
|
Find the actual TOC node for a ref_id .
|
10,371
|
def _get_toc_reference ( app , node , toc , docname ) : if isinstance ( node , nodes . section ) and isinstance ( node . parent , nodes . document ) : ref_id = docname toc_reference = _find_toc_node ( toc , ref_id , nodes . section ) elif isinstance ( node , nodes . section ) : ref_id = node . attributes [ "ids" ] [ 0 ] toc_reference = _find_toc_node ( toc , ref_id , nodes . section ) else : try : ref_id = node . children [ 0 ] . attributes [ "ids" ] [ 0 ] toc_reference = _find_toc_node ( toc , ref_id , addnodes . desc ) except ( KeyError , IndexError ) as e : LOGGER . warning ( "Invalid desc node: %s" % e ) toc_reference = None return toc_reference
|
Logic that understands maps a specific node to it s part of the toctree .
|
10,372
|
def add_domain_to_toctree ( app , doctree , docname ) : toc = app . env . tocs [ docname ] for desc_node in doctree . traverse ( addnodes . desc ) : try : ref_id = desc_node . children [ 0 ] . attributes [ "ids" ] [ 0 ] except ( KeyError , IndexError ) as e : LOGGER . warning ( "Invalid desc node: %s" % e ) continue try : ref_text = desc_node [ 0 ] . attributes [ "fullname" ] . split ( "." ) [ - 1 ] . split ( "(" ) [ 0 ] except ( KeyError , IndexError ) : ref_text = desc_node [ 0 ] . astext ( ) . split ( "." ) [ - 1 ] . split ( "(" ) [ 0 ] parent_node = _traverse_parent ( node = desc_node , objtypes = ( addnodes . desc , nodes . section ) ) if parent_node : toc_reference = _get_toc_reference ( app , parent_node , toc , docname ) if toc_reference : toc_insertion_point = _traverse_parent ( toc_reference , nodes . bullet_list ) [ - 1 ] if toc_insertion_point and isinstance ( toc_insertion_point [ 0 ] , nodes . bullet_list ) : new_insert = toc_insertion_point [ 0 ] to_add = _build_toc_node ( docname , anchor = ref_id , text = ref_text ) new_insert . append ( to_add ) else : to_add = _build_toc_node ( docname , anchor = ref_id , text = ref_text , bullet = True ) toc_insertion_point . append ( to_add )
|
Add domain objects to the toctree dynamically .
|
10,373
|
def warn ( self , msg ) : self . warnings . append ( self . state . document . reporter . warning ( msg , line = self . lineno ) )
|
Add a warning message .
|
10,374
|
def _get_names ( self ) : for line in self . content : line = line . strip ( ) if line and re . search ( "^[a-zA-Z0-9]" , line ) : yield line
|
Get the names of the objects to include in the table .
|
10,375
|
def humanize_duration ( duration ) : days = duration . days hours = int ( duration . seconds / 3600 ) minutes = int ( duration . seconds % 3600 / 60 ) seconds = int ( duration . seconds % 3600 % 60 ) parts = [ ] if days > 0 : parts . append ( u'%s %s' % ( days , pluralize ( days , _ ( 'day,days' ) ) ) ) if hours > 0 : parts . append ( u'%s %s' % ( hours , pluralize ( hours , _ ( 'hour,hours' ) ) ) ) if minutes > 0 : parts . append ( u'%s %s' % ( minutes , pluralize ( minutes , _ ( 'minute,minutes' ) ) ) ) if seconds > 0 : parts . append ( u'%s %s' % ( seconds , pluralize ( seconds , _ ( 'second,seconds' ) ) ) ) return ', ' . join ( parts ) if len ( parts ) != 0 else _ ( '< 1 second' )
|
Returns a humanized string representing time difference
|
10,376
|
def run_cron_with_cache_check ( cron_class , force = False , silent = False ) : with CronJobManager ( cron_class , silent ) as manager : manager . run ( force )
|
Checks the cache and runs the cron or not .
|
10,377
|
def clear_old_log_entries ( ) : if hasattr ( settings , 'DJANGO_CRON_DELETE_LOGS_OLDER_THAN' ) : delta = timedelta ( days = settings . DJANGO_CRON_DELETE_LOGS_OLDER_THAN ) CronJobLog . objects . filter ( end_time__lt = get_current_time ( ) - delta ) . delete ( )
|
Removes older log entries if the appropriate setting has been set
|
10,378
|
def should_run_now ( self , force = False ) : from django_cron . models import CronJobLog cron_job = self . cron_job self . user_time = None self . previously_ran_successful_cron = None if force : return True if cron_job . schedule . run_every_mins is not None : last_job = None try : last_job = CronJobLog . objects . filter ( code = cron_job . code ) . latest ( 'start_time' ) except CronJobLog . DoesNotExist : pass if last_job : if not last_job . is_success and cron_job . schedule . retry_after_failure_mins : if get_current_time ( ) > last_job . start_time + timedelta ( minutes = cron_job . schedule . retry_after_failure_mins ) : return True else : return False try : self . previously_ran_successful_cron = CronJobLog . objects . filter ( code = cron_job . code , is_success = True , ran_at_time__isnull = True ) . latest ( 'start_time' ) except CronJobLog . DoesNotExist : pass if self . previously_ran_successful_cron : if get_current_time ( ) > self . previously_ran_successful_cron . start_time + timedelta ( minutes = cron_job . schedule . run_every_mins ) : return True else : return True if cron_job . schedule . run_at_times : for time_data in cron_job . schedule . run_at_times : user_time = time . strptime ( time_data , "%H:%M" ) now = get_current_time ( ) actual_time = time . strptime ( "%s:%s" % ( now . hour , now . minute ) , "%H:%M" ) if actual_time >= user_time : qset = CronJobLog . objects . filter ( code = cron_job . code , ran_at_time = time_data , is_success = True ) . filter ( Q ( start_time__gt = now ) | Q ( end_time__gte = now . replace ( hour = 0 , minute = 0 , second = 0 , microsecond = 0 ) ) ) if not qset : self . user_time = time_data return True return False
|
Returns a boolean determining whether this cron should run now or not!
|
10,379
|
def lock ( self ) : if self . cache . get ( self . lock_name ) : return False else : self . cache . set ( self . lock_name , timezone . now ( ) , self . timeout ) return True
|
This method sets a cache variable to mark current job as already running .
|
10,380
|
def get_invitation_model ( ) : path = app_settings . INVITATION_MODEL try : return django_apps . get_model ( path ) except ValueError : raise ImproperlyConfigured ( "path must be of the form 'app_label.model_name'" ) except LookupError : raise ImproperlyConfigured ( "path refers to model '%s' that\ has not been installed" % app_settings . INVITATION_MODEL )
|
Returns the Invitation model that is active in this project .
|
10,381
|
async def list ( self , ** params ) -> Mapping : response = await self . docker . _query_json ( "images/json" , "GET" , params = params ) return response
|
List of images
|
10,382
|
async def inspect ( self , name : str ) -> Mapping : response = await self . docker . _query_json ( "images/{name}/json" . format ( name = name ) ) return response
|
Return low - level information about an image
|
10,383
|
async def pull ( self , from_image : str , * , auth : Optional [ Union [ MutableMapping , str , bytes ] ] = None , tag : str = None , repo : str = None , stream : bool = False ) -> Mapping : image = from_image params = { "fromImage" : image } headers = { } if repo : params [ "repo" ] = repo if tag : params [ "tag" ] = tag if auth is not None : registry , has_registry_host , _ = image . partition ( "/" ) if not has_registry_host : raise ValueError ( "Image should have registry host " "when auth information is provided" ) headers [ "X-Registry-Auth" ] = compose_auth_header ( auth , registry ) response = await self . docker . _query ( "images/create" , "POST" , params = params , headers = headers ) return await json_stream_result ( response , stream = stream )
|
Similar to docker pull pull an image locally
|
10,384
|
async def tag ( self , name : str , repo : str , * , tag : str = None ) -> bool : params = { "repo" : repo } if tag : params [ "tag" ] = tag await self . docker . _query ( "images/{name}/tag" . format ( name = name ) , "POST" , params = params , headers = { "content-type" : "application/json" } , ) return True
|
Tag the given image so that it becomes part of a repository .
|
10,385
|
async def delete ( self , name : str , * , force : bool = False , noprune : bool = False ) -> List : params = { "force" : force , "noprune" : noprune } response = await self . docker . _query_json ( "images/{name}" . format ( name = name ) , "DELETE" , params = params ) return response
|
Remove an image along with any untagged parent images that were referenced by that image
|
10,386
|
async def build ( self , * , remote : str = None , fileobj : BinaryIO = None , path_dockerfile : str = None , tag : str = None , quiet : bool = False , nocache : bool = False , buildargs : Mapping = None , pull : bool = False , rm : bool = True , forcerm : bool = False , labels : Mapping = None , stream : bool = False , encoding : str = None ) -> Mapping : local_context = None headers = { } params = { "t" : tag , "rm" : rm , "q" : quiet , "pull" : pull , "remote" : remote , "nocache" : nocache , "forcerm" : forcerm , "dockerfile" : path_dockerfile , } if remote is None and fileobj is None : raise ValueError ( "You need to specify either remote or fileobj" ) if fileobj and remote : raise ValueError ( "You cannot specify both fileobj and remote" ) if fileobj and not encoding : raise ValueError ( "You need to specify an encoding" ) if remote is None and fileobj is None : raise ValueError ( "Either remote or fileobj needs to be provided." ) if fileobj : local_context = fileobj . read ( ) headers [ "content-type" ] = "application/x-tar" if fileobj and encoding : headers [ "Content-Encoding" ] = encoding if buildargs : params . update ( { "buildargs" : json . dumps ( buildargs ) } ) if labels : params . update ( { "labels" : json . dumps ( labels ) } ) response = await self . docker . _query ( "build" , "POST" , params = clean_map ( params ) , headers = headers , data = local_context , ) return await json_stream_result ( response , stream = stream )
|
Build an image given a remote Dockerfile or a file object with a Dockerfile inside
|
10,387
|
async def export_image ( self , name : str ) : response = await self . docker . _query ( "images/{name}/get" . format ( name = name ) , "GET" ) return response . content
|
Get a tarball of an image by name or id .
|
10,388
|
async def import_image ( self , data , stream : bool = False ) : headers = { "Content-Type" : "application/x-tar" } response = await self . docker . _query_chunked_post ( "images/load" , "POST" , data = data , headers = headers ) return await json_stream_result ( response , stream = stream )
|
Import tarball of image to docker .
|
10,389
|
async def parse_result ( response , response_type = None , * , encoding = "utf-8" ) : if response_type is None : ct = response . headers . get ( "content-type" ) if ct is None : cl = response . headers . get ( "content-length" ) if cl is None or cl == "0" : return "" raise TypeError ( "Cannot auto-detect response type " "due to missing Content-Type header." ) main_type , sub_type , extras = parse_content_type ( ct ) if sub_type == "json" : response_type = "json" elif sub_type == "x-tar" : response_type = "tar" elif ( main_type , sub_type ) == ( "text" , "plain" ) : response_type = "text" encoding = extras . get ( "charset" , encoding ) else : raise TypeError ( "Unrecognized response type: {ct}" . format ( ct = ct ) ) if "tar" == response_type : what = await response . read ( ) return tarfile . open ( mode = "r" , fileobj = BytesIO ( what ) ) if "json" == response_type : data = await response . json ( encoding = encoding ) elif "text" == response_type : data = await response . text ( encoding = encoding ) else : data = await response . read ( ) return data
|
Convert the response to native objects by the given response type or the auto - detected HTTP content - type . It also ensures release of the response object .
|
10,390
|
def clean_map ( obj : Mapping [ Any , Any ] ) -> Mapping [ Any , Any ] : return { k : v for k , v in obj . items ( ) if v is not None }
|
Return a new copied dictionary without the keys with None values from the given Mapping object .
|
10,391
|
def clean_networks ( networks : Iterable [ str ] = None ) -> Optional [ Iterable [ str ] ] : if not networks : return networks if not isinstance ( networks , list ) : raise TypeError ( "networks parameter must be a list." ) result = [ ] for n in networks : if isinstance ( n , str ) : n = { "Target" : n } result . append ( n ) return result
|
Cleans the values inside networks Returns a new list
|
10,392
|
async def inspect ( self , task_id : str ) -> Mapping [ str , Any ] : response = await self . docker . _query_json ( "tasks/{task_id}" . format ( task_id = task_id ) , method = "GET" ) return response
|
Return info about a task
|
10,393
|
async def run ( self , config , * , name = None ) : try : container = await self . create ( config , name = name ) except DockerError as err : if err . status == 404 and "Image" in config : await self . docker . pull ( config [ "Image" ] ) container = await self . create ( config , name = name ) else : raise err try : await container . start ( ) except DockerError as err : raise DockerContainerError ( err . status , { "message" : err . message } , container [ "id" ] ) return container
|
Create and start a container .
|
10,394
|
def subscribe ( self , * , create_task = True , ** params ) : if create_task and not self . task : self . task = asyncio . ensure_future ( self . run ( ** params ) ) return self . channel . subscribe ( )
|
Subscribes to the Docker events channel . Use the keyword argument create_task = False to prevent automatically spawning the background tasks that listen to the events .
|
10,395
|
async def run ( self , ** params ) : if self . json_stream : warnings . warn ( "already running" , RuntimeWarning , stackelevel = 2 ) return forced_params = { "stream" : True } params = ChainMap ( forced_params , params ) try : response = await self . docker . _query ( "events" , method = "GET" , params = params , timeout = 0 ) self . json_stream = await json_stream_result ( response , self . _transform_event , human_bool ( params [ "stream" ] ) ) try : async for data in self . json_stream : await self . channel . publish ( data ) finally : if self . json_stream is not None : await self . json_stream . _close ( ) self . json_stream = None finally : await self . channel . publish ( None )
|
Query the events endpoint of the Docker daemon .
|
10,396
|
async def inspect ( self , * , node_id : str ) -> Mapping [ str , Any ] : response = await self . docker . _query_json ( "nodes/{node_id}" . format ( node_id = node_id ) , method = "GET" ) return response
|
Inspect a node
|
10,397
|
async def update ( self , * , node_id : str , version : int , spec : Mapping [ str , Any ] ) -> Mapping [ str , Any ] : params = { "version" : version } if "Role" in spec : assert spec [ "Role" ] in { "worker" , "manager" } if "Availability" in spec : assert spec [ "Availability" ] in { "active" , "pause" , "drain" } response = await self . docker . _query_json ( "nodes/{node_id}/update" . format ( node_id = node_id ) , method = "POST" , params = params , data = spec , ) return response
|
Update the spec of a node .
|
10,398
|
async def remove ( self , * , node_id : str , force : bool = False ) -> Mapping [ str , Any ] : params = { "force" : force } response = await self . docker . _query_json ( "nodes/{node_id}" . format ( node_id = node_id ) , method = "DELETE" , params = params ) return response
|
Remove a node from a swarm .
|
10,399
|
async def _query ( self , path , method = "GET" , * , params = None , data = None , headers = None , timeout = None , chunked = None ) : url = self . _canonicalize_url ( path ) if headers and "content-type" not in headers : headers [ "content-type" ] = "application/json" try : response = await self . session . request ( method , url , params = httpize ( params ) , headers = headers , data = data , timeout = timeout , chunked = chunked , ) except asyncio . TimeoutError : raise if ( response . status // 100 ) in [ 4 , 5 ] : what = await response . read ( ) content_type = response . headers . get ( "content-type" , "" ) response . close ( ) if content_type == "application/json" : raise DockerError ( response . status , json . loads ( what . decode ( "utf8" ) ) ) else : raise DockerError ( response . status , { "message" : what . decode ( "utf8" ) } ) return response
|
Get the response object by performing the HTTP request . The caller is responsible to finalize the response object .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.