idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
8,000 | def param ( self , key , default = None ) : if key in self . parameters : return self . parameters [ key ] return default | for accessing global parameters |
8,001 | def generator ( self , gen , * args , ** kwargs ) : with self ( * args , ** kwargs ) : for i in gen : yield i | Use this function to enter and exit the context at the beginning and end of a generator . |
8,002 | def validate_url ( self , original_string ) : pieces = urlparse . urlparse ( original_string ) try : if self . path_only : assert not any ( [ pieces . scheme , pieces . netloc ] ) assert pieces . path else : assert all ( [ pieces . scheme , pieces . netloc ] ) valid_chars = set ( string . letters + string . digits + ":-_." ) assert set ( pieces . netloc ) <= valid_chars assert pieces . scheme in [ 'http' , 'https' ] except AssertionError as e : raise ArgumentError ( self . item_name , "The input you've provided is not a valid URL." ) return pieces | Returns the original string if it was valid raises an argument error if it s not . |
8,003 | def get_chapter ( self , book_name , book_chapter , cache_chapter = True ) : try : logging . debug ( "Attempting to read chapter from disk" ) verses_list = self . _get_ondisk_chapter ( book_name , book_chapter ) except Exception as e : logging . debug ( "Could not read file from disk. Attempting the internet.." ) logging . debug ( e . message ) verses_list = self . _get_online_chapter ( book_name , book_chapter , cache_chapter = cache_chapter ) return verses_list | Returns a chapter of the bible first checking to see if that chapter is on disk . If not hen it attempts to fetch it from the internet . |
8,004 | def verse_lookup ( self , book_name , book_chapter , verse , cache_chapter = True ) : verses_list = self . get_chapter ( book_name , str ( book_chapter ) , cache_chapter = cache_chapter ) return verses_list [ int ( verse ) - 1 ] | Looks up a verse from online . recoveryversion . bible then returns it . |
8,005 | def validate_on_submit ( self ) : valid = FlaskWtf . validate_on_submit ( self ) if not self . _schema or not self . is_submitted ( ) : return valid data = dict ( ) for field in self . _fields : data [ field ] = self . _fields [ field ] . data result = self . schema . process ( data , context = self . _force_context ) self . set_errors ( result ) for field in data : self . _fields [ field ] . data = data [ field ] return valid and not bool ( self . errors ) | Extend validate on submit to allow validation with schema |
8,006 | def set_errors ( self , result ) : errors = result . get_messages ( ) for property_name in errors : if not hasattr ( self , property_name ) : continue prop_errors = errors [ property_name ] if type ( prop_errors ) is not list : prop_errors = [ '<Nested schema result following...>' ] if property_name in self . errors : self . errors [ property_name ] . extend ( prop_errors ) else : self . errors [ property_name ] = prop_errors | Populate field errors with errors from schema validation |
8,007 | def diffs2persistence ( rev_docs , window_size = 50 , revert_radius = 15 , sunset = None , verbose = False ) : rev_docs = mwxml . utilities . normalize ( rev_docs ) window_size = int ( window_size ) revert_radius = int ( revert_radius ) sunset = Timestamp ( sunset ) if sunset is not None else Timestamp ( time . time ( ) ) page_docs = groupby ( rev_docs , key = lambda d : d [ 'page' ] [ 'title' ] ) for page_title , rev_docs in page_docs : if verbose : sys . stderr . write ( page_title + ": " ) rev_docs = peekable ( rev_docs ) window = deque ( maxlen = window_size ) state = DiffState ( revert_radius = revert_radius ) while rev_docs : rev_doc = next ( rev_docs ) next_doc = rev_docs . peek ( None ) if next_doc is not None : seconds_visible = Timestamp ( next_doc [ 'timestamp' ] ) - Timestamp ( rev_doc [ 'timestamp' ] ) else : seconds_visible = sunset - Timestamp ( rev_doc [ 'timestamp' ] ) if seconds_visible < 0 : logger . warn ( "Seconds visible {0} is less than zero." . format ( seconds_visible ) ) seconds_visible = 0 _ , tokens_added , _ = state . update_opdocs ( rev_doc [ 'sha1' ] , rev_doc [ 'diff' ] [ 'ops' ] , ( rev_doc [ 'user' ] , seconds_visible ) ) if len ( window ) == window_size : old_doc , old_added = window [ 0 ] window . append ( ( rev_doc , tokens_added ) ) persistence = token_persistence ( old_doc , old_added , window , None ) old_doc [ 'persistence' ] = persistence yield old_doc if verbose : sys . stderr . write ( "." ) sys . stderr . flush ( ) else : window . append ( ( rev_doc , tokens_added ) ) while len ( window ) > 0 : old_doc , old_added = window . popleft ( ) persistence = token_persistence ( old_doc , old_added , window , sunset ) old_doc [ 'persistence' ] = persistence yield old_doc if verbose : sys . stderr . write ( "_" ) sys . stderr . flush ( ) if verbose : sys . stderr . write ( "\n" ) | Processes a sorted and page - partitioned sequence of revision documents into and adds a persistence field to them containing statistics about how each token added in the revision persisted through future revisions . |
8,008 | def generator ( name ) : name = name . upper ( ) if name not in WHash . __hash_map__ . keys ( ) : raise ValueError ( 'Hash generator "%s" not available' % name ) return WHash . __hash_map__ [ name ] | Return generator by its name |
8,009 | def generator_by_digest ( family , digest_size ) : for generator_name in WHash . available_generators ( family = family ) : generator = WHash . generator ( generator_name ) if generator . generator_digest_size ( ) == digest_size : return generator raise ValueError ( 'Hash generator is not available' ) | Return generator by hash generator family name and digest size |
8,010 | def sequence ( cls , * info ) : if len ( info ) == 0 : return info = list ( info ) info . reverse ( ) result = WMessengerOnionSessionFlowProto . Iterator ( info [ 0 ] . layer_name ( ) , ** info [ 0 ] . layer_args ( ) ) for i in range ( 1 , len ( info ) ) : result = WMessengerOnionSessionFlowProto . Iterator ( info [ i ] . layer_name ( ) , next_iterator = result , ** info [ i ] . layer_args ( ) ) return result | Useful method to generate iterator . It is generated by chaining the given info . If no info is specified then None is returned |
8,011 | def from_string ( address ) : str_address = None if WMACAddress . re_dash_format . match ( address ) : str_address = "" . join ( address . split ( "-" ) ) elif WMACAddress . re_colon_format . match ( address ) : str_address = "" . join ( address . split ( ":" ) ) elif WMACAddress . re_cisco_format . match ( address ) : str_address = "" . join ( address . split ( "." ) ) elif WMACAddress . re_spaceless_format . match ( address ) : str_address = address if str_address is None : raise ValueError ( "Invalid MAC address format: " + address ) result = WMACAddress ( ) for octet_index in range ( WMACAddress . octet_count ) : octet = str_address [ : 2 ] result . __address [ octet_index ] = int ( octet , 16 ) str_address = str_address [ 2 : ] return result | Return new object by the given MAC - address |
8,012 | def from_string ( address ) : address = address . split ( '.' ) if len ( address ) != WIPV4Address . octet_count : raise ValueError ( 'Invalid ip address: %s' % address ) result = WIPV4Address ( ) for i in range ( WIPV4Address . octet_count ) : result . __address [ i ] = WBinArray ( int ( address [ i ] ) , WFixedSizeByteArray . byte_size ) return result | Parse string for IPv4 address |
8,013 | def to_string ( address , dns_format = False ) : if isinstance ( address , WIPV4Address ) is False : raise TypeError ( 'Invalid address type' ) address = [ str ( int ( x ) ) for x in address . __address ] if dns_format is False : return '.' . join ( address ) address . reverse ( ) return ( '.' . join ( address ) + '.in-addr.arpa' ) | Convert address to string |
8,014 | def first_address ( self , skip_network_address = True ) : bin_address = self . __address . bin_address ( ) bin_address_length = len ( bin_address ) if self . __mask > ( bin_address_length - 2 ) : skip_network_address = False for i in range ( bin_address_length - self . __mask ) : bin_address [ self . __mask + i ] = 0 if skip_network_address : bin_address [ bin_address_length - 1 ] = 1 return WIPV4Address ( bin_address ) | Return the first IP address of this network |
8,015 | def last_address ( self , skip_broadcast_address = True ) : bin_address = self . __address . bin_address ( ) bin_address_length = len ( bin_address ) if self . __mask > ( bin_address_length - 2 ) : skip_broadcast_address = False for i in range ( bin_address_length - self . __mask ) : bin_address [ self . __mask + i ] = 1 if skip_broadcast_address : bin_address [ bin_address_length - 1 ] = 0 return WIPV4Address ( bin_address ) | Return the last IP address of this network |
8,016 | def iterator ( self , skip_network_address = True , skip_broadcast_address = True ) : return WNetworkIPV4Iterator ( self , skip_network_address , skip_broadcast_address ) | Return iterator that can iterate over network addresses |
8,017 | def from_string ( address ) : if len ( address ) == 0 : return WFQDN ( ) if address [ - 1 ] == '.' : address = address [ : - 1 ] if len ( address ) > WFQDN . maximum_fqdn_length : raise ValueError ( 'Invalid address' ) result = WFQDN ( ) for label in address . split ( '.' ) : if isinstance ( label , str ) and WFQDN . re_label . match ( label ) : result . _labels . append ( label ) else : raise ValueError ( 'Invalid address' ) return result | Convert doted - written FQDN address to WFQDN object |
8,018 | def to_string ( address , leading_dot = False ) : if isinstance ( address , WFQDN ) is False : raise TypeError ( 'Invalid type for FQDN address' ) result = '.' . join ( address . _labels ) return result if leading_dot is False else ( result + '.' ) | Return doted - written address by the given WFQDN object |
8,019 | def qteReparent ( self , parent ) : self . setParent ( parent ) try : self . _qteAdmin . parentWindow = parent . qteParentWindow ( ) except AttributeError : self . _qteAdmin . parentWindow = None if parent : msg = 'Parent is neither None, nor does it have a' msg += 'qteParentWindow field print ( msg ) | Re - parent the applet . |
8,020 | def qteAddWidget ( self , widgetObj : QtGui . QWidget , isFocusable : bool = True , widgetSignature : str = None , autoBind : bool = True ) : widgetObj . _qteAdmin = QtmacsAdminStructure ( self , isFocusable = isFocusable ) widgetObj . _qteAdmin . appletID = self . _qteAdmin . appletID widgetObj . _qteAdmin . isQtmacsApplet = False widgetObj . _qteAdmin . appletSignature = self . qteAppletSignature ( ) if widgetSignature is None : widgetObj . _qteAdmin . widgetSignature = widgetObj . __class__ . __name__ else : widgetObj . _qteAdmin . widgetSignature = widgetSignature wo = widgetObj wo . qteSignature = wo . _qteAdmin . widgetSignature wo . qteSetKeyFilterPolicy = wo . _qteAdmin . qteSetKeyFilterPolicy del wo self . _qteAdmin . widgetList . append ( widgetObj ) default_bind = qte_global . default_widget_keybindings if autoBind and ( widgetObj . qteSignature in default_bind ) : module_name = default_bind [ widgetObj . qteSignature ] try : mod = importlib . import_module ( module_name ) except ImportError : msg = ( 'Module <b>{}</b> could not be imported.' . format ( module_name ) ) self . qteLogger . exception ( msg , stack_info = True ) return if hasattr ( mod , 'install_macros_and_bindings' ) : try : mod . install_macros_and_bindings ( widgetObj ) except Exception : msg = ( '<b>install_macros_and_bindings</b> function' ' in <b>{}</b> did not execute properly.' ) msg = msg . format ( module_name ) self . qteLogger . error ( msg , stack_info = True ) else : msg = ( 'Module <b>{}</b> has no ' '<b>install_macros_and_bindings</b>' ' method' . format ( module_name ) ) self . qteLogger . error ( msg ) return widgetObj | Augment the standard Qt widgetObj with Qtmacs specific fields . |
8,021 | def qteSetAppletSignature ( self , signature : str ) : if '*' in signature : raise QtmacsOtherError ( 'The applet signature must not contain "*"' ) if signature == '' : raise QtmacsOtherError ( 'The applet signature must be non-empty' ) self . _qteAdmin . appletSignature = signature | Specify the applet signature . |
8,022 | def qteAutoremoveDeletedWidgets ( self ) : widget_list = self . _qteAdmin . widgetList deleted_widgets = [ _ for _ in widget_list if sip . isdeleted ( _ ) ] for widgetObj in deleted_widgets : self . _qteAdmin . widgetList . remove ( widgetObj ) | Remove all widgets from the internal widget list that do not exist anymore according to SIP . |
8,023 | def qteSetWidgetFocusOrder ( self , widList : tuple ) : if len ( widList ) < 2 : return self . qteAutoremoveDeletedWidgets ( ) widList = [ _ for _ in widList if _ is not None ] for wid in widList : if wid not in self . _qteAdmin . widgetList : msg = 'Cannot change focus order because some ' msg += 'widgets do not exist.' self . qteLogger . warning ( msg ) return newList = [ widList [ 0 ] ] for wid in widList [ 1 : ] : if wid not in newList : newList . append ( wid ) if len ( newList ) < 2 : return for wid in newList [ 1 : ] : self . _qteAdmin . widgetList . remove ( wid ) startIdx = self . _qteAdmin . widgetList . index ( newList [ 0 ] ) + 1 for idx , wid in enumerate ( newList [ 1 : ] ) : self . _qteAdmin . widgetList . insert ( startIdx + idx , wid ) | Change the focus order of the widgets in this applet . |
8,024 | def qteNextWidget ( self , numSkip : int = 1 , ofsWidget : QtGui . QWidget = None , skipVisible : bool = False , skipInvisible : bool = True , skipFocusable : bool = False , skipUnfocusable : bool = True ) : if not hasattr ( ofsWidget , '_qteAdmin' ) and ( ofsWidget is not None ) : msg = '<ofsWidget> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError ( msg ) self . qteAutoremoveDeletedWidgets ( ) widList = list ( self . _qteAdmin . widgetList ) if not len ( widList ) : if qteGetAppletFromWidget ( self . _qteActiveWidget ) is self : return self . _qteActiveWidget else : return None if skipInvisible : widList = [ wid for wid in widList if wid . isVisible ( ) ] if skipVisible : widList = [ wid for wid in widList if not wid . isVisible ( ) ] if skipFocusable : widList = [ wid for wid in widList if not wid . _qteAdmin . isFocusable ] if skipUnfocusable : widList = [ wid for wid in widList if wid . _qteAdmin . isFocusable ] if not len ( widList ) : return None if ofsWidget is None : ofsWidget = self . _qteActiveWidget if ( ofsWidget is not None ) and ( numSkip == 0 ) : if qteIsQtmacsWidget ( ofsWidget ) : return ofsWidget try : ofsIdx = widList . index ( ofsWidget ) except ValueError : ofsIdx = 0 ofsIdx = ( ofsIdx + numSkip ) % len ( widList ) return widList [ ofsIdx ] | Return the next widget in cyclic order . |
8,025 | def qteMakeWidgetActive ( self , widgetObj : QtGui . QWidget ) : if widgetObj is None : self . _qteActiveWidget = None return if qteGetAppletFromWidget ( widgetObj ) is not self : msg = 'The specified widget is not inside the current applet.' raise QtmacsOtherError ( msg ) if not hasattr ( widgetObj , '_qteAdmin' ) : self . _qteActiveWidget = widgetObj return if widgetObj . _qteAdmin . isQtmacsApplet : self . _qteActiveWidget = None return self . qteAutoremoveDeletedWidgets ( ) if widgetObj not in self . _qteAdmin . widgetList : msg = 'Widget is not registered for this applet.' self . qteLogger . error ( msg , stack_info = True ) self . _qteActiveWidget = None return self . qteSetWidgetFocusOrder ( ( self . _qteActiveWidget , widgetObj ) ) self . _qteActiveWidget = widgetObj | Give keyboard focus to widgetObj . |
8,026 | def split_key ( key ) : if key == KEY_SEP : return ( ) key_chunks = tuple ( key . strip ( KEY_SEP ) . split ( KEY_SEP ) ) if key_chunks [ 0 ] . startswith ( KEY_SEP ) : return ( key_chunks [ 0 ] [ len ( KEY_SEP ) : ] , ) + key_chunks [ 1 : ] else : return key_chunks | Splits a node key . |
8,027 | def set ( self , index , value = None , dir = False , ttl = None , expiration = None ) : if bool ( dir ) is ( value is not None ) : raise TypeError ( 'Choose one of value or directory' ) if ( ttl is not None ) is ( expiration is None ) : raise TypeError ( 'Both of ttl and expiration required' ) self . value = value if self . dir != dir : self . dir = dir self . nodes = { } if dir else None self . ttl = ttl self . expiration = expiration self . modified_index = index | Updates the node data . |
8,028 | def make_result ( self , result_class , node = None , prev_node = None , remember = True , key_chunks = None , notify = True , ** kwargs ) : def canonicalize ( node , ** kwargs ) : return None if node is None else node . canonicalize ( ** kwargs ) index = self . index result = result_class ( canonicalize ( node , ** kwargs ) , canonicalize ( prev_node , ** kwargs ) , index ) if not remember : return result self . history [ index ] = result_class ( canonicalize ( node , include_nodes = False ) , canonicalize ( prev_node , include_nodes = False ) , index ) key_chunks = key_chunks or split_key ( node . key ) asymptotic_key_chunks = ( key_chunks [ : x + 1 ] for x in xrange ( len ( key_chunks ) ) ) event_keys = [ ( False , key_chunks ) ] for _key_chunks in asymptotic_key_chunks : exact = _key_chunks == key_chunks self . indices . setdefault ( _key_chunks , [ ] ) . append ( ( index , exact ) ) event_keys . append ( ( True , _key_chunks ) ) if notify : for event_key in event_keys : try : event = self . events . pop ( event_key ) except KeyError : pass else : event . set ( ) return result | Makes an etcd result . |
8,029 | def connect ( self , host = 'localhost' ) : get_logger ( ) . info ( "Connecting to RabbitMQ server..." ) self . _conn = pika . BlockingConnection ( pika . ConnectionParameters ( host = host ) ) self . _channel = self . _conn . channel ( ) get_logger ( ) . info ( "Declaring topic exchanger {}..." . format ( self . exchange ) ) self . _channel . exchange_declare ( exchange = self . exchange , type = 'topic' ) get_logger ( ) . info ( "Creating RabbitMQ queue..." ) result = self . _channel . queue_declare ( exclusive = True ) self . _queue_name = result . method . queue if self . listen_all : get_logger ( ) . info ( "Binding queue to exchanger {} (listen all)..." . format ( self . exchange ) ) self . _channel . queue_bind ( exchange = self . exchange , queue = self . _queue_name , routing_key = '*' ) else : for routing_key in self . topics : get_logger ( ) . info ( "Binding queue to exchanger {} " "with routing key {}..." . format ( self . exchange , routing_key ) ) self . _channel . queue_bind ( exchange = self . exchange , queue = self . _queue_name , routing_key = routing_key ) get_logger ( ) . info ( "Binding callback..." ) self . _channel . basic_consume ( self . _callback , queue = self . _queue_name , no_ack = True ) | Connect to the server and set everything up . |
8,030 | def publish ( self , topic , dct ) : get_logger ( ) . info ( "Publishing message {} on routing key " "{}..." . format ( dct , topic ) ) self . _channel . basic_publish ( exchange = self . exchange , routing_key = topic , body = json . dumps ( dct ) ) | Send a dict with internal routing key to the exchange . |
8,031 | def _callback ( self , ch , method , properties , body ) : get_logger ( ) . info ( "Message received! Calling listeners..." ) topic = method . routing_key dct = json . loads ( body . decode ( 'utf-8' ) ) for listener in self . listeners : listener ( self , topic , dct ) | Internal method that will be called when receiving message . |
8,032 | def _handle_ping ( client , topic , dct ) : if dct [ 'type' ] == 'request' : resp = { 'type' : 'answer' , 'name' : client . name , 'source' : dct } client . publish ( 'ping' , resp ) | Internal method that will be called when receiving ping message . |
8,033 | def _create_argument_value_pairs ( func , * args , ** kwargs ) : try : arg_dict = signature ( func ) . bind_partial ( * args , ** kwargs ) . arguments except TypeError : return dict ( ) arguments = signature ( func ) . parameters for arg_name in arguments : if ( arguments [ arg_name ] . default != Parameter . empty ) and ( arguments [ arg_name ] . name not in arg_dict ) : arg_dict [ arguments [ arg_name ] . name ] = arguments [ arg_name ] . default return arg_dict | Create dictionary with argument names as keys and their passed values as values . |
8,034 | def _get_contract_exception_dict ( contract_msg ) : start_token = "[START CONTRACT MSG: " stop_token = "[STOP CONTRACT MSG]" if contract_msg . find ( start_token ) == - 1 : return { "num" : 0 , "msg" : "Argument `*[argument_name]*` is not valid" , "type" : RuntimeError , "field" : "argument_name" , } msg_start = contract_msg . find ( start_token ) + len ( start_token ) contract_msg = contract_msg [ msg_start : ] contract_name = contract_msg [ : contract_msg . find ( "]" ) ] contract_msg = contract_msg [ contract_msg . find ( "]" ) + 1 : contract_msg . find ( stop_token ) ] exdict = _CUSTOM_CONTRACTS [ contract_name ] for exvalue in exdict . values ( ) : if exvalue [ "msg" ] == contract_msg : return exvalue | Generate message for exception . |
8,035 | def _get_custom_contract ( param_contract ) : if not isinstance ( param_contract , str ) : return None for custom_contract in _CUSTOM_CONTRACTS : if re . search ( r"\b{0}\b" . format ( custom_contract ) , param_contract ) : return custom_contract return None | Return True if parameter contract is a custom contract False otherwise . |
8,036 | def _get_replacement_token ( msg ) : return ( None if not re . search ( r"\*\[[\w|\W]+\]\*" , msg ) else re . search ( r"\*\[[\w|\W]+\]\*" , msg ) . group ( ) [ 2 : - 2 ] ) | Extract replacement token from exception message . |
8,037 | def _get_type_name ( type_ ) : name = repr ( type_ ) if name . startswith ( "<" ) : name = getattr ( type_ , "__qualname__" , getattr ( type_ , "__name__" , "" ) ) return name . rsplit ( "." , 1 ) [ - 1 ] or repr ( type_ ) | Return a displayable name for the type . |
8,038 | def _get_class_frame_source ( class_name ) : for frame_info in inspect . stack ( ) : try : with open ( frame_info [ 1 ] ) as fp : src = "" . join ( fp . readlines ( ) [ frame_info [ 2 ] - 1 : ] ) except IOError : continue if re . search ( r"\bclass\b\s+\b{}\b" . format ( class_name ) , src ) : reader = six . StringIO ( src ) . readline tokens = tokenize . generate_tokens ( reader ) source_tokens = [ ] indent_level = 0 base_indent_level = 0 has_base_level = False for token , value , _ , _ , _ in tokens : source_tokens . append ( ( token , value ) ) if token == tokenize . INDENT : indent_level += 1 elif token == tokenize . DEDENT : indent_level -= 1 if has_base_level and indent_level <= base_indent_level : return ( tokenize . untokenize ( source_tokens ) , frame_info [ 0 ] . f_globals , frame_info [ 0 ] . f_locals , ) elif not has_base_level : has_base_level = True base_indent_level = indent_level raise TypeError ( 'Unable to retrieve source for class "{}"' . format ( class_name ) ) | Return the source code for a class by checking the frame stack . |
8,039 | def _is_propertyable ( names , attrs , annotations , attr , ) : return ( attr in annotations and not attr . startswith ( "_" ) and not attr . isupper ( ) and "__{}" . format ( attr ) not in names and not isinstance ( getattr ( attrs , attr , None ) , types . MethodType ) ) | Determine if an attribute can be replaced with a property . |
8,040 | def _create_typed_object_meta ( get_fset ) : def _get_fget ( attr , private_attr , type_ ) : def _fget ( self ) : try : return getattr ( self , private_attr ) except AttributeError : raise AttributeError ( "'{}' object has no attribute '{}'" . format ( _get_type_name ( type_ ) , attr ) ) return _fget class _AnnotatedObjectMeta ( type ) : def __new__ ( mcs , name , bases , attrs , ** kwargs ) : annotations = attrs . get ( "__annotations__" , { } ) use_comment_type_hints = ( not annotations and attrs . get ( "__module__" ) != __name__ ) if use_comment_type_hints : frame_source = _get_class_frame_source ( name ) annotations = get_type_hints ( * frame_source ) names = list ( attrs ) + list ( annotations ) typed_attrs = { } for attr in names : typed_attrs [ attr ] = attrs . get ( attr ) if _is_propertyable ( names , attrs , annotations , attr ) : private_attr = "__{}" . format ( attr ) if attr in attrs : typed_attrs [ private_attr ] = attrs [ attr ] type_ = ( Optional [ annotations [ attr ] ] if not use_comment_type_hints and attr in attrs and attrs [ attr ] is None else annotations [ attr ] ) typed_attrs [ attr ] = property ( _get_fget ( attr , private_attr , type_ ) , get_fset ( attr , private_attr , type_ ) , ) properties = [ attr for attr in annotations if _is_propertyable ( names , attrs , annotations , attr ) ] typed_attrs [ "_tp__typed_properties" ] = properties typed_attrs [ "_tp__required_typed_properties" ] = [ attr for attr in properties if ( attr not in attrs or attrs [ attr ] is None and use_comment_type_hints ) and NoneType not in getattr ( annotations [ attr ] , "__args__" , ( ) ) ] return super ( _AnnotatedObjectMeta , mcs ) . __new__ ( mcs , name , bases , typed_attrs , ** kwargs ) return _AnnotatedObjectMeta | Create a metaclass for typed objects . |
8,041 | def _tp__get_typed_properties ( self ) : try : return tuple ( getattr ( self , p ) for p in self . _tp__typed_properties ) except AttributeError : raise NotImplementedError | Return a tuple of typed attrs that can be used for comparisons . |
8,042 | def run ( cls , routes , * args , ** kwargs ) : app = init ( cls , routes , * args , ** kwargs ) HOST = os . getenv ( 'HOST' , '0.0.0.0' ) PORT = int ( os . getenv ( 'PORT' , 8000 ) ) aiohttp . web . run_app ( app , port = PORT , host = HOST ) | Run a web application . |
8,043 | def add ( self , vector , InterventionAnophelesParams = None ) : assert isinstance ( vector , six . string_types ) et = ElementTree . fromstring ( vector ) mosquito = Vector ( et ) assert isinstance ( mosquito . mosquito , str ) assert isinstance ( mosquito . propInfected , float ) assert len ( mosquito . seasonality . monthlyValues ) == 12 index = len ( self . et . findall ( "anopheles" ) ) self . et . insert ( index , et ) | Add a vector to entomology section . vector is either ElementTree or xml snippet |
8,044 | def _format_msg ( text , width , indent = 0 , prefix = "" ) : r text = repr ( text ) . replace ( "`" , "\\`" ) . replace ( "\\n" , " ``\\n`` " ) sindent = " " * indent if not prefix else prefix wrapped_text = textwrap . wrap ( text , width , subsequent_indent = sindent ) return ( "\n" . join ( wrapped_text ) ) [ 1 : - 1 ] . rstrip ( ) | r Format exception message . |
8,045 | def _validate_fname ( fname , arg_name ) : if fname is not None : msg = "Argument `{0}` is not valid" . format ( arg_name ) if ( not isinstance ( fname , str ) ) or ( isinstance ( fname , str ) and ( "\0" in fname ) ) : raise RuntimeError ( msg ) try : if not os . path . exists ( fname ) : os . access ( fname , os . W_OK ) except ( TypeError , ValueError ) : raise RuntimeError ( msg ) | Validate that a string is a valid file name . |
8,046 | def _build_ex_tree ( self ) : sep = self . _exh_obj . callables_separator data = self . _exh_obj . exceptions_db if not data : raise RuntimeError ( "Exceptions database is empty" ) for item in data : item [ "name" ] = "root{sep}{name}" . format ( sep = sep , name = item [ "name" ] ) self . _tobj = ptrie . Trie ( sep ) try : self . _tobj . add_nodes ( data ) except ValueError as eobj : if str ( eobj ) . startswith ( "Illegal node name" ) : raise RuntimeError ( "Exceptions do not have a common callable" ) raise node = self . _tobj . root_name while ( len ( self . _tobj . get_children ( node ) ) == 1 ) and ( not self . _tobj . get_data ( node ) ) : node = self . _tobj . get_children ( node ) [ 0 ] if not self . _tobj . is_root ( node ) : self . _tobj . make_root ( node ) nsep = self . _tobj . node_separator prefix = nsep . join ( node . split ( self . _tobj . node_separator ) [ : - 1 ] ) self . _tobj . delete_prefix ( prefix ) self . _print_ex_tree ( ) | Construct exception tree from trace . |
8,047 | def _build_module_db ( self ) : tdict = collections . defaultdict ( lambda : [ ] ) for callable_name , callable_dict in self . _exh_obj . callables_db . items ( ) : fname , line_no = callable_dict [ "code_id" ] cname = ( "{cls_name}.__init__" . format ( cls_name = callable_name ) if callable_dict [ "type" ] == "class" else callable_name ) tdict [ fname ] . append ( { "name" : cname , "line" : line_no } ) for fname in tdict . keys ( ) : self . _module_obj_db [ fname ] = sorted ( tdict [ fname ] , key = lambda idict : idict [ "line" ] ) | Build database of module callables sorted by line number . |
8,048 | def _process_exlist ( self , exc , raised ) : if ( not raised ) or ( raised and exc . endswith ( "*" ) ) : return exc [ : - 1 ] if exc . endswith ( "*" ) else exc return None | Remove raised info from exception message and create separate list for it . |
8,049 | def _set_depth ( self , depth ) : if depth and ( ( not isinstance ( depth , int ) ) or ( isinstance ( depth , int ) and ( depth < 0 ) ) ) : raise RuntimeError ( "Argument `depth` is not valid" ) self . _depth = depth | Depth setter . |
8,050 | def _set_exclude ( self , exclude ) : if exclude and ( ( not isinstance ( exclude , list ) ) or ( isinstance ( exclude , list ) and any ( [ not isinstance ( item , str ) for item in exclude ] ) ) ) : raise RuntimeError ( "Argument `exclude` is not valid" ) self . _exclude = exclude | Exclude setter . |
8,051 | def get_sphinx_autodoc ( self , depth = None , exclude = None , width = 72 , error = False , raised = False , no_comment = False , ) : r frame = sys . _getframe ( 1 ) index = frame . f_code . co_filename . rfind ( "+" ) fname = os . path . abspath ( frame . f_code . co_filename [ : index ] ) line_num = int ( frame . f_code . co_filename [ index + 1 : ] ) module_db = self . _module_obj_db [ fname ] names = [ callable_dict [ "name" ] for callable_dict in module_db ] line_nums = [ callable_dict [ "line" ] for callable_dict in module_db ] name = names [ bisect . bisect ( line_nums , line_num ) - 1 ] return self . get_sphinx_doc ( name = name , depth = depth , exclude = exclude , width = width , error = error , raised = raised , no_comment = no_comment , ) | r Return exception list in reStructuredText _ auto - determining callable name . |
8,052 | def resize ( self , size ) : if size < len ( self ) : raise ValueError ( "Value is out of bound. Array can't be shrinked" ) current_size = self . __size for i in range ( size - current_size ) : self . __array . append ( WBinArray ( 0 , self . __class__ . byte_size ) ) self . __size = size | Grow this array to specified length . This array can t be shrinked |
8,053 | def swipe ( self ) : result = WFixedSizeByteArray ( len ( self ) ) for i in range ( len ( self ) ) : result [ len ( self ) - i - 1 ] = self [ i ] return result | Mirror current array value in reverse . Bytes that had greater index will have lesser index and vice - versa . This method doesn t change this array . It creates a new one and return it as a result . |
8,054 | def mime_type ( filename ) : try : __mime_lock . acquire ( ) extension = filename . split ( "." ) extension = extension [ len ( extension ) - 1 ] if extension == "woff2" : return "application/font-woff2" if extension == "css" : return "text/css" m = magic . from_file ( filename , mime = True ) m = m . decode ( ) if isinstance ( m , bytes ) else m if m == "text/plain" : guessed_type = mimetypes . guess_type ( filename ) [ 0 ] if guessed_type : return guessed_type return m finally : __mime_lock . release ( ) | Guess mime type for the given file name |
8,055 | def _validate_type ( self , item , name ) : if item is None : return if not isinstance ( item , self . allowed_types ) : item_class_name = item . __class__ . __name__ raise ArgumentError ( name , "Expected one of %s, but got `%s`" % ( self . allowed_types , item_class_name ) ) | Validate the item against allowed_types . |
8,056 | def _validate_required ( self , item , name ) : if self . required is True and item is None : raise ArgumentError ( name , "This argument is required." ) | Validate that the item is present if it s required . |
8,057 | def doc_dict ( self ) : doc = { 'type' : self . __class__ . __name__ , 'description' : self . description , 'default' : self . default , 'required' : self . required } if hasattr ( self , 'details' ) : doc [ 'detailed_description' ] = self . details return doc | Returns the documentation dictionary for this argument . |
8,058 | def validate_items ( self , input_list ) : output_list = [ ] for item in input_list : valid = self . list_item_type . validate ( item , self . item_name ) output_list . append ( valid ) return output_list | Validates that items in the list are of the type specified . |
8,059 | def startserver ( self , hostname = "localhost" , port = 8080 , daemon = False , handle_sigint = True ) : if daemon : print ( "Sorry daemon server not supported just yet." ) else : print ( "Starting %s json-rpc service at http://%s:%s" % ( self . __class__ . __name__ , hostname , port ) ) self . _http_server = HTTPServer ( server_address = ( hostname , int ( port ) ) , RequestHandlerClass = self . get_http_request_handler ( ) ) if handle_sigint : def sigint_handler ( signum , frame ) : self . _post_shutdown ( ) sys . exit ( 0 ) signal . signal ( signal . SIGINT , sigint_handler ) self . _http_server . serve_forever ( ) | Start json - rpc service . |
8,060 | def _get_asym_hel ( self , d ) : d0 = d [ 0 ] d1 = d [ 2 ] d2 = d [ 1 ] d3 = d [ 3 ] denom1 = d0 + d1 denom2 = d2 + d3 denom1 [ denom1 == 0 ] = np . nan denom2 [ denom2 == 0 ] = np . nan asym_hel = [ ( d0 - d1 ) / denom1 , ( d2 - d3 ) / denom2 ] asym_hel_err = [ 2 * np . sqrt ( d0 * d1 / np . power ( denom1 , 3 ) ) , 2 * np . sqrt ( d2 * d3 / np . power ( denom2 , 3 ) ) ] for i in range ( 2 ) : asym_hel [ i ] [ np . isnan ( asym_hel [ i ] ) ] = 0. asym_hel_err [ i ] [ np . isnan ( asym_hel_err [ i ] ) ] = 0. return [ [ asym_hel [ 1 ] , asym_hel_err [ 1 ] ] , [ asym_hel [ 0 ] , asym_hel_err [ 0 ] ] ] | Find the asymmetry of each helicity . |
8,061 | def _get_asym_comb ( self , d ) : d0 = d [ 0 ] d1 = d [ 2 ] d2 = d [ 1 ] d3 = d [ 3 ] r_denom = d0 * d3 r_denom [ r_denom == 0 ] = np . nan r = np . sqrt ( ( d1 * d2 / r_denom ) ) r [ r == - 1 ] = np . nan asym_comb = ( r - 1 ) / ( r + 1 ) d0 [ d0 == 0 ] = np . nan d1 [ d1 == 0 ] = np . nan d2 [ d2 == 0 ] = np . nan d3 [ d3 == 0 ] = np . nan asym_comb_err = r * np . sqrt ( 1 / d1 + 1 / d0 + 1 / d3 + 1 / d2 ) / np . square ( r + 1 ) asym_comb [ np . isnan ( asym_comb ) ] = 0. asym_comb_err [ np . isnan ( asym_comb_err ) ] = 0. return [ asym_comb , asym_comb_err ] | Find the combined asymmetry for slr runs . Elegant 4 - counter method . |
8,062 | def _get_1f_sum_scans ( self , d , freq ) : unique_freq = np . unique ( freq ) sum_scans = [ [ ] for i in range ( len ( d ) ) ] for f in unique_freq : tag = freq == f for i in range ( len ( d ) ) : sum_scans [ i ] . append ( np . sum ( d [ i ] [ tag ] ) ) return ( np . array ( unique_freq ) , np . array ( sum_scans ) ) | Sum counts in each frequency bin over 1f scans . |
8,063 | def get_pulse_s ( self ) : try : dwelltime = self . ppg . dwelltime . mean beam_on = self . ppg . beam_on . mean except AttributeError : raise AttributeError ( "Missing logged ppg parameter: dwelltime " + "or beam_on" ) return dwelltime * beam_on / 1000. | Get pulse duration in seconds for pulsed measurements . |
8,064 | def extract_endpoints ( api_module ) : if not hasattr ( api_module , 'endpoints' ) : raise ValueError ( ( "pale.extract_endpoints expected the passed in " "api_module to have an `endpoints` attribute, but it didn't!" ) ) endpoints = api_module . endpoints if isinstance ( endpoints , types . ModuleType ) : classes = [ v for ( k , v ) in inspect . getmembers ( endpoints , inspect . isclass ) ] elif isinstance ( endpoints , ( list , tuple ) ) : classes = endpoints else : raise ValueError ( "Endpoints is not a module or list type!" ) instances = [ ] for cls in classes : if cls not in ( Endpoint , PatchEndpoint , PutResourceEndpoint ) and Endpoint in inspect . getmro ( cls ) : source_code = inspect . getsource ( cls ) if "@requires_permission" in source_code : permission_match = re . search ( r"@requires_permission\(\[?[\'\"]+(\w+)[\'\"]+" , source_code ) if permission_match != None : cls . _requires_permission = permission_match . group ( 1 ) instances . append ( cls ( ) ) return instances | Return the endpoints from an API implementation module . |
8,065 | def extract_resources ( api_module ) : endpoints = extract_endpoints ( api_module ) resource_classes = [ e . _returns . __class__ for e in endpoints ] return list ( set ( resource_classes ) ) | Return the resources from an API implementation module . |
8,066 | def load_template_source ( self , template_name , template_dirs = None ) : log . error ( "Calling zip loader" ) for folder in app_template_dirs : if ".zip/" in folder . replace ( "\\" , "/" ) : lib_file , relative_folder = get_zip_file_and_relative_path ( folder ) log . error ( lib_file , relative_folder ) try : z = zipfile . ZipFile ( lib_file ) log . error ( relative_folder + template_name ) template_path_in_zip = os . path . join ( relative_folder , template_name ) . replace ( "\\" , "/" ) source = z . read ( template_path_in_zip ) except ( IOError , KeyError ) as e : import traceback log . error ( traceback . format_exc ( ) ) try : z . close ( ) except : pass continue z . close ( ) template_path = "%s:%s" % ( lib_file , template_path_in_zip ) return ( source , template_path ) raise TemplateDoesNotExist ( template_name ) | Template loader that loads templates from zipped modules . |
8,067 | def fetch ( self , start = None , stop = None ) : if not start : start = 0 if not stop : stop = len ( self . log ) if start < 0 : start = 0 if stop > len ( self . log ) : stop = len ( self . log ) self . waitForFetch = False return self . log [ start : stop ] | Fetch log records and return them as a list . |
8,068 | def bind_blueprint ( pale_api_module , flask_blueprint ) : if not isinstance ( flask_blueprint , Blueprint ) : raise TypeError ( ( "pale.flask_adapter.bind_blueprint expected the " "passed in flask_blueprint to be an instance of " "Blueprint, but it was an instance of %s instead." ) % ( type ( flask_blueprint ) , ) ) if not pale . is_pale_module ( pale_api_module ) : raise TypeError ( ( "pale.flask_adapter.bind_blueprint expected the " "passed in pale_api_module to be a module, and to " "have a _module_type defined to equal " "pale.ImplementationModule, but it was an instance of " "%s instead." ) % ( type ( pale_api_module ) , ) ) endpoints = pale . extract_endpoints ( pale_api_module ) for endpoint in endpoints : endpoint . _set_response_class ( RESPONSE_CLASS ) method = [ endpoint . _http_method ] name = endpoint . _route_name handler = endpoint . _execute flask_blueprint . add_url_rule ( endpoint . _uri , name , view_func = ContextualizedHandler ( handler ) , methods = method ) | Binds an implemented pale API module to a Flask Blueprint . |
8,069 | def cookie_name_check ( cookie_name ) : cookie_match = WHTTPCookie . cookie_name_non_compliance_re . match ( cookie_name . encode ( 'us-ascii' ) ) return len ( cookie_name ) > 0 and cookie_match is None | Check cookie name for validity . Return True if name is valid |
8,070 | def cookie_attr_value_check ( attr_name , attr_value ) : attr_value . encode ( 'us-ascii' ) return WHTTPCookie . cookie_attr_value_compliance [ attr_name ] . match ( attr_value ) is not None | Check cookie attribute value for validity . Return True if value is valid |
8,071 | def __attr_name ( self , name ) : if name not in self . cookie_attr_value_compliance . keys ( ) : suggested_name = name . replace ( '_' , '-' ) . lower ( ) if suggested_name not in self . cookie_attr_value_compliance . keys ( ) : raise ValueError ( 'Invalid attribute name is specified' ) name = suggested_name return name | Return suitable and valid attribute name . This method replaces dash char to underscore . If name is invalid ValueError exception is raised |
8,072 | def remove_cookie ( self , cookie_name ) : if self . __ro_flag : raise RuntimeError ( 'Read-only cookie-jar changing attempt' ) if cookie_name in self . __cookies . keys ( ) : self . __cookies . pop ( cookie_name ) | Remove cookie by its name |
8,073 | def ro ( self ) : ro_jar = WHTTPCookieJar ( ) for cookie in self . __cookies . values ( ) : ro_jar . add_cookie ( cookie . ro ( ) ) ro_jar . __ro_flag = True return ro_jar | Return read - only copy |
8,074 | def import_simple_cookie ( cls , simple_cookie ) : cookie_jar = WHTTPCookieJar ( ) for cookie_name in simple_cookie . keys ( ) : cookie_attrs = { } for attr_name in WHTTPCookie . cookie_attr_value_compliance . keys ( ) : attr_value = simple_cookie [ cookie_name ] [ attr_name ] if attr_value != '' : cookie_attrs [ attr_name ] = attr_value cookie_jar . add_cookie ( WHTTPCookie ( cookie_name , simple_cookie [ cookie_name ] . value , ** cookie_attrs ) ) return cookie_jar | Create cookie jar from SimpleCookie object |
8,075 | def is_prime ( n ) : if n % 2 == 0 and n > 2 : return False return all ( n % i for i in range ( 3 , int ( math . sqrt ( n ) ) + 1 , 2 ) ) | Check if n is a prime number |
8,076 | def loadFile ( self , fileName ) : self . file = QtCore . QFile ( fileName ) if self . file . exists ( ) : self . qteText . append ( open ( fileName ) . read ( ) ) else : msg = "File <b>{}</b> does not exist" . format ( self . qteAppletID ( ) ) self . qteLogger . info ( msg ) | Display the file associated with the appletID . |
8,077 | def _encode ( self ) : obj = { k : v for k , v in self . __dict__ . items ( ) if not k . startswith ( '_' ) and type ( v ) in SAFE_TYPES } obj . update ( { k : v . _encode ( ) for k , v in self . __dict__ . items ( ) if isinstance ( v , Ent ) } ) return obj | Generate a recursive JSON representation of the ent . |
8,078 | def merge ( cls , * args , ** kwargs ) : newkeys = bool ( kwargs . get ( 'newkeys' , False ) ) ignore = kwargs . get ( 'ignore' , list ( ) ) if len ( args ) < 1 : raise ValueError ( 'no ents given to Ent.merge()' ) elif not all ( isinstance ( s , Ent ) for s in args ) : raise ValueError ( 'all positional arguments to Ent.merge() must ' 'be instances of Ent' ) ent = args [ 0 ] data = cls . load ( ent ) for ent in args [ 1 : ] : for key , value in ent . __dict__ . items ( ) : if key in ignore : continue if key in data . __dict__ : v1 = data . __dict__ [ key ] if type ( value ) == type ( v1 ) : if isinstance ( v1 , Ent ) : data . __dict__ [ key ] = cls . merge ( v1 , value , ** kwargs ) else : data . __dict__ [ key ] = cls . load ( value ) elif newkeys : data . __dict__ [ key ] = value return data | Create a new Ent from one or more existing Ents . Keys in the later Ent objects will overwrite the keys of the previous Ents . Later keys of different type than in earlier Ents will be bravely ignored . |
8,079 | def diff ( cls , * args , ** kwargs ) : newkeys = bool ( kwargs . get ( 'newkeys' , False ) ) ignore = kwargs . get ( 'ignore' , list ( ) ) if len ( args ) < 2 : raise ValueError ( 'less than two ents given to Ent.diff()' ) elif not all ( isinstance ( s , Ent ) for s in args ) : raise ValueError ( 'all positional arguments to Ent.diff() must ' 'be instances of Ent' ) s1 = args [ 0 ] differences = Ent ( ) for s2 in args [ 1 : ] : for key , value in s2 . __dict__ . items ( ) : if key in ignore : continue if key in s1 . __dict__ : v1 = s1 . __dict__ [ key ] if type ( value ) == type ( v1 ) : if isinstance ( v1 , Ent ) : delta = cls . diff ( v1 , value , ** kwargs ) if len ( delta . __dict__ ) : differences . __dict__ [ key ] = delta elif v1 != value : differences . __dict__ [ key ] = cls . load ( value ) elif newkeys : differences . __dict__ [ key ] = cls . load ( value ) s1 = s2 return differences | Create a new Ent representing the differences in two or more existing Ents . Keys in the later Ents with values that differ from the earlier Ents will be present in the final Ent with the latest value seen for that key . Later keys of different type than in earlier Ents will be bravely ignored . |
8,080 | def subclasses ( cls ) : seen = set ( ) queue = set ( [ cls ] ) while queue : c = queue . pop ( ) seen . add ( c ) sc = c . __subclasses__ ( ) for c in sc : if c not in seen : queue . add ( c ) seen . remove ( cls ) return seen | Return a set of all Ent subclasses recursively . |
8,081 | def base_url ( self ) : if self . location in self . known_locations : return self . known_locations [ self . location ] elif '.' in self . location or self . location == 'localhost' : return 'https://' + self . location else : return 'https://' + self . location + API_HOST_SUFFIX | Protocol + hostname |
8,082 | def _build_exclusion_list ( exclude ) : mod_files = [ ] if exclude : for mod in exclude : mdir = None mod_file = None for token in mod . split ( "." ) : try : mfile , mdir , _ = imp . find_module ( token , mdir and [ mdir ] ) if mfile : mod_file = mfile . name mfile . close ( ) except ImportError : msg = "Source for module {mod_name} could not be found" raise ValueError ( msg . format ( mod_name = mod ) ) if mod_file : mod_files . append ( mod_file . replace ( ".pyc" , ".py" ) ) return mod_files | Build file names list of modules to exclude from exception handling . |
8,083 | def _invalid_frame ( fobj ) : fin = fobj . f_code . co_filename invalid_module = any ( [ fin . endswith ( item ) for item in _INVALID_MODULES_LIST ] ) return invalid_module or ( not os . path . isfile ( fin ) ) | Select valid stack frame to process . |
8,084 | def _sorted_keys_items ( dobj ) : keys = sorted ( dobj . keys ( ) ) for key in keys : yield key , dobj [ key ] | Return dictionary items sorted by key . |
8,085 | def addex ( extype , exmsg , condition = None , edata = None ) : r return _ExObj ( extype , exmsg , condition , edata ) . craise | r Add an exception in the global exception handler . |
8,086 | def addai ( argname , condition = None ) : r if not isinstance ( argname , str ) : raise RuntimeError ( "Argument `argname` is not valid" ) if ( condition is not None ) and ( type ( condition ) != bool ) : raise RuntimeError ( "Argument `condition` is not valid" ) obj = _ExObj ( RuntimeError , "Argument `{0}` is not valid" . format ( argname ) , condition ) return obj . craise | r Add an AI exception in the global exception handler . |
8,087 | def get_or_create_exh_obj ( full_cname = False , exclude = None , callables_fname = None ) : r if not hasattr ( __builtin__ , "_EXH" ) : set_exh_obj ( ExHandle ( full_cname = full_cname , exclude = exclude , callables_fname = callables_fname ) ) return get_exh_obj ( ) | r Return global exception handler if set otherwise create a new one and return it . |
8,088 | def _flatten_ex_dict ( self ) : odict = { } for _ , fdict in self . _ex_dict . items ( ) : for ( extype , exmsg ) , value in fdict . items ( ) : key = value [ "name" ] odict [ key ] = copy . deepcopy ( value ) del odict [ key ] [ "name" ] odict [ key ] [ "type" ] = extype odict [ key ] [ "msg" ] = exmsg return odict | Flatten structure of exceptions dictionary . |
8,089 | def _format_msg ( self , msg , edata ) : edata = edata if isinstance ( edata , list ) else [ edata ] for fdict in edata : if "*[{token}]*" . format ( token = fdict [ "field" ] ) not in msg : raise RuntimeError ( "Field {token} not in exception message" . format ( token = fdict [ "field" ] ) ) msg = msg . replace ( "*[{token}]*" . format ( token = fdict [ "field" ] ) , "{value}" ) . format ( value = fdict [ "value" ] ) return msg | Substitute parameters in exception message . |
8,090 | def _get_exceptions_db ( self ) : template = "{extype} ({exmsg}){raised}" if not self . _full_cname : ret = [ ] for _ , fdict in self . _ex_dict . items ( ) : for key in fdict . keys ( ) : ret . append ( { "name" : fdict [ key ] [ "name" ] , "data" : template . format ( extype = _ex_type_str ( key [ 0 ] ) , exmsg = key [ 1 ] , raised = "*" if fdict [ key ] [ "raised" ] [ 0 ] else "" , ) , } ) return ret ret = [ ] for fdict in self . _ex_dict . values ( ) : for key in fdict . keys ( ) : for func_name in fdict [ key ] [ "function" ] : rindex = fdict [ key ] [ "function" ] . index ( func_name ) raised = fdict [ key ] [ "raised" ] [ rindex ] ret . append ( { "name" : self . decode_call ( func_name ) , "data" : template . format ( extype = _ex_type_str ( key [ 0 ] ) , exmsg = key [ 1 ] , raised = "*" if raised else "" , ) , } ) return ret | Return a list of dictionaries suitable to be used with ptrie module . |
8,091 | def _get_ex_data ( self ) : func_id , func_name = self . _get_callable_path ( ) if self . _full_cname : func_name = self . encode_call ( func_name ) return func_id , func_name | Return hierarchical function name . |
8,092 | def _property_search ( self , fobj ) : scontext = fobj . f_locals . get ( "self" , None ) class_obj = scontext . __class__ if scontext is not None else None if not class_obj : del fobj , scontext , class_obj return None class_props = [ ( member_name , member_obj ) for member_name , member_obj in inspect . getmembers ( class_obj ) if isinstance ( member_obj , property ) ] if not class_props : del fobj , scontext , class_obj return None class_file = inspect . getfile ( class_obj ) . replace ( ".pyc" , ".py" ) class_name = self . _callables_obj . get_callable_from_line ( class_file , inspect . getsourcelines ( class_obj ) [ 1 ] ) prop_actions_dicts = { } for prop_name , prop_obj in class_props : prop_dict = { "fdel" : None , "fget" : None , "fset" : None } for action in prop_dict : action_obj = getattr ( prop_obj , action ) if action_obj : prev_func_obj , next_func_obj = ( action_obj , getattr ( action_obj , "__wrapped__" , None ) , ) while next_func_obj : prev_func_obj , next_func_obj = ( next_func_obj , getattr ( next_func_obj , "__wrapped__" , None ) , ) prop_dict [ action ] = [ id ( _get_func_code ( action_obj ) ) , id ( _get_func_code ( prev_func_obj ) ) , ] prop_actions_dicts [ prop_name ] = prop_dict func_id = id ( fobj . f_code ) desc_dict = { "fget" : "getter" , "fset" : "setter" , "fdel" : "deleter" } for prop_name , prop_actions_dict in prop_actions_dicts . items ( ) : for action_name , action_id_list in prop_actions_dict . items ( ) : if action_id_list and ( func_id in action_id_list ) : prop_name = "." . join ( [ class_name , prop_name ] ) del fobj , scontext , class_obj , class_props return "{prop_name}({prop_action})" . format ( prop_name = prop_name , prop_action = desc_dict [ action_name ] ) return None | Return full name if object is a class property otherwise return None . |
8,093 | def _raise_exception ( self , eobj , edata = None ) : _ , _ , tbobj = sys . exc_info ( ) if edata : emsg = self . _format_msg ( eobj [ "msg" ] , edata ) _rwtb ( eobj [ "type" ] , emsg , tbobj ) else : _rwtb ( eobj [ "type" ] , eobj [ "msg" ] , tbobj ) | Raise exception by name . |
8,094 | def _unwrap_obj ( self , fobj , fun ) : try : prev_func_obj , next_func_obj = ( fobj . f_globals [ fun ] , getattr ( fobj . f_globals [ fun ] , "__wrapped__" , None ) , ) while next_func_obj : prev_func_obj , next_func_obj = ( next_func_obj , getattr ( next_func_obj , "__wrapped__" , None ) , ) return ( prev_func_obj , inspect . getfile ( prev_func_obj ) . replace ( ".pyc" , "py" ) ) except ( KeyError , AttributeError , TypeError ) : return None , None | Unwrap decorators . |
8,095 | def _validate_edata ( self , edata ) : if edata is None : return True if not ( isinstance ( edata , dict ) or _isiterable ( edata ) ) : return False edata = [ edata ] if isinstance ( edata , dict ) else edata for edict in edata : if ( not isinstance ( edict , dict ) ) or ( isinstance ( edict , dict ) and ( ( "field" not in edict ) or ( "field" in edict and ( not isinstance ( edict [ "field" ] , str ) ) ) or ( "value" not in edict ) ) ) : return False return True | Validate edata argument of raise_exception_if method . |
8,096 | def add_exception ( self , exname , extype , exmsg ) : r if not isinstance ( exname , str ) : raise RuntimeError ( "Argument `exname` is not valid" ) number = True try : int ( exname ) except ValueError : number = False if number : raise RuntimeError ( "Argument `exname` is not valid" ) if not isinstance ( exmsg , str ) : raise RuntimeError ( "Argument `exmsg` is not valid" ) msg = "" try : raise extype ( exmsg ) except Exception as eobj : msg = _get_ex_msg ( eobj ) if msg != exmsg : raise RuntimeError ( "Argument `extype` is not valid" ) func_id , func_name = self . _get_ex_data ( ) if func_id not in self . _ex_dict : self . _ex_dict [ func_id ] = { } key = ( extype , exmsg ) exname = "{0}{1}{2}" . format ( func_id , self . _callables_separator , exname ) entry = self . _ex_dict [ func_id ] . get ( key , { "function" : [ ] , "name" : exname , "raised" : [ ] } ) if func_name not in entry [ "function" ] : entry [ "function" ] . append ( func_name ) entry [ "raised" ] . append ( False ) self . _ex_dict [ func_id ] [ key ] = entry return ( func_id , key , func_name ) | r Add an exception to the handler . |
8,097 | def decode_call ( self , call ) : if call is None : return None itokens = call . split ( self . _callables_separator ) odict = { } for key , value in self . _clut . items ( ) : if value in itokens : odict [ itokens [ itokens . index ( value ) ] ] = key return self . _callables_separator . join ( [ odict [ itoken ] for itoken in itokens ] ) | Replace callable tokens with callable names . |
8,098 | def encode_call ( self , call ) : if call is None : return None itokens = call . split ( self . _callables_separator ) otokens = [ ] for itoken in itokens : otoken = self . _clut . get ( itoken , None ) if not otoken : otoken = str ( len ( self . _clut ) ) self . _clut [ itoken ] = otoken otokens . append ( otoken ) return self . _callables_separator . join ( otokens ) | Replace callables with tokens to reduce object memory footprint . |
8,099 | def default ( self , obj ) : try : if isinstance ( obj , datetime . datetime ) : encoded = arrow . get ( obj ) . isoformat ( ) else : encoded = json . JSONEncoder . default ( self , obj ) except TypeError as e : if hasattr ( obj , 'to_dict' ) and callable ( obj . to_dict ) : encoded = obj . to_dict ( ) else : raise e return encoded | Default JSON encoding . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.