code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if isinstance(value, six.text_type): if value.strip().lower() == 'true': value = True elif value.strip().lower() == 'false': value = False else: raise ValueError('"{}" must be a boolean ("True" or "False")'.format(value)) if not isinstance(value, bool): raise ValueError('"{}" is not a boolean value.'.format(value)) return value
def _validate_bool(value)
Validate a setting is a bool. Returns: bool: The value as a boolean. Raises: ValueError: If the value can't be parsed as a bool string or isn't already bool.
2.167412
2.258291
0.959758
warnings.warn('Using "load_config" is deprecated and will be removed in a future release;' ' use the "fedmsg.config.conf" dictionary instead.', DeprecationWarning) global __cache if invalidate_cache: __cache = {} if __cache: return __cache # Coerce defaults if arguments are not supplied. extra_args = extra_args or [] doc = doc or "" if not disable_defaults: config = copy.deepcopy(defaults) else: config = {} config.update(_process_config_file(filenames=filenames)) # This is optional (and defaults to false) so that only 'fedmsg-*' commands # are required to provide these arguments. # For instance, the moksha-hub command takes a '-v' argument and internally # makes calls to fedmsg. We don't want to impose all of fedmsg's CLI # option constraints on programs that use fedmsg, so we make it optional. if fedmsg_command: config.update(_process_arguments(extra_args, doc, config)) # If the user specified a config file on the command line, then start over # but read in that file instead. if not filenames and config.get('config_filename', None): return load_config(extra_args, doc, filenames=[config['config_filename']], fedmsg_command=fedmsg_command, disable_defaults=disable_defaults) # Just a little debug option. :) if config.get('print_config'): print(pretty_dumps(config)) sys.exit(0) if not disable_defaults and 'endpoints' not in config: raise ValueError("No config value 'endpoints' found.") if not isinstance(config.get('endpoints', {}), dict): raise ValueError("The 'endpoints' config value must be a dict.") if 'endpoints' in config: config['endpoints'] = dict([ (k, list(iterate(v))) for k, v in config['endpoints'].items() ]) if 'srv_endpoints' in config and len(config['srv_endpoints']) > 0: from dns.resolver import query, NXDOMAIN, Timeout, NoNameservers for e in config['srv_endpoints']: urls = [] try: records = query('_fedmsg._tcp.{0}'.format(e), 'SRV') except NXDOMAIN: warnings.warn("There is no appropriate SRV records " + "for {0}".format(e)) continue except Timeout: warnings.warn("The DNS query for the SRV records of" + " {0} timed out.".format(e)) continue except NoNameservers: warnings.warn("No name server is available, please " + "check the configuration") break for rec in records: urls.append('tcp://{hostname}:{port}'.format( hostname=rec.target.to_text(), port=rec.port )) config['endpoints'][e] = list(iterate(urls)) if 'topic_prefix_re' not in config and 'topic_prefix' in config: # Turn "org.fedoraproject" into "org\.fedoraproject\.[^\W\d_]+" config['topic_prefix_re'] = config['topic_prefix'].replace('.', '\.')\ + '\.[^\W\d_]+' __cache = config return config
def load_config(extra_args=None, doc=None, filenames=None, invalidate_cache=False, fedmsg_command=False, disable_defaults=False)
Setup a runtime config dict by integrating the following sources (ordered by precedence): - defaults (unless disable_defaults = True) - config file - command line arguments If the ``fedmsg_command`` argument is False, no command line arguments are checked.
3.93809
3.980494
0.989347
config = config or copy.deepcopy(defaults) prog = prog or sys.argv[0] parser = argparse.ArgumentParser( description=textwrap.dedent(doc), formatter_class=argparse.RawDescriptionHelpFormatter, prog=prog, ) parser.add_argument( '--io-threads', dest='io_threads', type=int, help="Number of io threads for 0mq to use", default=config['io_threads'], ) parser.add_argument( '--topic-prefix', dest='topic_prefix', type=str, help="Prefix for the topic of each message sent.", default=config['topic_prefix'], ) parser.add_argument( '--post-init-sleep', dest='post_init_sleep', type=float, help="Number of seconds to sleep after initializing.", default=config['post_init_sleep'], ) parser.add_argument( '--config-filename', dest='config_filename', help="Config file to use.", default=None, ) parser.add_argument( '--print-config', dest='print_config', help='Simply print out the configuration and exit. No action taken.', default=False, action='store_true', ) parser.add_argument( '--timeout', dest='timeout', help="Timeout in seconds for any blocking zmq operations.", type=float, default=config['timeout'], ) parser.add_argument( '--high-water-mark', dest='high_water_mark', help="Limit on the number of messages in the queue before blocking.", type=int, default=config['high_water_mark'], ) parser.add_argument( '--linger', dest='zmq_linger', help="Number of milliseconds to wait before timing out connections.", type=int, default=config['zmq_linger'], ) for args, kwargs in declared_args: # Replace the hard-coded extra_args default with the config file value # (if it exists) if all([k in kwargs for k in ['dest', 'default']]): kwargs['default'] = config.get( kwargs['dest'], kwargs['default']) # Having slurped smart defaults from the config file, add the CLI arg. parser.add_argument(*args, **kwargs) return parser
def build_parser(declared_args, doc, config=None, prog=None)
Return the global :class:`argparse.ArgumentParser` used by all fedmsg commands. Extra arguments can be supplied with the `declared_args` argument.
2.603191
2.627614
0.990705
try: return sorted([ os.path.join(directory, fname) for fname in os.listdir(directory) if fname.endswith('.py') ]) except OSError: return []
def _gather_configs_in(directory)
Return list of fully qualified python filenames in the given dir
2.774453
2.428622
1.142398
for k in set(d1).intersection(d2): if isinstance(d1[k], dict) and isinstance(d2[k], dict): d1[k] = _recursive_update(d1[k], d2[k]) else: d1[k] = d2[k] for k in set(d2).difference(d1): d1[k] = d2[k] return d1
def _recursive_update(d1, d2)
Little helper function that does what d1.update(d2) does, but works nice and recursively with dicts of dicts of dicts. It's not necessarily very efficient.
1.508585
1.508301
1.000188
with open(fname) as f: code = compile(f.read(), fname, 'exec') exec(code, variables)
def execfile(fname, variables)
This is builtin in python2, but we have to roll our own on py3.
1.896469
1.828385
1.037238
if not self._loaded: self.load_config() return super(FedmsgConfig, self).get(*args, **kw)
def get(self, *args, **kw)
Load the configuration if necessary and forward the call to the parent.
5.300662
3.792543
1.397654
if not self._loaded: self.load_config() return super(FedmsgConfig, self).copy(*args, **kw)
def copy(self, *args, **kw)
Load the configuration if necessary and forward the call to the parent.
6.190632
4.102142
1.509122
self._load_defaults() if settings: self.update(settings) else: config_paths = _get_config_files() for p in config_paths: conf = _process_config_file([p]) self.update(conf) self._loaded = True self._validate()
def load_config(self, settings=None)
Load the configuration either from the config file, or from the given settings. Args: settings (dict): If given, the settings are pulled from this dictionary. Otherwise, the config file is used.
4.125597
4.254018
0.969812
for k, v in self._defaults.items(): self[k] = v['default']
def _load_defaults(self)
Iterate over self._defaults and set all default values on self.
5.990185
3.494332
1.714258
errors = [] for k in self._defaults.keys(): try: validator = self._defaults[k]['validator'] if validator is not None: self[k] = validator(self[k]) except ValueError as e: errors.append('\t{}: {}'.format(k, six.text_type(e))) if errors: raise ValueError( 'Invalid configuration values were set: \n{}'.format('\n'.join(errors)))
def _validate(self)
Run the validators found in self._defaults on all the corresponding values. Raises: ValueError: If the configuration contains an invalid configuration value.
3.329735
2.833667
1.175062
global processors # If they're already initialized, then fine. if processors: return import pkg_resources processors = [] for processor in pkg_resources.iter_entry_points('fedmsg.meta'): try: processors.append(processor.load()(_, **config)) except Exception as e: log.warn("Failed to load %r processor." % processor.name) log.exception(e) # This should always be last processors.append(DefaultProcessor(_, **config)) # By default we have three builtin processors: Default, Logger, and # Announce. If these are the only three, then we didn't find any # externally provided ones. calls to msg2subtitle and msg2link likely will # not work the way the user is expecting. if len(processors) == 3: log.warn("No fedmsg.meta plugins found. fedmsg.meta.msg2* crippled")
def make_processors(**config)
Initialize all of the text processors. You'll need to call this once before using any of the other functions in this module. >>> import fedmsg.config >>> import fedmsg.meta >>> config = fedmsg.config.load_config([], None) >>> fedmsg.meta.make_processors(**config) >>> text = fedmsg.meta.msg2repr(some_message_dict, **config)
9.106183
8.025514
1.134654
for processor in processors: if processor.handle_msg(msg, **config) is not None: return processor else: return processors[-1]
def msg2processor(msg, **config)
For a given message return the text processor that can handle it. This will raise a :class:`fedmsg.meta.ProcessorsNotInitialized` exception if :func:`fedmsg.meta.make_processors` hasn't been called yet.
4.172051
4.841196
0.861781
def _wrapper(f): @functools.wraps(f) def __wrapper(msg, **config): try: return f(msg, **config) except KeyError: log.exception("%r failed on %r" % (f, msg.get('msg_id'))) return cls() return __wrapper return _wrapper
def graceful(cls)
A decorator to protect against message structure changes. Many of our processors expect messages to be in a certain format. If the format changes, they may start to fail and raise exceptions. This decorator is in place to catch and log those exceptions and to gracefully return default values.
4.007243
3.91761
1.022879
# First, give every registered processor a chance to do its work for processor in processors: messages = processor.conglomerate(messages, subject=subject, **config) # Then, just fake it for every other ungrouped message. for i, message in enumerate(messages): # If these were successfully grouped, then skip if 'msg_ids' in message: continue # For ungrouped ones, replace them with a fake conglomerate messages[i] = BaseConglomerator.produce_template( [message], subject=subject, lexers=lexers, **config) # And fill out the fields that fully-implemented conglomerators would # normally fill out. messages[i].update({ 'link': msg2link(message, **config), 'subtitle': msg2subtitle(message, **config), 'subjective': msg2subjective(message, subject=subject, **config), 'secondary_icon': msg2secondary_icon(message, **config), }) return messages
def conglomerate(messages, subject=None, lexers=False, **config)
Return a list of messages with some of them grouped into conglomerate messages. Conglomerate messages represent several other messages. For example, you might pass this function a list of 40 messages. 38 of those are git.commit messages, 1 is a bodhi.update message, and 1 is a badge.award message. This function could return a list of three messages, one representing the 38 git commit messages, one representing the bodhi.update message, and one representing the badge.award message. The ``subject`` argument is optional and will return "subjective" representations if possible (see msg2subjective(...)). Functionality is provided by fedmsg.meta plugins on a "best effort" basis.
6.276527
5.948951
1.055065
fmt = u"{title} -- {subtitle} {link}" title = msg2title(msg, **config) subtitle = processor.subtitle(msg, **config) link = processor.link(msg, **config) or '' return fmt.format(**locals())
def msg2repr(msg, processor, **config)
Return a human-readable or "natural language" representation of a dict-like fedmsg message. Think of this as the 'top-most level' function in this module.
4.404248
5.032992
0.875076
result = processor.long_form(msg, **config) if not result: result = processor.subtitle(msg, **config) return result
def msg2long_form(msg, processor, **config)
Return a 'long form' text representation of a message. For most message, this will just default to the terse subtitle, but for some messages a long paragraph-structured block of text may be returned.
3.896734
3.398389
1.146642
return processor.usernames(msg, **config)
def msg2usernames(msg, processor=None, legacy=False, **config)
Return a set of FAS usernames associated with a message.
7.617896
8.212786
0.927565
if processor.agent is not NotImplemented: return processor.agent(msg, **config) else: usernames = processor.usernames(msg, **config) # usernames is a set(), which doesn't support indexing. if usernames: return usernames.pop() # default to None if we can't find anything return None
def msg2agent(msg, processor=None, legacy=False, **config)
Return the single username who is the "agent" for an event. An "agent" is the one responsible for the event taking place, for example, if one person gives karma to another, then both usernames are returned by msg2usernames, but only the one who gave the karma is returned by msg2agent. If the processor registered to handle the message does not provide an agent method, then the *first* user returned by msg2usernames is returned (whether that is correct or not). Here we assume that if a processor implements `agent`, then it knows what it is doing and we should trust that. But if it does not implement it, we'll try our best guess. If there are no users returned by msg2usernames, then None is returned.
5.92102
4.517902
1.310568
text = processor.subjective(msg, subject, **config) if not text: text = processor.subtitle(msg, **config) return text
def msg2subjective(msg, processor, subject, **config)
Return a human-readable text representation of a dict-like fedmsg message from the subjective perspective of a user. For example, if the subject viewing the message is "oddshocks" and the message would normally translate into "oddshocks commented on ticket #174", it would instead translate into "you commented on ticket #174".
4.021017
4.63658
0.867238
proc = subprocess.Popen([ 'echo \'%s\' | %s' % (fedmsg.encoding.dumps(message), command) ], shell=True, executable='/bin/bash') return proc.wait()
def run_command(self, command, message)
Use subprocess; feed the message to our command over stdin
6.351041
6.201647
1.024089
if ssldir is None or certname is None: error = "You must set the ssldir and certname keyword arguments." raise ValueError(error) message['crypto'] = 'x509' certificate = M2Crypto.X509.load_cert( "%s/%s.crt" % (ssldir, certname)).as_pem() # Opening this file requires elevated privileges in stg/prod. rsa_private = M2Crypto.RSA.load_key( "%s/%s.key" % (ssldir, certname)) digest = M2Crypto.EVP.MessageDigest('sha1') digest.update(fedmsg.encoding.dumps(message)) signature = rsa_private.sign(digest.digest()) # Return a new dict containing the pairs in the original message as well # as the new authn fields. return dict(message.items() + [ ('signature', signature.encode('base64').decode('ascii')), ('certificate', certificate.encode('base64').decode('ascii')), ])
def _m2crypto_sign(message, ssldir=None, certname=None, **config)
Insert two new fields into the message dict and return it. Those fields are: - 'signature' - the computed RSA message digest of the JSON repr. - 'certificate' - the base64 X509 certificate of the sending host.
3.812599
3.684946
1.034642
for conglomerator in self.conglomerator_objects: messages = conglomerator.conglomerate(messages, **config) return messages
def conglomerate(self, messages, **config)
Given N messages, return another list that has some of them grouped together into a common 'item'. A conglomeration of messages should be of the following form:: { 'subtitle': 'relrod pushed commits to ghc and 487 other packages', 'link': None, # This could be something. 'icon': 'https://that-git-logo', 'secondary_icon': 'https://that-relrod-avatar', 'start_time': some_timestamp, 'end_time': some_other_timestamp, 'human_time': '5 minutes ago', 'usernames': ['relrod'], 'packages': ['ghc', 'nethack', ... ], 'topics': ['org.fedoraproject.prod.git.receive'], 'categories': ['git'], 'msg_ids': { '2014-abcde': { 'subtitle': 'relrod pushed some commits to ghc', 'title': 'git.receive', 'link': 'http://...', 'icon': 'http://...', }, '2014-bcdef': { 'subtitle': 'relrod pushed some commits to nethack', 'title': 'git.receive', 'link': 'http://...', 'icon': 'http://...', }, }, } The telltale sign that an entry in a list of messages represents a conglomerate message is the presence of the plural ``msg_ids`` field. In contrast, ungrouped singular messages should bear a singular ``msg_id`` field.
3.527615
4.956778
0.711675
match = self.__prefix__.match(msg['topic']) if match: return match.groups()[-1] or ""
def handle_msg(self, msg, **config)
If we can handle the given message, return the remainder of the topic. Returns None if we can't handle the message.
11.883046
9.624299
1.234692
# preferred_auth_policies is a space-separated list of policy URIs self.preferred_auth_policies = [] policies_str = args.get('preferred_auth_policies') if policies_str: if isinstance(policies_str, bytes): policies_str = str(policies_str, encoding="utf-8") for uri in policies_str.split(' '): if uri not in self.preferred_auth_policies: self.preferred_auth_policies.append(uri) # max_auth_age is base-10 integer number of seconds max_auth_age_str = args.get('max_auth_age') self.max_auth_age = None if max_auth_age_str: try: self.max_auth_age = int(max_auth_age_str) except ValueError: pass
def parseExtensionArgs(self, args)
Set the state of this request to be that expressed in these PAPE arguments @param args: The PAPE arguments without a namespace @rtype: None @raises ValueError: When the max_auth_age is not parseable as an integer
2.307937
2.232494
1.033793
disco = Discovery(self.session, user_url, self.session_key_prefix) try: service = disco.getNextService(self._discover) except fetchers.HTTPFetchingError as why: raise DiscoveryFailure('Error fetching XRDS document: %s' % (why.why, ), None) if service is None: raise DiscoveryFailure('No usable OpenID services found for %s' % (user_url, ), None) else: return self.beginWithoutDiscovery(service, anonymous)
def begin(self, user_url, anonymous=False)
Start the OpenID authentication process. See steps 1-2 in the overview at the top of this file. @param user_url: Identity URL given by the user. This method performs a textual transformation of the URL to try and make sure it is normalized. For example, a user_url of example.com will be normalized to http://example.com/ normalizing and resolving any redirects the server might issue. @type user_url: unicode @param anonymous: Whether to make an anonymous request of the OpenID provider. Such a request does not ask for an authorization assertion for an OpenID identifier, but may be used with extensions to pass other data. e.g. "I don't care who you are, but I'd like to know your time zone." @type anonymous: bool @returns: An object containing the discovered information will be returned, with a method for building a redirect URL to the server, as described in step 3 of the overview. This object may also be used to add extension arguments to the request, using its L{addExtensionArg<openid.consumer.consumer.AuthRequest.addExtensionArg>} method. @returntype: L{AuthRequest<openid.consumer.consumer.AuthRequest>} @raises openid.consumer.discover.DiscoveryFailure: when I fail to find an OpenID server for this URL. If the C{yadis} package is available, L{openid.consumer.discover.DiscoveryFailure} is an alias for C{yadis.discover.DiscoveryFailure}.
7.006638
6.143142
1.140563
auth_req = self.consumer.begin(service) self.session[self._token_key] = auth_req.endpoint try: auth_req.setAnonymous(anonymous) except ValueError as why: raise ProtocolError(str(why)) return auth_req
def beginWithoutDiscovery(self, service, anonymous=False)
Start OpenID verification without doing OpenID server discovery. This method is used internally by Consumer.begin after discovery is performed, and exists to provide an interface for library users needing to perform their own discovery. @param service: an OpenID service endpoint descriptor. This object and factories for it are found in the L{openid.consumer.discover} module. @type service: L{OpenIDServiceEndpoint<openid.consumer.discover.OpenIDServiceEndpoint>} @returns: an OpenID authentication request object. @rtype: L{AuthRequest<openid.consumer.consumer.AuthRequest>} @See: Openid.consumer.consumer.Consumer.begin @see: openid.consumer.discover
6.371581
7.586018
0.839911
# Check the openid.return_to args against args in the original # message. try: self._verifyReturnToArgs(message.toPostArgs()) except ProtocolError as why: logging.exception("Verifying return_to arguments: %s" % (why, )) return False # Check the return_to base URL against the one in the message. msg_return_to = message.getArg(OPENID_NS, 'return_to') # The URL scheme, authority, and path MUST be the same between # the two URLs. app_parts = urlparse(urinorm.urinorm(return_to)) msg_parts = urlparse(urinorm.urinorm(msg_return_to)) # (addressing scheme, network location, path) must be equal in # both URLs. for part in range(0, 3): if app_parts[part] != msg_parts[part]: return False return True
def _checkReturnTo(self, message, return_to)
Check an OpenID message and its openid.return_to value against a return_to URL from an application. Return True on success, False on failure.
5.192034
4.602441
1.128104
# Search the services resulting from discovery to find one # that matches the information from the assertion failure_messages = [] for endpoint in services: for to_match_endpoint in to_match_endpoints: try: self._verifyDiscoverySingle(endpoint, to_match_endpoint) except ProtocolError as why: failure_messages.append(str(why)) else: # It matches, so discover verification has # succeeded. Return this endpoint. return endpoint else: logging.error('Discovery verification failure for %s' % (claimed_id, )) for failure_message in failure_messages: logging.error(' * Endpoint mismatch: ' + failure_message) raise DiscoveryFailure( 'No matching endpoint found after discovering %s' % (claimed_id, ), None)
def _verifyDiscoveredServices(self, claimed_id, services, to_match_endpoints)
See @L{_discoverAndVerify}
5.124781
4.993233
1.026345
logging.info('Using OpenID check_authentication') request = self._createCheckAuthRequest(message) if request is None: return False try: response = self._makeKVPost(request, server_url) except (fetchers.HTTPFetchingError, ServerError) as e: e0 = e.args[0] logging.exception('check_authentication failed: %s' % e0) return False else: return self._processCheckAuthResponse(response, server_url)
def _checkAuth(self, message, server_url)
Make a check_authentication request to verify this message. @returns: True if the request is valid. @rtype: bool
5.053378
4.696418
1.076007
signed = message.getArg(OPENID_NS, 'signed') if signed: if isinstance(signed, bytes): signed = str(signed, encoding="utf-8") for k in signed.split(','): logging.info(k) val = message.getAliasedArg(k) # Signed value is missing if val is None: logging.info('Missing signed field %r' % (k, )) return None check_auth_message = message.copy() check_auth_message.setArg(OPENID_NS, 'mode', 'check_authentication') return check_auth_message
def _createCheckAuthRequest(self, message)
Generate a check_authentication request message given an id_res message.
4.534688
4.345003
1.043656
# Get our preferred session/association type from the negotiatior. assoc_type, session_type = self.negotiator.getAllowedType() try: assoc = self._requestAssociation(endpoint, assoc_type, session_type) except ServerError as why: supportedTypes = self._extractSupportedAssociationType( why, endpoint, assoc_type) if supportedTypes is not None: assoc_type, session_type = supportedTypes # Attempt to create an association from the assoc_type # and session_type that the server told us it # supported. try: assoc = self._requestAssociation(endpoint, assoc_type, session_type) except ServerError as why: # Do not keep trying, since it rejected the # association type that it told us to use. logging.error( 'Server %s refused its suggested association ' 'type: session_type=%s, assoc_type=%s' % ( endpoint.server_url, session_type, assoc_type)) return None else: return assoc else: return assoc
def _negotiateAssociation(self, endpoint)
Make association requests to the server, attempting to create a new association. @returns: a new association object @rtype: L{openid.association.Association}
4.582327
4.46674
1.025877
# If it's an OpenID 1 message, allow session_type to default # to None (which signifies "no-encryption") session_type = assoc_response.getArg(OPENID1_NS, 'session_type') # Handle the differences between no-encryption association # respones in OpenID 1 and 2: # no-encryption is not really a valid session type for # OpenID 1, but we'll accept it anyway, while issuing a # warning. if session_type == 'no-encryption': logging.warning('OpenID server sent "no-encryption"' 'for OpenID 1.X') # Missing or empty session type is the way to flag a # 'no-encryption' response. Change the session type to # 'no-encryption' so that it can be handled in the same # way as OpenID 2 'no-encryption' respones. elif session_type == '' or session_type is None: session_type = 'no-encryption' return session_type
def _getOpenID1SessionType(self, assoc_response)
Given an association response message, extract the OpenID 1.X session type. This function mostly takes care of the 'no-encryption' default behavior in OpenID 1. If the association type is plain-text, this function will return 'no-encryption' @returns: The association type for this message @rtype: str @raises KeyError: when the session_type field is absent.
5.327819
4.930005
1.080692
try: normalized = urinorm.urinorm(url) except ValueError as why: raise DiscoveryFailure('Normalizing identifier: %s' % (why, ), None) else: return urllib.parse.urldefrag(normalized)[0]
def normalizeURL(url)
Normalize a URL, converting normalization failures to DiscoveryFailure
6.714171
5.449929
1.231974
if self.display_identifier is not None: return self.display_identifier if self.claimed_id is None: return None else: return urllib.parse.urldefrag(self.claimed_id)[0]
def getDisplayIdentifier(self)
Return the display_identifier if set, else return the claimed_id.
3.641801
2.235136
1.629343
try: # lxml prefers to parse bytestrings, and occasionally chokes on a # combination of text strings and declared XML encodings -- see # https://github.com/necaris/python3-openid/issues/19 # To avoid this, we ensure that the 'text' we're parsing is actually # a bytestring bytestring = text.encode('utf8') if isinstance(text, str) else text element = SafeElementTree.XML(bytestring) except (SystemExit, MemoryError, AssertionError, ImportError): raise except Exception as why: exc = XRDSError('Error parsing document as XML') exc.reason = why raise exc else: tree = ElementTree.ElementTree(element) if not isXRDS(tree): raise XRDSError('Not an XRDS document') return tree
def parseXRDS(text)
Parse the given text as an XRDS document. @return: ElementTree containing an XRDS document @raises XRDSError: When there is a parse error or the document does not contain an XRDS.
5.751376
5.810698
0.989791
# Randomize the services before sorting so that equal priority # elements are load-balanced. random.shuffle(elements) sorted_elems = sorted(elements, key=getPriority) return sorted_elems
def prioSort(elements)
Sort a list of elements that have priority attributes
9.444946
9.438438
1.00069
if module_names is None: module_names = xxe_safe_elementtree_modules try: return importElementTree(module_names) except ImportError: raise ImportError('Unable to find a ElementTree module ' 'that is not vulnerable to XXE. ' 'Tried importing %r' % (module_names, ))
def importSafeElementTree(module_names=None)
Find a working ElementTree implementation that is not vulnerable to XXE, using `defusedxml`. >>> XXESafeElementTree = importSafeElementTree() @param module_names: The names of modules to try to use as a safe ElementTree. Defaults to C{L{xxe_safe_elementtree_modules}} @returns: An ElementTree module that is not vulnerable to XXE.
4.397773
3.573386
1.230702
if hasattr(args, 'items'): args = sorted(args.items()) else: args = list(args) if not isinstance(url, str): url = str(url, encoding="utf-8") if not args: return url if '?' in url: sep = '&' else: sep = '?' # Map unicode to UTF-8 if present. Do not make any assumptions # about the encodings of plain bytes (str). i = 0 for k, v in args: if not isinstance(k, bytes): k = k.encode('utf-8') if not isinstance(v, bytes): v = v.encode('utf-8') args[i] = (k, v) i += 1 return '%s%s%s' % (url, sep, urlencode(args))
def appendArgs(url, args)
Append query arguments to a HTTP(s) URL. If the URL already has query arguemtns, these arguments will be added, and the existing arguments will be preserved. Duplicate arguments will not be detected or collapsed (both will appear in the output). @param url: The url to which the arguments will be appended @type url: str @param args: The query arguments to add to the URL. If a dictionary is passed, the items will be sorted before appending them to the URL. If a sequence of pairs is passed, the order of the sequence will be preserved. @type args: A dictionary from string to string, or a sequence of pairs of strings. @returns: The URL with the parameters added @rtype: str
2.543448
2.56167
0.992887
if isinstance(s, str): s = s.encode("utf-8") return binascii.b2a_base64(s)[:-1]
def toBase64(s)
Represent string / bytes s as base64, omitting newlines
2.321025
2.19357
1.058104
remove = [] for handle, assoc in self.assocs.items(): if assoc.expiresIn == 0: remove.append(handle) for handle in remove: del self.assocs[handle] return len(remove), len(self.assocs)
def cleanup(self)
Remove expired associations. @return: tuple of (removed associations, remaining associations)
3.716889
3.169791
1.172597
mode = ax_args.get('mode') if isinstance(mode, bytes): mode = str(mode, encoding="utf-8") if mode != self.mode: if not mode: raise NotAXMessage() else: raise AXError('Expected mode %r; got %r' % (self.mode, mode))
def _checkMode(self, ax_args)
Raise an exception if the mode in the attribute exchange arguments does not match what is expected for this class. @raises NotAXMessage: When there is no mode value in ax_args at all. @raises AXError: When mode does not match.
4.240037
2.858253
1.483437
aliases = NamespaceMap() required = [] if_available = [] ax_args = self._newArgs() for type_uri, attribute in self.requested_attributes.items(): if attribute.alias is None: alias = aliases.add(type_uri) else: # This will raise an exception when the second # attribute with the same alias is added. I think it # would be better to complain at the time that the # attribute is added to this object so that the code # that is adding it is identified in the stack trace, # but it's more work to do so, and it won't be 100% # accurate anyway, since the attributes are # mutable. So for now, just live with the fact that # we'll learn about the error later. # # The other possible approach is to hide the error and # generate a new alias on the fly. I think that would # probably be bad. alias = aliases.addAlias(type_uri, attribute.alias) if attribute.required: required.append(alias) else: if_available.append(alias) if attribute.count != 1: ax_args['count.' + alias] = str(attribute.count) ax_args['type.' + alias] = type_uri if required: ax_args['required'] = ','.join(required) if if_available: ax_args['if_available'] = ','.join(if_available) return ax_args
def getExtensionArgs(self)
Get the serialized form of this attribute fetch request. @returns: The fetch request message parameters @rtype: {unicode:unicode}
5.067802
5.091856
0.995276
required = [] for type_uri, attribute in self.requested_attributes.items(): if attribute.required: required.append(type_uri) return required
def getRequiredAttrs(self)
Get the type URIs for all attributes that have been marked as required. @returns: A list of the type URIs for attributes that have been marked as required. @rtype: [str]
5.204438
4.950459
1.051304
message = openid_request.message ax_args = message.getArgs(cls.ns_uri) self = cls() try: self.parseExtensionArgs(ax_args) except NotAXMessage as err: return None if self.update_url: # Update URL must match the openid.realm of the underlying # OpenID 2 message. realm = message.getArg(OPENID_NS, 'realm', message.getArg(OPENID_NS, 'return_to')) if not realm: raise AXError( ("Cannot validate update_url %r " + "against absent realm") % (self.update_url, )) tr = TrustRoot.parse(realm) if not tr.validateURL(self.update_url): raise AXError( "Update URL %r failed validation against realm %r" % (self.update_url, realm, )) return self
def fromOpenIDRequest(cls, openid_request)
Extract a FetchRequest from an OpenID message @param openid_request: The OpenID authentication request containing the attribute fetch request @type openid_request: C{L{openid.server.server.CheckIDRequest}} @rtype: C{L{FetchRequest}} or C{None} @returns: The FetchRequest extracted from the message or None, if the message contained no AX extension. @raises KeyError: if the AuthRequest is not consistent in its use of namespace aliases. @raises AXError: When parseExtensionArgs would raise same. @see: L{parseExtensionArgs}
5.575915
5.138097
1.08521
# Raises an exception if the mode is not the expected value self._checkMode(ax_args) aliases = NamespaceMap() for key, value in ax_args.items(): if key.startswith('type.'): alias = key[5:] type_uri = value aliases.addAlias(type_uri, alias) count_key = 'count.' + alias count_s = ax_args.get(count_key) if count_s: try: count = int(count_s) if count <= 0: raise AXError( "Count %r must be greater than zero, got %r" % (count_key, count_s, )) except ValueError: if count_s != UNLIMITED_VALUES: raise AXError("Invalid count value for %r: %r" % (count_key, count_s, )) count = count_s else: count = 1 self.add(AttrInfo(type_uri, alias=alias, count=count)) required = toTypeURIs(aliases, ax_args.get('required')) for type_uri in required: self.requested_attributes[type_uri].required = True if_available = toTypeURIs(aliases, ax_args.get('if_available')) all_type_uris = required + if_available for type_uri in aliases.iterNamespaceURIs(): if type_uri not in all_type_uris: raise AXError('Type URI %r was in the request but not ' 'present in "required" or "if_available"' % (type_uri, )) self.update_url = ax_args.get('update_url')
def parseExtensionArgs(self, ax_args)
Given attribute exchange arguments, populate this FetchRequest. @param ax_args: Attribute Exchange arguments from the request. As returned from L{Message.getArgs<openid.message.Message.getArgs>}. @type ax_args: dict @raises KeyError: if the message is not consistent in its use of namespace aliases. @raises NotAXMessage: If ax_args does not include an Attribute Exchange mode. @raises AXError: If the data to be parsed does not follow the attribute exchange specification. At least when 'if_available' or 'required' is not specified for a particular attribute type.
3.433506
3.111979
1.103319
# XXX: TESTME matchesTarget = lambda attrs: linkHasRel(attrs, target_rel) return list(filter(matchesTarget, link_attrs_list))
def findLinksRel(link_attrs_list, target_rel)
Filter the list of link attributes on whether it has target_rel as a relationship.
8.380293
7.76529
1.079199
if handle is not None: self.db_get_assoc(server_url, handle) else: self.db_get_assocs(server_url) rows = self.cur.fetchall() if len(rows) == 0: return None else: associations = [] for values in rows: values = list(values) values[1] = self.blobDecode(values[1]) assoc = Association(*values) if assoc.expiresIn == 0: self.txn_removeAssociation(server_url, assoc.handle) else: associations.append((assoc.issued, assoc)) if associations: associations.sort() return associations[-1][1] else: return None
def txn_getAssociation(self, server_url, handle=None)
Get the most recent association that has been set for this server URL and handle. str -> NoneType or Association
2.71248
2.833017
0.957453
''' Construct an HTTPResponse from the the urllib response. Attempt to decode the response body from bytes to str if the necessary information is available. ''' resp = HTTPResponse() resp.body = urllib2_response.read(MAX_RESPONSE_KB * 1024) resp.final_url = urllib2_response.geturl() resp.headers = self._lowerCaseKeys( dict(list(urllib2_response.info().items()))) if hasattr(urllib2_response, 'code'): resp.status = urllib2_response.code else: resp.status = 200 _, extra_dict = self._parseHeaderValue( resp.headers.get("content-type", "")) # Try to decode the response body to a string, if there's a # charset known; fall back to ISO-8859-1 otherwise, since that's # what's suggested in HTTP/1.1 charset = extra_dict.get('charset', 'latin1') try: resp.body = resp.body.decode(charset) except Exception: pass return resp
def _makeResponse(self, urllib2_response)
Construct an HTTPResponse from the the urllib response. Attempt to decode the response body from bytes to str if the necessary information is available.
4.079539
3.122299
1.306582
values = header_value.split(';', 1) if len(values) == 1: # There's no extra info -- return the main value and an empty dict return values[0], {} main_value, extra_values = values[0], values[1].split(';') extra_dict = {} for value_string in extra_values: try: key, value = value_string.split('=', 1) extra_dict[key.strip()] = value.strip() except ValueError: # Can't unpack it -- must be malformed. Ignore pass return main_value, extra_dict
def _parseHeaderValue(self, header_value)
Parse out a complex header value (such as Content-Type, with a value like "text/html; charset=utf-8") into a main value and a dictionary of extra information (in this case, 'text/html' and {'charset': 'utf8'}).
2.954091
2.436797
1.212284
global registered_aliases if registered_aliases.get(alias) == namespace_uri: return if namespace_uri in list(registered_aliases.values()): raise NamespaceAliasRegistrationError( 'Namespace uri %r already registered' % (namespace_uri, )) if alias in registered_aliases: raise NamespaceAliasRegistrationError('Alias %r already registered' % (alias, )) registered_aliases[alias] = namespace_uri
def registerNamespaceAlias(namespace_uri, alias)
Registers a (namespace URI, alias) mapping in a global namespace alias map. Raises NamespaceAliasRegistrationError if either the namespace URI or alias has already been registered with a different value. This function is required if you want to use a namespace with an OpenID 1 message.
2.685005
2.79267
0.961447
if isinstance(openid_ns_uri, bytes): openid_ns_uri = str(openid_ns_uri, encoding="utf-8") if openid_ns_uri not in self.allowed_openid_namespaces: raise InvalidOpenIDNamespace(openid_ns_uri) self.namespaces.addAlias(openid_ns_uri, NULL_NAMESPACE, implicit) self._openid_ns_uri = openid_ns_uri
def setOpenIDNamespace(self, openid_ns_uri, implicit)
Set the OpenID namespace URI used in this message. @raises InvalidOpenIDNamespace: if the namespace is not in L{Message.allowed_openid_namespaces}
2.812622
2.561458
1.098055
args = {} # Add namespace definitions to the output for ns_uri, alias in self.namespaces.items(): if self.namespaces.isImplicit(ns_uri): continue if alias == NULL_NAMESPACE: ns_key = 'openid.ns' else: ns_key = 'openid.ns.' + alias args[ns_key] = oidutil.toUnicode(ns_uri) for (ns_uri, ns_key), value in self.args.items(): key = self.getKey(ns_uri, ns_key) # Ensure the resulting value is an UTF-8 encoded *bytestring*. args[key] = oidutil.toUnicode(value) return args
def toPostArgs(self)
Return all arguments with openid. in front of namespaced arguments. @return bytes
4.752281
4.647633
1.022517
# FIXME - undocumented exception post_args = self.toPostArgs() kvargs = {} for k, v in post_args.items(): if not k.startswith('openid.'): raise ValueError( 'This message can only be encoded as a POST, because it ' 'contains arguments that are not prefixed with "openid."') else: kvargs[k[7:]] = v return kvargs
def toArgs(self)
Return all namespaced arguments, failing if any non-namespaced arguments exist.
6.1434
5.713668
1.075211
if ElementTree is None: raise RuntimeError('This function requires ElementTree.') assert action_url is not None form = ElementTree.Element('form') if form_tag_attrs: for name, attr in form_tag_attrs.items(): form.attrib[name] = attr form.attrib['action'] = oidutil.toUnicode(action_url) form.attrib['method'] = 'post' form.attrib['accept-charset'] = 'UTF-8' form.attrib['enctype'] = 'application/x-www-form-urlencoded' for name, value in self.toPostArgs().items(): attrs = { 'type': 'hidden', 'name': oidutil.toUnicode(name), 'value': oidutil.toUnicode(value) } form.append(ElementTree.Element('input', attrs)) submit = ElementTree.Element( 'input', {'type': 'submit', 'value': oidutil.toUnicode(submit_text)}) form.append(submit) return str(ElementTree.tostring(form, encoding='utf-8'), encoding="utf-8")
def toFormMarkup(self, action_url, form_tag_attrs=None, submit_text="Continue")
Generate HTML form markup that contains the values in this message, to be HTTP POSTed as x-www-form-urlencoded UTF-8. @param action_url: The URL to which the form will be POSTed @type action_url: str @param form_tag_attrs: Dictionary of attributes to be added to the form tag. 'accept-charset' and 'enctype' have defaults that can be overridden. If a value is supplied for 'action' or 'method', it will be replaced. @type form_tag_attrs: {unicode: unicode} @param submit_text: The text that will appear on the submit button for this form. @type submit_text: unicode @returns: A string containing (X)HTML markup for a form that encodes the values in this Message object. @rtype: str
2.045956
2.121102
0.964572
args = sorted(self.toPostArgs().items()) return urllib.parse.urlencode(args)
def toURLEncoded(self)
Generate an x-www-urlencoded string
7.74025
8.251832
0.938004
if isinstance(namespace, bytes): namespace = str(namespace, encoding="utf-8") if namespace == OPENID_NS: if self._openid_ns_uri is None: raise UndefinedOpenIDNamespace('OpenID namespace not set') else: namespace = self._openid_ns_uri if namespace != BARE_NS and not isinstance(namespace, str): raise TypeError( "Namespace must be BARE_NS, OPENID_NS or a string. got %r" % (namespace, )) if namespace != BARE_NS and ':' not in namespace: fmt = 'OpenID 2.0 namespace identifiers SHOULD be URIs. Got %r' warnings.warn(fmt % (namespace, ), DeprecationWarning) if namespace == 'sreg': fmt = 'Using %r instead of "sreg" as namespace' warnings.warn( fmt % (SREG_URI, ), DeprecationWarning, ) return SREG_URI return namespace
def _fixNS(self, namespace)
Convert an input value into the internally used values of this object @param namespace: The string or constant to convert @type namespace: str or unicode or BARE_NS or OPENID_NS
3.979426
3.537552
1.124909
namespace = self._fixNS(namespace) args = [] for ((pair_ns, ns_key), value) in self.args.items(): if pair_ns == namespace: if isinstance(ns_key, bytes): k = str(ns_key, encoding="utf-8") else: k = ns_key if isinstance(value, bytes): v = str(value, encoding="utf-8") else: v = value args.append((k, v)) return dict(args)
def getArgs(self, namespace)
Get the arguments that are defined for this namespace URI @returns: mapping from namespaced keys to values @returntype: dict of {str:bytes}
2.767943
2.65951
1.040772
assert key is not None assert value is not None namespace = self._fixNS(namespace) # try to ensure that internally it's consistent, at least: str -> str if isinstance(value, bytes): value = str(value, encoding="utf-8") self.args[(namespace, key)] = value if not (namespace is BARE_NS): self.namespaces.add(namespace)
def setArg(self, namespace, key, value)
Set a single argument in this namespace
6.670646
6.509933
1.024687
if isinstance(namespace_uri, bytes): namespace_uri = str(namespace_uri, encoding="utf-8") # Check that desired_alias is not an openid protocol field as # per the spec. assert desired_alias not in OPENID_PROTOCOL_FIELDS, \ "%r is not an allowed namespace alias" % (desired_alias,) # Check that desired_alias does not contain a period as per # the spec. if isinstance(desired_alias, str): assert '.' not in desired_alias, \ "%r must not contain a dot" % (desired_alias,) # Check that there is not a namespace already defined for # the desired alias current_namespace_uri = self.alias_to_namespace.get(desired_alias) if (current_namespace_uri is not None and current_namespace_uri != namespace_uri): fmt = ('Cannot map %r to alias %r. ' '%r is already mapped to alias %r') msg = fmt % (namespace_uri, desired_alias, current_namespace_uri, desired_alias) raise KeyError(msg) # Check that there is not already a (different) alias for # this namespace URI alias = self.namespace_to_alias.get(namespace_uri) if alias is not None and alias != desired_alias: fmt = ('Cannot map %r to alias %r. ' 'It is already mapped to alias %r') raise KeyError(fmt % (namespace_uri, desired_alias, alias)) assert (desired_alias == NULL_NAMESPACE or type(desired_alias) in [str, str]), repr(desired_alias) assert namespace_uri not in self.implicit_namespaces self.alias_to_namespace[desired_alias] = namespace_uri self.namespace_to_alias[namespace_uri] = desired_alias if implicit: self.implicit_namespaces.append(namespace_uri) return desired_alias
def addAlias(self, namespace_uri, desired_alias, implicit=False)
Add an alias from this namespace URI to the desired alias
2.591302
2.574132
1.00667
# to be merged with oidutil.appendArgs when we combine the projects. if hasattr(args, 'items'): args = list(args.items()) args.sort() if len(args) == 0: return url # According to XRI Resolution section "QXRI query parameters": # # if '?' in url.rstrip('?'): sep = '&' else: sep = '?' return '%s%s%s' % (url, sep, urlencode(args))
def _appendArgs(url, args)
Append some arguments to an HTTP query.
8.272598
7.51008
1.101533
db_engine = settings.DATABASES['default']['ENGINE'] if not db_engine: return FileOpenIDStore(filestore_path) # Possible side-effect: create a database connection if one isn't # already open. connection.cursor() # Create table names to specify for SQL-backed stores. tablenames = { 'associations_table': table_prefix + 'openid_associations', 'nonces_table': table_prefix + 'openid_nonces', } types = { 'django.db.backends.postgresql_psycopg2': sqlstore.PostgreSQLStore, 'django.db.backends.mysql': sqlstore.MySQLStore, 'django.db.backends.sqlite3': sqlstore.SQLiteStore, } if db_engine not in types: raise ImproperlyConfigured( "Database engine %s not supported by OpenID library" % db_engine) s = types[db_engine](connection.connection, **tablenames) try: s.createTables() except (SystemExit, KeyboardInterrupt, MemoryError): raise except: # XXX This is not the Right Way to do this, but because the # underlying database implementation might differ in behavior # at this point, we can't reliably catch the right # exception(s) here. Ideally, the SQL store in the OpenID # library would catch exceptions that it expects and fail # silently, but that could be bad, too. More ideally, the SQL # store would not attempt to create tables it knows already # exists. pass return s
def getOpenIDStore(filestore_path, table_prefix)
Returns an OpenID association store object based on the database engine chosen for this Django application. * If no database engine is chosen, a filesystem-based store will be used whose path is filestore_path. * If a database engine is chosen, a store object for that database type will be returned. * If the chosen engine is not supported by the OpenID library, raise ImproperlyConfigured. * If a database store is used, this will create the tables necessary to use it. The table names will be prefixed with table_prefix. DO NOT use the same table prefix for both an OpenID consumer and an OpenID server in the same database. The result of this function should be passed to the Consumer constructor as the store parameter.
4.993119
4.528459
1.102609
response = render_to_response( 'xrds.xml', {'type_uris': type_uris, 'endpoint_urls': endpoint_urls}, context_instance=RequestContext(request)) response['Content-Type'] = YADIS_CONTENT_TYPE return response
def renderXRDS(request, type_uris, endpoint_urls)
Render an XRDS page with the specified type URIs and endpoint URLs in one service block, and return a response with the appropriate content-type.
2.226794
2.328959
0.956133
''' Encoding error handler that does percent-escaping of Unicode, to be used with codecs.register_error TODO: replace use of this with urllib.parse.quote as appropriate ''' chunk = err.object[err.start:err.end] replacements = _pct_encoded_replacements(chunk) return ("".join(replacements), err.end)
def _pct_escape_handler(err)
Encoding error handler that does percent-escaping of Unicode, to be used with codecs.register_error TODO: replace use of this with urllib.parse.quote as appropriate
7.608459
2.834762
2.683985
def err(msg): formatted = 'seqToKV warning: %s: %r' % (msg, seq) if strict: raise KVFormError(formatted) else: logging.warning(formatted) lines = [] for k, v in seq: if isinstance(k, bytes): k = k.decode('utf-8') elif not isinstance(k, str): err('Converting key to string: %r' % k) k = str(k) if '\n' in k: raise KVFormError( 'Invalid input for seqToKV: key contains newline: %r' % (k, )) if ':' in k: raise KVFormError( 'Invalid input for seqToKV: key contains colon: %r' % (k, )) if k.strip() != k: err('Key has whitespace at beginning or end: %r' % (k, )) if isinstance(v, bytes): v = v.decode('utf-8') elif not isinstance(v, str): err('Converting value to string: %r' % (v, )) v = str(v) if '\n' in v: raise KVFormError( 'Invalid input for seqToKV: value contains newline: %r' % (v, )) if v.strip() != v: err('Value has whitespace at beginning or end: %r' % (v, )) lines.append(k + ':' + v + '\n') return ''.join(lines).encode('utf-8')
def seqToKV(seq, strict=False)
Represent a sequence of pairs of strings as newline-terminated key:value pairs. The pairs are generated in the order given. @param seq: The pairs @type seq: [(str, (unicode|str))] @return: A string representation of the sequence @rtype: bytes
1.999986
2.013025
0.993523
# According to RFC 3987, section 3.1, "Mapping of IRIs to URIs" if isinstance(iri, bytes): iri = str(iri, encoding="utf-8") return iri.encode('ascii', errors='oid_percent_escape').decode()
def iriToURI(iri)
Transform an IRI to a URI by escaping unicode.
6.184165
5.726479
1.079925
''' Normalize a URI ''' # TODO: use urllib.parse instead of these complex regular expressions if isinstance(uri, bytes): uri = str(uri, encoding='utf-8') uri = uri.encode('ascii', errors='oid_percent_escape').decode('utf-8') # _escapeme_re.sub(_pct_escape_unicode, uri).encode('ascii').decode() illegal_mo = uri_illegal_char_re.search(uri) if illegal_mo: raise ValueError('Illegal characters in URI: %r at position %s' % (illegal_mo.group(), illegal_mo.start())) uri_mo = uri_re.match(uri) scheme = uri_mo.group(2) if scheme is None: raise ValueError('No scheme specified') scheme = scheme.lower() if scheme not in ('http', 'https'): raise ValueError('Not an absolute HTTP or HTTPS URI: %r' % (uri, )) authority = uri_mo.group(4) if authority is None: raise ValueError('Not an absolute URI: %r' % (uri, )) authority_mo = authority_re.match(authority) if authority_mo is None: raise ValueError('URI does not have a valid authority: %r' % (uri, )) userinfo, host, port = authority_mo.groups() if userinfo is None: userinfo = '' if '%' in host: host = host.lower() host = pct_encoded_re.sub(_pct_encoded_replace, host) host = host.encode('idna').decode() else: host = host.lower() if port: if (port == ':' or (scheme == 'http' and port == ':80') or (scheme == 'https' and port == ':443')): port = '' else: port = '' authority = userinfo + host + port path = uri_mo.group(5) path = pct_encoded_re.sub(_pct_encoded_replace_unreserved, path) path = remove_dot_segments(path) if not path: path = '/' query = uri_mo.group(6) if query is None: query = '' fragment = uri_mo.group(8) if fragment is None: fragment = '' return scheme + '://' + authority + path + query + fragment
def urinorm(uri)
Normalize a URI
2.561677
2.567906
0.997574
return render_to_response( 'server/index.html', { 'user_url': getViewURL(request, idPage), 'server_xrds_url': getViewURL(request, idpXrds), }, context_instance=RequestContext(request))
def server(request)
Respond to requests for the server's primary web page.
6.706749
6.716787
0.998506
s = getServer(request) # Encode the response into something that is renderable. try: webresponse = s.encodeResponse(openid_response) except EncodingError as why: # If it couldn't be encoded, display an error. text = why.response.encodeToKVForm() return render_to_response( 'server/endpoint.html', {'error': cgi.escape(text)}, context_instance=RequestContext(request)) # Construct the appropriate django framework response. r = http.HttpResponse(webresponse.body) r.status_code = webresponse.code for header, value in webresponse.headers.items(): r[header] = value return r
def displayResponse(request, openid_response)
Display an OpenID response. Errors will be displayed directly to the user; successful responses and other protocol-level messages will be sent using the proper mechanism (i.e., direct response, redirection, etc.).
5.422273
5.363452
1.010967
try: os.unlink(filename) except OSError as why: if why.errno == ENOENT: # Someone beat us to it, but it's gone, so that's OK return 0 else: raise else: # File was present return 1
def _removeIfPresent(filename)
Attempt to remove a file, returning whether the file existed at the time of the call. str -> bool
3.947016
3.910111
1.009438
association_s = association.serialize() # NOTE: UTF-8 encoded bytes filename = self.getAssociationFilename(server_url, association.handle) tmp_file, tmp = self._mktemp() try: try: tmp_file.write(association_s) os.fsync(tmp_file.fileno()) finally: tmp_file.close() try: os.rename(tmp, filename) except OSError as why: if why.errno != EEXIST: raise # We only expect EEXIST to happen only on Windows. It's # possible that we will succeed in unlinking the existing # file, but not in putting the temporary file in place. try: os.unlink(filename) except OSError as why: if why.errno == ENOENT: pass else: raise # Now the target should not exist. Try renaming again, # giving up if it fails. os.rename(tmp, filename) except: # If there was an error, don't leave the temporary file # around. _removeIfPresent(tmp) raise
def storeAssociation(self, server_url, association)
Store an association in the association directory. (str, Association) -> NoneType
4.072557
4.056656
1.00392
# First, make sure that the user entered something openid_url = self.query.get('openid_identifier') if not openid_url: self.render( 'Enter an OpenID Identifier to verify.', css_class='error', form_contents=openid_url) return immediate = 'immediate' in self.query use_sreg = 'use_sreg' in self.query use_pape = 'use_pape' in self.query use_stateless = 'use_stateless' in self.query oidconsumer = self.getConsumer(stateless=use_stateless) try: request = oidconsumer.begin(openid_url) except consumer.DiscoveryFailure as exc: fetch_error_string = 'Error in discovery: %s' % ( cgi.escape(str(exc))) self.render( fetch_error_string, css_class='error', form_contents=openid_url) else: if request is None: msg = 'No OpenID services found for <code>%s</code>' % ( cgi.escape(openid_url), ) self.render(msg, css_class='error', form_contents=openid_url) else: # Then, ask the library to begin the authorization. # Here we find out the identity server that will verify the # user's identity, and get a token that allows us to # communicate securely with the identity server. if use_sreg: self.requestRegistrationData(request) if use_pape: self.requestPAPEDetails(request) trust_root = self.server.base_url return_to = self.buildURL('process') if request.shouldSendRedirect(): redirect_url = request.redirectURL( trust_root, return_to, immediate=immediate) self.send_response(302) self.send_header('Location', redirect_url) self.writeUserHeader() self.end_headers() else: form_html = request.htmlMarkup( trust_root, return_to, form_tag_attrs={'id': 'openid_message'}, immediate=immediate) self.wfile.write(bytes(form_html, 'utf-8'))
def doVerify(self)
Process the form submission, initating OpenID verification.
3.643541
3.498474
1.041466
base = urllib.parse.urljoin(self.server.base_url, action) return appendArgs(base, query)
def buildURL(self, action, **query)
Build a URL relative to the server base_url, with the given query parameters added.
7.341566
5.467163
1.342847
response = OpenIDResponse(self) response.fields.updateArgs(OPENID_NS, { 'expires_in': str(assoc.expiresIn), 'assoc_type': self.assoc_type, 'assoc_handle': assoc.handle, }) response.fields.updateArgs(OPENID_NS, self.session.answer(assoc.secret)) if not (self.session.session_type == 'no-encryption' and self.message.isOpenID1()): # The session type "no-encryption" did not have a name # in OpenID v1, it was just omitted. response.fields.setArg(OPENID_NS, 'session_type', self.session.session_type) return response
def answer(self, assoc)
Respond to this request with an X{association}. @param assoc: The association to send back. @type assoc: L{openid.association.Association} @returns: A response with the association information, encrypted to the consumer's X{public key} if appropriate. @returntype: L{OpenIDResponse}
5.575058
5.280151
1.055852
if self.request.mode in BROWSER_REQUEST_MODES: if self.fields.getOpenIDNamespace() == OPENID2_NS and \ len(self.encodeToURL()) > OPENID1_URL_LIMIT: return ENCODE_HTML_FORM else: return ENCODE_URL else: return ENCODE_KVFORM
def whichEncoding(self)
How should I be encoded? @returns: one of ENCODE_URL, ENCODE_HTML_FORM, or ENCODE_KVFORM. @change: 2.1.0 added the ENCODE_HTML_FORM response.
8.4772
5.404269
1.568612
for i in range(1000): alias = 'cust%d' % (i, ) if alias not in self.auth_level_aliases: return alias raise RuntimeError('Could not find an unused alias (tried 1000!)')
def _generateAlias(self)
Return an unused auth level alias
5.957505
4.249779
1.401839
for (alias, existing_uri) in self.auth_level_aliases.items(): if auth_level_uri == existing_uri: return alias raise KeyError(auth_level_uri)
def _getAlias(self, auth_level_uri)
Return the alias for the specified auth level URI. @raises KeyError: if no alias is defined
3.249266
3.417576
0.950752
# Separate into a list of callables and a list of filter objects transformers = [] filters = [] for subfilter in parts: try: subfilter = list(subfilter) except TypeError: # If it's not an iterable if hasattr(subfilter, 'getServiceEndpoints'): # It's a full filter filters.append(subfilter) elif hasattr(subfilter, 'fromBasicServiceEndpoint'): # It's an endpoint object, so put its endpoint # conversion attribute into the list of endpoint # transformers transformers.append(subfilter.fromBasicServiceEndpoint) elif isinstance(subfilter, collections.Callable): # It's a simple callable, so add it to the list of # endpoint transformers transformers.append(subfilter) else: raise filter_type_error else: filters.append(mkCompoundFilter(subfilter)) if transformers: filters.append(TransformFilterMaker(transformers)) if len(filters) == 1: return filters[0] else: return CompoundFilter(filters)
def mkCompoundFilter(parts)
Create a filter out of a list of filter-like things Used by mkFilter @param parts: list of filter, endpoint, callable or list of any of these
4.050684
3.925317
1.031938
def wrapper(func): def callback(context, name, ob): obj = context.context crons = obj.get_plugin(Crons) if info.scope == 'class': callback = getattr( obj.get_plugin(ob), func.__name__) else: callback = irc3.utils.wraps_with_context(func, obj) crons.add_cron(cronline, callback) info = venusian.attach(func, callback, category=venusian_category) return func return wrapper
def cron(cronline, venusian_category='irc3.plugins.cron')
main decorator
4.818525
4.834486
0.996699
for http_code in default_exceptions: self.error_handler_spec[None][http_code] = f return f
def default_errorhandler(self, f)
Decorator that registers handler of default (Werkzeug) HTTP errors. Note that it might override already defined error handlers.
4.784339
4.981667
0.960389
response_mimetype = None if not request.accept_mimetypes: response_mimetype = self.default_mimetype else: all_media_types_wildcard = '*/*' for mimetype, q in request.accept_mimetypes: if mimetype == all_media_types_wildcard: response_mimetype = self.default_mimetype break if mimetype in self.response_formatters: response_mimetype = mimetype break return response_mimetype
def _response_mimetype_based_on_accept_header(self)
Determines mimetype to response based on Accept header. If mimetype is not found, it returns ``None``.
2.462109
2.435993
1.010721
status = headers = None if isinstance(rv, tuple): rv, status, headers = rv + (None,) * (3 - len(rv)) response_mimetype = self._response_mimetype_based_on_accept_header() if response_mimetype is None: # Return 406, list of available mimetypes in default format. default_formatter = self.response_formatters.get( self.default_mimetype ) available_mimetypes = default_formatter( mimetypes=list(self.response_formatters) ) rv = self.response_class( response=available_mimetypes, status=406, mimetype=self.default_mimetype, ) elif isinstance(rv, dict): formatter = self.response_formatters.get(response_mimetype) rv = self.response_class( response=formatter(**rv), mimetype=response_mimetype, ) return super(ResponsiveFlask, self).make_response( rv=(rv, status, headers) )
def make_response(self, rv)
Returns response based on Accept header. If no Accept header field is present, then it is assumed that the client accepts all media types. This way JSON format will be used. If an Accept header field is present, and if the server cannot send a response which is acceptable according to the combined Accept field value, then a 406 (not acceptable) response will be sent.
2.996004
3.274828
0.914858
return event(regexp, callback=callback, iotype='dcc_' + iotype, venusian_category=venusian_category)
def dcc_event(regexp, callback=None, iotype='in', venusian_category='irc3.dcc')
Work like :class:`~irc3.dec.event` but occurs during DCC CHATs
3.682716
4.48802
0.820566
def callback(context, name, ob): obj = context.context if info.scope == 'class': f = getattr(obj.get_plugin(ob), func.__name__) else: f = func setattr(obj, f.__name__, f.__get__(obj, obj.__class__)) info = venusian.attach(func, callback, category='irc3.extend') return func
def extend(func)
Allow to extend a bot: Create a module with some useful routine: .. literalinclude:: ../examples/myextends.py .. >>> import sys >>> sys.path.append('examples') >>> from irc3 import IrcBot >>> IrcBot.defaults.update(asynchronous=False, testing=True) Now you can use those routine in your bot:: >>> bot = IrcBot() >>> bot.include('myextends') >>> print(bot.my_usefull_function(1)) my_usefull_function(*(1,)) >>> print(bot.my_usefull_method(2)) my_usefull_method(*(2,))
6.948002
7.832961
0.887021
if client and message: messages = utils.split_message(message, self.config.max_length) for msg in messages: client.fwrite(':{c.srv} NOTICE {c.nick} :{msg}', msg=msg)
def notice(self, client, message)
send a notice to client
7.779663
7.420877
1.048348
@wraps(f) def wrapped_f(client_id): client_key = f(client_id) return { 'id': client_id, 'key': client_key, 'algorithm': current_app.config['HAWK_ALGORITHM'] } self._client_key_loader_func = wrapped_f return wrapped_f
def client_key_loader(self, f)
Registers a function to be called to find a client key. Function you set has to take a client id and return a client key:: @hawk.client_key_loader def get_client_key(client_id): if client_id == 'Alice': return 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn' else: raise LookupError() :param f: The callback for retrieving a client key.
3.170658
2.746477
1.154446
@wraps(view_func) def wrapped_view_func(*args, **kwargs): if current_app.config['HAWK_ENABLED']: if current_app.config['HAWK_ALLOW_COOKIE_AUTH'] and session: self._auth_by_cookie() else: self._auth_by_signature() return view_func(*args, **kwargs) return wrapped_view_func
def auth_required(self, view_func)
Decorator that provides an access to view function for authenticated users only. Note that we don't run authentication when `HAWK_ENABLED` is `False`.
2.998353
2.689093
1.115005
if 'Authorization' not in request.headers: return response try: mohawk_receiver = mohawk.Receiver( credentials_map=self._client_key_loader_func, request_header=request.headers['Authorization'], url=request.url, method=request.method, content=request.get_data(), content_type=request.mimetype, accept_untrusted_content=current_app.config['HAWK_ACCEPT_UNTRUSTED_CONTENT'], localtime_offset_in_seconds=current_app.config['HAWK_LOCALTIME_OFFSET_IN_SECONDS'], timestamp_skew_in_seconds=current_app.config['HAWK_TIMESTAMP_SKEW_IN_SECONDS'] ) except mohawk.exc.HawkFail: return response response.headers['Server-Authorization'] = mohawk_receiver.respond( content=response.data, content_type=response.mimetype ) return response
def _sign_response(self, response)
Signs a response if it's possible.
3.061596
3.010851
1.016854
if isinstance(name_or_class, type): name = name_or_class.type protocol = name_or_class else: name = name_or_class protocol = self.protocols[name] assert name in DCC_TYPES if filepath: kwargs.setdefault('limit_rate', self.config['send_limit_rate']) kwargs['filepath'] = filepath if protocol.type == DCCSend.type: kwargs.setdefault('offset', 0) kwargs.update( filename_safe=slugify(os.path.basename(filepath)), filesize=os.path.getsize(filepath), ) elif protocol.type == DCCGet.type: try: offset = os.path.getsize(filepath) except OSError: offset = 0 kwargs.setdefault('offset', offset) kwargs.setdefault('resume', False) kwargs.setdefault('port', None) f = protocol( mask=mask, ip=int(self.bot.ip), bot=self.bot, loop=self.loop, **kwargs) if kwargs['port']: if self.bot.config.get('dcc_sock_factory'): sock_factory = maybedotted(self.bot.config.dcc_sock_factory) args = dict(sock=sock_factory(self.bot, f.host, f.port)) else: args = dict(host=f.host, port=f.port) task = self.bot.create_task( self.loop.create_connection(f.factory, **args)) task.add_done_callback(partial(self.created, f)) else: task = self.bot.create_task( self.loop.create_server( f.factory, '0.0.0.0', 0, backlog=1)) task.add_done_callback(partial(self.created, f)) return f
def create(self, name_or_class, mask, filepath=None, **kwargs)
Create a new DCC connection. Return an ``asyncio.Protocol``
3.239624
2.997716
1.080698
self.connections['send']['masks'][mask][port].offset = pos message = 'DCC ACCEPT %s %d %d' % (filename, port, pos) self.bot.ctcp(mask, message)
def resume(self, mask, filename, port, pos)
Resume a DCC send
10.575075
8.931876
1.18397
name = name_or_class.type else: name = name_or_class info = self.connections[name] limit = self.config[name + '_limit'] if limit and info['total'] >= limit: msg = ( "Sorry, there is too much DCC %s active. Please try again " "later.") % name.upper() self.bot.notice(mask, msg) return False if mask not in info['masks']: return True limit = self.config[name + '_user_limit'] if limit and info['masks'][mask] >= limit: msg = ( "Sorry, you have too many DCC %s active. Close the other " "connection(s) or wait a few seconds and try again." ) % name.upper() self.bot.notice(mask, msg) return False return True
def is_allowed(self, name_or_class, mask): # pragma: no cover if isinstance(name_or_class, type)
Return True is a new connection is allowed
3.887359
3.617844
1.074496
if len(message) > max_length: for message in textwrap.wrap(message, max_length): yield message else: yield message.rstrip(STRIPPED_CHARS)
def split_message(message, max_length)
Split long messages
3.938791
3.849387
1.023226
filename = filenames[-1] filename = os.path.abspath(filename) here = os.path.dirname(filename) defaults = dict(here=here, hash='#') defaults['#'] = '#' config = configparser.ConfigParser( defaults, allow_no_value=False, interpolation=configparser.ExtendedInterpolation(), ) config.optionxform = str config.read([os.path.expanduser('~/.irc3/passwd.ini')] + list(filenames)) value = {} for s in config.sections(): items = {} for k, v in config.items(s): if '\n' in v: v = as_list(v) elif v.isdigit(): v = int(v) elif v.replace('.', '').isdigit() and v.count('.') == 1: v = float(v) elif v in ('true', 'false'): v = v == 'true' and True or False items[k] = v if s == main_section: value.update(items) else: for k in ('here', 'config'): items.pop(k, '') value[s] = items value.update(defaults) value['configfiles'] = filenames return value
def parse_config(main_section, *filenames)
parse config files
3.027189
2.952689
1.025231
prefix = prefix.strip('.') + '.' plen = len(prefix) value = {} for k, v in config.items(): if k.startswith(prefix): value[k[plen:]] = v return value
def extract_config(config, prefix)
return all keys with the same prefix without the prefix
3.222723
2.859182
1.127149
if isinstance(value, (list, tuple)): return value if not value: return [] for c in '\n ': if c in value: value = value.split(c) return [v.strip() for v in value if v.strip()] return [value]
def as_list(value)
clever string spliting: .. code-block:: python >>> print(as_list('value')) ['value'] >>> print(as_list('v1 v2')) ['v1', 'v2'] >>> print(as_list(None)) [] >>> print(as_list(['v1'])) ['v1']
2.74367
3.160736
0.868048
if not targets: targets = [] cleaned = [] for mode in modes: if mode in '-+': char = mode continue target = targets.pop(0) if mode not in noargs else None cleaned.append((char, mode, target)) return cleaned
def parse_modes(modes, targets=None, noargs='')
Parse channel modes: .. code-block:: python >>> parse_modes('+c-n', noargs='cn') [('+', 'c', None), ('-', 'n', None)] >>> parse_modes('+c-v', ['gawel'], noargs='c') [('+', 'c', None), ('-', 'v', 'gawel')]
4.506174
4.487631
1.004132
wrapped = functools.partial(func, context) wrapped = functools.wraps(func)(wrapped) if asyncio.iscoroutinefunction(func): wrapped = asyncio.coroutine(wrapped) return wrapped
def wraps_with_context(func, context)
Return a wrapped partial(func, context)
2.702791
2.462254
1.09769
if not name: raise LookupError( 'Not able to resolve %s' % name) if not hasattr(name, '__name__'): try: mod = importlib.import_module(name) except ImportError: attr = None if '.' in name: names = name.split('.') attr = names.pop(-1) try: mod = maybedotted('.'.join(names)) except LookupError: attr = None else: attr = getattr(mod, attr, None) if attr is not None: return attr raise LookupError( 'Not able to resolve %s' % name) else: return mod return name
def maybedotted(name)
Resolve dotted names: .. code-block:: python >>> maybedotted('irc3.config') <module 'irc3.config' from '...'> >>> maybedotted('irc3.utils.IrcString') <class 'irc3.utils.IrcString'> ..
2.535077
2.635847
0.961769
if '!' in self: return self.split('!', 1)[0] if not self.is_channel and not self.is_server: return self
def nick(self)
return nick name: .. code-block:: py >>> print(IrcString('foo').nick) foo >>> print(IrcString('foo!user@host').nick) foo >>> IrcString('#foo').nick is None True >>> IrcString('irc.freenode.net').nick is None True
4.78444
5.174965
0.924536
tagdict = getattr(self, '_tagdict', None) if tagdict is None: try: self._tagdict = tags.decode(self) except ValueError: self._tagdict = {} return self._tagdict
def tagdict(self)
return a dict converted from this string interpreted as a tag-string .. code-block:: py >>> from pprint import pprint >>> dict_ = IrcString('aaa=bbb;ccc;example.com/ddd=eee').tagdict >>> pprint({str(k): str(v) for k, v in dict_.items()}) {'aaa': 'bbb', 'ccc': 'None', 'example.com/ddd': 'eee'}
2.902908
3.526935
0.823068