_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q8300
|
validate
|
train
|
def validate(message, ssldir=None, **config):
"""
Validate the signature on the given message.
Four things must be true for the signature to be valid:
1) The X.509 cert must be signed by our CA
2) The cert must not be in our CRL.
3) We must be able to verify the signature using the RSA public key
contained in the X.509 cert.
4) The topic of the message and the CN on the cert must appear in the
:ref:`conf-routing-policy` dict.
Args:
message (dict): A signed message in need of validation. A signed message
contains the 'signature' and 'certificate' keys.
ssldir (str): The path to the directory containing PEM-encoded X.509
key pairs.
Returns:
bool: True of the message passes validation, False otherwise.
"""
for field in ['signature', 'certificate']:
if field not in message:
_log.warn('No %s field found.', field)
return False
if not isinstance(message[field], six.text_type):
_log.error('msg[%r] is not a unicode string' % field)
try:
# Make an effort to decode it, it's very likely utf-8 since that's what
# is hardcoded throughout fedmsg. Worst case scenario is it'll cause a
# validation error when there shouldn't be one.
message[field] = message[field].decode('utf-8')
except UnicodeError as e:
_log.error("Unable to decode the message '%s' field: %s", field, str(e))
return False
signature = base64.b64decode(message['signature'])
certificate = base64.b64decode(message['certificate'])
message = fedmsg.crypto.strip_credentials(message)
# Unfortunately we can't change this defaulting to Fedora behavior until
# fedmsg-2.0
ca_location = config.get('ca_cert_location', 'https://fedoraproject.org/fedmsg/ca.crt')
crl_location = config.get('crl_location', 'https://fedoraproject.org/fedmsg/crl.pem')
try:
ca_certificate, crl = utils.load_certificates(ca_location, crl_location)
_validate_signing_cert(ca_certificate, certificate, crl)
except (IOError, RequestException, X509StoreContextError) as e:
# Maybe the CA/CRL is expired or just rotated, so invalidate the cache and try again
try:
ca_certificate, crl = utils.load_certificates(
ca_location, crl_location, invalidate_cache=True)
_validate_signing_cert(ca_certificate, certificate, crl)
except (IOError, RequestException, X509StoreContextError) as e:
_log.error(str(e))
return False
# Validate the signature of the message itself
try:
crypto_certificate = x509.load_pem_x509_certificate(certificate, default_backend())
crypto_certificate.public_key().verify(
signature,
fedmsg.encoding.dumps(message).encode('utf-8'),
asymmetric.padding.PKCS1v15(),
hashes.SHA1(),
)
except InvalidSignature as e:
_log.error('message [{m}] has an invalid signature: {e}'.format(
m=message, e=str(e)))
return False
# Step 4, check that the certificate is permitted to emit messages for the
# topic.
common_name = crypto_certificate.subject.get_attributes_for_oid(x509.oid.NameOID.COMMON_NAME)
common_name = common_name[0]
routing_policy = config.get('routing_policy', {})
nitpicky = config.get('routing_nitpicky', False)
return utils.validate_policy(
message.get('topic'), common_name.value, routing_policy, nitpicky=nitpicky)
|
python
|
{
"resource": ""
}
|
q8301
|
_validate_signing_cert
|
train
|
def _validate_signing_cert(ca_certificate, certificate, crl=None):
"""
Validate an X509 certificate using pyOpenSSL.
.. note::
pyOpenSSL is a short-term solution to certificate validation. pyOpenSSL
is basically in maintenance mode and there's a desire in upstream to move
all the functionality into cryptography.
Args:
ca_certificate (str): A PEM-encoded Certificate Authority certificate to
validate the ``certificate`` with.
certificate (str): A PEM-encoded certificate that is in need of validation.
crl (str): A PEM-encoded Certificate Revocation List which, if provided, will
be taken into account when validating the certificate.
Raises:
X509StoreContextError: If the certificate failed validation. The
exception contains the details of the error.
"""
pyopenssl_cert = load_certificate(FILETYPE_PEM, certificate)
pyopenssl_ca_cert = load_certificate(FILETYPE_PEM, ca_certificate)
cert_store = X509Store()
cert_store.add_cert(pyopenssl_ca_cert)
if crl:
pyopenssl_crl = load_crl(FILETYPE_PEM, crl)
cert_store.add_crl(pyopenssl_crl)
cert_store.set_flags(X509StoreFlags.CRL_CHECK | X509StoreFlags.CRL_CHECK_ALL)
cert_store_context = X509StoreContext(cert_store, pyopenssl_cert)
cert_store_context.verify_certificate()
|
python
|
{
"resource": ""
}
|
q8302
|
IRCBotConsumer.consume
|
train
|
def consume(self, msg):
""" Forward on messages from the bus to all IRC connections. """
log.debug("Got message %r" % msg)
topic, body = msg.get('topic'), msg.get('body')
for client in self.irc_clients:
if not client.factory.filters or (
client.factory.filters and
self.apply_filters(client.factory.filters, topic, body)
):
raw_msg = self.prettify(
topic=topic,
msg=body,
pretty=client.factory.pretty,
terse=client.factory.terse,
short=client.factory.short,
)
send = getattr(client, self.hub.config['irc_method'], 'notice')
send(client.factory.channel, raw_msg.encode('utf-8'))
backlog = self.incoming.qsize()
if backlog and (backlog % 20) == 0:
warning = "* backlogged by %i messages" % backlog
log.warning(warning)
send(client.factory.channel, warning.encode('utf-8'))
|
python
|
{
"resource": ""
}
|
q8303
|
check
|
train
|
def check(timeout, consumer=None, producer=None):
"""This command is used to check the status of consumers and producers.
If no consumers and producers are provided, the status of all consumers and producers
is printed.
"""
# It's weird to say --consumers, but there are multiple, so rename the variables
consumers, producers = consumer, producer
config = load_config()
endpoint = config.get('moksha.monitoring.socket')
if not endpoint:
raise click.ClickException('No monitoring endpoint has been configured: '
'please set "moksha.monitoring.socket"')
context = zmq.Context.instance()
socket = context.socket(zmq.SUB)
# ZMQ takes the timeout in milliseconds
socket.set(zmq.RCVTIMEO, timeout * 1000)
socket.subscribe(b'')
socket.connect(endpoint)
try:
message = socket.recv_json()
except zmq.error.Again:
raise click.ClickException(
'Failed to receive message from the monitoring endpoint ({e}) in {t} '
'seconds.'.format(e=endpoint, t=timeout))
if not consumers and not producers:
click.echo('No consumers or producers specified so all will be shown.')
else:
missing = False
uninitialized = False
for messager_type, messagers in (('consumers', consumers), ('producers', producers)):
active = {}
for messager in message[messager_type]:
active[messager['name']] = messager
for messager in messagers:
if messager not in active:
click.echo('"{m}" is not active!'.format(m=messager), err=True)
missing = True
else:
if active[messager]['initialized'] is not True:
click.echo('"{m}" is not initialized!'.format(m=messager), err=True)
uninitialized = True
if missing:
raise click.ClickException('Some consumers and/or producers are missing!')
elif uninitialized:
raise click.ClickException('Some consumers and/or producers are uninitialized!')
else:
click.echo('All consumers and producers are active!')
click.echo(json.dumps(message, indent=2, sort_keys=True))
|
python
|
{
"resource": ""
}
|
q8304
|
CollectdConsumer.dump
|
train
|
def dump(self):
""" Called by CollectdProducer every `n` seconds. """
# Print out the collectd feedback.
# This is sent to stdout while other log messages are sent to stderr.
for k, v in sorted(self._dict.items()):
print(self.formatter(k, v))
# Reset each entry to zero
for k, v in sorted(self._dict.items()):
self._dict[k] = 0
|
python
|
{
"resource": ""
}
|
q8305
|
CollectdConsumer.formatter
|
train
|
def formatter(self, key, value):
""" Format messages for collectd to consume. """
template = "PUTVAL {host}/fedmsg/fedmsg_wallboard-{key} " +\
"interval={interval} {timestamp}:{value}"
timestamp = int(time.time())
interval = self.hub.config['collectd_interval']
return template.format(
host=self.host,
timestamp=timestamp,
value=value,
interval=interval,
key=key,
)
|
python
|
{
"resource": ""
}
|
q8306
|
init
|
train
|
def init(**config):
""" Initialize the crypto backend.
The backend can be one of two plugins:
- 'x509' - Uses x509 certificates.
- 'gpg' - Uses GnuPG keys.
"""
global _implementation
global _validate_implementations
if config.get('crypto_backend') == 'gpg':
_implementation = gpg
else:
_implementation = x509
_validate_implementations = []
for mod in config.get('crypto_validate_backends', []):
if mod == 'gpg':
_validate_implementations.append(gpg)
elif mod == 'x509':
_validate_implementations.append(x509)
else:
raise ValueError("%r is not a valid crypto backend" % mod)
if not _validate_implementations:
_validate_implementations.append(_implementation)
|
python
|
{
"resource": ""
}
|
q8307
|
validate_signed_by
|
train
|
def validate_signed_by(message, signer, **config):
""" Validate that a message was signed by a particular certificate.
This works much like ``validate(...)``, but additionally accepts a
``signer`` argument. It will reject a message for any of the regular
circumstances, but will also reject it if its not signed by a cert with the
argued name.
"""
config = copy.deepcopy(config)
config['routing_nitpicky'] = True
config['routing_policy'] = {message['topic']: [signer]}
return validate(message, **config)
|
python
|
{
"resource": ""
}
|
q8308
|
strip_credentials
|
train
|
def strip_credentials(message):
""" Strip credentials from a message dict.
A new dict is returned without either `signature` or `certificate` keys.
This method can be called safely; the original dict is not modified.
This function is applicable using either using the x509 or gpg backends.
"""
message = copy.deepcopy(message)
for field in ['signature', 'certificate']:
if field in message:
del message[field]
return message
|
python
|
{
"resource": ""
}
|
q8309
|
get_replay
|
train
|
def get_replay(name, query, config, context=None):
"""
Query the replay endpoint for missed messages.
Args:
name (str): The replay endpoint name.
query (dict): A dictionary used to query the replay endpoint for messages.
Queries are dictionaries with the following any of the following keys:
* 'seq_ids': A ``list`` of ``int``, matching the seq_id attributes
of the messages. It should return at most as many messages as the
length of the list, assuming no duplicate.
* 'seq_id': A single ``int`` matching the seq_id attribute of the message.
Should return a single message. It is intended as a shorthand for
singleton ``seq_ids`` queries.
* 'seq_id_range': A two-tuple of ``int`` defining a range of seq_id to check.
* 'msg_ids': A ``list`` of UUIDs matching the msg_id attribute of the messages.
* 'msg_id': A single UUID for the msg_id attribute.
* 'time': A tuple of two timestamps. It will return all messages emitted in between.
config (dict): A configuration dictionary. This dictionary should contain, at a
minimum, two keys. The first key, 'replay_endpoints', should be a dictionary
that maps ``name`` to a ZeroMQ socket. The second key, 'io_threads', is an
integer used to initialize the ZeroMQ context.
context (zmq.Context): The ZeroMQ context to use. If a context is not provided,
one will be created.
Returns:
generator: A generator that yields message dictionaries.
"""
endpoint = config.get('replay_endpoints', {}).get(name, None)
if not endpoint:
raise IOError("No appropriate replay endpoint "
"found for {0}".format(name))
if not context:
context = zmq.Context(config['io_threads'])
# A replay endpoint isn't PUB/SUB but REQ/REP, as it allows
# for bidirectional communication
socket = context.socket(zmq.REQ)
try:
socket.connect(endpoint)
except zmq.ZMQError as e:
raise IOError("Error when connecting to the "
"replay endpoint: '{0}'".format(str(e)))
# REQ/REP dance
socket.send(fedmsg.encoding.dumps(query).encode('utf-8'))
msgs = socket.recv_multipart()
socket.close()
for m in msgs:
try:
yield fedmsg.encoding.loads(m.decode('utf-8'))
except ValueError:
# We assume that if it isn't JSON then it's an error message
raise ValueError(m)
|
python
|
{
"resource": ""
}
|
q8310
|
check_for_replay
|
train
|
def check_for_replay(name, names_to_seq_id, msg, config, context=None):
"""
Check to see if messages need to be replayed.
Args:
name (str): The consumer's name.
names_to_seq_id (dict): A dictionary that maps names to the last seen sequence ID.
msg (dict): The latest message that has arrived.
config (dict): A configuration dictionary. This dictionary should contain, at a
minimum, two keys. The first key, 'replay_endpoints', should be a dictionary
that maps ``name`` to a ZeroMQ socket. The second key, 'io_threads', is an
integer used to initialize the ZeroMQ context.
context (zmq.Context): The ZeroMQ context to use. If a context is not provided,
one will be created.
Returns:
list: A list of message dictionaries.
"""
prev_seq_id = names_to_seq_id.get(name, None)
cur_seq_id = msg.get("seq_id", None)
if prev_seq_id is None or cur_seq_id is None:
return [msg]
if cur_seq_id <= prev_seq_id:
# Might have been delayed by network lag or something, in which case
# we assume the replay has already been asked for and we dismiss it
return []
if cur_seq_id == prev_seq_id + 1 or prev_seq_id < 0:
ret = [msg]
else:
ret = list(get_replay(name, {
"seq_id_range": (prev_seq_id, cur_seq_id)
}, config, context))
if len(ret) == 0 or ret[-1]['seq_id'] < msg['seq_id']:
ret.append(msg)
names_to_seq_id[name] = cur_seq_id
return ret
|
python
|
{
"resource": ""
}
|
q8311
|
validate_policy
|
train
|
def validate_policy(topic, signer, routing_policy, nitpicky=False):
"""
Checks that the sender is allowed to emit messages for the given topic.
Args:
topic (str): The message topic the ``signer`` used when sending the message.
signer (str): The Common Name of the certificate used to sign the message.
Returns:
bool: True if the policy defined in the settings allows the signer to send
messages on ``topic``.
"""
if topic in routing_policy:
# If so.. is the signer one of those permitted senders?
if signer in routing_policy[topic]:
# We are good. The signer of this message is explicitly
# whitelisted to send on this topic in our config policy.
return True
else:
# We have a policy for this topic and $homeboy isn't on the list.
_log.error("Authorization/routing_policy error. "
"Topic %r. Signer %r." % (topic, signer))
return False
else:
# We don't have a policy for this topic. How we react next for an
# underspecified routing_policy is based on a configuration option.
# Ideally, we are in nitpicky mode. We leave it disabled while
# standing up fedmsg across our environment so that we can build our
# policy without having the whole thing come crashing down.
if nitpicky:
# We *are* in nitpicky mode. We don't have an entry in the
# routing_policy for the topic of this message.. and *nobody*
# gets in without a pass. That means that we fail the message.
_log.error("Authorization/routing_policy underspecified.")
return False
else:
# We are *not* in nitpicky mode. We don't have an entry in the
# routing_policy for the topic of this message.. but we don't
# really care.
_log.warning('No routing policy defined for "{t}" but routing_nitpicky is '
'False so the message is being treated as authorized.'.format(t=topic))
return True
|
python
|
{
"resource": ""
}
|
q8312
|
load_certificates
|
train
|
def load_certificates(ca_location, crl_location=None, invalidate_cache=False):
"""
Load the CA certificate and CRL, caching it for future use.
.. note::
Providing the location of the CA and CRL as an HTTPS URL is deprecated
and will be removed in a future release.
Args:
ca_location (str): The location of the Certificate Authority certificate. This should
be the absolute path to a PEM-encoded file. It can also be an HTTPS url, but this
is deprecated and will be removed in a future release.
crl_location (str): The location of the Certificate Revocation List. This should
be the absolute path to a PEM-encoded file. It can also be an HTTPS url, but
this is deprecated and will be removed in a future release.
invalidate_cache (bool): Whether or not to invalidate the certificate cache.
Returns:
tuple: A tuple of the (CA certificate, CRL) as unicode strings.
Raises:
requests.exception.RequestException: Any exception requests could raise.
IOError: If the location provided could not be opened and read.
"""
if crl_location is None:
crl_location = ''
try:
if invalidate_cache:
del _cached_certificates[ca_location + crl_location]
else:
return _cached_certificates[ca_location + crl_location]
except KeyError:
pass
ca, crl = None, None
if ca_location:
ca = _load_certificate(ca_location)
if crl_location:
crl = _load_certificate(crl_location)
_cached_certificates[ca_location + crl_location] = ca, crl
return ca, crl
|
python
|
{
"resource": ""
}
|
q8313
|
_load_certificate
|
train
|
def _load_certificate(location):
"""
Load a certificate from the given location.
Args:
location (str): The location to load. This can either be an HTTPS URL or an absolute file
path. This is intended to be used with PEM-encoded certificates and therefore assumes
ASCII encoding.
Returns:
str: The PEM-encoded certificate as a unicode string.
Raises:
requests.exception.RequestException: Any exception requests could raise.
IOError: If the location provided could not be opened and read.
"""
if location.startswith('https://'):
_log.info('Downloading x509 certificate from %s', location)
with requests.Session() as session:
session.mount('https://', requests.adapters.HTTPAdapter(max_retries=3))
response = session.get(location, timeout=30)
response.raise_for_status()
return response.text
else:
_log.info('Loading local x509 certificate from %s', location)
with open(location, 'rb') as fd:
return fd.read().decode('ascii')
|
python
|
{
"resource": ""
}
|
q8314
|
_get_config_files
|
train
|
def _get_config_files():
"""
Load the list of file paths for fedmsg configuration files.
Returns:
list: List of files containing fedmsg configuration.
"""
config_paths = []
if os.environ.get('FEDMSG_CONFIG'):
config_location = os.environ['FEDMSG_CONFIG']
else:
config_location = '/etc/fedmsg.d'
if os.path.isfile(config_location):
config_paths.append(config_location)
elif os.path.isdir(config_location):
# list dir and add valid files
possible_config_files = [os.path.join(config_location, p)
for p in os.listdir(config_location) if p.endswith('.py')]
for p in possible_config_files:
if os.path.isfile(p):
config_paths.append(p)
if not config_paths:
_log.info('No configuration files found in %s', config_location)
return config_paths
|
python
|
{
"resource": ""
}
|
q8315
|
_validate_none_or_type
|
train
|
def _validate_none_or_type(t):
"""
Create a validator that checks if a setting is either None or a given type.
Args:
t: The type to assert.
Returns:
callable: A callable that will validate a setting for that type.
"""
def _validate(setting):
"""
Check the setting to make sure it's the right type.
Args:
setting (object): The setting to check.
Returns:
object: The unmodified object if it's the proper type.
Raises:
ValueError: If the setting is the wrong type.
"""
if setting is not None and not isinstance(setting, t):
raise ValueError('"{}" is not "{}"'.format(setting, t))
return setting
return _validate
|
python
|
{
"resource": ""
}
|
q8316
|
_validate_bool
|
train
|
def _validate_bool(value):
"""
Validate a setting is a bool.
Returns:
bool: The value as a boolean.
Raises:
ValueError: If the value can't be parsed as a bool string or isn't already bool.
"""
if isinstance(value, six.text_type):
if value.strip().lower() == 'true':
value = True
elif value.strip().lower() == 'false':
value = False
else:
raise ValueError('"{}" must be a boolean ("True" or "False")'.format(value))
if not isinstance(value, bool):
raise ValueError('"{}" is not a boolean value.'.format(value))
return value
|
python
|
{
"resource": ""
}
|
q8317
|
_gather_configs_in
|
train
|
def _gather_configs_in(directory):
""" Return list of fully qualified python filenames in the given dir """
try:
return sorted([
os.path.join(directory, fname)
for fname in os.listdir(directory)
if fname.endswith('.py')
])
except OSError:
return []
|
python
|
{
"resource": ""
}
|
q8318
|
execfile
|
train
|
def execfile(fname, variables):
""" This is builtin in python2, but we have to roll our own on py3. """
with open(fname) as f:
code = compile(f.read(), fname, 'exec')
exec(code, variables)
|
python
|
{
"resource": ""
}
|
q8319
|
FedmsgConfig.load_config
|
train
|
def load_config(self, settings=None):
"""
Load the configuration either from the config file, or from the given settings.
Args:
settings (dict): If given, the settings are pulled from this dictionary. Otherwise, the
config file is used.
"""
self._load_defaults()
if settings:
self.update(settings)
else:
config_paths = _get_config_files()
for p in config_paths:
conf = _process_config_file([p])
self.update(conf)
self._loaded = True
self._validate()
|
python
|
{
"resource": ""
}
|
q8320
|
FedmsgConfig._load_defaults
|
train
|
def _load_defaults(self):
"""Iterate over self._defaults and set all default values on self."""
for k, v in self._defaults.items():
self[k] = v['default']
|
python
|
{
"resource": ""
}
|
q8321
|
FedmsgConfig._validate
|
train
|
def _validate(self):
"""
Run the validators found in self._defaults on all the corresponding values.
Raises:
ValueError: If the configuration contains an invalid configuration value.
"""
errors = []
for k in self._defaults.keys():
try:
validator = self._defaults[k]['validator']
if validator is not None:
self[k] = validator(self[k])
except ValueError as e:
errors.append('\t{}: {}'.format(k, six.text_type(e)))
if errors:
raise ValueError(
'Invalid configuration values were set: \n{}'.format('\n'.join(errors)))
|
python
|
{
"resource": ""
}
|
q8322
|
make_processors
|
train
|
def make_processors(**config):
""" Initialize all of the text processors.
You'll need to call this once before using any of the other functions in
this module.
>>> import fedmsg.config
>>> import fedmsg.meta
>>> config = fedmsg.config.load_config([], None)
>>> fedmsg.meta.make_processors(**config)
>>> text = fedmsg.meta.msg2repr(some_message_dict, **config)
"""
global processors
# If they're already initialized, then fine.
if processors:
return
import pkg_resources
processors = []
for processor in pkg_resources.iter_entry_points('fedmsg.meta'):
try:
processors.append(processor.load()(_, **config))
except Exception as e:
log.warn("Failed to load %r processor." % processor.name)
log.exception(e)
# This should always be last
processors.append(DefaultProcessor(_, **config))
# By default we have three builtin processors: Default, Logger, and
# Announce. If these are the only three, then we didn't find any
# externally provided ones. calls to msg2subtitle and msg2link likely will
# not work the way the user is expecting.
if len(processors) == 3:
log.warn("No fedmsg.meta plugins found. fedmsg.meta.msg2* crippled")
|
python
|
{
"resource": ""
}
|
q8323
|
msg2processor
|
train
|
def msg2processor(msg, **config):
""" For a given message return the text processor that can handle it.
This will raise a :class:`fedmsg.meta.ProcessorsNotInitialized` exception
if :func:`fedmsg.meta.make_processors` hasn't been called yet.
"""
for processor in processors:
if processor.handle_msg(msg, **config) is not None:
return processor
else:
return processors[-1]
|
python
|
{
"resource": ""
}
|
q8324
|
graceful
|
train
|
def graceful(cls):
""" A decorator to protect against message structure changes.
Many of our processors expect messages to be in a certain format. If the
format changes, they may start to fail and raise exceptions. This decorator
is in place to catch and log those exceptions and to gracefully return
default values.
"""
def _wrapper(f):
@functools.wraps(f)
def __wrapper(msg, **config):
try:
return f(msg, **config)
except KeyError:
log.exception("%r failed on %r" % (f, msg.get('msg_id')))
return cls()
return __wrapper
return _wrapper
|
python
|
{
"resource": ""
}
|
q8325
|
conglomerate
|
train
|
def conglomerate(messages, subject=None, lexers=False, **config):
""" Return a list of messages with some of them grouped into conglomerate
messages. Conglomerate messages represent several other messages.
For example, you might pass this function a list of 40 messages.
38 of those are git.commit messages, 1 is a bodhi.update message, and 1 is
a badge.award message. This function could return a list of three
messages, one representing the 38 git commit messages, one representing the
bodhi.update message, and one representing the badge.award message.
The ``subject`` argument is optional and will return "subjective"
representations if possible (see msg2subjective(...)).
Functionality is provided by fedmsg.meta plugins on a "best effort" basis.
"""
# First, give every registered processor a chance to do its work
for processor in processors:
messages = processor.conglomerate(messages, subject=subject, **config)
# Then, just fake it for every other ungrouped message.
for i, message in enumerate(messages):
# If these were successfully grouped, then skip
if 'msg_ids' in message:
continue
# For ungrouped ones, replace them with a fake conglomerate
messages[i] = BaseConglomerator.produce_template(
[message], subject=subject, lexers=lexers, **config)
# And fill out the fields that fully-implemented conglomerators would
# normally fill out.
messages[i].update({
'link': msg2link(message, **config),
'subtitle': msg2subtitle(message, **config),
'subjective': msg2subjective(message, subject=subject, **config),
'secondary_icon': msg2secondary_icon(message, **config),
})
return messages
|
python
|
{
"resource": ""
}
|
q8326
|
msg2repr
|
train
|
def msg2repr(msg, processor, **config):
""" Return a human-readable or "natural language" representation of a
dict-like fedmsg message. Think of this as the 'top-most level' function
in this module.
"""
fmt = u"{title} -- {subtitle} {link}"
title = msg2title(msg, **config)
subtitle = processor.subtitle(msg, **config)
link = processor.link(msg, **config) or ''
return fmt.format(**locals())
|
python
|
{
"resource": ""
}
|
q8327
|
msg2long_form
|
train
|
def msg2long_form(msg, processor, **config):
""" Return a 'long form' text representation of a message.
For most message, this will just default to the terse subtitle, but for
some messages a long paragraph-structured block of text may be returned.
"""
result = processor.long_form(msg, **config)
if not result:
result = processor.subtitle(msg, **config)
return result
|
python
|
{
"resource": ""
}
|
q8328
|
msg2usernames
|
train
|
def msg2usernames(msg, processor=None, legacy=False, **config):
""" Return a set of FAS usernames associated with a message. """
return processor.usernames(msg, **config)
|
python
|
{
"resource": ""
}
|
q8329
|
msg2agent
|
train
|
def msg2agent(msg, processor=None, legacy=False, **config):
""" Return the single username who is the "agent" for an event.
An "agent" is the one responsible for the event taking place, for example,
if one person gives karma to another, then both usernames are returned by
msg2usernames, but only the one who gave the karma is returned by
msg2agent.
If the processor registered to handle the message does not provide an
agent method, then the *first* user returned by msg2usernames is returned
(whether that is correct or not). Here we assume that if a processor
implements `agent`, then it knows what it is doing and we should trust
that. But if it does not implement it, we'll try our best guess.
If there are no users returned by msg2usernames, then None is returned.
"""
if processor.agent is not NotImplemented:
return processor.agent(msg, **config)
else:
usernames = processor.usernames(msg, **config)
# usernames is a set(), which doesn't support indexing.
if usernames:
return usernames.pop()
# default to None if we can't find anything
return None
|
python
|
{
"resource": ""
}
|
q8330
|
msg2subjective
|
train
|
def msg2subjective(msg, processor, subject, **config):
""" Return a human-readable text representation of a dict-like
fedmsg message from the subjective perspective of a user.
For example, if the subject viewing the message is "oddshocks"
and the message would normally translate into "oddshocks commented on
ticket #174", it would instead translate into "you commented on ticket
#174". """
text = processor.subjective(msg, subject, **config)
if not text:
text = processor.subtitle(msg, **config)
return text
|
python
|
{
"resource": ""
}
|
q8331
|
TriggerCommand.run_command
|
train
|
def run_command(self, command, message):
""" Use subprocess; feed the message to our command over stdin """
proc = subprocess.Popen([
'echo \'%s\' | %s' % (fedmsg.encoding.dumps(message), command)
], shell=True, executable='/bin/bash')
return proc.wait()
|
python
|
{
"resource": ""
}
|
q8332
|
BaseProcessor.conglomerate
|
train
|
def conglomerate(self, messages, **config):
""" Given N messages, return another list that has some of them
grouped together into a common 'item'.
A conglomeration of messages should be of the following form::
{
'subtitle': 'relrod pushed commits to ghc and 487 other packages',
'link': None, # This could be something.
'icon': 'https://that-git-logo',
'secondary_icon': 'https://that-relrod-avatar',
'start_time': some_timestamp,
'end_time': some_other_timestamp,
'human_time': '5 minutes ago',
'usernames': ['relrod'],
'packages': ['ghc', 'nethack', ... ],
'topics': ['org.fedoraproject.prod.git.receive'],
'categories': ['git'],
'msg_ids': {
'2014-abcde': {
'subtitle': 'relrod pushed some commits to ghc',
'title': 'git.receive',
'link': 'http://...',
'icon': 'http://...',
},
'2014-bcdef': {
'subtitle': 'relrod pushed some commits to nethack',
'title': 'git.receive',
'link': 'http://...',
'icon': 'http://...',
},
},
}
The telltale sign that an entry in a list of messages represents a
conglomerate message is the presence of the plural ``msg_ids`` field.
In contrast, ungrouped singular messages should bear a singular
``msg_id`` field.
"""
for conglomerator in self.conglomerator_objects:
messages = conglomerator.conglomerate(messages, **config)
return messages
|
python
|
{
"resource": ""
}
|
q8333
|
BaseProcessor.handle_msg
|
train
|
def handle_msg(self, msg, **config):
"""
If we can handle the given message, return the remainder of the topic.
Returns None if we can't handle the message.
"""
match = self.__prefix__.match(msg['topic'])
if match:
return match.groups()[-1] or ""
|
python
|
{
"resource": ""
}
|
q8334
|
Consumer.begin
|
train
|
def begin(self, user_url, anonymous=False):
"""Start the OpenID authentication process. See steps 1-2 in
the overview at the top of this file.
@param user_url: Identity URL given by the user. This method
performs a textual transformation of the URL to try and
make sure it is normalized. For example, a user_url of
example.com will be normalized to http://example.com/
normalizing and resolving any redirects the server might
issue.
@type user_url: unicode
@param anonymous: Whether to make an anonymous request of the OpenID
provider. Such a request does not ask for an authorization
assertion for an OpenID identifier, but may be used with
extensions to pass other data. e.g. "I don't care who you are,
but I'd like to know your time zone."
@type anonymous: bool
@returns: An object containing the discovered information will
be returned, with a method for building a redirect URL to
the server, as described in step 3 of the overview. This
object may also be used to add extension arguments to the
request, using its
L{addExtensionArg<openid.consumer.consumer.AuthRequest.addExtensionArg>}
method.
@returntype: L{AuthRequest<openid.consumer.consumer.AuthRequest>}
@raises openid.consumer.discover.DiscoveryFailure: when I fail to
find an OpenID server for this URL. If the C{yadis} package
is available, L{openid.consumer.discover.DiscoveryFailure} is
an alias for C{yadis.discover.DiscoveryFailure}.
"""
disco = Discovery(self.session, user_url, self.session_key_prefix)
try:
service = disco.getNextService(self._discover)
except fetchers.HTTPFetchingError as why:
raise DiscoveryFailure('Error fetching XRDS document: %s' %
(why.why, ), None)
if service is None:
raise DiscoveryFailure('No usable OpenID services found for %s' %
(user_url, ), None)
else:
return self.beginWithoutDiscovery(service, anonymous)
|
python
|
{
"resource": ""
}
|
q8335
|
Consumer.beginWithoutDiscovery
|
train
|
def beginWithoutDiscovery(self, service, anonymous=False):
"""Start OpenID verification without doing OpenID server
discovery. This method is used internally by Consumer.begin
after discovery is performed, and exists to provide an
interface for library users needing to perform their own
discovery.
@param service: an OpenID service endpoint descriptor. This
object and factories for it are found in the
L{openid.consumer.discover} module.
@type service:
L{OpenIDServiceEndpoint<openid.consumer.discover.OpenIDServiceEndpoint>}
@returns: an OpenID authentication request object.
@rtype: L{AuthRequest<openid.consumer.consumer.AuthRequest>}
@See: Openid.consumer.consumer.Consumer.begin
@see: openid.consumer.discover
"""
auth_req = self.consumer.begin(service)
self.session[self._token_key] = auth_req.endpoint
try:
auth_req.setAnonymous(anonymous)
except ValueError as why:
raise ProtocolError(str(why))
return auth_req
|
python
|
{
"resource": ""
}
|
q8336
|
GenericConsumer._checkReturnTo
|
train
|
def _checkReturnTo(self, message, return_to):
"""Check an OpenID message and its openid.return_to value
against a return_to URL from an application. Return True on
success, False on failure.
"""
# Check the openid.return_to args against args in the original
# message.
try:
self._verifyReturnToArgs(message.toPostArgs())
except ProtocolError as why:
logging.exception("Verifying return_to arguments: %s" % (why, ))
return False
# Check the return_to base URL against the one in the message.
msg_return_to = message.getArg(OPENID_NS, 'return_to')
# The URL scheme, authority, and path MUST be the same between
# the two URLs.
app_parts = urlparse(urinorm.urinorm(return_to))
msg_parts = urlparse(urinorm.urinorm(msg_return_to))
# (addressing scheme, network location, path) must be equal in
# both URLs.
for part in range(0, 3):
if app_parts[part] != msg_parts[part]:
return False
return True
|
python
|
{
"resource": ""
}
|
q8337
|
GenericConsumer._checkAuth
|
train
|
def _checkAuth(self, message, server_url):
"""Make a check_authentication request to verify this message.
@returns: True if the request is valid.
@rtype: bool
"""
logging.info('Using OpenID check_authentication')
request = self._createCheckAuthRequest(message)
if request is None:
return False
try:
response = self._makeKVPost(request, server_url)
except (fetchers.HTTPFetchingError, ServerError) as e:
e0 = e.args[0]
logging.exception('check_authentication failed: %s' % e0)
return False
else:
return self._processCheckAuthResponse(response, server_url)
|
python
|
{
"resource": ""
}
|
q8338
|
GenericConsumer._negotiateAssociation
|
train
|
def _negotiateAssociation(self, endpoint):
"""Make association requests to the server, attempting to
create a new association.
@returns: a new association object
@rtype: L{openid.association.Association}
"""
# Get our preferred session/association type from the negotiatior.
assoc_type, session_type = self.negotiator.getAllowedType()
try:
assoc = self._requestAssociation(endpoint, assoc_type,
session_type)
except ServerError as why:
supportedTypes = self._extractSupportedAssociationType(
why, endpoint, assoc_type)
if supportedTypes is not None:
assoc_type, session_type = supportedTypes
# Attempt to create an association from the assoc_type
# and session_type that the server told us it
# supported.
try:
assoc = self._requestAssociation(endpoint, assoc_type,
session_type)
except ServerError as why:
# Do not keep trying, since it rejected the
# association type that it told us to use.
logging.error(
'Server %s refused its suggested association '
'type: session_type=%s, assoc_type=%s' % (
endpoint.server_url, session_type, assoc_type))
return None
else:
return assoc
else:
return assoc
|
python
|
{
"resource": ""
}
|
q8339
|
importSafeElementTree
|
train
|
def importSafeElementTree(module_names=None):
"""Find a working ElementTree implementation that is not vulnerable
to XXE, using `defusedxml`.
>>> XXESafeElementTree = importSafeElementTree()
@param module_names: The names of modules to try to use as
a safe ElementTree. Defaults to C{L{xxe_safe_elementtree_modules}}
@returns: An ElementTree module that is not vulnerable to XXE.
"""
if module_names is None:
module_names = xxe_safe_elementtree_modules
try:
return importElementTree(module_names)
except ImportError:
raise ImportError('Unable to find a ElementTree module '
'that is not vulnerable to XXE. '
'Tried importing %r' % (module_names, ))
|
python
|
{
"resource": ""
}
|
q8340
|
FetchRequest.getExtensionArgs
|
train
|
def getExtensionArgs(self):
"""Get the serialized form of this attribute fetch request.
@returns: The fetch request message parameters
@rtype: {unicode:unicode}
"""
aliases = NamespaceMap()
required = []
if_available = []
ax_args = self._newArgs()
for type_uri, attribute in self.requested_attributes.items():
if attribute.alias is None:
alias = aliases.add(type_uri)
else:
# This will raise an exception when the second
# attribute with the same alias is added. I think it
# would be better to complain at the time that the
# attribute is added to this object so that the code
# that is adding it is identified in the stack trace,
# but it's more work to do so, and it won't be 100%
# accurate anyway, since the attributes are
# mutable. So for now, just live with the fact that
# we'll learn about the error later.
#
# The other possible approach is to hide the error and
# generate a new alias on the fly. I think that would
# probably be bad.
alias = aliases.addAlias(type_uri, attribute.alias)
if attribute.required:
required.append(alias)
else:
if_available.append(alias)
if attribute.count != 1:
ax_args['count.' + alias] = str(attribute.count)
ax_args['type.' + alias] = type_uri
if required:
ax_args['required'] = ','.join(required)
if if_available:
ax_args['if_available'] = ','.join(if_available)
return ax_args
|
python
|
{
"resource": ""
}
|
q8341
|
FetchRequest.fromOpenIDRequest
|
train
|
def fromOpenIDRequest(cls, openid_request):
"""Extract a FetchRequest from an OpenID message
@param openid_request: The OpenID authentication request
containing the attribute fetch request
@type openid_request: C{L{openid.server.server.CheckIDRequest}}
@rtype: C{L{FetchRequest}} or C{None}
@returns: The FetchRequest extracted from the message or None, if
the message contained no AX extension.
@raises KeyError: if the AuthRequest is not consistent in its use
of namespace aliases.
@raises AXError: When parseExtensionArgs would raise same.
@see: L{parseExtensionArgs}
"""
message = openid_request.message
ax_args = message.getArgs(cls.ns_uri)
self = cls()
try:
self.parseExtensionArgs(ax_args)
except NotAXMessage as err:
return None
if self.update_url:
# Update URL must match the openid.realm of the underlying
# OpenID 2 message.
realm = message.getArg(OPENID_NS, 'realm',
message.getArg(OPENID_NS, 'return_to'))
if not realm:
raise AXError(
("Cannot validate update_url %r " + "against absent realm")
% (self.update_url, ))
tr = TrustRoot.parse(realm)
if not tr.validateURL(self.update_url):
raise AXError(
"Update URL %r failed validation against realm %r" %
(self.update_url, realm, ))
return self
|
python
|
{
"resource": ""
}
|
q8342
|
FetchRequest.parseExtensionArgs
|
train
|
def parseExtensionArgs(self, ax_args):
"""Given attribute exchange arguments, populate this FetchRequest.
@param ax_args: Attribute Exchange arguments from the request.
As returned from L{Message.getArgs<openid.message.Message.getArgs>}.
@type ax_args: dict
@raises KeyError: if the message is not consistent in its use
of namespace aliases.
@raises NotAXMessage: If ax_args does not include an Attribute Exchange
mode.
@raises AXError: If the data to be parsed does not follow the
attribute exchange specification. At least when
'if_available' or 'required' is not specified for a
particular attribute type.
"""
# Raises an exception if the mode is not the expected value
self._checkMode(ax_args)
aliases = NamespaceMap()
for key, value in ax_args.items():
if key.startswith('type.'):
alias = key[5:]
type_uri = value
aliases.addAlias(type_uri, alias)
count_key = 'count.' + alias
count_s = ax_args.get(count_key)
if count_s:
try:
count = int(count_s)
if count <= 0:
raise AXError(
"Count %r must be greater than zero, got %r" %
(count_key, count_s, ))
except ValueError:
if count_s != UNLIMITED_VALUES:
raise AXError("Invalid count value for %r: %r" %
(count_key, count_s, ))
count = count_s
else:
count = 1
self.add(AttrInfo(type_uri, alias=alias, count=count))
required = toTypeURIs(aliases, ax_args.get('required'))
for type_uri in required:
self.requested_attributes[type_uri].required = True
if_available = toTypeURIs(aliases, ax_args.get('if_available'))
all_type_uris = required + if_available
for type_uri in aliases.iterNamespaceURIs():
if type_uri not in all_type_uris:
raise AXError('Type URI %r was in the request but not '
'present in "required" or "if_available"' %
(type_uri, ))
self.update_url = ax_args.get('update_url')
|
python
|
{
"resource": ""
}
|
q8343
|
findLinksRel
|
train
|
def findLinksRel(link_attrs_list, target_rel):
"""Filter the list of link attributes on whether it has target_rel
as a relationship."""
# XXX: TESTME
matchesTarget = lambda attrs: linkHasRel(attrs, target_rel)
return list(filter(matchesTarget, link_attrs_list))
|
python
|
{
"resource": ""
}
|
q8344
|
SQLStore.txn_getAssociation
|
train
|
def txn_getAssociation(self, server_url, handle=None):
"""Get the most recent association that has been set for this
server URL and handle.
str -> NoneType or Association
"""
if handle is not None:
self.db_get_assoc(server_url, handle)
else:
self.db_get_assocs(server_url)
rows = self.cur.fetchall()
if len(rows) == 0:
return None
else:
associations = []
for values in rows:
values = list(values)
values[1] = self.blobDecode(values[1])
assoc = Association(*values)
if assoc.expiresIn == 0:
self.txn_removeAssociation(server_url, assoc.handle)
else:
associations.append((assoc.issued, assoc))
if associations:
associations.sort()
return associations[-1][1]
else:
return None
|
python
|
{
"resource": ""
}
|
q8345
|
Urllib2Fetcher._makeResponse
|
train
|
def _makeResponse(self, urllib2_response):
'''
Construct an HTTPResponse from the the urllib response. Attempt to
decode the response body from bytes to str if the necessary information
is available.
'''
resp = HTTPResponse()
resp.body = urllib2_response.read(MAX_RESPONSE_KB * 1024)
resp.final_url = urllib2_response.geturl()
resp.headers = self._lowerCaseKeys(
dict(list(urllib2_response.info().items())))
if hasattr(urllib2_response, 'code'):
resp.status = urllib2_response.code
else:
resp.status = 200
_, extra_dict = self._parseHeaderValue(
resp.headers.get("content-type", ""))
# Try to decode the response body to a string, if there's a
# charset known; fall back to ISO-8859-1 otherwise, since that's
# what's suggested in HTTP/1.1
charset = extra_dict.get('charset', 'latin1')
try:
resp.body = resp.body.decode(charset)
except Exception:
pass
return resp
|
python
|
{
"resource": ""
}
|
q8346
|
Message.toPostArgs
|
train
|
def toPostArgs(self):
"""
Return all arguments with openid. in front of namespaced arguments.
@return bytes
"""
args = {}
# Add namespace definitions to the output
for ns_uri, alias in self.namespaces.items():
if self.namespaces.isImplicit(ns_uri):
continue
if alias == NULL_NAMESPACE:
ns_key = 'openid.ns'
else:
ns_key = 'openid.ns.' + alias
args[ns_key] = oidutil.toUnicode(ns_uri)
for (ns_uri, ns_key), value in self.args.items():
key = self.getKey(ns_uri, ns_key)
# Ensure the resulting value is an UTF-8 encoded *bytestring*.
args[key] = oidutil.toUnicode(value)
return args
|
python
|
{
"resource": ""
}
|
q8347
|
Message.toArgs
|
train
|
def toArgs(self):
"""Return all namespaced arguments, failing if any
non-namespaced arguments exist."""
# FIXME - undocumented exception
post_args = self.toPostArgs()
kvargs = {}
for k, v in post_args.items():
if not k.startswith('openid.'):
raise ValueError(
'This message can only be encoded as a POST, because it '
'contains arguments that are not prefixed with "openid."')
else:
kvargs[k[7:]] = v
return kvargs
|
python
|
{
"resource": ""
}
|
q8348
|
NamespaceMap.addAlias
|
train
|
def addAlias(self, namespace_uri, desired_alias, implicit=False):
"""Add an alias from this namespace URI to the desired alias
"""
if isinstance(namespace_uri, bytes):
namespace_uri = str(namespace_uri, encoding="utf-8")
# Check that desired_alias is not an openid protocol field as
# per the spec.
assert desired_alias not in OPENID_PROTOCOL_FIELDS, \
"%r is not an allowed namespace alias" % (desired_alias,)
# Check that desired_alias does not contain a period as per
# the spec.
if isinstance(desired_alias, str):
assert '.' not in desired_alias, \
"%r must not contain a dot" % (desired_alias,)
# Check that there is not a namespace already defined for
# the desired alias
current_namespace_uri = self.alias_to_namespace.get(desired_alias)
if (current_namespace_uri is not None and
current_namespace_uri != namespace_uri):
fmt = ('Cannot map %r to alias %r. '
'%r is already mapped to alias %r')
msg = fmt % (namespace_uri, desired_alias, current_namespace_uri,
desired_alias)
raise KeyError(msg)
# Check that there is not already a (different) alias for
# this namespace URI
alias = self.namespace_to_alias.get(namespace_uri)
if alias is not None and alias != desired_alias:
fmt = ('Cannot map %r to alias %r. '
'It is already mapped to alias %r')
raise KeyError(fmt % (namespace_uri, desired_alias, alias))
assert (desired_alias == NULL_NAMESPACE or
type(desired_alias) in [str, str]), repr(desired_alias)
assert namespace_uri not in self.implicit_namespaces
self.alias_to_namespace[desired_alias] = namespace_uri
self.namespace_to_alias[namespace_uri] = desired_alias
if implicit:
self.implicit_namespaces.append(namespace_uri)
return desired_alias
|
python
|
{
"resource": ""
}
|
q8349
|
_appendArgs
|
train
|
def _appendArgs(url, args):
"""Append some arguments to an HTTP query.
"""
# to be merged with oidutil.appendArgs when we combine the projects.
if hasattr(args, 'items'):
args = list(args.items())
args.sort()
if len(args) == 0:
return url
# According to XRI Resolution section "QXRI query parameters":
#
# """If the original QXRI had a null query component (only a leading
# question mark), or a query component consisting of only question
# marks, one additional leading question mark MUST be added when
# adding any XRI resolution parameters."""
if '?' in url.rstrip('?'):
sep = '&'
else:
sep = '?'
return '%s%s%s' % (url, sep, urlencode(args))
|
python
|
{
"resource": ""
}
|
q8350
|
iriToURI
|
train
|
def iriToURI(iri):
"""Transform an IRI to a URI by escaping unicode."""
# According to RFC 3987, section 3.1, "Mapping of IRIs to URIs"
if isinstance(iri, bytes):
iri = str(iri, encoding="utf-8")
return iri.encode('ascii', errors='oid_percent_escape').decode()
|
python
|
{
"resource": ""
}
|
q8351
|
urinorm
|
train
|
def urinorm(uri):
'''
Normalize a URI
'''
# TODO: use urllib.parse instead of these complex regular expressions
if isinstance(uri, bytes):
uri = str(uri, encoding='utf-8')
uri = uri.encode('ascii', errors='oid_percent_escape').decode('utf-8')
# _escapeme_re.sub(_pct_escape_unicode, uri).encode('ascii').decode()
illegal_mo = uri_illegal_char_re.search(uri)
if illegal_mo:
raise ValueError('Illegal characters in URI: %r at position %s' %
(illegal_mo.group(), illegal_mo.start()))
uri_mo = uri_re.match(uri)
scheme = uri_mo.group(2)
if scheme is None:
raise ValueError('No scheme specified')
scheme = scheme.lower()
if scheme not in ('http', 'https'):
raise ValueError('Not an absolute HTTP or HTTPS URI: %r' % (uri, ))
authority = uri_mo.group(4)
if authority is None:
raise ValueError('Not an absolute URI: %r' % (uri, ))
authority_mo = authority_re.match(authority)
if authority_mo is None:
raise ValueError('URI does not have a valid authority: %r' % (uri, ))
userinfo, host, port = authority_mo.groups()
if userinfo is None:
userinfo = ''
if '%' in host:
host = host.lower()
host = pct_encoded_re.sub(_pct_encoded_replace, host)
host = host.encode('idna').decode()
else:
host = host.lower()
if port:
if (port == ':' or (scheme == 'http' and port == ':80') or
(scheme == 'https' and port == ':443')):
port = ''
else:
port = ''
authority = userinfo + host + port
path = uri_mo.group(5)
path = pct_encoded_re.sub(_pct_encoded_replace_unreserved, path)
path = remove_dot_segments(path)
if not path:
path = '/'
query = uri_mo.group(6)
if query is None:
query = ''
fragment = uri_mo.group(8)
if fragment is None:
fragment = ''
return scheme + '://' + authority + path + query + fragment
|
python
|
{
"resource": ""
}
|
q8352
|
_removeIfPresent
|
train
|
def _removeIfPresent(filename):
"""Attempt to remove a file, returning whether the file existed at
the time of the call.
str -> bool
"""
try:
os.unlink(filename)
except OSError as why:
if why.errno == ENOENT:
# Someone beat us to it, but it's gone, so that's OK
return 0
else:
raise
else:
# File was present
return 1
|
python
|
{
"resource": ""
}
|
q8353
|
FileOpenIDStore.storeAssociation
|
train
|
def storeAssociation(self, server_url, association):
"""Store an association in the association directory.
(str, Association) -> NoneType
"""
association_s = association.serialize() # NOTE: UTF-8 encoded bytes
filename = self.getAssociationFilename(server_url, association.handle)
tmp_file, tmp = self._mktemp()
try:
try:
tmp_file.write(association_s)
os.fsync(tmp_file.fileno())
finally:
tmp_file.close()
try:
os.rename(tmp, filename)
except OSError as why:
if why.errno != EEXIST:
raise
# We only expect EEXIST to happen only on Windows. It's
# possible that we will succeed in unlinking the existing
# file, but not in putting the temporary file in place.
try:
os.unlink(filename)
except OSError as why:
if why.errno == ENOENT:
pass
else:
raise
# Now the target should not exist. Try renaming again,
# giving up if it fails.
os.rename(tmp, filename)
except:
# If there was an error, don't leave the temporary file
# around.
_removeIfPresent(tmp)
raise
|
python
|
{
"resource": ""
}
|
q8354
|
OpenIDRequestHandler.doVerify
|
train
|
def doVerify(self):
"""Process the form submission, initating OpenID verification.
"""
# First, make sure that the user entered something
openid_url = self.query.get('openid_identifier')
if not openid_url:
self.render(
'Enter an OpenID Identifier to verify.',
css_class='error',
form_contents=openid_url)
return
immediate = 'immediate' in self.query
use_sreg = 'use_sreg' in self.query
use_pape = 'use_pape' in self.query
use_stateless = 'use_stateless' in self.query
oidconsumer = self.getConsumer(stateless=use_stateless)
try:
request = oidconsumer.begin(openid_url)
except consumer.DiscoveryFailure as exc:
fetch_error_string = 'Error in discovery: %s' % (
cgi.escape(str(exc)))
self.render(
fetch_error_string,
css_class='error',
form_contents=openid_url)
else:
if request is None:
msg = 'No OpenID services found for <code>%s</code>' % (
cgi.escape(openid_url), )
self.render(msg, css_class='error', form_contents=openid_url)
else:
# Then, ask the library to begin the authorization.
# Here we find out the identity server that will verify the
# user's identity, and get a token that allows us to
# communicate securely with the identity server.
if use_sreg:
self.requestRegistrationData(request)
if use_pape:
self.requestPAPEDetails(request)
trust_root = self.server.base_url
return_to = self.buildURL('process')
if request.shouldSendRedirect():
redirect_url = request.redirectURL(
trust_root, return_to, immediate=immediate)
self.send_response(302)
self.send_header('Location', redirect_url)
self.writeUserHeader()
self.end_headers()
else:
form_html = request.htmlMarkup(
trust_root,
return_to,
form_tag_attrs={'id': 'openid_message'},
immediate=immediate)
self.wfile.write(bytes(form_html, 'utf-8'))
|
python
|
{
"resource": ""
}
|
q8355
|
ResponsiveFlask._response_mimetype_based_on_accept_header
|
train
|
def _response_mimetype_based_on_accept_header(self):
"""Determines mimetype to response based on Accept header.
If mimetype is not found, it returns ``None``.
"""
response_mimetype = None
if not request.accept_mimetypes:
response_mimetype = self.default_mimetype
else:
all_media_types_wildcard = '*/*'
for mimetype, q in request.accept_mimetypes:
if mimetype == all_media_types_wildcard:
response_mimetype = self.default_mimetype
break
if mimetype in self.response_formatters:
response_mimetype = mimetype
break
return response_mimetype
|
python
|
{
"resource": ""
}
|
q8356
|
ResponsiveFlask.make_response
|
train
|
def make_response(self, rv):
"""Returns response based on Accept header.
If no Accept header field is present, then it is assumed that
the client accepts all media types. This way JSON format will
be used.
If an Accept header field is present, and if the server cannot
send a response which is acceptable according to the combined
Accept field value, then a 406 (not acceptable) response will
be sent.
"""
status = headers = None
if isinstance(rv, tuple):
rv, status, headers = rv + (None,) * (3 - len(rv))
response_mimetype = self._response_mimetype_based_on_accept_header()
if response_mimetype is None:
# Return 406, list of available mimetypes in default format.
default_formatter = self.response_formatters.get(
self.default_mimetype
)
available_mimetypes = default_formatter(
mimetypes=list(self.response_formatters)
)
rv = self.response_class(
response=available_mimetypes,
status=406,
mimetype=self.default_mimetype,
)
elif isinstance(rv, dict):
formatter = self.response_formatters.get(response_mimetype)
rv = self.response_class(
response=formatter(**rv),
mimetype=response_mimetype,
)
return super(ResponsiveFlask, self).make_response(
rv=(rv, status, headers)
)
|
python
|
{
"resource": ""
}
|
q8357
|
IrcServer.notice
|
train
|
def notice(self, client, message):
"""send a notice to client"""
if client and message:
messages = utils.split_message(message, self.config.max_length)
for msg in messages:
client.fwrite(':{c.srv} NOTICE {c.nick} :{msg}', msg=msg)
|
python
|
{
"resource": ""
}
|
q8358
|
Hawk.client_key_loader
|
train
|
def client_key_loader(self, f):
"""Registers a function to be called to find a client key.
Function you set has to take a client id and return a client key::
@hawk.client_key_loader
def get_client_key(client_id):
if client_id == 'Alice':
return 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn'
else:
raise LookupError()
:param f: The callback for retrieving a client key.
"""
@wraps(f)
def wrapped_f(client_id):
client_key = f(client_id)
return {
'id': client_id,
'key': client_key,
'algorithm': current_app.config['HAWK_ALGORITHM']
}
self._client_key_loader_func = wrapped_f
return wrapped_f
|
python
|
{
"resource": ""
}
|
q8359
|
Hawk.auth_required
|
train
|
def auth_required(self, view_func):
"""Decorator that provides an access to view function for
authenticated users only.
Note that we don't run authentication when `HAWK_ENABLED` is `False`.
"""
@wraps(view_func)
def wrapped_view_func(*args, **kwargs):
if current_app.config['HAWK_ENABLED']:
if current_app.config['HAWK_ALLOW_COOKIE_AUTH'] and session:
self._auth_by_cookie()
else:
self._auth_by_signature()
return view_func(*args, **kwargs)
return wrapped_view_func
|
python
|
{
"resource": ""
}
|
q8360
|
Hawk._sign_response
|
train
|
def _sign_response(self, response):
"""Signs a response if it's possible."""
if 'Authorization' not in request.headers:
return response
try:
mohawk_receiver = mohawk.Receiver(
credentials_map=self._client_key_loader_func,
request_header=request.headers['Authorization'],
url=request.url,
method=request.method,
content=request.get_data(),
content_type=request.mimetype,
accept_untrusted_content=current_app.config['HAWK_ACCEPT_UNTRUSTED_CONTENT'],
localtime_offset_in_seconds=current_app.config['HAWK_LOCALTIME_OFFSET_IN_SECONDS'],
timestamp_skew_in_seconds=current_app.config['HAWK_TIMESTAMP_SKEW_IN_SECONDS']
)
except mohawk.exc.HawkFail:
return response
response.headers['Server-Authorization'] = mohawk_receiver.respond(
content=response.data,
content_type=response.mimetype
)
return response
|
python
|
{
"resource": ""
}
|
q8361
|
DCCManager.create
|
train
|
def create(self, name_or_class, mask, filepath=None, **kwargs):
"""Create a new DCC connection. Return an ``asyncio.Protocol``"""
if isinstance(name_or_class, type):
name = name_or_class.type
protocol = name_or_class
else:
name = name_or_class
protocol = self.protocols[name]
assert name in DCC_TYPES
if filepath:
kwargs.setdefault('limit_rate',
self.config['send_limit_rate'])
kwargs['filepath'] = filepath
if protocol.type == DCCSend.type:
kwargs.setdefault('offset', 0)
kwargs.update(
filename_safe=slugify(os.path.basename(filepath)),
filesize=os.path.getsize(filepath),
)
elif protocol.type == DCCGet.type:
try:
offset = os.path.getsize(filepath)
except OSError:
offset = 0
kwargs.setdefault('offset', offset)
kwargs.setdefault('resume', False)
kwargs.setdefault('port', None)
f = protocol(
mask=mask, ip=int(self.bot.ip),
bot=self.bot, loop=self.loop, **kwargs)
if kwargs['port']:
if self.bot.config.get('dcc_sock_factory'):
sock_factory = maybedotted(self.bot.config.dcc_sock_factory)
args = dict(sock=sock_factory(self.bot, f.host, f.port))
else:
args = dict(host=f.host, port=f.port)
task = self.bot.create_task(
self.loop.create_connection(f.factory, **args))
task.add_done_callback(partial(self.created, f))
else:
task = self.bot.create_task(
self.loop.create_server(
f.factory, '0.0.0.0', 0, backlog=1))
task.add_done_callback(partial(self.created, f))
return f
|
python
|
{
"resource": ""
}
|
q8362
|
DCCManager.resume
|
train
|
def resume(self, mask, filename, port, pos):
"""Resume a DCC send"""
self.connections['send']['masks'][mask][port].offset = pos
message = 'DCC ACCEPT %s %d %d' % (filename, port, pos)
self.bot.ctcp(mask, message)
|
python
|
{
"resource": ""
}
|
q8363
|
DCCManager.is_allowed
|
train
|
def is_allowed(self, name_or_class, mask): # pragma: no cover
"""Return True is a new connection is allowed"""
if isinstance(name_or_class, type):
name = name_or_class.type
else:
name = name_or_class
info = self.connections[name]
limit = self.config[name + '_limit']
if limit and info['total'] >= limit:
msg = (
"Sorry, there is too much DCC %s active. Please try again "
"later.") % name.upper()
self.bot.notice(mask, msg)
return False
if mask not in info['masks']:
return True
limit = self.config[name + '_user_limit']
if limit and info['masks'][mask] >= limit:
msg = (
"Sorry, you have too many DCC %s active. Close the other "
"connection(s) or wait a few seconds and try again."
) % name.upper()
self.bot.notice(mask, msg)
return False
return True
|
python
|
{
"resource": ""
}
|
q8364
|
split_message
|
train
|
def split_message(message, max_length):
"""Split long messages"""
if len(message) > max_length:
for message in textwrap.wrap(message, max_length):
yield message
else:
yield message.rstrip(STRIPPED_CHARS)
|
python
|
{
"resource": ""
}
|
q8365
|
parse_config
|
train
|
def parse_config(main_section, *filenames):
"""parse config files"""
filename = filenames[-1]
filename = os.path.abspath(filename)
here = os.path.dirname(filename)
defaults = dict(here=here, hash='#')
defaults['#'] = '#'
config = configparser.ConfigParser(
defaults, allow_no_value=False,
interpolation=configparser.ExtendedInterpolation(),
)
config.optionxform = str
config.read([os.path.expanduser('~/.irc3/passwd.ini')] + list(filenames))
value = {}
for s in config.sections():
items = {}
for k, v in config.items(s):
if '\n' in v:
v = as_list(v)
elif v.isdigit():
v = int(v)
elif v.replace('.', '').isdigit() and v.count('.') == 1:
v = float(v)
elif v in ('true', 'false'):
v = v == 'true' and True or False
items[k] = v
if s == main_section:
value.update(items)
else:
for k in ('here', 'config'):
items.pop(k, '')
value[s] = items
value.update(defaults)
value['configfiles'] = filenames
return value
|
python
|
{
"resource": ""
}
|
q8366
|
extract_config
|
train
|
def extract_config(config, prefix):
"""return all keys with the same prefix without the prefix"""
prefix = prefix.strip('.') + '.'
plen = len(prefix)
value = {}
for k, v in config.items():
if k.startswith(prefix):
value[k[plen:]] = v
return value
|
python
|
{
"resource": ""
}
|
q8367
|
IrcString.tagdict
|
train
|
def tagdict(self):
"""return a dict converted from this string interpreted as a tag-string
.. code-block:: py
>>> from pprint import pprint
>>> dict_ = IrcString('aaa=bbb;ccc;example.com/ddd=eee').tagdict
>>> pprint({str(k): str(v) for k, v in dict_.items()})
{'aaa': 'bbb', 'ccc': 'None', 'example.com/ddd': 'eee'}
"""
tagdict = getattr(self, '_tagdict', None)
if tagdict is None:
try:
self._tagdict = tags.decode(self)
except ValueError:
self._tagdict = {}
return self._tagdict
|
python
|
{
"resource": ""
}
|
q8368
|
IrcBot.send_line
|
train
|
def send_line(self, data, nowait=False):
"""send a line to the server. replace CR by spaces"""
data = data.replace('\n', ' ').replace('\r', ' ')
f = asyncio.Future(loop=self.loop)
if self.queue is not None and nowait is False:
self.queue.put_nowait((f, data))
else:
self.send(data.replace('\n', ' ').replace('\r', ' '))
f.set_result(True)
return f
|
python
|
{
"resource": ""
}
|
q8369
|
IrcBot.privmsg
|
train
|
def privmsg(self, target, message, nowait=False):
"""send a privmsg to target"""
if message:
messages = utils.split_message(message, self.config.max_length)
if isinstance(target, DCCChat):
for message in messages:
target.send_line(message)
elif target:
f = None
for message in messages:
f = self.send_line('PRIVMSG %s :%s' % (target, message),
nowait=nowait)
return f
|
python
|
{
"resource": ""
}
|
q8370
|
IrcBot.ctcp
|
train
|
def ctcp(self, target, message, nowait=False):
"""send a ctcp to target"""
if target and message:
messages = utils.split_message(message, self.config.max_length)
f = None
for message in messages:
f = self.send_line('PRIVMSG %s :\x01%s\x01' % (target,
message),
nowait=nowait)
return f
|
python
|
{
"resource": ""
}
|
q8371
|
IrcBot.mode
|
train
|
def mode(self, target, *data):
"""set user or channel mode"""
self.send_line('MODE %s %s' % (target, ' '.join(data)), nowait=True)
|
python
|
{
"resource": ""
}
|
q8372
|
IrcBot.join
|
train
|
def join(self, target):
"""join a channel"""
password = self.config.passwords.get(
target.strip(self.server_config['CHANTYPES']))
if password:
target += ' ' + password
self.send_line('JOIN %s' % target)
|
python
|
{
"resource": ""
}
|
q8373
|
IrcBot.part
|
train
|
def part(self, target, reason=None):
"""quit a channel"""
if reason:
target += ' :' + reason
self.send_line('PART %s' % target)
|
python
|
{
"resource": ""
}
|
q8374
|
IrcBot.kick
|
train
|
def kick(self, channel, target, reason=None):
"""kick target from channel"""
if reason:
target += ' :' + reason
self.send_line('KICK %s %s' % (channel, target), nowait=True)
|
python
|
{
"resource": ""
}
|
q8375
|
IrcBot.topic
|
train
|
def topic(self, channel, topic=None):
"""change or request the topic of a channel"""
if topic:
channel += ' :' + topic
self.send_line('TOPIC %s' % channel)
|
python
|
{
"resource": ""
}
|
q8376
|
IrcBot.away
|
train
|
def away(self, message=None):
"""mark ourself as away"""
cmd = 'AWAY'
if message:
cmd += ' :' + message
self.send_line(cmd)
|
python
|
{
"resource": ""
}
|
q8377
|
IrcBot.ip
|
train
|
def ip(self):
"""return bot's ip as an ``ip_address`` object"""
if not self._ip:
if 'ip' in self.config:
ip = self.config['ip']
else:
ip = self.protocol.transport.get_extra_info('sockname')[0]
ip = ip_address(ip)
if ip.version == 4:
self._ip = ip
else: # pragma: no cover
response = urlopen('http://ipv4.icanhazip.com/')
ip = response.read().strip().decode()
ip = ip_address(ip)
self._ip = ip
return self._ip
|
python
|
{
"resource": ""
}
|
q8378
|
IrcBot.dcc_get
|
train
|
def dcc_get(self, mask, host, port, filepath, filesize=None):
"""DCC GET a file from mask. filepath must be an absolute path with an
existing directory. filesize is the expected file size."""
return self.dcc.create(
'get', mask, filepath=filepath, filesize=filesize,
host=host, port=port).ready
|
python
|
{
"resource": ""
}
|
q8379
|
IrcBot.dcc_send
|
train
|
def dcc_send(self, mask, filepath):
"""DCC SEND a file to mask. filepath must be an absolute path to
existing file"""
return self.dcc.create('send', mask, filepath=filepath).ready
|
python
|
{
"resource": ""
}
|
q8380
|
IrcBot.dcc_accept
|
train
|
def dcc_accept(self, mask, filepath, port, pos):
"""accept a DCC RESUME for an axisting DCC SEND. filepath is the
filename to sent. port is the port opened on the server.
pos is the expected offset"""
return self.dcc.resume(mask, filepath, port, pos)
|
python
|
{
"resource": ""
}
|
q8381
|
challenge_auth
|
train
|
def challenge_auth(username, password, challenge, lower, digest='sha256'):
"""Calculates quakenet's challenge auth hash
.. code-block:: python
>>> challenge_auth("mooking", "0000000000",
... "12345678901234567890123456789012", str.lower, "md5")
'2ed1a1f1d2cd5487d2e18f27213286b9'
"""
def hdig(x):
return fdigest(x).hexdigest()
fdigest = get_digest(digest)
luser = lower(username)
tpass = password[:10].encode("ascii")
hvalue = hdig("{0}:{1}".format(luser, hdig(tpass)).encode("ascii"))
bhvalue = hvalue.encode("ascii")
bchallenge = challenge.encode("ascii")
return hmac.HMAC(bhvalue, bchallenge, digestmod=fdigest).hexdigest()
|
python
|
{
"resource": ""
}
|
q8382
|
auto_retweet
|
train
|
def auto_retweet(bot):
"""retweet author tweets about irc3 and pypi releases"""
conn = bot.get_social_connection(id='twitter')
dirname = os.path.expanduser('~/.irc3/twitter/{nick}'.format(**bot.config))
if not os.path.isdir(dirname):
os.makedirs(dirname)
filename = os.path.join(dirname, 'retweeted')
if os.path.isfile(filename):
with open(filename) as fd:
retweeted = [i.strip() for i in fd.readlines()]
else:
retweeted = []
for user in ('pypi', 'gawel_'):
results = conn.search.tweets(
q=user + ' AND irc3',
result_type='recent')
for item in results.get('statuses', []):
if item['user']['screen_name'] == user:
if item['id_str'] not in retweeted:
res = conn(getattr(conn.statuses.retweet, item['id_str']))
if 'id' in res:
with open(filename, 'a+') as fd:
fd.write(item['id_str'] + '\n')
|
python
|
{
"resource": ""
}
|
q8383
|
FeedsHook.filter_travis
|
train
|
def filter_travis(self, entry):
"""Only show the latest entry iif this entry is in a new state"""
fstate = entry.filename + '.state'
if os.path.isfile(fstate):
with open(fstate) as fd:
state = fd.read().strip()
else:
state = None
if 'failed' in entry.summary:
nstate = 'failed'
else:
nstate = 'success'
with open(fstate, 'w') as fd:
fd.write(nstate)
if state != nstate:
build = entry.title.split('#')[1]
entry['title'] = 'Build #{0} {1}'.format(build, nstate)
return True
|
python
|
{
"resource": ""
}
|
q8384
|
FeedsHook.filter_pypi
|
train
|
def filter_pypi(self, entry):
"""Show only usefull packages"""
for package in self.packages:
if entry.title.lower().startswith(package):
return entry
|
python
|
{
"resource": ""
}
|
q8385
|
Whois.process_results
|
train
|
def process_results(self, results=None, **value):
"""take results list of all events and put them in a dict"""
channels = []
for res in results:
channels.extend(res.pop('channels', '').split())
value.update(res)
value['channels'] = channels
value['success'] = value.get('retcode') == '318'
return value
|
python
|
{
"resource": ""
}
|
q8386
|
CTCP.process_results
|
train
|
def process_results(self, results=None, **value):
"""take results list of all events and return first dict"""
for res in results:
if 'mask' in res:
res['mask'] = utils.IrcString(res['mask'])
value['success'] = res.pop('retcode', None) != '486'
value.update(res)
return value
|
python
|
{
"resource": ""
}
|
q8387
|
quote
|
train
|
def quote(bot, mask, target, args):
"""send quote to the server
%%quote <args>...
"""
msg = ' '.join(args['<args>'])
bot.log.info('quote> %r', msg)
bot.send(msg)
|
python
|
{
"resource": ""
}
|
q8388
|
print_help_page
|
train
|
def print_help_page(bot, file=sys.stdout):
"""print help page"""
def p(text):
print(text, file=file)
plugin = bot.get_plugin(Commands)
title = "Available Commands for {nick} at {host}".format(**bot.config)
p("=" * len(title))
p(title)
p("=" * len(title))
p('')
p('.. contents::')
p('')
modules = {}
for name, (predicates, callback) in plugin.items():
commands = modules.setdefault(callback.__module__, [])
commands.append((name, callback, predicates))
for module in sorted(modules):
p(module)
p('=' * len(module))
p('')
for name, callback, predicates in sorted(modules[module]):
p(name)
p('-' * len(name))
p('')
doc = callback.__doc__
doc = doc.replace('%%', bot.config.cmd)
for line in doc.split('\n'):
line = line.strip()
if line.startswith(bot.config.cmd):
line = ' ``{}``'.format(line)
p(line)
if 'permission' in predicates:
p('*Require {0[permission]} permission.*'.format(predicates))
if predicates.get('public', True) is False:
p('*Only available in private.*')
p('')
|
python
|
{
"resource": ""
}
|
q8389
|
Core.connected
|
train
|
def connected(self, **kwargs):
"""triger the server_ready event"""
self.bot.log.info('Server config: %r', self.bot.server_config)
# recompile when I'm sure of my nickname
self.bot.config['nick'] = kwargs['me']
self.bot.recompile()
# Let all plugins know that server can handle commands
self.bot.notify('server_ready')
# detach useless events
self.bot.detach_events(*self.before_connect_events)
|
python
|
{
"resource": ""
}
|
q8390
|
Core.recompile
|
train
|
def recompile(self, nick=None, new_nick=None, **kw):
"""recompile regexp on new nick"""
if self.bot.nick == nick.nick:
self.bot.config['nick'] = new_nick
self.bot.recompile()
|
python
|
{
"resource": ""
}
|
q8391
|
Core.badnick
|
train
|
def badnick(self, me=None, nick=None, **kw):
"""Use alt nick on nick error"""
if me == '*':
self.bot.set_nick(self.bot.nick + '_')
self.bot.log.debug('Trying to regain nickname in 30s...')
self.nick_handle = self.bot.loop.call_later(
30, self.bot.set_nick, self.bot.original_nick)
|
python
|
{
"resource": ""
}
|
q8392
|
Core.set_config
|
train
|
def set_config(self, data=None, **kwargs):
"""Store server config"""
config = self.bot.config['server_config']
for opt in data.split(' '):
if '=' in opt:
opt, value = opt.split('=', 1)
else:
value = True
if opt.isupper():
config[opt] = value
|
python
|
{
"resource": ""
}
|
q8393
|
fetch
|
train
|
def fetch(args):
"""fetch a feed"""
session = args['session']
for feed, filename in zip(args['feeds'], args['filenames']):
try:
resp = session.get(feed, timeout=5)
content = resp.content
except Exception: # pragma: no cover
pass
else:
with open(filename, 'wb') as fd:
fd.write(content)
return args['name']
|
python
|
{
"resource": ""
}
|
q8394
|
parse
|
train
|
def parse(feedparser, args):
"""parse a feed using feedparser"""
entries = []
args = irc3.utils.Config(args)
max_date = datetime.datetime.now() - datetime.timedelta(days=2)
for filename in args['filenames']:
try:
with open(filename + '.updated') as fd:
updated = fd.read().strip()
except (OSError, IOError):
updated = '0'
feed = feedparser.parse(filename)
for e in feed.entries:
if e.updated <= updated:
# skip already sent entries
continue
try:
updated_parsed = e.updated_parsed
except AttributeError:
continue
if datetime.datetime(*updated_parsed[:7]) < max_date:
# skip entries older than 2 days
continue
e['filename'] = filename
e['feed'] = args
entries.append((e.updated, e))
if entries:
entries = sorted(entries, key=itemgetter(0))
with open(filename + '.updated', 'w') as fd:
fd.write(str(entries[-1][0]))
return entries
|
python
|
{
"resource": ""
}
|
q8395
|
IrcObject.attach_events
|
train
|
def attach_events(self, *events, **kwargs):
"""Attach one or more events to the bot instance"""
reg = self.registry
insert = 'insert' in kwargs
for e in events:
cregexp = e.compile(self.config)
regexp = getattr(e.regexp, 're', e.regexp)
if regexp not in reg.events[e.iotype]:
if insert:
reg.events_re[e.iotype].insert(0, (regexp, cregexp))
else:
reg.events_re[e.iotype].append((regexp, cregexp))
if insert:
reg.events[e.iotype][regexp].insert(0, e)
else:
reg.events[e.iotype][regexp].append(e)
|
python
|
{
"resource": ""
}
|
q8396
|
IrcObject.detach_events
|
train
|
def detach_events(self, *events):
"""Detach one or more events from the bot instance"""
reg = self.registry
delete = defaultdict(list)
# remove from self.events
all_events = reg.events
for e in events:
regexp = getattr(e.regexp, 're', e.regexp)
iotype = e.iotype
if e in all_events[iotype].get(regexp, []):
all_events[iotype][regexp].remove(e)
if not all_events[iotype][regexp]:
del all_events[iotype][regexp]
# need to delete from self.events_re
delete[iotype].append(regexp)
# delete from events_re
for iotype, regexps in delete.items():
reg.events_re[iotype] = [r for r in reg.events_re[iotype]
if r[0] not in regexps]
|
python
|
{
"resource": ""
}
|
q8397
|
IrcObject.reload
|
train
|
def reload(self, *modules):
"""Reload one or more plugins"""
self.notify('before_reload')
if 'configfiles' in self.config:
# reload configfiles
self.log.info('Reloading configuration...')
cfg = utils.parse_config(
self.server and 'server' or 'bot', *self.config['configfiles'])
self.config.update(cfg)
self.log.info('Reloading python code...')
if not modules:
modules = self.registry.includes
scanned = list(reversed(self.registry.scanned))
# reset includes and events
self.registry.reset()
to_scan = []
for module_name, categories in scanned:
if module_name in modules:
module = utils.maybedotted(module_name)
reload_module(module)
to_scan.append((module_name, categories))
# rescan all modules
for module_name, categories in to_scan:
self.include(module_name, venusian_categories=categories)
self.registry.reloading = {}
self.notify('after_reload')
|
python
|
{
"resource": ""
}
|
q8398
|
IrcObject.call_many
|
train
|
def call_many(self, callback, args):
"""callback is run with each arg but run a call per second"""
if isinstance(callback, str):
callback = getattr(self, callback)
f = None
for arg in args:
f = callback(*arg)
return f
|
python
|
{
"resource": ""
}
|
q8399
|
IrcObject.run
|
train
|
def run(self, forever=True):
"""start the bot"""
loop = self.create_connection()
self.add_signal_handlers()
if forever:
loop.run_forever()
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.