code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
# get formatter initialized by config (usualy on a NullHandler)
ll = logging.getLogger('irc')
formatter = ll.handlers[0].formatter
# add a handler for the sub logger
handler = Handler(bot, *targets)
handler.setFormatter(formatter)
self.addHandler(handler)
|
def set_irc_targets(self, bot, *targets)
|
Add a irc Handler using bot and log to targets (can be nicks or
channels:
..
>>> bot = None
.. code-block:: python
>>> log = logging.getLogger('irc.mymodule')
>>> log.set_irc_targets(bot, '#chan', 'admin')
| 11.087918
| 10.668086
| 1.039354
|
data = data.replace('\n', ' ').replace('\r', ' ')
f = asyncio.Future(loop=self.loop)
if self.queue is not None and nowait is False:
self.queue.put_nowait((f, data))
else:
self.send(data.replace('\n', ' ').replace('\r', ' '))
f.set_result(True)
return f
|
def send_line(self, data, nowait=False)
|
send a line to the server. replace CR by spaces
| 2.687424
| 2.409869
| 1.115175
|
if message:
messages = utils.split_message(message, self.config.max_length)
if isinstance(target, DCCChat):
for message in messages:
target.send_line(message)
elif target:
f = None
for message in messages:
f = self.send_line('PRIVMSG %s :%s' % (target, message),
nowait=nowait)
return f
|
def privmsg(self, target, message, nowait=False)
|
send a privmsg to target
| 3.915631
| 3.900872
| 1.003784
|
if target and message:
messages = utils.split_message(message, self.config.max_length)
f = None
for message in messages:
f = self.send_line('PRIVMSG %s :\x01%s\x01' % (target,
message),
nowait=nowait)
return f
|
def ctcp(self, target, message, nowait=False)
|
send a ctcp to target
| 3.677699
| 3.678371
| 0.999817
|
self.send_line('MODE %s %s' % (target, ' '.join(data)), nowait=True)
|
def mode(self, target, *data)
|
set user or channel mode
| 4.554104
| 3.962993
| 1.149158
|
password = self.config.passwords.get(
target.strip(self.server_config['CHANTYPES']))
if password:
target += ' ' + password
self.send_line('JOIN %s' % target)
|
def join(self, target)
|
join a channel
| 7.564977
| 6.438608
| 1.17494
|
if reason:
target += ' :' + reason
self.send_line('PART %s' % target)
|
def part(self, target, reason=None)
|
quit a channel
| 6.223222
| 5.501031
| 1.131283
|
if reason:
target += ' :' + reason
self.send_line('KICK %s %s' % (channel, target), nowait=True)
|
def kick(self, channel, target, reason=None)
|
kick target from channel
| 3.939372
| 4.129658
| 0.953922
|
if topic:
channel += ' :' + topic
self.send_line('TOPIC %s' % channel)
|
def topic(self, channel, topic=None)
|
change or request the topic of a channel
| 6.143356
| 6.241993
| 0.984198
|
cmd = 'AWAY'
if message:
cmd += ' :' + message
self.send_line(cmd)
|
def away(self, message=None)
|
mark ourself as away
| 5.246896
| 4.67177
| 1.123107
|
if not reason:
reason = 'bye'
else:
reason = reason
self.send_line('QUIT :%s' % reason)
|
def quit(self, reason=None)
|
disconnect
| 3.999575
| 3.897532
| 1.026181
|
if not self._ip:
if 'ip' in self.config:
ip = self.config['ip']
else:
ip = self.protocol.transport.get_extra_info('sockname')[0]
ip = ip_address(ip)
if ip.version == 4:
self._ip = ip
else: # pragma: no cover
response = urlopen('http://ipv4.icanhazip.com/')
ip = response.read().strip().decode()
ip = ip_address(ip)
self._ip = ip
return self._ip
|
def ip(self)
|
return bot's ip as an ``ip_address`` object
| 2.740288
| 2.55347
| 1.073162
|
if self._dcc is None:
self._dcc = DCCManager(self)
return self._dcc
|
def dcc(self)
|
return the :class:`~irc3.dcc.DCCManager`
| 4.283595
| 2.477253
| 1.729171
|
return self.dcc.create(
'chat', mask, host=host, port=port).ready
|
def dcc_chat(self, mask, host=None, port=None)
|
Open a DCC CHAT whith mask. If host/port are specified then connect
to a server. Else create a server
| 10.236041
| 11.383679
| 0.899186
|
return self.dcc.create(
'get', mask, filepath=filepath, filesize=filesize,
host=host, port=port).ready
|
def dcc_get(self, mask, host, port, filepath, filesize=None)
|
DCC GET a file from mask. filepath must be an absolute path with an
existing directory. filesize is the expected file size.
| 6.752532
| 6.664553
| 1.013201
|
return self.dcc.create('send', mask, filepath=filepath).ready
|
def dcc_send(self, mask, filepath)
|
DCC SEND a file to mask. filepath must be an absolute path to
existing file
| 20.616076
| 26.34992
| 0.782396
|
return self.dcc.resume(mask, filepath, port, pos)
|
def dcc_accept(self, mask, filepath, port, pos)
|
accept a DCC RESUME for an axisting DCC SEND. filepath is the
filename to sent. port is the port opened on the server.
pos is the expected offset
| 8.553246
| 8.495426
| 1.006806
|
'''Encodes a dictionary of tags to fit into an IRC-message.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from collections import OrderedDict
>>> encode({'key': 'value'})
'key=value'
>>> d = {'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> encode(d_ordered)
'aaa=bbb;ccc;example.com/ddd=eee'
>>> d = {'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> print(encode(d_ordered))
key=value\\:with\\sspecial\\\characters;key2=with=equals
>>> print(encode({'key': r'\\something'}))
key=\\\\something
'''
tagstrings = []
for key, value in tags.items():
if not _valid_key.match(key):
raise ValueError("dictionary key is invalid as tag key: " + key)
# if no value, just append the key
if value:
tagstrings.append(key + "=" + _escape(value))
else:
tagstrings.append(key)
return ";".join(tagstrings)
|
def encode(tags)
|
Encodes a dictionary of tags to fit into an IRC-message.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from collections import OrderedDict
>>> encode({'key': 'value'})
'key=value'
>>> d = {'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> encode(d_ordered)
'aaa=bbb;ccc;example.com/ddd=eee'
>>> d = {'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> print(encode(d_ordered))
key=value\\:with\\sspecial\\\characters;key2=with=equals
>>> print(encode({'key': r'\\something'}))
key=\\\\something
| 3.424061
| 1.570052
| 2.180859
|
'''Decodes a tag-string from an IRC-message into a python dictionary.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from pprint import pprint
>>> pprint(decode('key=value'))
{'key': 'value'}
>>> pprint(decode('aaa=bbb;ccc;example.com/ddd=eee'))
{'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> s = r'key=value\\:with\\sspecial\\\\characters;key2=with=equals'
>>> pprint(decode(s))
{'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> print(decode(s)['key'])
value;with special\\characters
>>> print(decode(r'key=\\\\something')['key'])
\\something
'''
if not tagstring:
# None/empty = no tags
return {}
tags = {}
for tag in tagstring.split(";"):
# value is either everything after "=", or None
key, value = (tag.split("=", 1) + [None])[:2]
if not _valid_key.match(key):
raise ValueError("invalid tag key: " + key)
if value:
if not _valid_escaped_value.match(value):
raise ValueError("invalid escaped tag value: " + value)
value = _unescape(value)
tags[key] = value
return tags
|
def decode(tagstring)
|
Decodes a tag-string from an IRC-message into a python dictionary.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from pprint import pprint
>>> pprint(decode('key=value'))
{'key': 'value'}
>>> pprint(decode('aaa=bbb;ccc;example.com/ddd=eee'))
{'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> s = r'key=value\\:with\\sspecial\\\\characters;key2=with=equals'
>>> pprint(decode(s))
{'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> print(decode(s)['key'])
value;with special\\characters
>>> print(decode(r'key=\\\\something')['key'])
\\something
| 3.61264
| 1.73562
| 2.081469
|
def hdig(x):
return fdigest(x).hexdigest()
fdigest = get_digest(digest)
luser = lower(username)
tpass = password[:10].encode("ascii")
hvalue = hdig("{0}:{1}".format(luser, hdig(tpass)).encode("ascii"))
bhvalue = hvalue.encode("ascii")
bchallenge = challenge.encode("ascii")
return hmac.HMAC(bhvalue, bchallenge, digestmod=fdigest).hexdigest()
|
def challenge_auth(username, password, challenge, lower, digest='sha256')
|
Calculates quakenet's challenge auth hash
.. code-block:: python
>>> challenge_auth("mooking", "0000000000",
... "12345678901234567890123456789012", str.lower, "md5")
'2ed1a1f1d2cd5487d2e18f27213286b9'
| 5.270844
| 5.705415
| 0.923832
|
conn = bot.get_social_connection(id='twitter')
dirname = os.path.expanduser('~/.irc3/twitter/{nick}'.format(**bot.config))
if not os.path.isdir(dirname):
os.makedirs(dirname)
filename = os.path.join(dirname, 'retweeted')
if os.path.isfile(filename):
with open(filename) as fd:
retweeted = [i.strip() for i in fd.readlines()]
else:
retweeted = []
for user in ('pypi', 'gawel_'):
results = conn.search.tweets(
q=user + ' AND irc3',
result_type='recent')
for item in results.get('statuses', []):
if item['user']['screen_name'] == user:
if item['id_str'] not in retweeted:
res = conn(getattr(conn.statuses.retweet, item['id_str']))
if 'id' in res:
with open(filename, 'a+') as fd:
fd.write(item['id_str'] + '\n')
|
def auto_retweet(bot)
|
retweet author tweets about irc3 and pypi releases
| 3.441768
| 3.046941
| 1.129581
|
fstate = entry.filename + '.state'
if os.path.isfile(fstate):
with open(fstate) as fd:
state = fd.read().strip()
else:
state = None
if 'failed' in entry.summary:
nstate = 'failed'
else:
nstate = 'success'
with open(fstate, 'w') as fd:
fd.write(nstate)
if state != nstate:
build = entry.title.split('#')[1]
entry['title'] = 'Build #{0} {1}'.format(build, nstate)
return True
|
def filter_travis(self, entry)
|
Only show the latest entry iif this entry is in a new state
| 3.164727
| 2.954998
| 1.070974
|
for package in self.packages:
if entry.title.lower().startswith(package):
return entry
|
def filter_pypi(self, entry)
|
Show only usefull packages
| 6.216984
| 5.481204
| 1.134237
|
channels = []
for res in results:
channels.extend(res.pop('channels', '').split())
value.update(res)
value['channels'] = channels
value['success'] = value.get('retcode') == '318'
return value
|
def process_results(self, results=None, **value)
|
take results list of all events and put them in a dict
| 5.734854
| 5.52147
| 1.038646
|
for res in results:
if 'mask' in res:
res['mask'] = utils.IrcString(res['mask'])
value['success'] = res.pop('retcode', None) != '486'
value.update(res)
return value
|
def process_results(self, results=None, **value)
|
take results list of all events and return first dict
| 8.101809
| 7.583353
| 1.068368
|
bot.send('NOTICE %(nick)s :PONG %(nick)s!' % dict(nick=mask.nick))
|
def ping(bot, mask, target, args)
|
ping/pong
%%ping
| 7.986239
| 11.011778
| 0.725245
|
msg = ' '.join(args['<args>'])
bot.log.info('quote> %r', msg)
bot.send(msg)
|
def quote(bot, mask, target, args)
|
send quote to the server
%%quote <args>...
| 6.799062
| 6.700917
| 1.014647
|
plugin = bot.get_plugin(utils.maybedotted('irc3.plugins.core.Core'))
bot.loop.call_soon(plugin.reconnect)
|
def reconnect(bot, mask, target, args)
|
force reconnect
%%reconnect
| 9.129182
| 9.509213
| 0.960035
|
def p(text):
print(text, file=file)
plugin = bot.get_plugin(Commands)
title = "Available Commands for {nick} at {host}".format(**bot.config)
p("=" * len(title))
p(title)
p("=" * len(title))
p('')
p('.. contents::')
p('')
modules = {}
for name, (predicates, callback) in plugin.items():
commands = modules.setdefault(callback.__module__, [])
commands.append((name, callback, predicates))
for module in sorted(modules):
p(module)
p('=' * len(module))
p('')
for name, callback, predicates in sorted(modules[module]):
p(name)
p('-' * len(name))
p('')
doc = callback.__doc__
doc = doc.replace('%%', bot.config.cmd)
for line in doc.split('\n'):
line = line.strip()
if line.startswith(bot.config.cmd):
line = ' ``{}``'.format(line)
p(line)
if 'permission' in predicates:
p('*Require {0[permission]} permission.*'.format(predicates))
if predicates.get('public', True) is False:
p('*Only available in private.*')
p('')
|
def print_help_page(bot, file=sys.stdout)
|
print help page
| 3.381144
| 3.415062
| 0.990068
|
self.bot.log.info('Server config: %r', self.bot.server_config)
# recompile when I'm sure of my nickname
self.bot.config['nick'] = kwargs['me']
self.bot.recompile()
# Let all plugins know that server can handle commands
self.bot.notify('server_ready')
# detach useless events
self.bot.detach_events(*self.before_connect_events)
|
def connected(self, **kwargs)
|
triger the server_ready event
| 10.617722
| 9.906042
| 1.071843
|
self.reconn_handle.cancel()
self.reconn_handle = self.bot.loop.call_later(self.timeout,
self.reconnect)
if self.ping_handle is not None:
self.ping_handle.cancel()
self.ping_handle = self.bot.loop.call_later(
self.timeout - self.max_lag, self.bot.send,
'PING :%s' % int(self.bot.loop.time()))
|
def pong(self, event='PONG', data='', **kw): # pragma: no cover
self.bot.log.debug('%s ping-pong (%s)', event, data)
if self.reconn_handle is not None
|
P0NG/PING
| 2.761166
| 2.68087
| 1.029951
|
self.bot.send('PONG :' + data)
self.pong(event='PING', data=data)
|
def ping(self, data)
|
PING reply
| 10.971682
| 10.767144
| 1.018996
|
if self.bot.nick == nick.nick:
self.bot.config['nick'] = new_nick
self.bot.recompile()
|
def recompile(self, nick=None, new_nick=None, **kw)
|
recompile regexp on new nick
| 5.95528
| 5.192906
| 1.146811
|
if me == '*':
self.bot.set_nick(self.bot.nick + '_')
self.bot.log.debug('Trying to regain nickname in 30s...')
self.nick_handle = self.bot.loop.call_later(
30, self.bot.set_nick, self.bot.original_nick)
|
def badnick(self, me=None, nick=None, **kw)
|
Use alt nick on nick error
| 5.371389
| 5.209739
| 1.031028
|
config = self.bot.config['server_config']
for opt in data.split(' '):
if '=' in opt:
opt, value = opt.split('=', 1)
else:
value = True
if opt.isupper():
config[opt] = value
|
def set_config(self, data=None, **kwargs)
|
Store server config
| 3.499474
| 3.129873
| 1.118088
|
session = args['session']
for feed, filename in zip(args['feeds'], args['filenames']):
try:
resp = session.get(feed, timeout=5)
content = resp.content
except Exception: # pragma: no cover
pass
else:
with open(filename, 'wb') as fd:
fd.write(content)
return args['name']
|
def fetch(args)
|
fetch a feed
| 3.386144
| 3.269918
| 1.035544
|
entries = []
args = irc3.utils.Config(args)
max_date = datetime.datetime.now() - datetime.timedelta(days=2)
for filename in args['filenames']:
try:
with open(filename + '.updated') as fd:
updated = fd.read().strip()
except (OSError, IOError):
updated = '0'
feed = feedparser.parse(filename)
for e in feed.entries:
if e.updated <= updated:
# skip already sent entries
continue
try:
updated_parsed = e.updated_parsed
except AttributeError:
continue
if datetime.datetime(*updated_parsed[:7]) < max_date:
# skip entries older than 2 days
continue
e['filename'] = filename
e['feed'] = args
entries.append((e.updated, e))
if entries:
entries = sorted(entries, key=itemgetter(0))
with open(filename + '.updated', 'w') as fd:
fd.write(str(entries[-1][0]))
return entries
|
def parse(feedparser, args)
|
parse a feed using feedparser
| 3.026645
| 3.022976
| 1.001214
|
self.set_timeout()
data = self.decode(data)
if self.queue:
data = self.queue.popleft() + data
lines = data.replace('\r', '').split('\n')
self.queue.append(lines.pop(-1))
for line in lines:
self.bot.dispatch(line, iotype='dcc_in', client=self)
|
def data_received(self, data)
|
data received
| 5.552233
| 5.361034
| 1.035665
|
def callback(context, name, ob):
obj = context.context
if info.scope == 'class':
@functools.wraps(func)
def f(self, *args, **kwargs):
plugin = obj.get_plugin(ob)
return getattr(plugin, func.__name__)(*args, **kwargs)
setattr(obj, func.__name__, f.__get__(obj, obj.__class__))
else:
setattr(obj, func.__name__, func.__get__(obj, obj.__class__))
info = venusian.attach(func, callback, category='irc3d.extend')
return func
|
def extend(func)
|
same as :func:`~irc3.dec.extend` but for servers
| 3.981201
| 3.700473
| 1.075863
|
reg = self.registry
insert = 'insert' in kwargs
for e in events:
cregexp = e.compile(self.config)
regexp = getattr(e.regexp, 're', e.regexp)
if regexp not in reg.events[e.iotype]:
if insert:
reg.events_re[e.iotype].insert(0, (regexp, cregexp))
else:
reg.events_re[e.iotype].append((regexp, cregexp))
if insert:
reg.events[e.iotype][regexp].insert(0, e)
else:
reg.events[e.iotype][regexp].append(e)
|
def attach_events(self, *events, **kwargs)
|
Attach one or more events to the bot instance
| 3.427091
| 3.472319
| 0.986975
|
reg = self.registry
delete = defaultdict(list)
# remove from self.events
all_events = reg.events
for e in events:
regexp = getattr(e.regexp, 're', e.regexp)
iotype = e.iotype
if e in all_events[iotype].get(regexp, []):
all_events[iotype][regexp].remove(e)
if not all_events[iotype][regexp]:
del all_events[iotype][regexp]
# need to delete from self.events_re
delete[iotype].append(regexp)
# delete from events_re
for iotype, regexps in delete.items():
reg.events_re[iotype] = [r for r in reg.events_re[iotype]
if r[0] not in regexps]
|
def detach_events(self, *events)
|
Detach one or more events from the bot instance
| 3.005757
| 3.010981
| 0.998265
|
self.notify('before_reload')
if 'configfiles' in self.config:
# reload configfiles
self.log.info('Reloading configuration...')
cfg = utils.parse_config(
self.server and 'server' or 'bot', *self.config['configfiles'])
self.config.update(cfg)
self.log.info('Reloading python code...')
if not modules:
modules = self.registry.includes
scanned = list(reversed(self.registry.scanned))
# reset includes and events
self.registry.reset()
to_scan = []
for module_name, categories in scanned:
if module_name in modules:
module = utils.maybedotted(module_name)
reload_module(module)
to_scan.append((module_name, categories))
# rescan all modules
for module_name, categories in to_scan:
self.include(module_name, venusian_categories=categories)
self.registry.reloading = {}
self.notify('after_reload')
|
def reload(self, *modules)
|
Reload one or more plugins
| 4.904237
| 4.919456
| 0.996906
|
if isinstance(callback, str):
callback = getattr(self, callback)
f = None
for arg in args:
f = callback(*arg)
return f
|
def call_many(self, callback, args)
|
callback is run with each arg but run a call per second
| 3.846774
| 3.710018
| 1.036861
|
try:
self.loop.add_signal_handler(signal.SIGHUP, self.SIGHUP)
except (RuntimeError, AttributeError): # pragma: no cover
# windows
pass
try:
self.loop.add_signal_handler(signal.SIGINT, self.SIGINT)
except (RuntimeError, NotImplementedError): # pragma: no cover
# annaconda
pass
|
def add_signal_handlers(self)
|
Register handlers for UNIX signals (SIGHUP/SIGINT)
| 2.871489
| 2.698408
| 1.064142
|
loop = self.create_connection()
self.add_signal_handlers()
if forever:
loop.run_forever()
|
def run(self, forever=True)
|
start the bot
| 5.394147
| 4.984195
| 1.08225
|
cfg = dict(cfg, **kwargs)
pythonpath = cfg.get('pythonpath', [])
if 'here' in cfg:
pythonpath.append(cfg['here'])
for path in pythonpath:
sys.path.append(os.path.expanduser(path))
prog = cls.server and 'irc3d' or 'irc3'
if cfg.get('debug'):
cls.venusian_categories.append(prog + '.debug')
if cfg.get('interactive'): # pragma: no cover
import irc3.testing
context = getattr(irc3.testing, cls.__name__)(**cfg)
else:
context = cls(**cfg)
if cfg.get('raw'):
context.include('irc3.plugins.log',
venusian_categories=[prog + '.debug'])
return context
|
def from_config(cls, cfg, **kwargs)
|
return an instance configured with the ``cfg`` dict
| 4.732173
| 4.676667
| 1.011869
|
''' Executes the provided Robot Framework keyword in a separate thread and immediately returns a handle to be used with async_get '''
handle = self._last_thread_handle
thread = self._threaded(keyword, *args, **kwargs)
thread.start()
self._thread_pool[handle] = thread
self._last_thread_handle += 1
return handle
|
def async_run(self, keyword, *args, **kwargs)
|
Executes the provided Robot Framework keyword in a separate thread and immediately returns a handle to be used with async_get
| 5.249269
| 3.372576
| 1.556457
|
''' Blocks until the thread created by async_run returns '''
assert handle in self._thread_pool, 'Invalid async call handle'
result = self._thread_pool[handle].result_queue.get()
del self._thread_pool[handle]
return result
|
def async_get(self, handle)
|
Blocks until the thread created by async_run returns
| 6.229134
| 4.089239
| 1.523299
|
''' Gets the Robot Framework handler associated with the given keyword '''
if EXECUTION_CONTEXTS.current is None:
raise RobotNotRunningError('Cannot access execution context')
return EXECUTION_CONTEXTS.current.get_handler(keyword)
|
def _get_handler_from_keyword(self, keyword)
|
Gets the Robot Framework handler associated with the given keyword
| 6.488924
| 5.014369
| 1.294066
|
'''
A special set function to ensure
we're setting with a dictionary
'''
if value is None:
setattr(self, '_PMMail__custom_headers', {})
elif isinstance(value, dict):
setattr(self, '_PMMail__custom_headers', value)
else:
raise TypeError('Custom headers must be a dictionary of key-value pairs')
|
def _set_custom_headers(self, value)
|
A special set function to ensure
we're setting with a dictionary
| 6.029646
| 3.453116
| 1.746146
|
'''
A special set function to ensure
we're setting with a dictionary
'''
if value is None:
setattr(self, '_PMMail__metadata', {})
elif isinstance(value, dict):
for k, v in value.items():
if (not isinstance(k, str) and not isinstance(k, int)) \
or (not isinstance(v, str) and not isinstance(v, int)):
raise TypeError('Metadata keys and values can only be strings or integers')
setattr(self, '_PMMail__metadata', value)
else:
raise TypeError('Metadata must be a dictionary of key-value pairs')
|
def _set_metadata(self, value)
|
A special set function to ensure
we're setting with a dictionary
| 3.456892
| 2.607652
| 1.325672
|
'''
A special set function to ensure
we're setting with a list
'''
if value is None:
setattr(self, '_PMMail__attachments', [])
elif isinstance(value, list):
setattr(self, '_PMMail__attachments', value)
else:
raise TypeError('Attachments must be a list')
|
def _set_attachments(self, value)
|
A special set function to ensure
we're setting with a list
| 5.700715
| 3.315156
| 1.719592
|
'''
Make sure all values are of the appropriate
type and are not missing.
'''
if not self.__api_key:
raise PMMailMissingValueException('Cannot send an e-mail without a Postmark API Key')
elif not self.__sender:
raise PMMailMissingValueException('Cannot send an e-mail without a sender (.sender field)')
elif not self.__to:
raise PMMailMissingValueException('Cannot send an e-mail without at least one recipient (.to field)')
elif (self.__template_id or self.__template_model) and not all([self.__template_id, self.__template_model]):
raise PMMailMissingValueException(
'Cannot send a template e-mail without a both template_id and template_model set')
elif not any([self.__template_id, self.__template_model, self.__subject]):
raise PMMailMissingValueException('Cannot send an e-mail without a subject')
elif not self.__html_body and not self.__text_body and not self.__template_id:
raise PMMailMissingValueException('Cannot send an e-mail without either an HTML or text version of your e-mail body')
if self.__track_opens and not self.__html_body:
print('WARNING: .track_opens set to True with no .html_body set. Tracking opens will not work; message will still send.')
|
def _check_values(self)
|
Make sure all values are of the appropriate
type and are not missing.
| 3.187578
| 2.797478
| 1.139447
|
'''
Send the email through the Postmark system.
Pass test=True to just print out the resulting
JSON message being sent to Postmark
'''
self._check_values()
# Set up message dictionary
json_message = self.to_json_message()
# if (self.__html_body and not self.__text_body) and self.__multipart:
# # TODO: Set up regex to strip html
# pass
# If test is not specified, attempt to read the Django setting
if test is None:
try:
from django.conf import settings as django_settings
test = getattr(django_settings, "POSTMARK_TEST_MODE", None)
except ImportError:
pass
# If this is a test, just print the message
if test:
print('JSON message is:\n%s' % json.dumps(json_message, cls=PMJSONEncoder))
return
if self.__template_id:
endpoint_url = __POSTMARK_URL__ + 'email/withTemplate/'
else:
endpoint_url = __POSTMARK_URL__ + 'email'
# Set up the url Request
req = Request(
endpoint_url,
json.dumps(json_message, cls=PMJSONEncoder).encode('utf8'),
{
'Accept': 'application/json',
'Content-Type': 'application/json',
'X-Postmark-Server-Token': self.__api_key,
'User-agent': self.__user_agent
}
)
# Attempt send
try:
# print 'sending request to postmark: %s' % json_message
result = urlopen(req)
jsontxt = result.read().decode('utf8')
result.close()
if result.code == 200:
self.message_id = json.loads(jsontxt).get('MessageID', None)
return True
else:
raise PMMailSendException('Return code %d: %s' % (result.code, result.msg))
except HTTPError as err:
if err.code == 401:
raise PMMailUnauthorizedException('Sending Unauthorized - incorrect API key.', err)
elif err.code == 422:
try:
jsontxt = err.read().decode('utf8')
jsonobj = json.loads(jsontxt)
desc = jsonobj['Message']
error_code = jsonobj['ErrorCode']
except KeyError:
raise PMMailUnprocessableEntityException('Unprocessable Entity: Description not given')
if error_code == 406:
raise PMMailInactiveRecipientException('You tried to send email to a recipient that has been marked as inactive.')
raise PMMailUnprocessableEntityException('Unprocessable Entity: %s' % desc)
elif err.code == 500:
raise PMMailServerErrorException('Internal server error at Postmark. Admins have been alerted.', err)
except URLError as err:
if hasattr(err, 'reason'):
raise PMMailURLException('URLError: Failed to reach the server: %s (See "inner_exception" for details)' % err.reason, err)
elif hasattr(err, 'code'):
raise PMMailURLException('URLError: %d: The server couldn\'t fufill the request. (See "inner_exception" for details)' % err.code, err)
else:
raise PMMailURLException('URLError: The server couldn\'t fufill the request. (See "inner_exception" for details)', err)
|
def send(self, test=None)
|
Send the email through the Postmark system.
Pass test=True to just print out the resulting
JSON message being sent to Postmark
| 3.181133
| 2.903669
| 1.095556
|
'''
Remove a message from the batch
'''
if message in self.__messages:
self.__messages.remove(message)
|
def remove_message(self, message)
|
Remove a message from the batch
| 5.224534
| 4.209669
| 1.241079
|
'''
Returns a summary of inactive emails and bounces by type.
'''
self._check_values()
req = Request(
__POSTMARK_URL__ + 'deliverystats',
None,
{
'Accept': 'application/json',
'Content-Type': 'application/json',
'X-Postmark-Server-Token': self.__api_key,
'User-agent': self.__user_agent
}
)
# Attempt send
try:
# print 'sending request to postmark:'
result = urlopen(req)
with closing(result):
if result.code == 200:
return json.loads(result.read())
else:
raise PMMailSendException('Return code %d: %s' % (result.code, result.msg))
except HTTPError as err:
return err
|
def delivery_stats(self)
|
Returns a summary of inactive emails and bounces by type.
| 4.630445
| 3.681401
| 1.257794
|
'''
Fetches a portion of bounces according to the specified input criteria. The count and offset
parameters are mandatory. You should never retrieve all bounces as that could be excessively
slow for your application. To know how many bounces you have, you need to request a portion
first, usually the first page, and the service will return the count in the TotalCount property
of the response.
'''
self._check_values()
params = '?inactive=' + inactive + '&emailFilter=' + email_filter +'&tag=' + tag
params += '&count=' + str(count) + '&offset=' + str(offset)
req = Request(
__POSTMARK_URL__ + 'bounces' + params,
None,
{
'Accept': 'application/json',
'Content-Type': 'application/json',
'X-Postmark-Server-Token': self.__api_key,
'User-agent': self.__user_agent
}
)
# Attempt send
try:
# print 'sending request to postmark:'
result = urlopen(req)
with closing(result):
if result.code == 200:
return json.loads(result.read())
else:
raise PMMailSendException('Return code %d: %s' % (result.code, result.msg))
except HTTPError as err:
return err
|
def get_all(self, inactive='', email_filter='', tag='', count=25, offset=0)
|
Fetches a portion of bounces according to the specified input criteria. The count and offset
parameters are mandatory. You should never retrieve all bounces as that could be excessively
slow for your application. To know how many bounces you have, you need to request a portion
first, usually the first page, and the service will return the count in the TotalCount property
of the response.
| 4.628893
| 2.435768
| 1.900383
|
'''
Activates a deactivated bounce.
'''
self._check_values()
req_url = '/bounces/' + str(bounce_id) + '/activate'
# print req_url
h1 = HTTPConnection('api.postmarkapp.com')
dta = urlencode({"data": "blank"}).encode('utf8')
req = h1.request(
'PUT',
req_url,
dta,
{
'Accept': 'application/json',
'Content-Type': 'application/json',
'X-Postmark-Server-Token': self.__api_key,
'User-agent': self.__user_agent
}
)
r = h1.getresponse()
return json.loads(r.read())
|
def activate(self, bounce_id)
|
Activates a deactivated bounce.
| 3.770846
| 3.604195
| 1.046238
|
if not email_messages:
return
sent = self._send(email_messages)
if sent:
return len(email_messages)
return 0
|
def send_messages(self, email_messages)
|
Sends one or more EmailMessage objects and returns the number of email
messages sent.
| 3.829627
| 3.538893
| 1.082154
|
if not message.recipients():
return False
recipients = ','.join(message.to)
recipients_cc = ','.join(message.cc)
recipients_bcc = ','.join(message.bcc)
text_body = message.body
html_body = None
if isinstance(message, EmailMultiAlternatives):
for alt in message.alternatives:
if alt[1] == "text/html":
html_body = alt[0]
break
elif getattr(message, 'content_subtype', None) == 'html':
# Don't send html content as plain text
text_body = None
html_body = message.body
reply_to = ','.join(message.reply_to)
custom_headers = {}
if message.extra_headers and isinstance(message.extra_headers, dict):
if 'Reply-To' in message.extra_headers:
reply_to = message.extra_headers.pop('Reply-To')
if len(message.extra_headers):
custom_headers = message.extra_headers
attachments = []
if message.attachments and isinstance(message.attachments, list):
if len(message.attachments):
for item in message.attachments:
if isinstance(item, tuple):
(f, content, m) = item
content = base64.b64encode(content)
# b64decode returns bytes on Python 3. PMMail needs a
# str (for JSON serialization). Convert on Python 3
# only to avoid a useless performance hit on Python 2.
if not isinstance(content, str):
content = content.decode()
attachments.append((f, content, m))
else:
attachments.append(item)
postmark_message = PMMail(api_key=self.api_key,
subject=message.subject,
sender=message.from_email,
to=recipients,
cc=recipients_cc,
bcc=recipients_bcc,
text_body=text_body,
html_body=html_body,
reply_to=reply_to,
custom_headers=custom_headers,
attachments=attachments)
postmark_message.tag = getattr(message, 'tag', None)
postmark_message.track_opens = getattr(message, 'track_opens', False)
return postmark_message
|
def _build_message(self, message)
|
A helper method to convert a PMEmailMessage to a PMMail
| 2.375316
| 2.257004
| 1.05242
|
if len(messages) == 1:
to_send = self._build_message(messages[0])
if to_send is False:
# The message was missing recipients.
# Bail.
return False
else:
pm_messages = list(map(self._build_message, messages))
pm_messages = [m for m in pm_messages if m]
if len(pm_messages) == 0:
# If after filtering, there aren't any messages
# to send, bail.
return False
to_send = PMBatchMail(messages=pm_messages)
try:
to_send.send(test=self.test_mode)
except:
if self.fail_silently:
return False
raise
return True
|
def _send(self, messages)
|
A helper method that does the actual sending.
| 3.516225
| 3.392299
| 1.036532
|
'''
PDF link handler; never gets explicitly called by user
'''
if tag == 'a' and ( ('class', 'download-pdf') in attrs or ('id', 'download-pdf') in attrs ):
for attr in attrs:
if attr[0] == 'href':
self.download_link = 'http://www.nature.com' + attr[1]
|
def handle_starttag(self, tag, attrs)
|
PDF link handler; never gets explicitly called by user
| 5.970127
| 3.104698
| 1.922933
|
try:
return rstr.xeger(pattern)
except re.error as e:
raise ValueError(str(e))
|
def genpass(pattern=r'[\w]{32}')
|
generates a password with random chararcters
| 4.280906
| 4.408187
| 0.971126
|
if string is None:
string = CHAR_ZERO * self.__size__
data = struct.unpack(self.__fmt__, string)
i = 0
for field in self.__fields__:
(vtype, vlen) = self.__fields_types__[field]
if vtype == 'char': # string
setattr(self, field, data[i])
i = i + 1
elif isinstance(vtype, CStructMeta):
num = int(vlen / vtype.size)
if num == 1: # single struct
sub_struct = vtype()
sub_struct.unpack(EMPTY_BYTES_STRING.join(data[i:i+sub_struct.size]))
setattr(self, field, sub_struct)
i = i + sub_struct.size
else: # multiple struct
sub_structs = []
for j in range(0, num):
sub_struct = vtype()
sub_struct.unpack(EMPTY_BYTES_STRING.join(data[i:i+sub_struct.size]))
i = i + sub_struct.size
sub_structs.append(sub_struct)
setattr(self, field, sub_structs)
elif vlen == 1:
setattr(self, field, data[i])
i = i + vlen
else:
setattr(self, field, list(data[i:i+vlen]))
i = i + vlen
|
def unpack(self, string)
|
Unpack the string containing packed C structure data
| 2.344151
| 2.290815
| 1.023283
|
data = []
for field in self.__fields__:
(vtype, vlen) = self.__fields_types__[field]
if vtype == 'char': # string
data.append(getattr(self, field))
elif isinstance(vtype, CStructMeta):
num = int(vlen / vtype.size)
if num == 1: # single struct
v = getattr(self, field, vtype())
v = v.pack()
if sys.version_info >= (3, 0):
v = ([bytes([x]) for x in v])
data.extend(v)
else: # multiple struct
values = getattr(self, field, [])
for j in range(0, num):
try:
v = values[j]
except:
v = vtype()
v = v.pack()
if sys.version_info >= (3, 0):
v = ([bytes([x]) for x in v])
data.extend(v)
elif vlen == 1:
data.append(getattr(self, field))
else:
v = getattr(self, field)
v = v[:vlen] + [0] * (vlen - len(v))
data.extend(v)
return struct.pack(self.__fmt__, *data)
|
def pack(self)
|
Pack the structure data into a string
| 2.487615
| 2.431772
| 1.022964
|
"andreax + pts/0 2013-08-21 08:58 . 32341 (l26.box)"
" pts/34 2013-06-12 15:04 26396 id=s/34 term=0 exit=0"
# if self.ut_type not in [6,7]:
# return
print("%-10s %-12s %15s %15s %-8s" % (
str_from_c(self.ut_user),
str_from_c(self.ut_line),
time.strftime("%Y-%m-%d %H:%M", time.gmtime(self.ut_tv.tv_sec)),
self.ut_pid,
str_from_c(self.ut_host) and "(%s)" % str_from_c(self.ut_host) or str_from_c(self.ut_id) and "id=%s" % str_from_c(self.ut_id) or ""))
|
def print_info(self)
|
andreax + pts/0 2013-08-21 08:58 . 32341 (l26.box)
| 6.637933
| 3.863572
| 1.718082
|
_import_all_importer_files()
for module in (value for key, value in globals().items()
if key in __all__):
for klass_name, klass in inspect.getmembers(module, inspect.isclass):
if klass is not BaseImporter and issubclass(klass, BaseImporter):
yield klass
for klass in _get_importers_from_entry_points():
yield klass
|
def get_all()
|
Get all subclasses of BaseImporter from module and return and generator
| 4.686962
| 3.871109
| 1.210754
|
credentials = db.credentials()
if credentials:
table = Table(
db.config['headers'],
table_format=db.config['table_format'],
colors=db.config['colors'],
hidden=db.config['hidden'],
hidden_string=db.config['hidden_string'],
)
click.echo(table.render(credentials))
|
def list_database(db)
|
Print credential as a table
| 4.623463
| 4.255441
| 1.086483
|
if level == 'global':
configuration = config.read(config.HOMEDIR, '.passpierc')
elif level == 'local':
configuration = config.read(os.path.join(db.path))
elif level == 'current':
configuration = db.config
if configuration:
click.echo(yaml.safe_dump(configuration, default_flow_style=False))
|
def check_config(db, level)
|
Show current configuration for shell
| 4.872434
| 4.408978
| 1.105116
|
self.data = self.handle(data, **kwargs)
return self
|
def process(self, data=None, **kwargs)
|
Process the provided data and invoke :meth:`Handler.handle` method for this
Handler class.
:params data: The data being processed.
:returns: self
:rtype: :class:`Handler`
.. code-block:: python
def post(self, *args, **kwargs):
self.request = self.get_request_handler()
self.request.process(self.get_data())
return self.get_create_response()
.. seealso:
:meth:`Handler.process`
| 6.414956
| 12.339267
| 0.519881
|
return super(RequestHandler, self).process(data=data or self.get_request_data())
|
def process(self, data=None)
|
Fetch incoming data from the Flask request object when no data is supplied
to the process method. By default, the RequestHandler expects the
incoming data to be sent as JSON.
| 8.512042
| 5.158293
| 1.650167
|
session = self.get_db_session()
session.add(obj)
session.commit()
return obj
|
def save(self, obj)
|
Add ``obj`` to the SQLAlchemy session and commit the changes back to
the database.
:param obj: SQLAlchemy object being saved
:returns: The saved object
| 4.413336
| 5.60295
| 0.787681
|
if self.model is None:
raise ArrestedException('DBObjectMixin requires a model to be set.')
idfield = getattr(self.model, self.model_id_param, None)
if not idfield:
raise ArrestedException('DBObjectMixin could not find a valid Model.id.')
return query.filter(idfield == self.kwargs[self.url_id_param])
|
def filter_by_id(self, query)
|
Apply the primary key filter to query to filter the results for a specific
instance by id.
The filter applied by the this method by default can be controlled using the
url_id_param
:param query: SQLAlchemy Query
:returns: A SQLAlchemy Query object
| 5.298433
| 4.916046
| 1.077783
|
query = self.get_query()
query = self.filter_by_id(query)
return self.get_result(query)
|
def get_object(self)
|
Implements the GetObjectMixin interface and calls
:meth:`DBObjectMixin.get_query`. Using this mixin requires usage of
a response handler capable of serializing SQLAlchemy query result objects.
:returns: Typically a SQLALchemy Query result.
:rtype: mixed
.. seealso::
:meth:`DBObjectMixin.get_query`
:meth:`DBObjectMixin.filter_by_id`
:meth:`DBObjectMixin.get_result`
| 6.627953
| 3.404398
| 1.946879
|
session = self.get_db_session()
session.delete(obj)
session.commit()
|
def delete_object(self, obj)
|
Deletes an object from the session by calling session.delete and then commits
the changes to the database.
:param obj: The SQLAlchemy instance being deleted
:returns: None
| 3.820571
| 4.199606
| 0.909745
|
self.app = app
if self.deferred:
self.register_all(self.deferred)
|
def init_app(self, app)
|
Initialise the ArrestedAPI object by storing a pointer to a Flask app object.
This method is typically used when initialisation is deferred.
:param app: Flask application object
Usage::
app = Flask(__name__)
ap1_v1 = ArrestedAPI()
api_v1.init_app(app)
| 8.736272
| 13.010861
| 0.67146
|
if defer:
self.deferred.append(resource)
else:
resource.init_api(self)
self.app.register_blueprint(resource, url_prefix=self.url_prefix)
|
def register_resource(self, resource, defer=False)
|
Register a :class:`.Resource` blueprint object against the Flask app object.
:param resource: :class:`.Resource` or :class:`flask.Blueprint`
object.
:param defer: Optionally specify that registering this resource should be
deferred. This option is useful when users are creating their
Flask app instance via a factory.
**Deferred resource registration**
Resources can optionally be registered in a deferred manner. Simply pass
`defer=True` to :meth:`.ArrestedAPI.register_resource` to attach the resource to
the API without calling register_blueprint.
This is useful when you're using the factory pattern for creating your Flask app
object as demonstrated below. Deferred resource will not be registered until the
ArrestedAPI instance is initialised with the Flask app object.
Usage::
api_v1 = ArrestedAPI(prefix='/v1')
characters_resource = Resource(
'characters', __name__, url_prefix='/characters'
)
ap1_v1.register_resource(characters_resource, defer=True)
def create_app():
app = Flask(__name__)
api_v1.init_app(app) # deferred resources are now registered.
| 3.224874
| 5.126922
| 0.629008
|
hooks = []
if self.resource:
hooks.extend(self.resource.api.before_all_hooks)
hooks.extend(self.resource.before_all_hooks)
hooks.extend(self.before_all_hooks)
hooks.extend(
getattr(
self,
'before_{method}_hooks'.format(method=self.meth),
[]
)
)
for hook in chain(hooks):
hook(self)
|
def process_before_request_hooks(self)
|
Process the list of before_{method}_hooks and the before_all_hooks. The hooks
will be processed in the following order
1 - any before_all_hooks defined on the :class:`arrested.ArrestedAPI` object
2 - any before_all_hooks defined on the :class:`arrested.Resource` object
3 - any before_all_hooks defined on the :class:`arrested.Endpoint` object
4 - any before_{method}_hooks defined on the :class:`arrested.Endpoint` object
| 3.310172
| 2.460812
| 1.345155
|
hooks = []
meth_hooks = getattr(
self,
'after_{method}_hooks'.format(method=self.meth),
[]
)
hooks.extend(meth_hooks)
hooks.extend(self.after_all_hooks)
if self.resource:
hooks.extend(self.resource.after_all_hooks)
hooks.extend(self.resource.api.after_all_hooks)
for hook in chain(hooks):
resp = hook(self, resp)
return resp
|
def process_after_request_hooks(self, resp)
|
Process the list of before_{method}_hooks and the before_all_hooks. The hooks
will be processed in the following order
1 - any after_{method}_hooks defined on the :class:`arrested.Endpoint` object
2 - any after_all_hooks defined on the :class:`arrested.Endpoint` object
2 - any after_all_hooks defined on the :class:`arrested.Resource` object
4 - any after_all_hooks defined on the :class:`arrested.ArrestedAPI` object
| 2.839419
| 2.479214
| 1.14529
|
self.args = args
self.kwargs = kwargs
self.meth = request.method.lower()
self.resource = current_app.blueprints.get(request.blueprint, None)
if not any([self.meth in self.methods, self.meth.upper() in self.methods]):
return self.return_error(405)
self.process_before_request_hooks()
resp = super(Endpoint, self).dispatch_request(*args, **kwargs)
resp = self.make_response(resp)
resp = self.process_after_request_hooks(resp)
return resp
|
def dispatch_request(self, *args, **kwargs)
|
Dispatch the incoming HTTP request to the appropriate handler.
| 3.075711
| 3.050444
| 1.008283
|
if not isinstance(rv, Response):
resp = Response(
response=rv,
headers=headers,
mimetype=mime,
status=status
)
else:
resp = rv
return resp
|
def make_response(self, rv, status=200, headers=None, mime='application/json')
|
Create a response object using the :class:`flask.Response` class.
:param rv: Response value. If the value is not an instance
of :class:`werkzeug.wrappers.Response` it will be converted
into a Response object.
:param status: specify the HTTP status code for this response.
:param mime: Specify the mimetype for this request.
:param headers: Specify dict of headers for the response.
| 2.618308
| 3.270091
| 0.800683
|
assert self.response_handler is not None, \
'Please define a response_handler ' \
' for Endpoint: %s' % self.__class__.__name__
return self.response_handler(self, **self.get_response_handler_params())
|
def get_response_handler(self)
|
Return the Endpoints defined :attr:`Endpoint.response_handler`.
:returns: A instance of the Endpoint specified :class:`ResonseHandler`.
:rtype: :class:`ResponseHandler`
| 4.870206
| 4.533553
| 1.074258
|
assert self.request_handler is not None, \
'Please define a request_handler ' \
' for Endpoint: %s' % self.__class__.__name__
return self.request_handler(self, **self.get_request_handler_params())
|
def get_request_handler(self)
|
Return the Endpoints defined :attr:`Endpoint.request_handler`.
:returns: A instance of the Endpoint specified :class:`RequestHandler`.
:rtype: :class:`RequestHandler`
| 4.798378
| 4.602485
| 1.042562
|
resp = None
if payload is not None:
payload = json.dumps(payload)
resp = self.make_response(payload, status=status)
if status in [405]:
abort(status)
else:
abort(status, response=resp)
|
def return_error(self, status, payload=None)
|
Error handler called by request handlers when an error occurs and the request
should be aborted.
Usage::
def handle_post_request(self, *args, **kwargs):
self.request_handler = self.get_request_handler()
try:
self.request_handler.process(self.get_data())
except SomeException as e:
self.return_error(400, payload=self.request_handler.errors)
return self.return_create_response()
| 3.473684
| 4.047822
| 0.858161
|
if self.many:
return self.mapper.many(raw=self.raw, **self.mapper_kwargs).serialize(
data, role=self.role
)
else:
return self.mapper(obj=data, raw=self.raw, **self.mapper_kwargs).serialize(
role=self.role
)
|
def handle(self, data, **kwargs)
|
Run serialization for the specified mapper_class.
Supports both .serialize and .many().serialize Kim interfaces.
:param data: Objects to be serialized.
:returns: Serialized data according to mapper configuration
| 4.307784
| 3.737141
| 1.152695
|
payload = {
"message": "Invalid or incomplete data provided.",
"errors": exp.errors
}
self.endpoint.return_error(self.error_status, payload=payload)
|
def handle_error(self, exp)
|
Called if a Mapper returns MappingInvalid. Should handle the error
and return it in the appropriate format, can be overridden in order
to change the error format.
:param exp: MappingInvalid exception raised
| 8.000122
| 8.950667
| 0.893802
|
try:
if self.many:
return self.mapper.many(raw=self.raw, **self.mapper_kwargs).marshal(
data, role=self.role
)
else:
return self.mapper(
data=data,
obj=self.obj,
partial=self.partial,
**self.mapper_kwargs
).marshal(role=self.role)
except MappingInvalid as e:
self.handle_error(e)
|
def handle(self, data, **kwargs)
|
Run marshalling for the specified mapper_class.
Supports both .marshal and .many().marshal Kim interfaces. Handles errors raised
during marshalling and automatically returns a HTTP error response.
:param data: Data to be marshaled.
:returns: Marshaled object according to mapper configuration
:raises: :class:`werkzeug.exceptions.UnprocessableEntity`
| 4.357848
| 3.625532
| 1.201988
|
params = super(KimEndpoint, self).get_response_handler_params(**params)
params['mapper_class'] = self.mapper_class
params['role'] = self.serialize_role
# After a successfull attempt to marshal an object has been made, a response
# is generated using the RepsonseHandler. Rather than taking the class level
# setting for many by default, pull it from the request handler params config to
# ensure Marshaling and Serializing are run the same way.
if self._is_marshal_request():
req_params = self.get_request_handler_params()
params['many'] = req_params.get('many', self.many)
else:
params['many'] = self.many
return params
|
def get_response_handler_params(self, **params)
|
Return a config object that will be used to configure the KimResponseHandler
:returns: a dictionary of config options
:rtype: dict
| 7.957898
| 7.894757
| 1.007998
|
params = super(KimEndpoint, self).get_request_handler_params(**params)
params['mapper_class'] = self.mapper_class
params['role'] = self.marshal_role
params['many'] = False
# when handling a PUT or PATCH request, self.obj will be set.. There might be a
# more robust way to handle this?
params['obj'] = getattr(self, 'obj', None)
params['partial'] = self.is_partial()
return params
|
def get_request_handler_params(self, **params)
|
Return a config object that will be used to configure the KimRequestHandler
:returns: a dictionary of config options
:rtype: dict
| 6.337188
| 6.150516
| 1.030351
|
return self._response(self.response.get_response_data(), status=status)
|
def list_response(self, status=200)
|
Pull the processed data from the response_handler and return a response.
:param status: The HTTP status code returned with the response
.. seealso:
:meth:`Endpoint.make_response`
:meth:`Endpoint.handle_get_request`
| 10.166265
| 9.45298
| 1.075456
|
self.objects = self.get_objects()
self.response = self.get_response_handler()
self.response.process(self.objects)
return self.list_response()
|
def handle_get_request(self)
|
Handle incoming GET request to an Endpoint and return an
array of results by calling :meth:`.GetListMixin.get_objects`.
.. seealso::
:meth:`GetListMixin.get_objects`
:meth:`Endpoint.get`
| 7.087131
| 6.401585
| 1.10709
|
self.response = self.get_response_handler()
self.response.process(self.obj)
return self._response(self.response.get_response_data(), status=status)
|
def create_response(self, status=201)
|
Generate a Response object for a POST request. By default, the newly created
object will be passed to the specified ResponseHandler and will be serialized
as the response body.
| 6.535758
| 5.344533
| 1.222887
|
self.request = self.get_request_handler()
self.obj = self.request.process().data
self.save_object(self.obj)
return self.create_response()
|
def handle_post_request(self)
|
Handle incoming POST request to an Endpoint and marshal the request data
via the specified RequestHandler. :meth:`.CreateMixin.save_object`. is then
called and must be implemented by mixins implementing this interfce.
.. seealso::
:meth:`CreateMixin.save_object`
:meth:`Endpoint.post`
| 6.460985
| 5.701785
| 1.133151
|
if not getattr(self, '_obj', None):
self._obj = self.get_object()
if self._obj is None and not self.allow_none:
self.return_error(404)
return self._obj
|
def obj(self)
|
Returns the value of :meth:`ObjectMixin.get_object` and sets a private
property called _obj. This property ensures the logic around allow_none
is enforced across Endpoints using the Object interface.
:raises: :class:`werkzeug.exceptions.BadRequest`
:returns: The result of :meth:ObjectMixin.get_object`
| 3.639326
| 3.294699
| 1.104601
|
'''
An FSM accepting nothing (not even the empty string). This is
demonstrates that this is possible, and is also extremely useful
in some situations
'''
return fsm(
alphabet = alphabet,
states = {0},
initial = 0,
finals = set(),
map = {
0: dict([(symbol, 0) for symbol in alphabet]),
},
)
|
def null(alphabet)
|
An FSM accepting nothing (not even the empty string). This is
demonstrates that this is possible, and is also extremely useful
in some situations
| 5.927529
| 2.568388
| 2.307879
|
'''
Crawl several FSMs in parallel, mapping the states of a larger meta-FSM.
To determine whether a state in the larger FSM is final, pass all of the
finality statuses (e.g. [True, False, False] to `test`.
'''
alphabet = set().union(*[fsm.alphabet for fsm in fsms])
initial = dict([(i, fsm.initial) for (i, fsm) in enumerate(fsms)])
# dedicated function accepts a "superset" and returns the next "superset"
# obtained by following this transition in the new FSM
def follow(current, symbol):
next = {}
for i in range(len(fsms)):
if symbol not in fsms[i].alphabet and anything_else in fsms[i].alphabet:
actual_symbol = anything_else
else:
actual_symbol = symbol
if i in current \
and current[i] in fsms[i].map \
and actual_symbol in fsms[i].map[current[i]]:
next[i] = fsms[i].map[current[i]][actual_symbol]
if len(next.keys()) == 0:
raise OblivionError
return next
# Determine the "is final?" condition of each substate, then pass it to the
# test to determine finality of the overall FSM.
def final(state):
accepts = [i in state and state[i] in fsm.finals for (i, fsm) in enumerate(fsms)]
return test(accepts)
return crawl(alphabet, initial, final, follow).reduce()
|
def parallel(fsms, test)
|
Crawl several FSMs in parallel, mapping the states of a larger meta-FSM.
To determine whether a state in the larger FSM is final, pass all of the
finality statuses (e.g. [True, False, False] to `test`.
| 4.659944
| 3.25718
| 1.430668
|
'''
Given the above conditions and instructions, crawl a new unknown FSM,
mapping its states, final states and transitions. Return the new FSM.
This is a pretty powerful procedure which could potentially go on
forever if you supply an evil version of follow().
'''
states = [initial]
finals = set()
map = {}
# iterate over a growing list
i = 0
while i < len(states):
state = states[i]
# add to finals
if final(state):
finals.add(i)
# compute map for this state
map[i] = {}
for symbol in sorted(alphabet, key=key):
try:
next = follow(state, symbol)
try:
j = states.index(next)
except ValueError:
j = len(states)
states.append(next)
except OblivionError:
# Reached an oblivion state. Don't list it.
continue
map[i][symbol] = j
i += 1
return fsm(
alphabet = alphabet,
states = range(len(states)),
initial = 0,
finals = finals,
map = map,
)
|
def crawl(alphabet, initial, final, follow)
|
Given the above conditions and instructions, crawl a new unknown FSM,
mapping its states, final states and transitions. Return the new FSM.
This is a pretty powerful procedure which could potentially go on
forever if you supply an evil version of follow().
| 4.492971
| 2.475701
| 1.814828
|
'''
Test whether the present FSM accepts the supplied string (iterable of
symbols). Equivalently, consider `self` as a possibly-infinite set of
strings and test whether `string` is a member of it.
This is actually mainly used for unit testing purposes.
If `fsm.anything_else` is in your alphabet, then any symbol not in your
alphabet will be converted to `fsm.anything_else`.
'''
state = self.initial
for symbol in input:
if anything_else in self.alphabet and not symbol in self.alphabet:
symbol = anything_else
# Missing transition = transition to dead state
if not (state in self.map and symbol in self.map[state]):
return False
state = self.map[state][symbol]
return state in self.finals
|
def accepts(self, input)
|
Test whether the present FSM accepts the supplied string (iterable of
symbols). Equivalently, consider `self` as a possibly-infinite set of
strings and test whether `string` is a member of it.
This is actually mainly used for unit testing purposes.
If `fsm.anything_else` is in your alphabet, then any symbol not in your
alphabet will be converted to `fsm.anything_else`.
| 6.208549
| 2.043392
| 3.038355
|
'''
Concatenate arbitrarily many finite state machines together.
'''
alphabet = set().union(*[fsm.alphabet for fsm in fsms])
def connect_all(i, substate):
'''
Take a state in the numbered FSM and return a set containing it, plus
(if it's final) the first state from the next FSM, plus (if that's
final) the first state from the next but one FSM, plus...
'''
result = {(i, substate)}
while i < len(fsms) - 1 and substate in fsms[i].finals:
i += 1
substate = fsms[i].initial
result.add((i, substate))
return result
# Use a superset containing states from all FSMs at once.
# We start at the start of the first FSM. If this state is final in the
# first FSM, then we are also at the start of the second FSM. And so on.
initial = set()
if len(fsms) > 0:
initial.update(connect_all(0, fsms[0].initial))
initial = frozenset(initial)
def final(state):
'''If you're in a final state of the final FSM, it's final'''
for (i, substate) in state:
if i == len(fsms) - 1 and substate in fsms[i].finals:
return True
return False
def follow(current, symbol):
'''
Follow the collection of states through all FSMs at once, jumping to the
next FSM if we reach the end of the current one
TODO: improve all follow() implementations to allow for dead metastates?
'''
next = set()
for (i, substate) in current:
fsm = fsms[i]
if substate in fsm.map and symbol in fsm.map[substate]:
next.update(connect_all(i, fsm.map[substate][symbol]))
if len(next) == 0:
raise OblivionError
return frozenset(next)
return crawl(alphabet, initial, final, follow).reduce()
|
def concatenate(*fsms)
|
Concatenate arbitrarily many finite state machines together.
| 3.636794
| 3.53463
| 1.028904
|
'''
If the present FSM accepts X, returns an FSM accepting X* (i.e. 0 or
more Xes). This is NOT as simple as naively connecting the final states
back to the initial state: see (b*ab)* for example.
'''
alphabet = self.alphabet
initial = {self.initial}
def follow(state, symbol):
next = set()
for substate in state:
if substate in self.map and symbol in self.map[substate]:
next.add(self.map[substate][symbol])
# If one of our substates is final, then we can also consider
# transitions from the initial state of the original FSM.
if substate in self.finals \
and self.initial in self.map \
and symbol in self.map[self.initial]:
next.add(self.map[self.initial][symbol])
if len(next) == 0:
raise OblivionError
return frozenset(next)
def final(state):
return any(substate in self.finals for substate in state)
return crawl(alphabet, initial, final, follow) | epsilon(alphabet)
|
def star(self)
|
If the present FSM accepts X, returns an FSM accepting X* (i.e. 0 or
more Xes). This is NOT as simple as naively connecting the final states
back to the initial state: see (b*ab)* for example.
| 5.038607
| 2.687385
| 1.87491
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.