INSTRUCTION stringlengths 1 8.43k | RESPONSE stringlengths 75 104k |
|---|---|
Write as a BEL namespace file. | def write_bel_namespace(self, file: TextIO, use_names: bool = False) -> None:
"""Write as a BEL namespace file."""
if not self.is_populated():
self.populate()
if use_names and not self.has_names:
raise ValueError
values = (
self._get_namespace_name_to_encoding(desc='writing names')
if use_names else
self._get_namespace_identifier_to_encoding(desc='writing identifiers')
)
write_namespace(
namespace_name=self._get_namespace_name(),
namespace_keyword=self._get_namespace_keyword(),
namespace_query_url=self.identifiers_url,
values=values,
file=file,
) |
Write as a BEL annotation file. | def write_bel_annotation(self, file: TextIO) -> None:
"""Write as a BEL annotation file."""
if not self.is_populated():
self.populate()
values = self._get_namespace_name_to_encoding(desc='writing names')
write_annotation(
keyword=self._get_namespace_keyword(),
citation_name=self._get_namespace_name(),
description='',
values=values,
file=file,
) |
Write a BEL namespace mapping file. | def write_bel_namespace_mappings(self, file: TextIO, **kwargs) -> None:
"""Write a BEL namespace mapping file."""
json.dump(self._get_namespace_identifier_to_name(**kwargs), file, indent=2, sort_keys=True) |
Write a BEL namespace for identifiers names name hash and mappings to the given directory. | def write_directory(self, directory: str) -> bool:
"""Write a BEL namespace for identifiers, names, name hash, and mappings to the given directory."""
current_md5_hash = self.get_namespace_hash()
md5_hash_path = os.path.join(directory, f'{self.module_name}.belns.md5')
if not os.path.exists(md5_hash_path):
old_md5_hash = None
else:
with open(md5_hash_path) as file:
old_md5_hash = file.read().strip()
if old_md5_hash == current_md5_hash:
return False
with open(os.path.join(directory, f'{self.module_name}.belns'), 'w') as file:
self.write_bel_namespace(file, use_names=False)
with open(md5_hash_path, 'w') as file:
print(current_md5_hash, file=file)
if self.has_names:
with open(os.path.join(directory, f'{self.module_name}-names.belns'), 'w') as file:
self.write_bel_namespace(file, use_names=True)
with open(os.path.join(directory, f'{self.module_name}.belns.mapping'), 'w') as file:
self.write_bel_namespace_mappings(file, desc='writing mapping')
return True |
Get the namespace hash. | def get_namespace_hash(self, hash_fn=hashlib.md5) -> str:
"""Get the namespace hash.
Defaults to MD5.
"""
m = hash_fn()
if self.has_names:
items = self._get_namespace_name_to_encoding(desc='getting hash').items()
else:
items = self._get_namespace_identifier_to_encoding(desc='getting hash').items()
for name, encoding in items:
m.update(f'{name}:{encoding}'.encode('utf8'))
return m.hexdigest() |
Get a: mod: click main function with added BEL namespace commands. | def get_cli(cls) -> click.Group:
"""Get a :mod:`click` main function with added BEL namespace commands."""
main = super().get_cli()
if cls.is_namespace:
@main.group()
def belns():
"""Manage BEL namespace."""
cls._cli_add_to_bel_namespace(belns)
cls._cli_add_clear_bel_namespace(belns)
cls._cli_add_write_bel_namespace(belns)
if cls.is_annotation:
@main.group()
def belanno():
"""Manage BEL annotation."""
cls._cli_add_write_bel_annotation(belanno)
return main |
Get the long_description from the README. rst file. Assume UTF - 8 encoding. | def get_long_description():
"""Get the long_description from the README.rst file. Assume UTF-8 encoding."""
with codecs.open(os.path.join(HERE, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
return long_description |
receives a UUID via the request and returns either a fresh or an existing dropbox for it | def dropbox_post_factory(request):
"""receives a UUID via the request and returns either a fresh or an existing dropbox
for it"""
try:
max_age = int(request.registry.settings.get('post_token_max_age_seconds'))
except Exception:
max_age = 300
try:
drop_id = parse_post_token(
token=request.matchdict['token'],
secret=request.registry.settings['post_secret'],
max_age=max_age)
except SignatureExpired:
raise HTTPGone('dropbox expired')
except Exception: # don't be too specific on the reason for the error
raise HTTPNotFound('no such dropbox')
dropbox = request.registry.settings['dropbox_container'].get_dropbox(drop_id)
if dropbox.status_int >= 20:
raise HTTPGone('dropbox already in processing, no longer accepts data')
return dropbox |
expects the id of an existing dropbox and returns its instance | def dropbox_factory(request):
""" expects the id of an existing dropbox and returns its instance"""
try:
return request.registry.settings['dropbox_container'].get_dropbox(request.matchdict['drop_id'])
except KeyError:
raise HTTPNotFound('no such dropbox') |
a constant time comparison implementation taken from http:// codahale. com/ a - lesson - in - timing - attacks/ and Django s util module https:// github. com/ django/ django/ blob/ master/ django/ utils/ crypto. py#L82 | def is_equal(a, b):
""" a constant time comparison implementation taken from
http://codahale.com/a-lesson-in-timing-attacks/ and
Django's `util` module https://github.com/django/django/blob/master/django/utils/crypto.py#L82
"""
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0 |
this factory also requires the editor token | def dropbox_editor_factory(request):
""" this factory also requires the editor token"""
dropbox = dropbox_factory(request)
if is_equal(dropbox.editor_token, request.matchdict['editor_token'].encode('utf-8')):
return dropbox
else:
raise HTTPNotFound('invalid editor token') |
Build a: mod: click CLI main function. | def get_cli(cls) -> click.Group:
"""Build a :mod:`click` CLI main function.
:param Type[AbstractManager] cls: A Manager class
:return: The main function for click
"""
group_help = 'Default connection at {}\n\nusing Bio2BEL v{}'.format(cls._get_connection(), get_version())
@click.group(help=group_help)
@click.option('-c', '--connection', default=cls._get_connection(),
help='Defaults to {}'.format(cls._get_connection()))
@click.pass_context
def main(ctx, connection):
"""Bio2BEL CLI."""
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.getLogger('bio2bel.utils').setLevel(logging.WARNING)
ctx.obj = cls(connection=connection)
return main |
preserve the file ending but replace the name with a random token | def sanitize_filename(filename):
"""preserve the file ending, but replace the name with a random token """
# TODO: fix broken splitext (it reveals everything of the filename after the first `.` - doh!)
token = generate_drop_id()
name, extension = splitext(filename)
if extension:
return '%s%s' % (token, extension)
else:
return token |
Calls the external cleanser scripts to ( optionally ) purge the meta data and then send the contents of the dropbox via email. | def process(self):
""" Calls the external cleanser scripts to (optionally) purge the meta data and then
send the contents of the dropbox via email.
"""
if self.num_attachments > 0:
self.status = u'100 processor running'
fs_dirty_archive = self._create_backup()
# calling _process_attachments has the side-effect of updating `send_attachments`
self._process_attachments()
if self.status_int < 500 and not self.send_attachments:
self._create_archive()
if self.status_int >= 500 and self.status_int < 600:
# cleansing failed
# if configured, we need to move the uncleansed archive to
# the appropriate folder and notify the editors
if 'dropbox_dirty_archive_url_format' in self.settings:
# create_archive
shutil.move(
fs_dirty_archive,
'%s/%s.zip.pgp' % (self.container.fs_archive_dirty, self.drop_id))
# update status
# it's now considered 'successful-ish' again
self.status = '490 cleanser failure but notify success'
if self.status_int == 800:
# at least one attachment was not supported
# if configured, we need to move the uncleansed archive to
# the appropriate folder and notify the editors
if 'dropbox_dirty_archive_url_format' in self.settings:
# create_archive
shutil.move(
fs_dirty_archive,
'%s/%s.zip.pgp' % (self.container.fs_archive_dirty, self.drop_id))
if self.status_int < 500 or self.status_int == 800:
try:
if self._notify_editors() > 0:
if self.status_int < 500:
self.status = '900 success'
else:
self.status = '605 smtp failure'
except Exception:
import traceback
tb = traceback.format_exc()
self.status = '610 smtp error (%s)' % tb
self.cleanup()
return self.status |
ensures that no data leaks from drop after processing by removing all data except the status file | def cleanup(self):
""" ensures that no data leaks from drop after processing by
removing all data except the status file"""
try:
remove(join(self.fs_path, u'message'))
remove(join(self.fs_path, 'dirty.zip.pgp'))
except OSError:
pass
shutil.rmtree(join(self.fs_path, u'clean'), ignore_errors=True)
shutil.rmtree(join(self.fs_path, u'attach'), ignore_errors=True) |
creates a zip file from the drop and encrypts it to the editors. the encrypted archive is created inside fs_target_dir | def _create_encrypted_zip(self, source='dirty', fs_target_dir=None):
""" creates a zip file from the drop and encrypts it to the editors.
the encrypted archive is created inside fs_target_dir"""
backup_recipients = [r for r in self.editors if checkRecipient(self.gpg_context, r)]
# this will be handled by watchdog, no need to send for each drop
if not backup_recipients:
self.status = u'500 no valid keys at all'
return self.status
# calculate paths
fs_backup = join(self.fs_path, '%s.zip' % source)
if fs_target_dir is None:
fs_backup_pgp = join(self.fs_path, '%s.zip.pgp' % source)
else:
fs_backup_pgp = join(fs_target_dir, '%s.zip.pgp' % self.drop_id)
fs_source = dict(
dirty=self.fs_dirty_attachments,
clean=self.fs_cleansed_attachments
)
# create archive
with ZipFile(fs_backup, 'w', ZIP_STORED) as backup:
if exists(join(self.fs_path, 'message')):
backup.write(join(self.fs_path, 'message'), arcname='message')
for fs_attachment in fs_source[source]:
backup.write(fs_attachment, arcname=split(fs_attachment)[-1])
# encrypt archive
with open(fs_backup, "rb") as backup:
self.gpg_context.encrypt_file(
backup,
backup_recipients,
always_trust=True,
output=fs_backup_pgp
)
# cleanup
remove(fs_backup)
return fs_backup_pgp |
creates an encrypted archive of the dropbox outside of the drop directory. | def _create_archive(self):
""" creates an encrypted archive of the dropbox outside of the drop directory.
"""
self.status = u'270 creating final encrypted backup of cleansed attachments'
return self._create_encrypted_zip(source='clean', fs_target_dir=self.container.fs_archive_cleansed) |
returns the number of bytes that the cleansed attachments take up on disk | def size_attachments(self):
"""returns the number of bytes that the cleansed attachments take up on disk"""
total_size = 0
for attachment in self.fs_cleansed_attachments:
total_size += stat(attachment).st_size
return total_size |
returns a list of strings | def replies(self):
""" returns a list of strings """
fs_reply_path = join(self.fs_replies_path, 'message_001.txt')
if exists(fs_reply_path):
return [load(open(fs_reply_path, 'r'))]
else:
return [] |
returns the user submitted text | def message(self):
""" returns the user submitted text
"""
try:
with open(join(self.fs_path, u'message')) as message_file:
return u''.join([line.decode('utf-8') for line in message_file.readlines()])
except IOError:
return u'' |
returns a list of absolute paths to the attachements | def fs_dirty_attachments(self):
""" returns a list of absolute paths to the attachements"""
if exists(self.fs_attachment_container):
return [join(self.fs_attachment_container, attachment)
for attachment in listdir(self.fs_attachment_container)]
else:
return [] |
returns a list of absolute paths to the cleansed attachements | def fs_cleansed_attachments(self):
""" returns a list of absolute paths to the cleansed attachements"""
if exists(self.fs_cleansed_attachment_container):
return [join(self.fs_cleansed_attachment_container, attachment)
for attachment in listdir(self.fs_cleansed_attachment_container)]
else:
return [] |
destroys all cleanser slaves and their rollback snapshots as well as the initial master snapshot - this allows re - running the jailhost deployment to recreate fresh cleansers. | def reset_cleansers(confirm=True):
"""destroys all cleanser slaves and their rollback snapshots, as well as the initial master
snapshot - this allows re-running the jailhost deployment to recreate fresh cleansers."""
if value_asbool(confirm) and not yesno("""\nObacht!
This will destroy any existing and or currently running cleanser jails.
Are you sure that you want to continue?"""):
exit("Glad I asked...")
get_vars()
cleanser_count = AV['ploy_cleanser_count']
# make sure no workers interfere:
fab.run('ezjail-admin stop worker')
# stop and nuke the cleanser slaves
for cleanser_index in range(cleanser_count):
cindex = '{:02d}'.format(cleanser_index + 1)
fab.run('ezjail-admin stop cleanser_{cindex}'.format(cindex=cindex))
with fab.warn_only():
fab.run('zfs destroy tank/jails/cleanser_{cindex}@jdispatch_rollback'.format(cindex=cindex))
fab.run('ezjail-admin delete -fw cleanser_{cindex}'.format(cindex=cindex))
fab.run('umount -f /usr/jails/cleanser_{cindex}'.format(cindex=cindex))
fab.run('rm -rf /usr/jails/cleanser_{cindex}'.format(cindex=cindex))
with fab.warn_only():
# remove master snapshot
fab.run('zfs destroy -R tank/jails/cleanser@clonesource')
# restart worker and cleanser to prepare for subsequent ansible configuration runs
fab.run('ezjail-admin start worker')
fab.run('ezjail-admin stop cleanser')
fab.run('ezjail-admin start cleanser') |
stops deletes and re - creates all jails. since the cleanser master is rather large that one is omitted by default. | def reset_jails(confirm=True, keep_cleanser_master=True):
""" stops, deletes and re-creates all jails.
since the cleanser master is rather large, that one is omitted by default.
"""
if value_asbool(confirm) and not yesno("""\nObacht!
This will destroy all existing and or currently running jails on the host.
Are you sure that you want to continue?"""):
exit("Glad I asked...")
reset_cleansers(confirm=False)
jails = ['appserver', 'webserver', 'worker']
if not value_asbool(keep_cleanser_master):
jails.append('cleanser')
with fab.warn_only():
for jail in jails:
fab.run('ezjail-admin delete -fw {jail}'.format(jail=jail))
# remove authorized keys for no longer existing key (they are regenerated for each new worker)
fab.run('rm /usr/jails/cleanser/usr/home/cleanser/.ssh/authorized_keys') |
Dump a provider to a format that can be passed to a: class: skosprovider. providers. DictionaryProvider. | def dict_dumper(provider):
'''
Dump a provider to a format that can be passed to a
:class:`skosprovider.providers.DictionaryProvider`.
:param skosprovider.providers.VocabularyProvider provider: The provider
that wil be turned into a `dict`.
:rtype: A list of dicts.
.. versionadded:: 0.2.0
'''
ret = []
for stuff in provider.get_all():
c = provider.get_by_id(stuff['id'])
labels = [l.__dict__ for l in c.labels]
notes = [n.__dict__ for n in c.notes]
sources = [s.__dict__ for s in c.sources]
if isinstance(c, Concept):
ret.append({
'id': c.id,
'uri': c.uri,
'type': c.type,
'labels': labels,
'notes': notes,
'sources': sources,
'narrower': c.narrower,
'broader': c.broader,
'related': c.related,
'member_of': c.member_of,
'subordinate_arrays': c.subordinate_arrays,
'matches': c.matches
})
elif isinstance(c, Collection):
ret.append({
'id': c.id,
'uri': c.uri,
'type': c.type,
'labels': labels,
'notes': notes,
'sources': sources,
'members': c.members,
'member_of': c.member_of,
'superordinates': c.superordinates
})
return ret |
Add a web comand main: mod: click function. | def add_cli_flask(main: click.Group) -> click.Group: # noqa: D202
"""Add a ``web`` comand main :mod:`click` function."""
@main.command()
@click.option('-v', '--debug', is_flag=True)
@click.option('-p', '--port')
@click.option('-h', '--host')
@click.option('-k', '--secret-key', default=os.urandom(8))
@click.pass_obj
def web(manager, debug, port, host, secret_key):
"""Run the web app."""
if not manager.is_populated():
click.echo('{} has not yet been populated'.format(manager.module_name))
sys.exit(0)
app = manager.get_flask_admin_app(url='/', secret_key=secret_key)
app.run(debug=debug, host=host, port=port)
return main |
Add a Flask Admin interface to an application. | def _add_admin(self, app, **kwargs):
"""Add a Flask Admin interface to an application.
:param flask.Flask app: A Flask application
:param kwargs: Keyword arguments are passed through to :class:`flask_admin.Admin`
:rtype: flask_admin.Admin
"""
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
admin = Admin(app, **kwargs)
for flask_admin_model in self.flask_admin_models:
if isinstance(flask_admin_model, tuple): # assume its a 2 tuple
if len(flask_admin_model) != 2:
raise TypeError
model, view = flask_admin_model
admin.add_view(view(model, self.session))
else:
admin.add_view(ModelView(flask_admin_model, self.session))
return admin |
Create a Flask application. | def get_flask_admin_app(self, url: Optional[str] = None, secret_key: Optional[str] = None):
"""Create a Flask application.
:param url: Optional mount point of the admin application. Defaults to ``'/'``.
:rtype: flask.Flask
"""
from flask import Flask
app = Flask(__name__)
if secret_key:
app.secret_key = secret_key
self._add_admin(app, url=(url or '/'))
return app |
Add a: mod: click main function to use as a command line interface. | def get_cli(cls) -> click.Group:
"""Add a :mod:`click` main function to use as a command line interface."""
main = super().get_cli()
cls._cli_add_flask(main)
return main |
DISARMED - > ARMED_AWAY - > EXIT_DELAY_START - > EXIT_DELAY_END ( trip ): - > ALARM - > OUTPUT_ON - > ALARM_RESTORE ( disarm ): - > DISARMED - > OUTPUT_OFF ( disarm ): - > DISARMED ( disarm before EXIT_DELAY_END ): - > DISARMED - > EXIT_DELAY_END | def _handle_system_status_event(self, event: SystemStatusEvent) -> None:
"""
DISARMED -> ARMED_AWAY -> EXIT_DELAY_START -> EXIT_DELAY_END
(trip): -> ALARM -> OUTPUT_ON -> ALARM_RESTORE
(disarm): -> DISARMED -> OUTPUT_OFF
(disarm): -> DISARMED
(disarm before EXIT_DELAY_END): -> DISARMED -> EXIT_DELAY_END
TODO(NW): Check ALARM_RESTORE state transition to move back into ARMED_AWAY state
"""
if event.type == SystemStatusEvent.EventType.UNSEALED:
return self._update_zone(event.zone, True)
elif event.type == SystemStatusEvent.EventType.SEALED:
return self._update_zone(event.zone, False)
elif event.type == SystemStatusEvent.EventType.ALARM:
return self._update_arming_state(ArmingState.TRIGGERED)
elif event.type == SystemStatusEvent.EventType.ALARM_RESTORE:
if self.arming_state != ArmingState.DISARMED:
return self._update_arming_state(ArmingState.ARMED)
elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_START:
return self._update_arming_state(ArmingState.ENTRY_DELAY)
elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_END:
pass
elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_START:
return self._update_arming_state(ArmingState.EXIT_DELAY)
elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_END:
# Exit delay finished - if we were in the process of arming update
# state to armed
if self.arming_state == ArmingState.EXIT_DELAY:
return self._update_arming_state(ArmingState.ARMED)
elif event.type in Alarm.ARM_EVENTS:
return self._update_arming_state(ArmingState.ARMING)
elif event.type == SystemStatusEvent.EventType.DISARMED:
return self._update_arming_state(ArmingState.DISARMED)
elif event.type == SystemStatusEvent.EventType.ARMING_DELAYED:
pass |
generates a dropbox uid and renders the submission form with a signed version of that id | def dropbox_form(request):
""" generates a dropbox uid and renders the submission form with a signed version of that id"""
from briefkasten import generate_post_token
token = generate_post_token(secret=request.registry.settings['post_secret'])
return dict(
action=request.route_url('dropbox_form_submit', token=token),
fileupload_url=request.route_url('dropbox_fileupload', token=token),
**defaults(request)) |
accepts a single file upload and adds it to the dropbox as attachment | def dropbox_fileupload(dropbox, request):
""" accepts a single file upload and adds it to the dropbox as attachment"""
attachment = request.POST['attachment']
attached = dropbox.add_attachment(attachment)
return dict(
files=[dict(
name=attached,
type=attachment.type,
)]
) |
handles the form submission redirects to the dropbox s status page. | def dropbox_submission(dropbox, request):
""" handles the form submission, redirects to the dropbox's status page."""
try:
data = dropbox_schema.deserialize(request.POST)
except Exception:
return HTTPFound(location=request.route_url('dropbox_form'))
# set the message
dropbox.message = data.get('message')
# recognize submission from watchdog
if 'testing_secret' in dropbox.settings:
dropbox.from_watchdog = is_equal(
unicode(dropbox.settings['test_submission_secret']),
data.pop('testing_secret', u''))
# a non-js client might have uploaded an attachment via the form's fileupload field:
if data.get('upload') is not None:
dropbox.add_attachment(data['upload'])
# now we can call the process method
dropbox.submit()
drop_url = request.route_url('dropbox_view', drop_id=dropbox.drop_id)
print("Created dropbox %s" % drop_url)
return HTTPFound(location=drop_url) |
Build a function that handles downloading OBO data and parsing it into a NetworkX object. | def make_obo_getter(data_url: str,
data_path: str,
*,
preparsed_path: Optional[str] = None,
) -> Callable[[Optional[str], bool, bool], MultiDiGraph]:
"""Build a function that handles downloading OBO data and parsing it into a NetworkX object.
:param data_url: The URL of the data
:param data_path: The path where the data should get stored
:param preparsed_path: The optional path to cache a pre-parsed json version
"""
download_function = make_downloader(data_url, data_path)
def get_obo(url: Optional[str] = None, cache: bool = True, force_download: bool = False) -> MultiDiGraph:
"""Download and parse a GO obo file with :mod:`obonet` into a MultiDiGraph.
:param url: The URL (or file path) to download.
:param cache: If true, the data is downloaded to the file system, else it is loaded from the internet
:param force_download: If true, overwrites a previously cached file
"""
if preparsed_path is not None and os.path.exists(preparsed_path):
return read_gpickle(preparsed_path)
if url is None and cache:
url = download_function(force_download=force_download)
result = obonet.read_obo(url)
if preparsed_path is not None:
write_gpickle(result, preparsed_path)
return result
return get_obo |
Write as a BEL namespace. | def belns(keyword: str, file: TextIO, encoding: Optional[str], use_names: bool):
"""Write as a BEL namespace."""
directory = get_data_dir(keyword)
obo_url = f'http://purl.obolibrary.org/obo/{keyword}.obo'
obo_path = os.path.join(directory, f'{keyword}.obo')
obo_cache_path = os.path.join(directory, f'{keyword}.obo.pickle')
obo_getter = make_obo_getter(obo_url, obo_path, preparsed_path=obo_cache_path)
graph = obo_getter()
convert_obo_graph_to_belns(
graph,
file=file,
encoding=encoding,
use_names=use_names,
) |
Write as a BEL annotation. | def belanno(keyword: str, file: TextIO):
"""Write as a BEL annotation."""
directory = get_data_dir(keyword)
obo_url = f'http://purl.obolibrary.org/obo/{keyword}.obo'
obo_path = os.path.join(directory, f'{keyword}.obo')
obo_cache_path = os.path.join(directory, f'{keyword}.obo.pickle')
obo_getter = make_obo_getter(obo_url, obo_path, preparsed_path=obo_cache_path)
graph = obo_getter()
convert_obo_graph_to_belanno(
graph,
file=file,
) |
Help store an action. | def _store_helper(model: Action, session: Optional[Session] = None) -> None:
"""Help store an action."""
if session is None:
session = _make_session()
session.add(model)
session.commit()
session.close() |
Make a session. | def _make_session(connection: Optional[str] = None) -> Session:
"""Make a session."""
if connection is None:
connection = get_global_connection()
engine = create_engine(connection)
create_all(engine)
session_cls = sessionmaker(bind=engine)
session = session_cls()
return session |
Create the tables for Bio2BEL. | def create_all(engine, checkfirst=True):
"""Create the tables for Bio2BEL."""
Base.metadata.create_all(bind=engine, checkfirst=checkfirst) |
Store a populate event. | def store_populate(cls, resource: str, session: Optional[Session] = None) -> 'Action':
"""Store a "populate" event.
:param resource: The normalized name of the resource to store
Example:
>>> from bio2bel.models import Action
>>> Action.store_populate('hgnc')
"""
action = cls.make_populate(resource)
_store_helper(action, session=session)
return action |
Store a populate failed event. | def store_populate_failed(cls, resource: str, session: Optional[Session] = None) -> 'Action':
"""Store a "populate failed" event.
:param resource: The normalized name of the resource to store
Example:
>>> from bio2bel.models import Action
>>> Action.store_populate_failed('hgnc')
"""
action = cls.make_populate_failed(resource)
_store_helper(action, session=session)
return action |
Store a drop event. | def store_drop(cls, resource: str, session: Optional[Session] = None) -> 'Action':
"""Store a "drop" event.
:param resource: The normalized name of the resource to store
Example:
>>> from bio2bel.models import Action
>>> Action.store_drop('hgnc')
"""
action = cls.make_drop(resource)
_store_helper(action, session=session)
return action |
Get all actions. | def ls(cls, session: Optional[Session] = None) -> List['Action']:
"""Get all actions."""
if session is None:
session = _make_session()
actions = session.query(cls).order_by(cls.created.desc()).all()
session.close()
return actions |
Count all actions. | def count(cls, session: Optional[Session] = None) -> int:
"""Count all actions."""
if session is None:
session = _make_session()
count = session.query(cls).count()
session.close()
return count |
Ensure the appropriate Bio2BEL data directory exists for the given module then returns the file path. | def get_data_dir(module_name: str) -> str:
"""Ensure the appropriate Bio2BEL data directory exists for the given module, then returns the file path.
:param module_name: The name of the module. Ex: 'chembl'
:return: The module's data directory
"""
module_name = module_name.lower()
data_dir = os.path.join(BIO2BEL_DIR, module_name)
os.makedirs(data_dir, exist_ok=True)
return data_dir |
Build a module configuration class. | def get_module_config_cls(module_name: str) -> Type[_AbstractModuleConfig]: # noqa: D202
"""Build a module configuration class."""
class ModuleConfig(_AbstractModuleConfig):
NAME = f'bio2bel:{module_name}'
FILES = DEFAULT_CONFIG_PATHS + [
os.path.join(DEFAULT_CONFIG_DIRECTORY, module_name, 'config.ini')
]
return ModuleConfig |
Return the SQLAlchemy connection string if it is set. | def get_connection(module_name: str, connection: Optional[str] = None) -> str:
"""Return the SQLAlchemy connection string if it is set.
Order of operations:
1. Return the connection if given as a parameter
2. Check the environment for BIO2BEL_{module_name}_CONNECTION
3. Look in the bio2bel config file for module-specific connection. Create if doesn't exist. Check the
module-specific section for ``connection``
4. Look in the bio2bel module folder for a config file. Don't create if doesn't exist. Check the default section
for ``connection``
5. Check the environment for BIO2BEL_CONNECTION
6. Check the bio2bel config file for default
7. Fall back to standard default cache connection
:param module_name: The name of the module to get the configuration for
:param connection: get the SQLAlchemy connection string
:return: The SQLAlchemy connection string based on the configuration
"""
# 1. Use given connection
if connection is not None:
return connection
module_name = module_name.lower()
module_config_cls = get_module_config_cls(module_name)
module_config = module_config_cls.load()
return module_config.connection or config.connection |
Get all Bio2BEL modules. | def get_modules() -> Mapping:
"""Get all Bio2BEL modules."""
modules = {}
for entry_point in iter_entry_points(group='bio2bel', name=None):
entry = entry_point.name
try:
modules[entry] = entry_point.load()
except VersionConflict as exc:
log.warning('Version conflict in %s: %s', entry, exc)
continue
except UnknownExtra as exc:
log.warning('Unknown extra in %s: %s', entry, exc)
continue
except ImportError as exc:
log.exception('Issue with importing module %s: %s', entry, exc)
continue
return modules |
Clear all downloaded files. | def clear_cache(module_name: str, keep_database: bool = True) -> None:
"""Clear all downloaded files."""
data_dir = get_data_dir(module_name)
if not os.path.exists(data_dir):
return
for name in os.listdir(data_dir):
if name in {'config.ini', 'cfg.ini'}:
continue
if name == 'cache.db' and keep_database:
continue
path = os.path.join(data_dir, name)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
os.rmdir(data_dir) |
Add a populate command to main: mod: click function. | def add_cli_populate(main: click.Group) -> click.Group: # noqa: D202
"""Add a ``populate`` command to main :mod:`click` function."""
@main.command()
@click.option('--reset', is_flag=True, help='Nuke database first')
@click.option('--force', is_flag=True, help='Force overwrite if already populated')
@click.pass_obj
def populate(manager: AbstractManager, reset, force):
"""Populate the database."""
if reset:
click.echo('Deleting the previous instance of the database')
manager.drop_all()
click.echo('Creating new models')
manager.create_all()
if manager.is_populated() and not force:
click.echo('Database already populated. Use --force to overwrite')
sys.exit(0)
manager.populate()
return main |
Add a drop command to main: mod: click function. | def add_cli_drop(main: click.Group) -> click.Group: # noqa: D202
"""Add a ``drop`` command to main :mod:`click` function."""
@main.command()
@click.confirmation_option(prompt='Are you sure you want to drop the db?')
@click.pass_obj
def drop(manager):
"""Drop the database."""
manager.drop_all()
return main |
Add several commands to main: mod: click function for handling the cache. | def add_cli_cache(main: click.Group) -> click.Group: # noqa: D202
"""Add several commands to main :mod:`click` function for handling the cache."""
@main.group()
def cache():
"""Manage cached data."""
@cache.command()
@click.pass_obj
def locate(manager):
"""Print the location of the data directory."""
data_dir = get_data_dir(manager.module_name)
click.echo(data_dir)
@cache.command()
@click.pass_obj
def ls(manager):
"""List files in the cache."""
data_dir = get_data_dir(manager.module_name)
for path in os.listdir(data_dir):
click.echo(path)
@cache.command()
@click.pass_obj
def clear(manager):
"""Clear all files from the cache."""
clear_cache(manager.module_name)
return main |
Add a summarize command to main: mod: click function. | def add_cli_summarize(main: click.Group) -> click.Group: # noqa: D202
"""Add a ``summarize`` command to main :mod:`click` function."""
@main.command()
@click.pass_obj
def summarize(manager: AbstractManager):
"""Summarize the contents of the database."""
if not manager.is_populated():
click.secho(f'{manager.module_name} has not been populated', fg='red')
sys.exit(1)
for name, count in sorted(manager.summarize().items()):
click.echo(f'{name.capitalize()}: {count}')
return main |
Create the empty database ( tables ). | def create_all(self, check_first: bool = True):
"""Create the empty database (tables).
:param bool check_first: Defaults to True, don't issue CREATEs for tables already present
in the target database. Defers to :meth:`sqlalchemy.sql.schema.MetaData.create_all`
"""
self._metadata.create_all(self.engine, checkfirst=check_first) |
Drop all tables from the database. | def drop_all(self, check_first: bool = True):
"""Drop all tables from the database.
:param bool check_first: Defaults to True, only issue DROPs for tables confirmed to be
present in the target database. Defers to :meth:`sqlalchemy.sql.schema.MetaData.drop_all`
"""
self._metadata.drop_all(self.engine, checkfirst=check_first)
self._store_drop() |
Get the: mod: click main function to use as a command line interface. | def get_cli(cls) -> click.Group:
"""Get the :mod:`click` main function to use as a command line interface."""
main = super().get_cli()
cls._cli_add_populate(main)
cls._cli_add_drop(main)
cls._cli_add_cache(main)
cls._cli_add_summarize(main)
return main |
Provide a label for a list of labels. | def label(labels=[], language='any', sortLabel=False):
'''
Provide a label for a list of labels.
The items in the list of labels are assumed to be either instances of
:class:`Label`, or dicts with at least the key `label` in them. These will
be passed to the :func:`dict_to_label` function.
This method tries to find a label by looking if there's
a pref label for the specified language. If there's no pref label,
it looks for an alt label. It disregards hidden labels.
While matching languages, preference will be given to exact matches. But,
if no exact match is present, an inexact match will be attempted. This might
be because a label in language `nl-BE` is being requested, but only `nl` or
even `nl-NL` is present. Similarly, when requesting `nl`, a label with
language `nl-NL` or even `nl-Latn-NL` will also be considered,
providing no label is present that has an exact match with the
requested language.
If language 'any' was specified, all labels will be considered,
regardless of language.
To find a label without a specified language, pass `None` as language.
If a language or None was specified, and no label could be found, this
method will automatically try to find a label in some other language.
Finally, if no label could be found, None is returned.
:param string language: The preferred language to receive the label in. This
should be a valid IANA language tag.
:param boolean sortLabel: Should sortLabels be considered or not? If True,
sortLabels will be preferred over prefLabels. Bear in mind that these
are still language dependent. So, it's possible to have a different
sortLabel per language.
:rtype: A :class:`Label` or `None` if no label could be found.
'''
if not labels:
return None
if not language:
language = 'und'
labels = [dict_to_label(l) for l in labels]
l = False
if sortLabel:
l = find_best_label_for_type(labels, language, 'sortLabel')
if not l:
l = find_best_label_for_type(labels, language, 'prefLabel')
if not l:
l = find_best_label_for_type(labels, language, 'altLabel')
if l:
return l
else:
return label(labels, 'any', sortLabel) if language != 'any' else None |
Find the best label for a certain labeltype. | def find_best_label_for_type(labels, language, labeltype):
'''
Find the best label for a certain labeltype.
:param list labels: A list of :class:`Label`.
:param str language: An IANA language string, eg. `nl` or `nl-BE`.
:param str labeltype: Type of label to look for, eg. `prefLabel`.
'''
typelabels = [l for l in labels if l.type == labeltype]
if not typelabels:
return False
if language == 'any':
return typelabels[0]
exact = filter_labels_by_language(typelabels, language)
if exact:
return exact[0]
inexact = filter_labels_by_language(typelabels, language, True)
if inexact:
return inexact[0]
return False |
Filter a list of labels leaving only labels of a certain language. | def filter_labels_by_language(labels, language, broader=False):
'''
Filter a list of labels, leaving only labels of a certain language.
:param list labels: A list of :class:`Label`.
:param str language: An IANA language string, eg. `nl` or `nl-BE`.
:param boolean broader: When true, will also match `nl-BE` when filtering
on `nl`. When false, only exact matches are considered.
'''
if language == 'any':
return labels
if broader:
language = tags.tag(language).language.format
return [l for l in labels if tags.tag(l.language).language.format == language]
else:
language = tags.tag(language).format
return [l for l in labels if tags.tag(l.language).format == language] |
Transform a dict with keys label type and language into a: class: Label. | def dict_to_label(dict):
'''
Transform a dict with keys `label`, `type` and `language` into a
:class:`Label`.
Only the `label` key is mandatory. If `type` is not present, it will
default to `prefLabel`. If `language` is not present, it will default
to `und`.
If the argument passed is not a dict, this method just
returns the argument.
'''
try:
return Label(
dict['label'],
dict.get('type', 'prefLabel'),
dict.get('language', 'und')
)
except (KeyError, AttributeError, TypeError):
return dict |
Transform a dict with keys note type and language into a: class: Note. | def dict_to_note(dict):
'''
Transform a dict with keys `note`, `type` and `language` into a
:class:`Note`.
Only the `note` key is mandatory. If `type` is not present, it will
default to `note`. If `language` is not present, it will default to `und`.
If `markup` is not present it will default to `None`.
If the argument passed is already a :class:`Note`, this method just returns
the argument.
'''
if isinstance(dict, Note):
return dict
return Note(
dict['note'],
dict.get('type', 'note'),
dict.get('language', 'und'),
dict.get('markup')
) |
Transform a dict with key citation into a: class: Source. | def dict_to_source(dict):
'''
Transform a dict with key 'citation' into a :class:`Source`.
If the argument passed is already a :class:`Source`, this method just
returns the argument.
'''
if isinstance(dict, Source):
return dict
return Source(
dict['citation'],
dict.get('markup')
) |
Provide a single sortkey for this conceptscheme. | def _sortkey(self, key='uri', language='any'):
'''
Provide a single sortkey for this conceptscheme.
:param string key: Either `uri`, `label` or `sortlabel`.
:param string language: The preferred language to receive the label in
if key is `label` or `sortlabel`. This should be a valid IANA language tag.
:rtype: :class:`str`
'''
if key == 'uri':
return self.uri
else:
l = label(self.labels, language, key == 'sortlabel')
return l.label.lower() if l else '' |
Iterate over instantiated managers. | def _iterate_managers(connection, skip):
"""Iterate over instantiated managers."""
for idx, name, manager_cls in _iterate_manage_classes(skip):
if name in skip:
continue
try:
manager = manager_cls(connection=connection)
except TypeError as e:
click.secho(f'Could not instantiate {name}: {e}', fg='red')
else:
yield idx, name, manager |
Populate all. | def populate(connection, reset, force, skip):
"""Populate all."""
for idx, name, manager in _iterate_managers(connection, skip):
click.echo(
click.style(f'[{idx}/{len(MANAGERS)}] ', fg='blue', bold=True) +
click.style(f'populating {name}', fg='cyan', bold=True)
)
if reset:
click.echo('deleting the previous instance of the database')
manager.drop_all()
click.echo('creating new models')
manager.create_all()
elif manager.is_populated() and not force:
click.echo(f'π {name} is already populated. use --force to overwrite', color='red')
continue
try:
manager.populate()
except Exception:
logger.exception('%s population failed', name)
click.secho(f'π {name} population failed', fg='red', bold=True) |
Drop all. | def drop(connection, skip):
"""Drop all."""
for idx, name, manager in _iterate_managers(connection, skip):
click.secho(f'dropping {name}', fg='cyan', bold=True)
manager.drop_all() |
Clear all caches. | def clear(skip):
"""Clear all caches."""
for name in sorted(MODULES):
if name in skip:
continue
click.secho(f'clearing cache for {name}', fg='cyan', bold=True)
clear_cache(name) |
Summarize all. | def summarize(connection, skip):
"""Summarize all."""
for idx, name, manager in _iterate_managers(connection, skip):
click.secho(name, fg='cyan', bold=True)
if not manager.is_populated():
click.echo('π unpopulated')
continue
if isinstance(manager, BELNamespaceManagerMixin):
click.secho(f'Terms: {manager._count_model(manager.namespace_model)}', fg='green')
if isinstance(manager, BELManagerMixin):
try:
click.secho(f'Relations: {manager.count_relations()}', fg='green')
except TypeError as e:
click.secho(str(e), fg='red')
for field_name, count in sorted(manager.summarize().items()):
click.echo(
click.style('=> ', fg='white', bold=True) + f"{field_name.replace('_', ' ').capitalize()}: {count}"
) |
Generate a summary sheet. | def sheet(connection, skip, file: TextIO):
"""Generate a summary sheet."""
from tabulate import tabulate
header = ['', 'Name', 'Description', 'Terms', 'Relations']
rows = []
for i, (idx, name, manager) in enumerate(_iterate_managers(connection, skip), start=1):
try:
if not manager.is_populated():
continue
except AttributeError:
click.secho(f'{name} does not implement is_populated', fg='red')
continue
terms, relations = None, None
if isinstance(manager, BELNamespaceManagerMixin):
terms = manager._count_model(manager.namespace_model)
if isinstance(manager, BELManagerMixin):
try:
relations = manager.count_relations()
except TypeError as e:
relations = str(e)
rows.append((i, name, manager.__doc__.split('\n')[0].strip().strip('.'), terms, relations))
print(tabulate(
rows,
headers=header,
# tablefmt="fancy_grid",
)) |
Write a BEL namespace names/ identifiers to terminology store. | def write(connection, skip, directory, force):
"""Write a BEL namespace names/identifiers to terminology store."""
os.makedirs(directory, exist_ok=True)
from .manager.namespace_manager import BELNamespaceManagerMixin
for idx, name, manager in _iterate_managers(connection, skip):
if not (isinstance(manager, AbstractManager) and isinstance(manager, BELNamespaceManagerMixin)):
continue
click.secho(name, fg='cyan', bold=True)
if force:
try:
click.echo(f'dropping')
manager.drop_all()
click.echo('clearing cache')
clear_cache(name)
click.echo('populating')
manager.populate()
except Exception:
click.secho(f'{name} failed', fg='red')
continue
try:
r = manager.write_directory(directory)
except TypeError as e:
click.secho(f'error with {name}: {e}'.rstrip(), fg='red')
else:
if not r:
click.echo('no update') |
Write all as BEL. | def write(connection, skip, directory, force):
"""Write all as BEL."""
os.makedirs(directory, exist_ok=True)
from .manager.bel_manager import BELManagerMixin
import pybel
for idx, name, manager in _iterate_managers(connection, skip):
if not isinstance(manager, BELManagerMixin):
continue
click.secho(name, fg='cyan', bold=True)
path = os.path.join(directory, f'{name}.bel.pickle')
if os.path.exists(path) and not force:
click.echo('π already exported')
continue
if not manager.is_populated():
click.echo('π unpopulated')
else:
graph = manager.to_bel()
pybel.to_pickle(graph, path)
pybel.to_json_path(graph, os.path.join(directory, f'{name}.bel.json')) |
Run a combine web interface. | def web(connection, host, port):
"""Run a combine web interface."""
from bio2bel.web.application import create_application
app = create_application(connection=connection)
app.run(host=host, port=port) |
List all actions. | def actions(connection):
"""List all actions."""
session = _make_session(connection=connection)
for action in Action.ls(session=session):
click.echo(f'{action.created} {action.action} {action.resource}') |
Add several command to main: mod: click function related to export to BEL. | def add_cli_to_bel(main: click.Group) -> click.Group: # noqa: D202
"""Add several command to main :mod:`click` function related to export to BEL."""
@main.command()
@click.option('-o', '--output', type=click.File('w'), default=sys.stdout)
@click.option('-f', '--fmt', default='bel', show_default=True, help='BEL export format')
@click.pass_obj
def write(manager: BELManagerMixin, output: TextIO, fmt: str):
"""Write as BEL Script."""
graph = manager.to_bel()
graph.serialize(file=output, fmt=fmt)
click.echo(graph.summary_str())
return main |
Add several command to main: mod: click function related to export to BEL. | def add_cli_upload_bel(main: click.Group) -> click.Group: # noqa: D202
"""Add several command to main :mod:`click` function related to export to BEL."""
@main.command()
@host_option
@click.pass_obj
def upload(manager: BELManagerMixin, host: str):
"""Upload BEL to BEL Commons."""
graph = manager.to_bel()
pybel.to_web(graph, host=host, public=True)
return main |
Count the number of BEL relations generated. | def count_relations(self) -> int:
"""Count the number of BEL relations generated."""
if self.edge_model is ...:
raise Bio2BELMissingEdgeModelError('edge_edge model is undefined/count_bel_relations is not overridden')
elif isinstance(self.edge_model, list):
return sum(self._count_model(m) for m in self.edge_model)
else:
return self._count_model(self.edge_model) |
Dump as a list of INDRA statements. | def to_indra_statements(self, *args, **kwargs):
"""Dump as a list of INDRA statements.
:rtype: List[indra.Statement]
"""
graph = self.to_bel(*args, **kwargs)
return to_indra_statements(graph) |
Get a: mod: click main function with added BEL commands. | def get_cli(cls) -> click.Group:
"""Get a :mod:`click` main function with added BEL commands."""
main = super().get_cli()
@main.group()
def bel():
"""Manage BEL."""
cls._cli_add_to_bel(bel)
cls._cli_add_upload_bel(bel)
return main |
benchorg. jpg is http:// upload. wikimedia. org/ wikipedia/ commons/ d/ df/ SAND_LUE. jpg | def exebench(width):
"""
benchorg.jpg is
'http://upload.wikimedia.org/wikipedia/commons/d/df/SAND_LUE.jpg'
"""
height = width * 2 / 3
with Benchmarker(width=30, loop=N) as bm:
for i in bm('kaa.imlib2'):
imlib2_scale('benchorg.jpg', width, height)
for i in bm("PIL"):
pil_scale('benchorg.jpg', width, height)
for i in bm("pgmagick(blob-read)"):
pgmagick_scale_from_blob('benchorg.jpg', width, height)
for i in bm("pgmagick(normal-read)"):
pgmagick_scale('benchorg.jpg', width, height)
for i in bm("pgmagick(scale+sharpen)"):
pgmagick_scale_plus_sharpen('benchorg.jpg', width, height)
for i in bm("opencv"):
opencv_scale('benchorg.jpg', width, height)
for i in bm("pyimlib2"):
pyimlib2_scale('benchorg.jpg', width, height)
for i in bm("pyimlib2_with_pgsharpen"):
pyimlib2_scale_with_pgmagicksharpen('benchorg.jpg', width, height)
return bm.results |
convert from list or tuple object to pgmagick. CoordinateList. | def _convert_coordinatelist(input_obj):
"""convert from 'list' or 'tuple' object to pgmagick.CoordinateList.
:type input_obj: list or tuple
"""
cdl = pgmagick.CoordinateList()
for obj in input_obj:
cdl.append(pgmagick.Coordinate(obj[0], obj[1]))
return cdl |
convert from list or tuple object to pgmagick. VPathList. | def _convert_vpathlist(input_obj):
"""convert from 'list' or 'tuple' object to pgmagick.VPathList.
:type input_obj: list or tuple
"""
vpl = pgmagick.VPathList()
for obj in input_obj:
# FIXME
obj = pgmagick.PathMovetoAbs(pgmagick.Coordinate(obj[0], obj[1]))
vpl.append(obj)
return vpl |
return exif - tag dict | def get_exif_info(self):
"""return exif-tag dict
"""
_dict = {}
for tag in _EXIF_TAGS:
ret = self.img.attribute("EXIF:%s" % tag)
if ret and ret != 'unknown':
_dict[tag] = ret
return _dict |
Draw a Bezier - curve. | def bezier(self, points):
"""Draw a Bezier-curve.
:param points: ex.) ((5, 5), (6, 6), (7, 7))
:type points: list
"""
coordinates = pgmagick.CoordinateList()
for point in points:
x, y = float(point[0]), float(point[1])
coordinates.append(pgmagick.Coordinate(x, y))
self.drawer.append(pgmagick.DrawableBezier(coordinates)) |
: param paint_method: point or replace or floodfill or filltoborder or reset: type paint_method: str or pgmagick. PaintMethod | def color(self, x, y, paint_method):
"""
:param paint_method: 'point' or 'replace' or 'floodfill' or
'filltoborder' or 'reset'
:type paint_method: str or pgmagick.PaintMethod
"""
paint_method = _convert_paintmethod(paint_method)
color = pgmagick.DrawableColor(x, y, paint_method)
self.drawer.append(color) |
: param org_x: origination x axis: param org_y: origination y axis: param radius_x: radius x axis: param radius_y: radius y axis: param arc_start: arc start angle: param arc_end: arc end angle | def ellipse(self, org_x, org_y, radius_x, radius_y, arc_start, arc_end):
"""
:param org_x: origination x axis
:param org_y: origination y axis
:param radius_x: radius x axis
:param radius_y: radius y axis
:param arc_start: arc start angle
:param arc_end: arc end angle
"""
ellipse = pgmagick.DrawableEllipse(float(org_x), float(org_y),
float(radius_x), float(radius_y),
float(arc_start), float(arc_end))
self.drawer.append(ellipse) |
: param opacity: 0. 0 ~ 1. 0 | def fill_opacity(self, opacity):
"""
:param opacity: 0.0 ~ 1.0
"""
opacity = pgmagick.DrawableFillOpacity(float(opacity))
self.drawer.append(opacity) |
: param paint_method: point or replace or floodfill or filltoborder or reset: type paint_method: str or pgmagick. PaintMethod | def matte(self, x, y, paint_method):
"""
:param paint_method: 'point' or 'replace' or 'floodfill' or
'filltoborder' or 'reset'
:type paint_method: str or pgmagick.PaintMethod
"""
paint_method = _convert_paintmethod(paint_method)
self.drawer.append(pgmagick.DrawableMatte(x, y, paint_method)) |
Scaling Draw Object | def scaling(self, x, y):
"""Scaling Draw Object
:param x: 0.0 ~ 1.0
:param y: 0.0 ~ 1.0
"""
self.drawer.append(pgmagick.DrawableScaling(float(x), float(y))) |
stroke antialias | def stroke_antialias(self, flag=True):
"""stroke antialias
:param flag: True or False. (default is True)
:type flag: bool
"""
antialias = pgmagick.DrawableStrokeAntialias(flag)
self.drawer.append(antialias) |
set to stroke linecap. | def stroke_linecap(self, linecap):
"""set to stroke linecap.
:param linecap: 'undefined', 'butt', 'round', 'square'
:type linecap: str
"""
linecap = getattr(pgmagick.LineCap, "%sCap" % linecap.title())
linecap = pgmagick.DrawableStrokeLineCap(linecap)
self.drawer.append(linecap) |
set to stroke linejoin. | def stroke_linejoin(self, linejoin):
"""set to stroke linejoin.
:param linejoin: 'undefined', 'miter', 'round', 'bevel'
:type linejoin: str
"""
linejoin = getattr(pgmagick.LineJoin, "%sJoin" % linejoin.title())
linejoin = pgmagick.DrawableStrokeLineJoin(linejoin)
self.drawer.append(linejoin) |
text antialias | def text_antialias(self, flag=True):
"""text antialias
:param flag: True or False. (default is True)
:type flag: bool
"""
antialias = pgmagick.DrawableTextAntialias(flag)
self.drawer.append(antialias) |
text decoration | def text_decoration(self, decoration):
"""text decoration
:param decoration: 'no', 'underline', 'overline', 'linethrough'
:type decoration: str
"""
if decoration.lower() == 'linethrough':
d = pgmagick.DecorationType.LineThroughDecoration
else:
decoration_type_string = "%sDecoration" % decoration.title()
d = getattr(pgmagick.DecorationType, "%s" % decoration_type_string)
decoration = pgmagick.DrawableTextDecoration(d)
self.drawer.append(decoration) |
similar to pkg - config -- modversion GraphicsMagick ++ | def get_version_from_pc(search_dirs, target):
"""similar to 'pkg-config --modversion GraphicsMagick++'"""
for dirname in search_dirs:
for root, dirs, files in os.walk(dirname):
for f in files:
if f == target:
file_path = os.path.join(root, target)
_tmp = _grep("Version: ", file_path)
version = _tmp.split()[1]
print("Found version %s in file %s" % (version, file_path))
return version |
Returns whether api_version is supported by given library version. E. g. library_version ( 1 3 21 ) returns True for api_version ( 1 3 21 ) ( 1 3 19 ) ( 1 3 x ) ( 1 2 x ) ( 1 x ) False for ( 1 3 24 ) ( 1 4 x ) ( 2 x ) | def library_supports_api(library_version, api_version, different_major_breaks_support=True):
"""
Returns whether api_version is supported by given library version.
E. g. library_version (1,3,21) returns True for api_version (1,3,21), (1,3,19), (1,3,'x'), (1,2,'x'), (1, 'x')
False for (1,3,24), (1,4,'x'), (2,'x')
different_major_breaks_support - if enabled and library and api major versions are different always return False
ex) with library_version (2,0,0) and for api_version(1,3,24) returns False if enabled, True if disabled
"""
assert isinstance(library_version, (tuple, list)) # won't work with e.g. generators
assert len(library_version) == 3
sequence_type = type(library_version) # assure we will compare same types
api_version = sequence_type(0 if num == 'x' else num for num in api_version)
if different_major_breaks_support and library_version[0] != api_version[0]:
return False
assert len(api_version) <= 3 # otherwise following comparision won't work as intended, e.g. (2, 0, 0) > (2, 0, 0, 0)
return library_version >= api_version |
Return version string. | def version():
"""Return version string."""
with io.open('pgmagick/_version.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
return ast.parse(line).body[0].value.s |
Submission to create a license acceptance request. | def post_license_request(request):
"""Submission to create a license acceptance request."""
uuid_ = request.matchdict['uuid']
posted_data = request.json
license_url = posted_data.get('license_url')
licensors = posted_data.get('licensors', [])
with db_connect() as db_conn:
with db_conn.cursor() as cursor:
cursor.execute("""\
SELECT l.url
FROM document_controls AS dc
LEFT JOIN licenses AS l ON (dc.licenseid = l.licenseid)
WHERE uuid = %s::UUID""", (uuid_,))
try:
# Check that the license exists
existing_license_url = cursor.fetchone()[0]
except TypeError: # NoneType
if request.has_permission('publish.create-identifier'):
cursor.execute("""\
INSERT INTO document_controls (uuid) VALUES (%s)""", (uuid_,))
existing_license_url = None
else:
raise httpexceptions.HTTPNotFound()
if existing_license_url is None and license_url is None:
raise httpexceptions.HTTPBadRequest("license_url is required")
elif (license_url != existing_license_url or
existing_license_url is None):
cursor.execute("""\
UPDATE document_controls AS dc
SET licenseid = l.licenseid FROM licenses AS l
WHERE url = %s and is_valid_for_publication = 't'
RETURNING dc.licenseid""",
(license_url,))
try:
# Check that it is a valid license id
cursor.fetchone()[0]
except TypeError: # None returned
raise httpexceptions.HTTPBadRequest("invalid license_url")
upsert_license_requests(cursor, uuid_, licensors)
resp = request.response
resp.status_int = 202
return resp |
Submission to remove a license acceptance request. | def delete_license_request(request):
"""Submission to remove a license acceptance request."""
uuid_ = request.matchdict['uuid']
posted_uids = [x['uid'] for x in request.json.get('licensors', [])]
with db_connect() as db_conn:
with db_conn.cursor() as cursor:
remove_license_requests(cursor, uuid_, posted_uids)
resp = request.response
resp.status_int = 200
return resp |
Returns a list of accepting roles. | def get_roles_request(request):
"""Returns a list of accepting roles."""
uuid_ = request.matchdict['uuid']
user_id = request.matchdict.get('uid')
args = [uuid_]
if user_id is not None:
fmt_conditional = "AND user_id = %s"
args.append(user_id)
else:
fmt_conditional = ""
with db_connect() as db_conn:
with db_conn.cursor() as cursor:
cursor.execute("""\
SELECT row_to_json(combined_rows) FROM (
SELECT uuid, user_id AS uid, role_type AS role, accepted AS has_accepted
FROM role_acceptances AS la
WHERE uuid = %s {}
ORDER BY user_id ASC, role_type ASC
) as combined_rows""".format(fmt_conditional), args)
acceptances = [r[0] for r in cursor.fetchall()]
if not acceptances:
if user_id is not None:
raise httpexceptions.HTTPNotFound()
else:
cursor.execute("""\
SELECT TRUE FROM document_controls WHERE uuid = %s""", (uuid_,))
try:
cursor.fetchone()[0]
except TypeError: # NoneType
raise httpexceptions.HTTPNotFound()
resp_value = acceptances
if user_id is not None:
resp_value = acceptances[0]
return resp_value |
Submission to create a role acceptance request. | def post_roles_request(request):
"""Submission to create a role acceptance request."""
uuid_ = request.matchdict['uuid']
posted_roles = request.json
with db_connect() as db_conn:
with db_conn.cursor() as cursor:
cursor.execute("""\
SELECT TRUE FROM document_controls WHERE uuid = %s::UUID""", (uuid_,))
try:
# Check that it exists
cursor.fetchone()[0]
except TypeError:
if request.has_permission('publish.create-identifier'):
cursor.execute("""\
INSERT INTO document_controls (uuid) VALUES (%s)""", (uuid_,))
else:
raise httpexceptions.HTTPNotFound()
try:
upsert_users(cursor, [r['uid'] for r in posted_roles])
except UserFetchError as exc:
raise httpexceptions.HTTPBadRequest(exc.message)
upsert_role_requests(cursor, uuid_, posted_roles)
resp = request.response
resp.status_int = 202
return resp |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.