code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
Path(value).mkdir(parents=True, exist_ok=True) set_git_home(value) return value
def store_directory(ctx, param, value)
Store directory as a new Git home.
4.616227
3.154063
1.463581
if not client.use_external_storage: use_external_storage = False ctx.obj = client = attr.evolve( client, path=directory, use_external_storage=use_external_storage, ) msg = 'Initialized empty project in {path}' branch_name = None stack = contextlib.ExitStack() if force and client.repo: msg = 'Initialized project in {path} (branch {branch_name})' merge_args = ['--no-ff', '-s', 'recursive', '-X', 'ours'] try: commit = client.find_previous_commit( str(client.renku_metadata_path), ) branch_name = 'renku/init/' + str(commit) except KeyError: from git import NULL_TREE commit = NULL_TREE branch_name = 'renku/init/root' merge_args.append('--allow-unrelated-histories') ctx.obj = client = stack.enter_context( client.worktree( branch_name=branch_name, commit=commit, merge_args=merge_args, ) ) try: with client.lock: path = client.init_repository(name=name, force=force) except FileExistsError: raise click.UsageError( 'Renku repository is not empty. ' 'Please use --force flag to use the directory as Renku ' 'repository.' ) stack.enter_context(client.commit()) with stack: # Install Git hooks. from .githooks import install ctx.invoke(install, force=force) # Create all necessary template files. from .runner import template ctx.invoke(template, force=force) click.echo(msg.format(path=path, branch_name=branch_name))
def init(ctx, client, directory, name, force, use_external_storage)
Initialize a project.
3.866297
3.832014
1.008947
missing = defaultdict(list) for path, dataset in client.datasets.items(): for file in dataset.files: filepath = (path.parent / file) if not filepath.exists(): missing[str( path.parent.relative_to(client.renku_datasets_path) )].append( os.path.normpath(str(filepath.relative_to(client.path))) ) if not missing: return True click.secho( WARNING + 'There are missing files in datasets.' # '\n (use "renku dataset clean <name>" to clean them)' ) for dataset, files in missing.items(): click.secho( '\n\t' + click.style(dataset, fg='yellow') + ':\n\t ' + '\n\t '.join(click.style(path, fg='red') for path in files) ) return False
def check_missing_files(client)
Find missing files listed in datasets.
3.51703
3.365325
1.045079
assert isinstance(e, requests.exceptions.HTTPError) response = e.response try: message = response.json()['message'] except (KeyError, ValueError): message = response.content.strip() raise cls(message)
def from_http_exception(cls, e)
Create ``APIError`` from ``requests.exception.HTTPError``.
2.944816
2.804805
1.049918
try: if response.status_code in expected_status_code: return response except TypeError: if response.status_code == expected_status_code: return response raise cls(response)
def return_or_raise(cls, response, expected_status_code)
Check for ``expected_status_code``.
2.618306
2.543091
1.029577
from renku.models.refs import LinkReference missing = [ ref for ref in LinkReference.iter_items(client) if not ref.reference.exists() ] if not missing: return True click.secho( WARNING + 'There are missing references.' '\n (use "git rm <name>" to clean them)\n\n\t' + '\n\t '.join( click.style(str(ref.path), fg='yellow') + ' -> ' + click.style(str(ref.reference), fg='red') for ref in missing ) + '\n' ) return False
def check_missing_references(client)
Find missing references.
4.405687
4.202699
1.048299
ctx = click.get_current_context(silent=True) if ctx and GIT_KEY in ctx.meta: return ctx.meta[GIT_KEY] from git import Repo return Repo(path, search_parent_directories=True).working_dir
def get_git_home(path='.')
Get Git path from the current context.
4.154398
3.339653
1.243961
ctx = click.get_current_context(silent=True) if ctx and GIT_ISOLATION in ctx.meta: return ctx.meta[GIT_ISOLATION]
def get_git_isolation()
Get Git isolation from the current context.
3.659956
3.083279
1.187034
branch_name = str(issue) if issue else 'master' if branch_name not in repo.heads: branch = repo.create_head(branch_name) else: branch = repo.heads[branch_name] branch.checkout()
def _safe_issue_checkout(repo, issue=None)
Safely checkout branch for the issue.
2.65931
2.420893
1.098483
if isinstance(type, (list, tuple, set)): types = list(type) else: types = [type] if type is not None else [] context = context or {} translate = translate or {} def wrap(cls): jsonld_cls = attr.s(cls, **attrs_kwargs) if not issubclass(jsonld_cls, JSONLDMixin): jsonld_cls = attr.s( make_type(cls.__name__, (jsonld_cls, JSONLDMixin), {}), **attrs_kwargs ) # Merge types for subcls in jsonld_cls.mro(): subtype = getattr(subcls, '_jsonld_type', None) if subtype: if isinstance(subtype, (tuple, list)): types.extend(subtype) else: types.append(subtype) for key, value in getattr(subcls, '_jsonld_context', {}).items(): if key in context and context[key] != value: raise TypeError() context.setdefault(key, value) for a in attr.fields(jsonld_cls): key = a.name ctx = a.metadata.get(KEY) if ctx is None: continue if ':' in ctx: prefix, _ = ctx.split(':', 1) if prefix in context: context[key] = ctx continue if isinstance(ctx, dict) or ctx not in context: context[key] = ctx if KEY_CLS in a.metadata: merge_ctx = a.metadata[KEY_CLS]._jsonld_context for ctx_key, ctx_value in merge_ctx.items(): context.setdefault(ctx_key, ctx_value) if context[ctx_key] != ctx_value: raise TypeError( 'Can not merge {0} and {1} because of {2}'.format( jsonld_cls, a.metadata[KEY_CLS], ctx_key ) ) jsonld_cls.__module__ = cls.__module__ jsonld_cls._jsonld_type = types[0] if len(types) == 1 else list( sorted(set(types)) ) jsonld_cls._jsonld_context = context jsonld_cls._jsonld_translate = translate jsonld_cls._jsonld_fields = { a.name for a in attr.fields(jsonld_cls) if KEY in a.metadata } context_doc = '\n'.join( ' ' + line for line in json.dumps(context, indent=2).split('\n') ) jsonld_cls.__doc__ = DOC_TPL.format( cls=cls, type=json.dumps(jsonld_cls._jsonld_type), context=context_doc, ) # Register class for given JSON-LD @type try: type_ = ld.expand({ '@type': jsonld_cls._jsonld_type, '@context': context })[0]['@type'] if isinstance(type_, list): type_ = tuple(sorted(type_)) except Exception: # FIXME make sure all classes have @id defined return jsonld_cls if type_ in jsonld_cls.__type_registry__: raise TypeError( 'Type {0!r} is already registered for class {1!r}.'.format( jsonld_cls._jsonld_type, jsonld_cls.__type_registry__[jsonld_cls._jsonld_type], ) ) jsonld_cls.__type_registry__[type_] = jsonld_cls return jsonld_cls if maybe_cls is None: return wrap return wrap(maybe_cls)
def attrs( maybe_cls=None, type=None, context=None, translate=None, **attrs_kwargs )
Wrap an attr enabled class.
2.486895
2.471694
1.00615
kwargs.setdefault('metadata', {}) kwargs['metadata'][KEY] = context return attr.ib(**kwargs)
def attrib(context=None, **kwargs)
Create a new attribute with context.
8.717547
8.356967
1.043147
context = {'@container': '@{0}'.format(name)} def _attrib(type, **kwargs): kwargs.setdefault('metadata', {}) kwargs['metadata'][KEY_CLS] = type kwargs['default'] = Factory(container) def _converter(value): if isinstance(value, container): return mapper(type, value) elif value is None: return value raise ValueError(value) kwargs.setdefault('converter', _converter) context_ib = context.copy() context_ib.update(kwargs.pop('context', {})) return attrib(context=context_ib, **kwargs) return _attrib
def _container_attrib_builder(name, container, mapper)
Builder for container attributes.
4.634047
4.631283
1.000597
jsonld_fields = inst.__class__._jsonld_fields attrs = tuple( field for field in fields(inst.__class__) if field.name in jsonld_fields ) rv = dict_factory() def convert_value(v): if isinstance(v, Path): v = str(v) return os.path.relpath(v, str(basedir)) if basedir else v return v for a in attrs: v = getattr(inst, a.name) # skip proxies if isinstance(v, weakref.ReferenceType): continue # do not export context for containers ec = export_context and KEY_CLS not in a.metadata if filter is not None and not filter(a, v): continue if recurse is True: if has(v.__class__): rv[a.name] = asjsonld( v, recurse=True, filter=filter, dict_factory=dict_factory, basedir=basedir, ) elif isinstance(v, (tuple, list, set)): cf = v.__class__ if retain_collection_types is True else list rv[a.name] = cf([ asjsonld( i, recurse=True, filter=filter, dict_factory=dict_factory, export_context=ec, basedir=basedir, ) if has(i.__class__) else i for i in v ]) elif isinstance(v, dict): df = dict_factory rv[a.name] = df(( asjsonld( kk, dict_factory=df, basedir=basedir, ) if has(kk.__class__) else convert_value(kk), asjsonld( vv, dict_factory=df, export_context=ec, basedir=basedir, ) if has(vv.__class__) else vv ) for kk, vv in iteritems(v)) else: rv[a.name] = convert_value(v) else: rv[a.name] = convert_value(v) inst_cls = type(inst) if export_context: rv['@context'] = deepcopy(inst_cls._jsonld_context) if inst_cls._jsonld_type: rv['@type'] = inst_cls._jsonld_type return rv
def asjsonld( inst, recurse=True, filter=None, dict_factory=dict, retain_collection_types=False, export_context=True, basedir=None, )
Dump a JSON-LD class to the JSON with generated ``@context`` field.
2.272618
2.292578
0.991294
if isinstance(data, cls): return data if not isinstance(data, dict): raise ValueError(data) if '@type' in data: type_ = tuple(sorted(data['@type'])) if type_ in cls.__type_registry__ and getattr( cls, '_jsonld_type', None ) != type_: new_cls = cls.__type_registry__[type_] if cls != new_cls: return new_cls.from_jsonld(data) if cls._jsonld_translate: data = ld.compact(data, {'@context': cls._jsonld_translate}) data.pop('@context', None) data.setdefault('@context', cls._jsonld_context) if data['@context'] != cls._jsonld_context: compacted = ld.compact(data, {'@context': cls._jsonld_context}) else: compacted = data # assert compacted['@type'] == cls._jsonld_type, '@type must be equal' # TODO update self(not cls)._jsonld_context with data['@context'] fields = cls._jsonld_fields if __reference__: with with_reference(__reference__): self = cls( **{ k.lstrip('_'): v for k, v in compacted.items() if k in fields } ) else: self = cls( **{ k.lstrip('_'): v for k, v in compacted.items() if k in fields } ) if __source__: setattr(self, '__source__', __source__) return self
def from_jsonld(cls, data, __reference__=None, __source__=None)
Instantiate a JSON-LD class from data.
2.746993
2.731802
1.005561
import yaml with path.open(mode='r') as fp: source = yaml.safe_load(fp) or {} self = cls.from_jsonld( source, __reference__=path, __source__=deepcopy(source), ) return self
def from_yaml(cls, path)
Return an instance from a YAML file.
6.298398
6.110198
1.030801
source = {} if self.__source__: source.update(self.__source__) source.update(asjsonld(self)) return source
def asjsonld(self)
Create JSON-LD with the original source data.
6.585194
5.072612
1.298186
import yaml with self.__reference__.open('w') as fp: yaml.dump(self.asjsonld(), fp, default_flow_style=False)
def to_yaml(self)
Store an instance to the referenced YAML file.
8.082801
6.494565
1.244548
def addresses_for_key(gpg, key): fingerprint = key["fingerprint"] addresses = [] for key in gpg.list_keys(): if key["fingerprint"] == fingerprint: addresses.extend([address.split("<")[-1].strip(">") for address in key["uids"] if address]) return addresses
Takes a key and extracts the email addresses for it.
null
null
null
def send_mail(subject, body_text, addr_from, recipient_list, fail_silently=False, auth_user=None, auth_password=None, attachments=None, body_html=None, html_message=None, connection=None, headers=None): # Make sure only one HTML option is specified if body_html is not None and html_message is not None: # pragma: no cover raise ValueError("You cannot specify body_html and html_message at " "the same time. Please only use html_message.") # Push users to update their code if body_html is not None: # pragma: no cover warn("Using body_html is deprecated; use the html_message argument " "instead. Please update your code.", DeprecationWarning) html_message = body_html # Allow for a single address to be passed in. if isinstance(recipient_list, six.string_types): recipient_list = [recipient_list] connection = connection or get_connection( username=auth_user, password=auth_password, fail_silently=fail_silently) # Obtain a list of the recipients that have gpg keys installed. key_addresses = {} if USE_GNUPG: from email_extras.models import Address key_addresses = dict(Address.objects.filter(address__in=recipient_list) .values_list('address', 'use_asc')) # Create the gpg object. if key_addresses: gpg = GPG(gnupghome=GNUPG_HOME) if GNUPG_ENCODING is not None: gpg.encoding = GNUPG_ENCODING # Check if recipient has a gpg key installed def has_pgp_key(addr): return addr in key_addresses # Encrypts body if recipient has a gpg key installed. def encrypt_if_key(body, addr_list): if has_pgp_key(addr_list[0]): encrypted = gpg.encrypt(body, addr_list[0], always_trust=ALWAYS_TRUST) if encrypted == "" and body != "": # encryption failed raise EncryptionFailedError("Encrypting mail to %s failed.", addr_list[0]) return smart_text(encrypted) return body # Load attachments and create name/data tuples. attachments_parts = [] if attachments is not None: for attachment in attachments: # Attachments can be pairs of name/data, or filesystem paths. if not hasattr(attachment, "__iter__"): with open(attachment, "rb") as f: attachments_parts.append((basename(attachment), f.read())) else: attachments_parts.append(attachment) # Send emails - encrypted emails needs to be sent individually, while # non-encrypted emails can be sent in one send. So the final list of # lists of addresses to send to looks like: # [[unencrypted1, unencrypted2, unencrypted3], [encrypted1], [encrypted2]] unencrypted = [addr for addr in recipient_list if addr not in key_addresses] unencrypted = [unencrypted] if unencrypted else unencrypted encrypted = [[addr] for addr in key_addresses] for addr_list in unencrypted + encrypted: msg = EmailMultiAlternatives(subject, encrypt_if_key(body_text, addr_list), addr_from, addr_list, connection=connection, headers=headers) if html_message is not None: if has_pgp_key(addr_list[0]): mimetype = "application/gpg-encrypted" else: mimetype = "text/html" msg.attach_alternative(encrypt_if_key(html_message, addr_list), mimetype) for parts in attachments_parts: name = parts[0] if key_addresses.get(addr_list[0]): name += ".asc" msg.attach(name, encrypt_if_key(parts[1], addr_list)) msg.send(fail_silently=fail_silently)
Sends a multipart email containing text and html versions which are encrypted for each recipient that has a valid gpg key installed.
null
null
null
def send_mail_template(subject, template, addr_from, recipient_list, fail_silently=False, attachments=None, context=None, connection=None, headers=None): if context is None: context = {} # Loads a template passing in vars as context. def render(ext): name = "email_extras/%s.%s" % (template, ext) return loader.get_template(name).render(context) send_mail(subject, render("txt"), addr_from, recipient_list, fail_silently=fail_silently, attachments=attachments, html_message=render("html"), connection=connection, headers=headers)
Send email rendering text and html versions for the specified template name using the context dictionary passed in.
null
null
null
order, enter, state = deque(), set(nodes), {} def dfs(node): state[node] = GRAY for parent in nodes.get(node, ()): color = state.get(parent, None) if color == GRAY: raise ValueError('cycle') if color == BLACK: continue enter.discard(parent) dfs(parent) order.appendleft(node) state[node] = BLACK while enter: dfs(enter.pop()) return order
def topological(nodes)
Return nodes in a topological order.
3.4794
3.231679
1.076654
from renku.api._git import _expand_directories dst = Path(destination) def fmt_path(path): return str(Path(path).absolute().relative_to(client.path)) files = { fmt_path(source): fmt_path(file_or_dir) for file_or_dir in sources for source in _expand_directories((file_or_dir, )) } def fmt_dst(path): return str(dst / os.path.relpath(path, start=files[path])) destinations = {source: fmt_dst(source) for source in files} # 1. Check .gitignore. ignored = client.find_ignored_paths(*destinations.values()) if ignored: click.echo(WARNING + 'Renamed files match .gitignore.\n') if click.confirm( 'Do you want to edit ".gitignore" now?', default=False ): click.edit(filename=str(client.path / '.gitignore')) # 2. Update dataset metadata files. with progressbar( client.datasets.items(), item_show_func=lambda item: str(item[1].short_id) if item else '', label='Updating dataset metadata', width=0, ) as bar: for (path, dataset) in bar: renames = {} for file in dataset.files: filepath = fmt_path(os.path.normpath(str(path.parent / file))) if filepath in files: renames[file] = os.path.relpath( destinations[filepath], start=str(path.parent) ) if renames: dataset = dataset.rename_files( lambda key: renames.get(key, key) ) dataset.to_yaml() # 3. Manage .gitattributes for external storage. tracked = tuple( path for path, attr in client.find_attr(*files).items() if attr.get('filter') == 'lfs' ) client.untrack_paths_from_storage(*tracked) existing = client.find_attr(*tracked) if existing: click.echo(WARNING + 'There are custom .gitattributes.\n') if click.confirm( 'Do you want to edit ".gitattributes" now?', default=False ): click.edit(filename=str(client.path / '.gitattributes')) client.track_paths_in_storage(*(destinations[path] for path in tracked)) # 4. Handle symlinks. dst.parent.mkdir(parents=True, exist_ok=True) for source, target in destinations.items(): src = Path(source) if src.is_symlink(): Path(target).parent.mkdir(parents=True, exist_ok=True) Path(target).symlink_to( os.path.relpath( str(src.resolve()), start=os.path.dirname(target) ) ) src.unlink() del files[source] # Finally move the files. final_sources = list(set(files.values())) if final_sources: run(['git', 'mv'] + final_sources + [destination], check=True)
def move(ctx, client, sources, destination)
Move files and check repository for potential problems.
3.285551
3.254127
1.009657
# Find pre 0.3.4 metadata files. old_metadata = list(_dataset_metadata_pre_0_3_4(client)) if not old_metadata: return True click.secho( WARNING + 'There are metadata files in the old location.' '\n (use "renku migrate datasets" to move them)\n\n\t' + '\n\t'.join( click.style(str(path.relative_to(client.path)), fg='yellow') for path in old_metadata ) + '\n' ) return False
def check_dataset_metadata(client)
Check location of dataset metadata.
5.71513
5.379544
1.062382
graph = Graph(client) nodes = graph.build(paths=paths, revision=revision) siblings_ = set(nodes) for node in nodes: siblings_ |= graph.siblings(node) paths = {node.path for node in siblings_} for path in paths: click.echo(graph._format_path(path))
def siblings(client, revision, paths)
Show siblings for given paths.
4.657041
4.213557
1.105252
r from renku.models.provenance import ProcessRun graph = Graph(client) paths = set(paths) nodes = graph.build(revision=revision) commits = {node.commit for node in nodes} candidates = {(node.commit, node.path) for node in nodes if not paths or node.path in paths} input_paths = set() for commit in commits: activity = graph.activities[commit] if isinstance(activity, ProcessRun): for usage in activity.qualified_usage: for entity in usage.entity.entities: path = str((usage.client.path / entity.path).relative_to( client.path )) usage_key = (entity.commit, entity.path) if path not in input_paths and usage_key in candidates: input_paths.add(path) click.echo('\n'.join(graph._format_path(path) for path in input_paths)) ctx.exit(0 if not paths or len(input_paths) == len(paths) else 1)
def inputs(ctx, client, revision, paths)
r"""Show inputs files in the repository. <PATHS> Files to show. If no files are given all input files are shown.
4.076897
4.272429
0.954234
r graph = Graph(client) filter = graph.build(paths=paths, revision=revision) output_paths = graph.output_paths click.echo('\n'.join(graph._format_path(path) for path in output_paths)) if paths: if not output_paths: ctx.exit(1) from renku.models._datastructures import DirectoryTree tree = DirectoryTree.from_list(item.path for item in filter) for output in output_paths: if tree.get(output) is None: ctx.exit(1) return
def outputs(ctx, client, revision, paths)
r"""Show output files in the repository. <PATHS> Files to show. If no files are given all output files are shown.
4.901627
5.362228
0.914103
import inspect from renku.models import provenance from renku.models._jsonld import JSONLDMixin for name in dir(provenance): cls = getattr(provenance, name) if inspect.isclass(cls) and issubclass(cls, JSONLDMixin): yield name
def _context_names()
Return list of valid context names.
3.984761
3.917792
1.017093
if not value or ctx.resilient_parsing: return click.echo('\n'.join(_context_names())) ctx.exit()
def print_context_names(ctx, param, value)
Print all possible types.
2.854356
2.566418
1.112195
from renku.models import provenance cls = getattr(provenance, name) return { '@context': cls._jsonld_context, '@type': cls._jsonld_type, }
def _context_json(name)
Return JSON-LD string for given context name.
5.54951
4.773152
1.162651
import json contexts = [_context_json(name) for name in set(names)] if contexts: click.echo( json.dumps( contexts[0] if len(contexts) == 1 else contexts, indent=2, ) )
def context(names)
Show JSON-LD context for repository objects.
4.730209
4.17524
1.132919
if ctx.invoked_subcommand is None: from renku.models.refs import LinkReference names = defaultdict(list) for ref in LinkReference.iter_items(client, common_path='workflows'): names[ref.reference.name].append(ref.name) for path in client.workflow_path.glob('*.cwl'): click.echo( '{path}: {names}'.format( path=path.name, names=', '.join( click.style(_deref(name), fg='green') for name in names[path.name] ), ) )
def workflow(ctx, client)
List or manage workflows with subcommands.
4.442813
4.233929
1.049336
client = ctx.obj if value is None: from renku.models.provenance import ProcessRun activity = client.process_commit() if not isinstance(activity, ProcessRun): raise click.BadParameter('No tool was found.') return activity.path return value
def validate_path(ctx, param, value)
Detect a workflow path if it is not passed.
8.025288
7.061132
1.136544
from renku.models.refs import LinkReference LinkReference.create(client=client, name=_ref(name), force=force).set_reference(path)
def set_name(client, name, path, force)
Sets the <name> for remote <path>.
13.113216
12.528861
1.046641
from renku.models.refs import LinkReference LinkReference(client=client, name=_ref(old)).rename(_ref(new), force=force)
def rename(client, old, new, force)
Rename the workflow named <old> to <new>.
9.6832
8.989492
1.077169
from renku.models.refs import LinkReference LinkReference(client=client, name=_ref(name)).delete()
def remove(client, name)
Remove the remote named <name>.
14.122348
12.853098
1.098751
graph = Graph(client) outputs = graph.build(paths=paths, revision=revision) output_file.write( yaml.dump( ascwl( graph.ascwl(outputs=outputs), filter=lambda _, x: x is not None and x != [], basedir=os.path.dirname(getattr(output_file, 'name', '.')) or '.', ), default_flow_style=False ) )
def create(client, output_file, revision, paths)
Create a workflow description for a file.
5.722743
5.513968
1.037863
if ctx.invoked_subcommand is None: # TODO default_endpoint = config.get('core', {}).get('default') for endpoint, values in config.get('endpoints', {}).items(): # TODO is_default = default_endpoint == endpoint if not verbose: click.echo(endpoint) else: click.echo( '{endpoint}\t{url}'.format( endpoint=endpoint, url=values.get('url', '') ) )
def endpoint(ctx, config, verbose)
Manage set of platform API endpoints.
3.308828
3.215537
1.029013
if isinstance(path_or_stream, (str, Path)): return method(Path(path_or_stream).open(mode)) return method(path_or_stream) return decorator
def _wrap_path_or_stream(method, mode): # noqa: D202 def decorator(path_or_stream)
Open path with context or close stream at the end.
2.293285
2.013316
1.139059
click.secho('\n'.join(textwrap.wrap(DOCTOR_INFO)) + '\n', bold=True) from . import _checks is_ok = True for attr in _checks.__all__: is_ok &= getattr(_checks, attr)(client) if is_ok: click.secho('Everything seems to be ok.', fg='green') ctx.exit(0 if is_ok else 1)
def doctor(ctx, client)
Check your system and repository for potential problems.
4.044663
3.835418
1.054556
data = self._client.api.create_project({'name': name}) return self.Meta.model(data, client=self._client, collection=self)
def create(self, name=None, **kwargs)
Create a new project. :param name: The name of the project. :returns: An instance of the newly create project. :rtype: renku.models.projects.Project
6.267416
6.933606
0.903919
key = self.cleaned_data["key"] gpg = GPG(gnupghome=GNUPG_HOME) result = gpg.import_keys(key) if result.count == 0: raise forms.ValidationError(_("Invalid Key")) return key
def clean_key(self)
Validate the key contains an email address.
3.923631
3.225496
1.216443
self = cls() for value in values: self.add(value) return self
def from_list(cls, values)
Construct a tree from a list with paths.
3.827295
3.090738
1.238311
path = value if isinstance(value, Path) else Path(str(value)) subtree = self for part in path.parts: try: subtree = subtree[part] except KeyError: return default return subtree
def get(self, value, default=None)
Return a subtree if exists.
3.358779
2.575639
1.304056
path = value if isinstance(value, Path) else Path(str(value)) if path and path != path.parent: destination = self for part in path.parts: destination = destination.setdefault(part, DirectoryTree())
def add(self, value)
Create a safe directory from a value.
5.834664
4.7407
1.23076
default_endpoint = config.get('core', {}).get('default') project_endpoint = config.get('project', {}).get('core', {}).get('default', default_endpoint) return Endpoint( option or project_endpoint or default_endpoint, default=default_endpoint, project=project_endpoint, option=option )
def default_endpoint_from_config(config, option=None)
Return a default endpoint.
4.710428
4.495151
1.047891
if ctx.resilient_parsing: return if not value: if 'password_stdin' in ctx.params: with click.open_file('-') as fp: value = fp.read().strip('\n') else: value = click.prompt('Password', hide_input=True) click.echo(value) return value
def password_prompt(ctx, param, value)
Prompt for password if ``--password-stdin`` is not used.
2.501674
2.316766
1.079813
return value shell, path = click_completion.core.install() click.secho( '{0} completion installed in {1}'.format(shell, path), fg='green' ) ctx.exit()
def install_completion(ctx, attr, value): # pragma: no cover import click_completion.core if not value or ctx.resilient_parsing
Install completion for the current shell.
5.369786
3.952157
1.358698
if ctx.resilient_parsing: return config = ctx.obj['config'] endpoint = default_endpoint_from_config(config, option=value) if endpoint is None: raise click.UsageError('No default endpoint found.') return endpoint
def default_endpoint(ctx, param, value)
Return default endpoint if specified.
3.375913
2.974532
1.134939
try: config = ctx.obj['config'] except Exception: return endpoint = default_endpoint(ctx, param, value) if endpoint not in config.get('endpoints', {}): raise click.UsageError('Unknown endpoint: {0}'.format(endpoint)) return endpoint
def validate_endpoint(ctx, param, value)
Validate endpoint.
3.750994
3.603338
1.040978
siblings = set() for node in outputs: siblings |= graph.siblings(node) siblings = {node.path for node in siblings} missing = siblings - {node.path for node in outputs} if missing: msg = ( 'Include the files above in the command ' 'or use the --with-siblings option.' ) raise click.ClickException( 'There are missing output siblings:\n\n' '\t{0}\n\n{1}'.format( '\n\t'.join(click.style(path, fg='red') for path in missing), msg, ), ) return outputs
def check_siblings(graph, outputs)
Check that all outputs have their siblings listed.
3.628571
3.527277
1.028717
siblings = set() for node in outputs: siblings |= graph.siblings(node) return siblings
def with_siblings(graph, outputs)
Include all missing siblings.
3.820538
3.468191
1.101594
try: restore = 'LESS' not in os.environ os.environ.setdefault('LESS', '-iXFR') click.echo_via_pager(*args, **kwargs) finally: if restore: os.environ.pop('LESS', None)
def echo_via_pager(*args, **kwargs)
Display pager only if it does not fit in one terminal screen. NOTE: The feature is available only on ``less``-based pager.
5.856949
5.916894
0.989869
if args and args[0] in self.commands: args.insert(0, '') super(OptionalGroup, self).parse_args(ctx, args)
def parse_args(self, ctx, args)
Check if the first argument is an existing command.
4.631277
3.961504
1.16907
output_paths = output_paths or set() import cwltool.factory from cwltool import workflow from cwltool.context import LoadingContext, RuntimeContext from cwltool.utils import visit_class def construct_tool_object(toolpath_object, *args, **kwargs): protocol = 'file://' def addLocation(d): if 'location' not in d and 'path' in d: d['location'] = protocol + d['path'] visit_class(toolpath_object, ('File', 'Directory'), addLocation) return workflow.default_make_tool(toolpath_object, *args, **kwargs) argv = sys.argv sys.argv = ['cwltool'] # Keep all environment variables. runtime_context = RuntimeContext( kwargs={ 'rm_tmpdir': False, 'move_outputs': 'leave', 'preserve_entire_environment': True, } ) loading_context = LoadingContext( kwargs={ 'construct_tool_object': construct_tool_object, } ) factory = cwltool.factory.Factory( loading_context=loading_context, runtime_context=runtime_context, ) process = factory.make(os.path.relpath(str(output_file))) outputs = process() sys.argv = argv # Move outputs to correct location in the repository. output_dirs = process.factory.executor.output_dirs def remove_prefix(location, prefix='file://'): if location.startswith(prefix): return location[len(prefix):] return location locations = { remove_prefix(output['location']) for output in outputs.values() } with progressbar( locations, label='Moving outputs', ) as bar: for location in bar: for output_dir in output_dirs: if location.startswith(output_dir): output_path = location[len(output_dir):].lstrip( os.path.sep ) destination = client.path / output_path if destination.is_dir(): shutil.rmtree(str(destination)) destination = destination.parent shutil.move(location, str(destination)) continue unchanged_paths = client.remove_unmodified(output_paths) if unchanged_paths: click.echo( 'Unchanged files:\n\n\t{0}'.format( '\n\t'.join( click.style(path, fg='yellow') for path in unchanged_paths ) ) )
def execute(client, output_file, output_paths=None)
Run the generated workflow using cwltool library.
3.236536
3.16583
1.022334
registry_url = detect_registry_url(client, auto_login=auto_login) repo = client.repo sha = repo.rev_parse(revision).hexsha short_sha = repo.git.rev_parse(sha, short=7) image = '{registry}:{short_sha}'.format( registry=registry_url.image, short_sha=short_sha ) result = subprocess.run(['docker', 'image', 'pull', image]) if result.returncode != 0: raise click.ClickException( 'The image "{image}" was not pulled.\n\n' 'Push the repository to the server or build the image manually:\n' '\n\tdocker build -t {image} .'.format(image=image) )
def pull(client, revision, auto_login)
Pull an existing image from the project registry.
3.373173
3.34434
1.008622
from renku.models._tabulate import tabulate echo_via_pager( tabulate( records, headers=OrderedDict(( ('added', None), ('authors_csv', 'authors'), ('dataset', None), ('full_path', 'path'), )), ) )
def tabular(client, records)
Format dataset files with a tabular output. :param client: LocalClient instance. :param records: Filtered collection.
7.599191
8.322499
0.91309
from renku.models._json import dumps from renku.models._jsonld import asjsonld data = [asjsonld(record) for record in records] echo_via_pager(dumps(data, indent=2))
def jsonld(client, records)
Format dataset files as JSON-LD. :param client: LocalClient instance. :param records: Filtered collection.
5.962322
7.004234
0.851245
try: result = super().main(*args, **kwargs) return result except Exception: if HAS_SENTRY: self._handle_sentry() if not (sys.stdin.isatty() and sys.stdout.isatty()): raise self._handle_github()
def main(self, *args, **kwargs)
Catch all exceptions.
4.522756
4.440281
1.018574
from sentry_sdk import capture_exception, configure_scope from sentry_sdk.utils import capture_internal_exceptions with configure_scope() as scope: with capture_internal_exceptions(): from git import Repo from renku.cli._git import get_git_home from renku.models.datasets import Author user = Author.from_git(Repo(get_git_home())) scope.user = {'name': user.name, 'email': user.email} event_id = capture_exception() click.echo( _BUG + 'Recorded in Sentry with ID: {0}\n'.format(event_id), err=True, ) raise
def _handle_sentry(self)
Handle exceptions using Sentry.
4.68386
4.478204
1.045924
value = click.prompt( _BUG + click.style( '1. Open an issue by typing "open";\n', fg='green', ) + click.style( '2. Print human-readable information by typing ' '"print";\n', fg='yellow', ) + click.style( '3. See the full traceback without submitting details ' '(default: "ignore").\n\n', fg='red', ) + 'Please select an action by typing its name', type=click.Choice([ 'open', 'print', 'ignore', ], ), default='ignore', ) getattr(self, '_process_' + value)()
def _handle_github(self)
Handle exception and submit it as GitHub issue.
6.105656
5.778129
1.056684
from renku import __version__ re_paths = r'(' + r'|'.join([path or os.getcwd() for path in sys.path]) + r')' tb = re.sub(re_paths, '[...]', traceback.format_exc(limit=limit)) return ( '## Describe the bug\nA clear and concise description.\n\n' '## Details\n' '*Please verify and redact the details.*\n\n' '**Renku version:** ' + __version__ + '\n' '**OS:** ' + platform.system() + ' (' + platform.version() + ')\n' '**Python:** ' + platform.python_version() + '\n\n' '### Traceback\n\n```\n' + tb + '```\n\n' '## Additional context\nAdd any other context about the problem.' )
def _format_issue_body(self, limit=-5)
Return formatted body.
4.544116
4.414502
1.029361
query = urlencode({ 'title': self._format_issue_title(), 'body': self._format_issue_body(), }) return self.REPO_URL + self.ISSUE_SUFFIX + '?' + query
def _format_issue_url(self)
Format full issue URL.
3.633495
3.344566
1.086388
click.launch(self._format_issue_url()) if not click.confirm('Did it work?', default=True): click.echo() self._process_print() click.secho( '\nOpen the line manually and copy the text above\n', fg='yellow' ) click.secho( ' ' + self.REPO_URL + self.ISSUE_SUFFIX + '\n', bold=True )
def _process_open(self)
Open link in a browser.
6.821923
6.335308
1.07681
if method is None: return functools.partial( pass_local_client, clean=clean, up_to_date=up_to_date, commit=commit, ignore_std_streams=ignore_std_streams, lock=lock, ) def new_func(*args, **kwargs): ctx = click.get_current_context() client = ctx.ensure_object(LocalClient) stack = contextlib.ExitStack() # Handle --isolation option: if get_git_isolation(): client = stack.enter_context(client.worktree()) transaction = client.transaction( clean=clean, up_to_date=up_to_date, commit=commit, ignore_std_streams=ignore_std_streams ) stack.enter_context(transaction) if lock or (lock is None and commit): stack.enter_context(client.lock) with stack: result = ctx.invoke(method, client, *args, **kwargs) return result return functools.update_wrapper(new_func, method)
def pass_local_client( method=None, clean=None, up_to_date=None, commit=None, ignore_std_streams=True, lock=None, )
Pass client from the current context to the decorated command.
2.588421
2.479141
1.04408
# FIXME add device number too standard_inos = {} for stream in streams: try: stream_stat = os.fstat(getattr(sys, stream).fileno()) key = stream_stat.st_dev, stream_stat.st_ino standard_inos[key] = stream except Exception: # FIXME UnsupportedOperation pass # FIXME if not getattr(sys, stream).istty() def stream_inos(paths): for path in paths: try: stat = os.stat(path) key = (stat.st_dev, stat.st_ino) if key in standard_inos: yield standard_inos[key], path except FileNotFoundError: # pragma: no cover pass return dict(stream_inos(lookup_paths)) if standard_inos else {}
def _mapped_std_streams(lookup_paths, streams=('stdin', 'stdout', 'stderr'))
Get a mapping of standard streams to given paths.
4.027337
3.906786
1.030857
for stream_name in ('stdout', 'stderr'): stream = mapped_streams.get(stream_name) if not stream: continue path = os.path.relpath(stream, start=repo.working_dir) if (path, 0) not in repo.index.entries: os.remove(stream) else: blob = repo.index.entries[(path, 0)].to_blob(repo) with open(path, 'wb') as fp: fp.write(blob.data_stream.read())
def _clean_streams(repo, mapped_streams)
Clean mapped standard streams.
2.90496
2.779858
1.045003
for path in paths: path_ = Path(path) if path_.is_dir(): for expanded in path_.rglob('*'): yield str(expanded) else: yield path
def _expand_directories(paths)
Expand directory with all files it contains.
3.012389
2.998883
1.004504
ctx.meta['renku.datasets.datadir'] = datadir if ctx.invoked_subcommand is not None: return if revision is None: datasets = client.datasets.values() else: datasets = client.datasets_from_commit(client.repo.commit(revision)) DATASETS_FORMATS[format](client, datasets)
def dataset(ctx, client, revision, datadir, format)
Handle datasets.
3.929408
3.854759
1.019365
from renku.models.datasets import Author with client.with_dataset(name=name) as dataset: click.echo('Creating a dataset ... ', nl=False) author = Author.from_git(client.repo) if author not in dataset.authors: dataset.authors.append(author) click.secho('OK', fg='green')
def create(client, name)
Create an empty dataset in the current repo.
4.225483
3.939872
1.072492
try: with client.with_dataset(name=name) as dataset: target = target if target else None with progressbar(urls, label='Adding data to dataset') as bar: for url in bar: client.add_data_to_dataset( dataset, url, link=link, target=target, relative_to=relative_to, force=force, ) except FileNotFoundError: raise BadParameter('Could not process {0}'.format(url))
def add(client, name, urls, link, relative_to, target, force)
Add data to a dataset.
3.648012
3.407764
1.0705
records = _filter( client, names=names, authors=authors, include=include, exclude=exclude ) DATASET_FILES_FORMATS[format](client, records)
def ls_files(client, names, authors, include, exclude, format)
List files in dataset.
5.907141
5.449562
1.083966
dataset = client.load_dataset(name=name) records = _filter( client, names=[dataset.name], include=include, exclude=exclude ) if not yes and records: prompt_text = ( 'You are about to remove ' 'following from "{0}" dataset.\n'.format(dataset.name) + '\n'.join([str(record.full_path) for record in records]) + '\nDo you wish to continue?' ) click.confirm(WARNING + prompt_text, abort=True) if records: for item in records: dataset.unlink_file(item.path) dataset.to_yaml() click.secho('OK', fg='green')
def unlink(client, name, include, exclude, yes)
Remove matching files from a dataset.
4.063156
3.87134
1.049548
from renku.models.refs import LinkReference datasets = {name: client.dataset_path(name) for name in names} if not datasets: raise click.BadParameter( 'use dataset name or identifier', param_hint='names' ) unknown = [ name for name, path in datasets.items() if not path or not path.exists() ] if unknown: raise click.BadParameter( 'unknown datasets ' + ', '.join(unknown), param_hint='names' ) datasets = set(datasets.values()) with progressbar( datasets, label='Removing metadata files'.ljust(30), item_show_func=lambda item: str(item) if item else '', ) as bar: for dataset in bar: if dataset and dataset.exists(): dataset.unlink() with progressbar( list(LinkReference.iter_items(client, common_path='datasets')), label='Removing aliases'.ljust(30), item_show_func=lambda item: item.name if item else '', ) as bar: for ref in bar: if ref.reference in datasets: ref.delete() click.secho('OK', fg='green')
def remove(client, names)
Delete a dataset.
3.089782
3.007502
1.027358
if exclude is not None and exclude: for pattern in exclude: if file_path.match(pattern): return False if include is not None and include: for pattern in include: if file_path.match(pattern): return True return False return True
def _include_exclude(file_path, include=None, exclude=None)
Check if file matches one of include filters and not in exclude filter. :param file_path: Path to the file. :param include: Tuple containing patterns to which include from result. :param exclude: Tuple containing patterns to which exclude from result.
2.138981
2.55958
0.835677
if isinstance(authors, str): authors = set(authors.split(',')) if isinstance(authors, list) or isinstance(authors, tuple): authors = set(authors) records = [] for path_, dataset in client.datasets.items(): if not names or dataset.name in names: for file_ in dataset.files.values(): file_.dataset = dataset.name path_ = file_.full_path.relative_to(client.path) match = _include_exclude(path_, include, exclude) if authors: match = match and authors.issubset({ author.name for author in file_.authors }) if match: records.append(file_) return sorted(records, key=lambda file_: file_.added)
def _filter(client, names=None, authors=None, include=None, exclude=None)
Filter dataset files by specified filters. :param names: Filter by specified dataset names. :param authors: Filter by authors. :param include: Include files matching file pattern. :param exclude: Exclude files matching file pattern.
3.102346
3.178143
0.976151
parts = key.split('.') if len(parts) > 1: return 'renku "{0}"'.format(parts[0]), '.'.join(parts[1:]) return 'renku', key
def _split_section_and_key(key)
Return a tuple with config section and key.
4.334873
3.78613
1.144935
if value is None: cfg = client.repo.config_reader() click.echo(cfg.get_value(*_split_section_and_key(key))) else: with client.repo.config_writer() as cfg: section, config_key = _split_section_and_key(key) cfg.set_value(section, config_key, value) click.echo(value)
def config(client, key, value)
Get and set Renku repository and global options.
3.134072
2.808821
1.115796
u = parse.urlparse(url) is_git = False if os.path.splitext(u.path)[1] == '.git': is_git = True elif u.scheme in ('', 'file'): from git import InvalidGitRepositoryError, Repo try: Repo(u.path, search_parent_directories=True) is_git = True except InvalidGitRepositoryError: is_git = False return is_git
def check_for_git_repo(url)
Check if a url points to a git repository.
2.416472
2.31431
1.044143
access_token = config['endpoints'][endpoint]['token']['access_token'] click.echo('export {0}={1}'.format('RENKU_ENDPOINT', endpoint)) click.echo('export {0}={1}'.format('RENKU_ACCESS_TOKEN', access_token)) click.echo('# Run this command to configure your Renku client:') click.echo('# eval "$(renku env)"')
def env(config, endpoint)
Print RENKU environment variables. Run this command to configure your Renku client: $ eval "$(renku env)"
3.914418
3.285654
1.191367
from ._config import APP_NAME if VersionCache.load(APP_NAME).is_fresh: return from pkg_resources import parse_version from renku.version import __version__ version = parse_version(__version__) allow_prereleases = version.is_prerelease latest_version = find_latest_version( 'renku', allow_prereleases=allow_prereleases ) if version < latest_version: click.secho( 'You are using renku version {version}, however version ' '{latest_version} is available.\n' 'You should consider upgrading ...'.format( version=__version__, latest_version=latest_version, ), fg='yellow', bold=True, ) VersionCache(pypi_version=str(latest_version)).dump(APP_NAME)
def _check_version()
Check renku version.
3.533548
3.176597
1.112369
if ctx.resilient_parsing: return if not value and ctx.invoked_subcommand != 'run': ctx.call_on_close(_check_version)
def check_version(ctx, param, value)
Check for latest version of renku on PyPI.
4.417431
4.014966
1.100241
conf.register_opts(service_opts) # Set cotyledon options from oslo config options _load_service_manager_options(service_manager, conf) def _service_manager_reload(): _configfile_reload(conf, reload_method) _load_service_manager_options(service_manager, conf) if os.name != "posix": # NOTE(sileht): reloading can't be supported oslo.config is not pickle # But we don't care SIGHUP is not support on window return service_manager.register_hooks( on_new_worker=functools.partial( _new_worker_hook, conf, reload_method), on_reload=_service_manager_reload)
def setup(service_manager, conf, reload_method="reload")
Load services configuration from oslo config object. It reads ServiceManager and Service configuration options from an oslo_config.ConfigOpts() object. Also It registers a ServiceManager hook to reload the configuration file on reload in the master process and in all children. And then when each child start or reload, the configuration options are logged if the oslo config option 'log_options' is True. On children, the configuration file is reloaded before the running the application reload method. Options currently supported on ServiceManager and Service: * graceful_shutdown_timeout :param service_manager: ServiceManager instance :type service_manager: cotyledon.ServiceManager :param conf: Oslo Config object :type conf: oslo_config.ConfigOpts() :param reload_method: reload or mutate the config files :type reload_method: str "reload/mutate"
5.888076
5.839992
1.008234
if on_terminate is not None: _utils.check_callable(on_terminate, 'on_terminate') self._hooks['terminate'].append(on_terminate) if on_reload is not None: _utils.check_callable(on_reload, 'on_reload') self._hooks['reload'].append(on_reload) if on_new_worker is not None: _utils.check_callable(on_new_worker, 'on_new_worker') self._hooks['new_worker'].append(on_new_worker) if on_dead_worker is not None: _utils.check_callable(on_dead_worker, 'on_dead_worker') self._hooks['dead_worker'].append(on_dead_worker)
def register_hooks(self, on_terminate=None, on_reload=None, on_new_worker=None, on_dead_worker=None)
Register hook methods This can be callable multiple times to add more hooks, hooks are executed in added order. If a hook raised an exception, next hooks will be not executed. :param on_terminate: method called on SIGTERM :type on_terminate: callable() :param on_reload: method called on SIGHUP :type on_reload: callable() :param on_new_worker: method called in the child process when this one is ready :type on_new_worker: callable(service_id, worker_id, service_obj) :param on_new_worker: method called when a child died :type on_new_worker: callable(service_id, worker_id, exit_code) If window support is planned, hooks callable must support to be pickle.pickle(). See CPython multiprocessing module documentation for more detail.
1.297145
1.385105
0.936496
_utils.check_callable(service, 'service') _utils.check_workers(workers, 1) service_id = uuid.uuid4() self._services[service_id] = _service.ServiceConfig( service_id, service, workers, args, kwargs) return service_id
def add(self, service, workers=1, args=None, kwargs=None)
Add a new service to the ServiceManager :param service: callable that return an instance of :py:class:`Service` :type service: callable :param workers: number of processes/workers for this service :type workers: int :param args: additional positional arguments for this service :type args: tuple :param kwargs: additional keywoard arguments for this service :type kwargs: dict :return: a service id :rtype: uuid.uuid4
3.396178
3.353691
1.012669
try: sc = self._services[service_id] except KeyError: raise ValueError("%s service id doesn't exists" % service_id) else: _utils.check_workers(workers, minimum=(1 - sc.workers)) sc.workers = workers # Reset forktimes to respawn services quickly self._forktimes = []
def reconfigure(self, service_id, workers)
Reconfigure a service registered in ServiceManager :param service_id: the service id :type service_id: uuid.uuid4 :param workers: number of processes/workers for this service :type workers: int :raises: ValueError
9.239957
8.780897
1.052279
self._systemd_notify_once() self._child_supervisor = _utils.spawn(self._child_supervisor_thread) self._wait_forever()
def run(self)
Start and supervise services workers This method will start and supervise all children processes until the master process asked to shutdown by a SIGTERM. All spawned processes are part of the same unix process group.
21.795507
12.761602
1.707897
self._run_hooks('reload') # Reset forktimes to respawn services quickly self._forktimes = [] signal.signal(signal.SIGHUP, signal.SIG_IGN) os.killpg(0, signal.SIGHUP) signal.signal(signal.SIGHUP, self._signal_catcher)
def _reload(self)
reload all children posix only
6.849575
6.536095
1.047961
for service_id in list(self._running_services.keys()): # We copy the list to clean the orignal one processes = list(self._running_services[service_id].items()) for process, worker_id in processes: if not process.is_alive(): self._run_hooks('dead_worker', service_id, worker_id, process.exitcode) if process.exitcode < 0: sig = _utils.signal_to_name(process.exitcode) LOG.info('Child %(pid)d killed by signal %(sig)s', dict(pid=process.pid, sig=sig)) else: LOG.info('Child %(pid)d exited with status %(code)d', dict(pid=process.pid, code=process.exitcode)) del self._running_services[service_id][process] return service_id, worker_id
def _get_last_worker_died(self)
Return the last died worker information or None
3.109863
3.041554
1.022459
notify_socket = os.getenv('NOTIFY_SOCKET') if notify_socket: if notify_socket.startswith('@'): # abstract namespace socket notify_socket = '\0%s' % notify_socket[1:] sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) with contextlib.closing(sock): try: sock.connect(notify_socket) sock.sendall(b'READY=1') del os.environ['NOTIFY_SOCKET'] except EnvironmentError: LOG.debug("Systemd notification failed", exc_info=True)
def _systemd_notify_once()
Send notification once to Systemd that service is ready. Systemd sets NOTIFY_SOCKET environment variable with the name of the socket listening for notifications from services. This method removes the NOTIFY_SOCKET environment variable to ensure notification is sent only once.
3.259096
2.731769
1.193035
theseargtypes = [T.TypeFactory(a) for a in argtypes] thesekwargtypes = {k : T.TypeFactory(a) for k,a in kwargtypes.items()} def _decorator(func): # @accepts decorator f = func.__wrapped__ if hasattr(func, "__wrapped__") else func try: argtypes = inspect.getcallargs(f, *theseargtypes, **thesekwargtypes) argtypes = {k: v if issubclass(type(v), T.Type) else T.Constant(v) for k,v in argtypes.items()} except TypeError: raise E.ArgumentTypeError("Invalid argument specification to @accepts in %s" % func.__qualname__) # Support keyword arguments. Find the name of the **kwargs # parameter (not necessarily "kwargs") and set it to be a # dictionary of unspecified types. kwargname = U.get_func_kwargs_name(func) if kwargname in argtypes.keys(): argtypes[kwargname] = T.KeywordArguments() # Support positional arguments. Find the name of the *args # parameter (not necessarily "args") and set it to be an # unspecified type. posargname = U.get_func_posargs_name(func) if posargname in argtypes.keys(): argtypes[posargname] = T.PositionalArguments() # TODO merge with actual argument names if U.has_fun_prop(func, "argtypes"): raise ValueError("Cannot set argument types twice") U.set_fun_prop(func, "argtypes", argtypes) return _wrap(func) return _decorator
def accepts(*argtypes, **kwargtypes)
A function decorator to specify argument types of the function. Types may be specified either in the order that they appear in the function or via keyword arguments (just as if you were calling the function). Example usage: | @accepts(Positive0) | def square_root(x): | ...
3.506878
3.792213
0.924758
returntype = T.TypeFactory(returntype) def _decorator(func): # @returns decorator if U.has_fun_prop(func, "returntype"): raise ValueError("Cannot set return type twice") U.set_fun_prop(func, "returntype", returntype) return _wrap(func) return _decorator
def returns(returntype)
A function decorator to specify return type of the function. Example usage: | @accepts(Positive0) | @returns(Positive0) | def square_root(x): | ...
5.406429
7.376678
0.732908
def _decorator(func, condition=condition): # @requires decorator if U.has_fun_prop(func, "requires"): if not isinstance(U.get_fun_prop(func, "requires"), list): raise E.InternalError("Invalid requires structure") base_requires = U.get_fun_prop(func, "requires") else: base_requires = [] base_condition = condition if "<-->" in condition: condition_parts = condition.split("<-->") assert len(condition_parts) == 2, "Only one implies per statement in %s condition %s" % (condition, func.__qualname__) condition = "((%s) if (%s) else True) and ((%s) if (%s) else True)" % (condition_parts[1], condition_parts[0], condition_parts[0], condition_parts[1]) elif "-->" in condition: condition_parts = condition.split("-->") assert len(condition_parts) == 2, "Only one implies per statement in %s condition %s" % (base_condition, func.__qualname__) condition = "(%s) if (%s) else True" % (condition_parts[1], condition_parts[0]) U.set_fun_prop(func, "requires", [(compile(condition, '', 'eval'), condition)]+base_requires) return _wrap(func) return _decorator
def requires(condition)
A function decorator to specify entry conditions for the function. Entry conditions should be a string, which will be evaluated as Python code. Arguments of the function may be accessed by their name. The special syntax "-->" and "<-->" may be used to mean "if" and "if and only if", respectively. They may not be contained within sub-expressions. Note that globals will not be included by default, and must be manually included using the "namespace" setting, set via settings.Settings. Example usage: | @requires("x >= y") | def subtract(x, y): | ... | @accepts(l=List(Number), log_transform=Boolean) | @requires("log_transform == True --> min(l) > 0") | def process_list(l, log_transform=False): | ...
2.952249
3.072345
0.96091
def _decorator(func, condition=condition): # @ensures decorator if U.has_fun_prop(func, "ensures"): if not isinstance(U.get_fun_prop(func, "ensures"), list): raise E.InternalError("Invalid ensures strucutre") ensures_statements = U.get_fun_prop(func, "ensures") else: ensures_statements = [] e = condition.replace("return", "__RETURN__") if "<-->" in e: e_parts = e.split("<-->") assert len(e_parts) == 2, "Only one implies per statement in %s condition %s" % (ensurement, func.__qualname__) e = "((%s) if (%s) else True) and ((%s) if (%s) else True)" % (e_parts[1], e_parts[0], e_parts[0], e_parts[1]) assert "-->" not in e, "Only one implies per statement in %s condition %s" % (ensurement, func.__qualname__) if "-->" in e: e_parts = e.split("-->") assert len(e_parts) == 2, "Only one implies per statement in %s condition %s" % (ensurement, func.__qualname__) e = "(%s) if (%s) else True" % (e_parts[1], e_parts[0]) _bt = "__BACKTICK__" _dbt = "__DOUBLEBACKTICK__" if "``" in e: e = e.replace("``", _dbt) e = e.replace("`", _bt) compiled = compile(e, '', 'eval') U.set_fun_prop(func, "ensures", [(2, compiled, condition)]+ensures_statements) elif "`" in e: e = e.replace("`", _bt) compiled = compile(e, '', 'eval') U.set_fun_prop(func, "ensures", [(1, compiled, condition)]+ensures_statements) else: compiled = compile(e, '', 'eval') U.set_fun_prop(func, "ensures", [(0, compiled, condition)]+ensures_statements) return _wrap(func) return _decorator
def ensures(condition)
A function decorator to specify exit conditions for the function. Exit conditions should be a string, which will be evaluated as Python code. Arguments of the function may be accessed by their name. The return value of the function may be accessed using the special variable name "return". The special syntax "-->" and "<-->" may be used to mean "if" and "if and only if", respectively. They may not be contained within sub-expressions. Values may be compared to previous executions of the function by including a "`" or "``" after them to check for higher order properties of the function. Note that globals will not be included by default, and must be manually included using the "namespace" setting, set via settings.Settings. Example usage: | @ensures("lower_bound <= return <= upper_bound") | def search(lower_bound, upper_bound): | ... | @ensures("x <= x` --> return <= return`") | def monotonic(x): | ...
2.590242
2.57165
1.00723
for methname in dir(cls): meth = getattr(cls, methname) if U.has_fun_prop(meth, "argtypes"): argtypes = U.get_fun_prop(meth, "argtypes") if "self" in argtypes and isinstance(argtypes["self"], T.Self): argtypes["self"] = T.Generic(cls) U.set_fun_prop(meth, "argtypes", argtypes) # TODO Not necessary because of reference if U.has_fun_prop(meth, "returntype"): if isinstance(U.get_fun_prop(meth, "returntype"), T.Self): U.set_fun_prop(meth, "returntype", T.Generic(cls)) return cls
def paranoidclass(cls)
A class decorator to specify that class methods contain paranoid decorators. Example usage: | @paranoidclass | class Point: | def __init__(self, x, y): | ... | @returns(Number) | def distance_from_zero(): | ...
2.809511
3.274522
0.857991
def _decorator(func): for k,v in kwargs.items(): Settings._set(k, v, function=func) return _wrap(func) return _decorator
def paranoidconfig(**kwargs)
A function decorator to set a local setting. Settings may be set either globally (using settings.Settings.set()) or locally using this decorator. The setting name should be passed as a keyword argument, and the value to assign the setting should be passed as the value. See settings.Settings for the different settings which can be set. Example usage: | @returns(Number) | @paranoidconfig(enabled=False) | def slow_function(): | ...
6.425134
7.682377
0.836347
if v is None: return Nothing() elif issubclass(type(v), Type): return v elif issubclass(v, Type): return v() elif issubclass(type(v), type): return Generic(v) else: raise InvalidTypeError("Invalid type %s" % v)
def TypeFactory(v)
Ensure `v` is a valid Type. This function is used to convert user-specified types into internal types for the verification engine. It allows Type subclasses, Type subclass instances, Python type, and user-defined classes to be passed. Returns an instance of the type of `v`. Users should never access this function directly.
3.287854
2.935529
1.120021
proofs = [] try: test = profile.items() except: return proofs if 'account' in profile: accounts = profile['account'] else: return proofs for account in accounts: # skip if proof service is not supported if 'service' in account and account['service'].lower() not in SITES: continue if 'proofType' in account and account['proofType'] == "http": try: proof = {"service": account['service'], "proof_url": account['proofUrl'], "identifier": account['identifier'], "valid": False} if is_valid_proof(account['service'], account['identifier'], fqdn, account['proofUrl'], address = address): proof["valid"] = True proofs.append(proof) except Exception as e: pass return proofs
def profile_v3_to_proofs(profile, fqdn, refresh=False, address = None)
Convert profile format v3 to proofs
3.415438
3.360594
1.01632
if not hasattr(f, _FUN_PROPS): return False if not isinstance(getattr(f, _FUN_PROPS), dict): return False if k not in getattr(f, _FUN_PROPS).keys(): return False return True
def has_fun_prop(f, k)
Test whether function `f` has property `k`. We define properties as annotations added to a function throughout the process of defining a function for verification, e.g. the argument types. If `f` is an unannotated function, this returns False. If `f` has the property named `k`, it returns True. Otherwise, it returns False. Users should never access this function directly.
2.423411
2.58136
0.938812
if not has_fun_prop(f, k): raise InternalError("Function %s has no property %s" % (str(f), k)) return getattr(f, _FUN_PROPS)[k]
def get_fun_prop(f, k)
Get the value of property `k` from function `f`. We define properties as annotations added to a function throughout the process of defining a function for verification, e.g. the argument types. If `f` does not have a property named `k`, this throws an error. If `f` has the property named `k`, it returns the value of it. Users should never access this function directly.
3.547002
4.178629
0.848844
if not hasattr(f, _FUN_PROPS): setattr(f, _FUN_PROPS, {}) if not isinstance(getattr(f, _FUN_PROPS), dict): raise InternalError("Invalid properties dictionary for %s" % str(f)) getattr(f, _FUN_PROPS)[k] = v
def set_fun_prop(f, k, v)
Set the value of property `k` to be `v` in function `f`. We define properties as annotations added to a function throughout the process of defining a function for verification, e.g. the argument types. This sets function `f`'s property named `k` to be value `v`. Users should never access this function directly.
2.53734
2.747456
0.923524