_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q15200
find_release
train
def find_release(package, releases, dependencies=None): """Return the best release.""" dependencies = dependencies if dependencies is not None else {} for release in releases: url = release['url'] old_priority = dependencies.get(package, {}).get('priority', 0) for suffix, priority in SUFFIXES.items(): if url.endswith(suffix): if old_priority < priority: sha256 = release['digests']['sha256'] dependencies[package] = { 'package': package, 'url': url, 'sha256': sha256, 'priority': priority, } return dependencies[package]
python
{ "resource": "" }
q15201
_safe_path
train
def _safe_path(filepath, can_be_cwl=False): """Check if the path should be used in output.""" # Should not be in ignore paths. if filepath in {'.gitignore', '.gitattributes'}: return False # Ignore everything in .renku ... if filepath.startswith('.renku'): # ... unless it can be a CWL. if can_be_cwl and filepath.endswith('.cwl'): return True return False return True
python
{ "resource": "" }
q15202
tabular
train
def tabular(client, datasets): """Format datasets with a tabular output.""" from renku.models._tabulate import tabulate click.echo( tabulate( datasets, headers=OrderedDict(( ('short_id', 'id'), ('name', None), ('created', None), ('authors_csv', 'authors'), )), ) )
python
{ "resource": "" }
q15203
jsonld
train
def jsonld(client, datasets): """Format datasets as JSON-LD.""" from renku.models._json import dumps from renku.models._jsonld import asjsonld data = [ asjsonld( dataset, basedir=os.path.relpath( '.', start=str(dataset.__reference__.parent) ) ) for dataset in datasets ] click.echo(dumps(data, indent=2))
python
{ "resource": "" }
q15204
_format_sha1
train
def _format_sha1(graph, node): """Return formatted text with the submodule information.""" try: submodules = node.submodules if submodules: submodule = ':'.join(submodules) return click.style(submodule, fg='green') + '@' + click.style( node.commit.hexsha[:8], fg='yellow' ) except KeyError: pass return click.style(node.commit.hexsha[:8], fg='yellow')
python
{ "resource": "" }
q15205
convert_arguments
train
def convert_arguments(value): """Convert arguments from various input formats.""" if isinstance(value, (list, tuple)): return [ CommandLineBinding(**item) if isinstance(item, dict) else item for item in value ] return shlex.split(value)
python
{ "resource": "" }
q15206
cli
train
def cli(ctx, path, renku_home, use_external_storage): """Check common Renku commands used in various situations.""" ctx.obj = LocalClient( path=path, renku_home=renku_home, use_external_storage=use_external_storage, )
python
{ "resource": "" }
q15207
update
train
def update(client, revision, no_output, siblings, paths): """Update existing files by rerunning their outdated workflow.""" graph = Graph(client) outputs = graph.build(revision=revision, can_be_cwl=no_output, paths=paths) outputs = {node for node in outputs if graph.need_update(node)} if not outputs: click.secho( 'All files were generated from the latest inputs.', fg='green' ) sys.exit(0) # Check or extend siblings of outputs. outputs = siblings(graph, outputs) output_paths = {node.path for node in outputs if _safe_path(node.path)} # Get all clean nodes. input_paths = {node.path for node in graph.nodes} - output_paths # Store the generated workflow used for updating paths. import yaml output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex) workflow = graph.ascwl( input_paths=input_paths, output_paths=output_paths, outputs=outputs, ) # Make sure all inputs are pulled from a storage. client.pull_paths_from_storage( *(path for _, path in workflow.iter_input_files(client.workflow_path)) ) with output_file.open('w') as f: f.write( yaml.dump( ascwl( workflow, filter=lambda _, x: x is not None, basedir=client.workflow_path, ), default_flow_style=False ) ) from ._cwl import execute execute(client, output_file, output_paths=output_paths)
python
{ "resource": "" }
q15208
config_path
train
def config_path(path=None, final=False): """Return config path.""" if final and path: return path if path is None: path = default_config_dir() try: os.makedirs(path) except OSError as e: # pragma: no cover if e.errno != errno.EEXIST: raise return os.path.join(path, 'config.yml')
python
{ "resource": "" }
q15209
read_config
train
def read_config(path=None, final=False): """Read Renku configuration.""" try: with open(config_path(path, final=final), 'r') as configfile: return yaml.safe_load(configfile) or {} except FileNotFoundError: return {}
python
{ "resource": "" }
q15210
write_config
train
def write_config(config, path, final=False): """Write Renku configuration.""" with open(config_path(path, final=final), 'w+') as configfile: yaml.dump(config, configfile, default_flow_style=False)
python
{ "resource": "" }
q15211
with_config
train
def with_config(f): """Add config to function.""" # keep it. @click.pass_context def new_func(ctx, *args, **kwargs): # Invoked with custom config: if 'config' in kwargs: return ctx.invoke(f, *args, **kwargs) if ctx.obj is None: ctx.obj = {} config = ctx.obj['config'] project_enabled = not ctx.obj.get('no_project', False) project_config_path = get_project_config_path() if project_enabled and project_config_path: project_config = read_config(project_config_path) config['project'] = project_config result = ctx.invoke(f, config, *args, **kwargs) project_config = config.pop('project', None) if project_config: if not project_config_path: raise RuntimeError('Invalid config update') write_config(project_config, path=project_config_path) write_config(config, path=ctx.obj['config_path']) if project_config is not None: config['project'] = project_config return result return update_wrapper(new_func, f)
python
{ "resource": "" }
q15212
create_project_config_path
train
def create_project_config_path( path, mode=0o777, parents=False, exist_ok=False ): """Create new project configuration folder.""" # FIXME check default directory mode project_path = Path(path).absolute().joinpath(RENKU_HOME) project_path.mkdir(mode=mode, parents=parents, exist_ok=exist_ok) return str(project_path)
python
{ "resource": "" }
q15213
get_project_config_path
train
def get_project_config_path(path=None): """Return project configuration folder if exist.""" project_path = Path(path or '.').absolute().joinpath(RENKU_HOME) if project_path.exists() and project_path.is_dir(): return str(project_path)
python
{ "resource": "" }
q15214
find_project_config_path
train
def find_project_config_path(path=None): """Find project config path.""" path = Path(path) if path else Path.cwd() abspath = path.absolute() project_path = get_project_config_path(abspath) if project_path: return project_path for parent in abspath.parents: project_path = get_project_config_path(parent) if project_path: return project_path
python
{ "resource": "" }
q15215
_nodes
train
def _nodes(output, parent=None): """Yield nodes from entities.""" # NOTE refactor so all outputs behave the same entity = getattr(output, 'entity', output) if isinstance(entity, Collection): for member in entity.members: if parent is not None: member = attr.evolve(member, parent=parent) yield from _nodes(member) yield output else: yield output
python
{ "resource": "" }
q15216
mapped
train
def mapped(cls, key='id', **kwargs): """Create list of instances from a mapping.""" kwargs.setdefault('metadata', {}) kwargs['metadata']['jsonldPredicate'] = {'mapSubject': key} kwargs.setdefault('default', attr.Factory(list)) def converter(value): """Convert mapping to a list of instances.""" if isinstance(value, dict): result = [] for k, v in iteritems(value): if not hasattr(cls, 'from_cwl'): vv = dict(v) vv[key] = k else: vv = attr.evolve(cls.from_cwl(v), **{key: k}) result.append(vv) else: result = value def fix_keys(data): """Fix names of keys.""" for a in fields(cls): a_name = a.name.rstrip('_') if a_name in data: yield a.name, data[a_name] return [ cls(**{kk: vv for kk, vv in fix_keys(v)}) if not isinstance(v, cls) else v for v in result ] kwargs['converter'] = converter return attr.ib(**kwargs)
python
{ "resource": "" }
q15217
CWLClass.from_cwl
train
def from_cwl(cls, data, __reference__=None): """Return an instance from CWL data.""" class_name = data.get('class', None) cls = cls.registry.get(class_name, cls) if __reference__: with with_reference(__reference__): self = cls( **{k: v for k, v in iteritems(data) if k != 'class'} ) else: self = cls(**{k: v for k, v in iteritems(data) if k != 'class'}) return self
python
{ "resource": "" }
q15218
template
train
def template(client, force): """Render templated configuration files.""" import pkg_resources # create the templated files for tpl_file in CI_TEMPLATES: tpl_path = client.path / tpl_file with pkg_resources.resource_stream(__name__, tpl_file) as tpl: content = tpl.read() if not force and tpl_path.exists(): click.confirm( 'Do you want to override "{tpl_file}"'.format( tpl_file=tpl_file ), abort=True, ) with tpl_path.open('wb') as dest: dest.write(content)
python
{ "resource": "" }
q15219
rerun
train
def rerun(client, run, job): """Re-run existing workflow or tool using CWL runner.""" from renku.models.provenance import ProcessRun activity = client.process_commmit() if not isinstance(activity, ProcessRun): click.secho('No tool was found.', fg='red', file=sys.stderr) return try: args = ['cwl-runner', activity.path] if job: job_file = tempfile.NamedTemporaryFile( suffix='.yml', dir=os.getcwd(), delete=False ) args.append(job_file.name) with job_file as fp: yaml.dump(yaml.safe_load(job), stream=fp, encoding='utf-8') if run: return call(args, cwd=os.getcwd()) finally: if job: os.unlink(job_file.name)
python
{ "resource": "" }
q15220
_format_default
train
def _format_default(client, value): """Format default values.""" if isinstance(value, File): return os.path.relpath( str((client.workflow_path / value.path).resolve()) ) return value
python
{ "resource": "" }
q15221
show_inputs
train
def show_inputs(client, workflow): """Show workflow inputs and exit.""" for input_ in workflow.inputs: click.echo( '{id}: {default}'.format( id=input_.id, default=_format_default(client, input_.default), ) ) sys.exit(0)
python
{ "resource": "" }
q15222
edit_inputs
train
def edit_inputs(client, workflow): """Edit workflow inputs.""" types = { 'int': int, 'string': str, 'File': lambda x: File(path=Path(x).resolve()), } for input_ in workflow.inputs: convert = types.get(input_.type, str) input_.default = convert( click.prompt( '{0.id} ({0.type})'.format(input_), default=_format_default(client, input_.default), ) ) return workflow
python
{ "resource": "" }
q15223
rerun
train
def rerun(client, revision, roots, siblings, inputs, paths): """Recreate files generated by a sequence of ``run`` commands.""" graph = Graph(client) outputs = graph.build(paths=paths, revision=revision) # Check or extend siblings of outputs. outputs = siblings(graph, outputs) output_paths = {node.path for node in outputs} # Normalize and check all starting paths. roots = {graph.normalize_path(root) for root in roots} assert not roots & output_paths, '--from colides with output paths' # Generate workflow and check inputs. # NOTE The workflow creation is done before opening a new file. workflow = inputs( client, graph.ascwl( input_paths=roots, output_paths=output_paths, outputs=outputs, ) ) # Make sure all inputs are pulled from a storage. client.pull_paths_from_storage( *(path for _, path in workflow.iter_input_files(client.workflow_path)) ) # Store the generated workflow used for updating paths. import yaml output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex) with output_file.open('w') as f: f.write( yaml.dump( ascwl( workflow, filter=lambda _, x: x is not None, basedir=client.workflow_path, ), default_flow_style=False ) ) # Execute the workflow and relocate all output files. from ._cwl import execute # FIXME get new output paths for edited tools # output_paths = {path for _, path in workflow.iter_output_files()} execute( client, output_file, output_paths=output_paths, )
python
{ "resource": "" }
q15224
datasets
train
def datasets(ctx, client): """Migrate dataset metadata.""" from renku.models._jsonld import asjsonld from renku.models.datasets import Dataset from renku.models.refs import LinkReference from ._checks.location_datasets import _dataset_metadata_pre_0_3_4 for old_path in _dataset_metadata_pre_0_3_4(client): with old_path.open('r') as fp: dataset = Dataset.from_jsonld(yaml.safe_load(fp)) name = str(old_path.parent.relative_to(client.path / 'data')) new_path = ( client.renku_datasets_path / dataset.identifier.hex / client.METADATA ) new_path.parent.mkdir(parents=True, exist_ok=True) dataset = dataset.rename_files( lambda key: os.path. relpath(str(old_path.parent / key), start=str(new_path.parent)) ) with new_path.open('w') as fp: yaml.dump(asjsonld(dataset), fp, default_flow_style=False) old_path.unlink() LinkReference.create(client=client, name='datasets/' + name).set_reference(new_path)
python
{ "resource": "" }
q15225
detect_registry_url
train
def detect_registry_url(client, auto_login=True): """Return a URL of the Docker registry.""" repo = client.repo config = repo.config_reader() # Find registry URL in .git/config remote_url = None try: registry_url = config.get_value('renku', 'registry', None) except NoSectionError: registry_url = None remote_branch = repo.head.reference.tracking_branch() if remote_branch is not None: remote_name = remote_branch.remote_name config_section = 'renku "{remote_name}"'.format( remote_name=remote_name ) try: registry_url = config.get_value( config_section, 'registry', registry_url ) except NoSectionError: pass remote_url = repo.remotes[remote_name].url if registry_url: # Look in [renku] and [renku "{remote_name}"] for registry_url key. url = GitURL.parse(registry_url) elif remote_url: # Use URL based on remote configuration. url = GitURL.parse(remote_url) # Replace gitlab. with registry. unless running on gitlab.com. hostname_parts = url.hostname.split('.') if len(hostname_parts) > 2 and hostname_parts[0] == 'gitlab': hostname_parts = hostname_parts[1:] hostname = '.'.join(['registry'] + hostname_parts) url = attr.evolve(url, hostname=hostname) else: raise errors.ConfigurationError( 'Configure renku.repository_url or Git remote.' ) if auto_login and url.username and url.password: try: subprocess.run([ 'docker', 'login', url.hostname, '-u', url.username, '--password-stdin', ], check=True, input=url.password.encode('utf-8')) except subprocess.CalledProcessError: raise errors.AuthenticationError( 'Check configuration of password or token in the registry URL' ) return url
python
{ "resource": "" }
q15226
JSONEncoder.default
train
def default(self, obj): """Encode more types.""" if isinstance(obj, UUID): return obj.hex elif isinstance(obj, datetime.datetime): return obj.isoformat() return super().default(obj)
python
{ "resource": "" }
q15227
run
train
def run(client, outputs, no_output, success_codes, isolation, command_line): """Tracking work on a specific problem.""" working_dir = client.repo.working_dir mapped_std = _mapped_std_streams(client.candidate_paths) factory = CommandLineToolFactory( command_line=command_line, directory=os.getcwd(), working_dir=working_dir, successCodes=success_codes, **{ name: os.path.relpath(path, working_dir) for name, path in mapped_std.items() } ) with client.with_workflow_storage() as wf: with factory.watch( client, no_output=no_output, outputs=outputs ) as tool: # Make sure all inputs are pulled from a storage. client.pull_paths_from_storage( *( path for _, path in tool.iter_input_files(client.workflow_path) ) ) returncode = call( factory.command_line, cwd=os.getcwd(), **{key: getattr(sys, key) for key in mapped_std.keys()}, ) if returncode not in (success_codes or {0}): raise errors.InvalidSuccessCode( returncode, success_codes=success_codes ) sys.stdout.flush() sys.stderr.flush() wf.add_step(run=tool)
python
{ "resource": "" }
q15228
log
train
def log(client, revision, format, no_output, paths): """Show logs for a file.""" graph = Graph(client) if not paths: start, is_range, stop = revision.partition('..') if not is_range: stop = start elif not stop: stop = 'HEAD' commit = client.repo.rev_parse(stop) paths = ( str(client.path / item.a_path) for item in commit.diff(commit.parents or NULL_TREE) # if not item.deleted_file ) # NOTE shall we warn when "not no_output and not paths"? graph.build(paths=paths, revision=revision, can_be_cwl=no_output) FORMATS[format](graph)
python
{ "resource": "" }
q15229
status
train
def status(ctx, client, revision, no_output, path): """Show a status of the repository.""" graph = Graph(client) # TODO filter only paths = {graph.normalize_path(p) for p in path} status = graph.build_status(revision=revision, can_be_cwl=no_output) click.echo('On branch {0}'.format(client.repo.active_branch)) if status['outdated']: click.echo( 'Files generated from newer inputs:\n' ' (use "renku log [<file>...]" to see the full lineage)\n' ' (use "renku update [<file>...]" to ' 'generate the file from its latest inputs)\n' ) for filepath, stts in sorted(status['outdated'].items()): outdated = ( ', '.join( '{0}#{1}'.format( click.style( graph._format_path(n.path), fg='blue', bold=True ), _format_sha1(graph, n), ) for n in stts if n.path and n.path not in status['outdated'] ) ) click.echo( '\t{0}: {1}'.format( click.style( graph._format_path(filepath), fg='red', bold=True ), outdated ) ) click.echo() else: click.secho( 'All files were generated from the latest inputs.', fg='green' ) if status['multiple-versions']: click.echo( 'Input files used in different versions:\n' ' (use "renku log --revision <sha1> <file>" to see a lineage ' 'for the given revision)\n' ) for filepath, files in sorted(status['multiple-versions'].items()): # Do not show duplicated commits! (see #387) commits = {_format_sha1(graph, key) for key in files} click.echo( '\t{0}: {1}'.format( click.style( graph._format_path(filepath), fg='blue', bold=True ), ', '.join(commits) ) ) click.echo() if status['deleted']: click.echo( 'Deleted files used to generate outputs:\n' ' (use "git show <sha1>:<file>" to see the file content ' 'for the given revision)\n' ) for filepath, node in status['deleted'].items(): click.echo( '\t{0}: {1}'.format( click.style( graph._format_path(filepath), fg='blue', bold=True ), _format_sha1(graph, node) ) ) click.echo() ctx.exit(1 if status['outdated'] else 0)
python
{ "resource": "" }
q15230
validate_name
train
def validate_name(ctx, param, value): """Validate a project name.""" if not value: value = os.path.basename(ctx.params['directory'].rstrip(os.path.sep)) return value
python
{ "resource": "" }
q15231
store_directory
train
def store_directory(ctx, param, value): """Store directory as a new Git home.""" Path(value).mkdir(parents=True, exist_ok=True) set_git_home(value) return value
python
{ "resource": "" }
q15232
init
train
def init(ctx, client, directory, name, force, use_external_storage): """Initialize a project.""" if not client.use_external_storage: use_external_storage = False ctx.obj = client = attr.evolve( client, path=directory, use_external_storage=use_external_storage, ) msg = 'Initialized empty project in {path}' branch_name = None stack = contextlib.ExitStack() if force and client.repo: msg = 'Initialized project in {path} (branch {branch_name})' merge_args = ['--no-ff', '-s', 'recursive', '-X', 'ours'] try: commit = client.find_previous_commit( str(client.renku_metadata_path), ) branch_name = 'renku/init/' + str(commit) except KeyError: from git import NULL_TREE commit = NULL_TREE branch_name = 'renku/init/root' merge_args.append('--allow-unrelated-histories') ctx.obj = client = stack.enter_context( client.worktree( branch_name=branch_name, commit=commit, merge_args=merge_args, ) ) try: with client.lock: path = client.init_repository(name=name, force=force) except FileExistsError: raise click.UsageError( 'Renku repository is not empty. ' 'Please use --force flag to use the directory as Renku ' 'repository.' ) stack.enter_context(client.commit()) with stack: # Install Git hooks. from .githooks import install ctx.invoke(install, force=force) # Create all necessary template files. from .runner import template ctx.invoke(template, force=force) click.echo(msg.format(path=path, branch_name=branch_name))
python
{ "resource": "" }
q15233
check_missing_files
train
def check_missing_files(client): """Find missing files listed in datasets.""" missing = defaultdict(list) for path, dataset in client.datasets.items(): for file in dataset.files: filepath = (path.parent / file) if not filepath.exists(): missing[str( path.parent.relative_to(client.renku_datasets_path) )].append( os.path.normpath(str(filepath.relative_to(client.path))) ) if not missing: return True click.secho( WARNING + 'There are missing files in datasets.' # '\n (use "renku dataset clean <name>" to clean them)' ) for dataset, files in missing.items(): click.secho( '\n\t' + click.style(dataset, fg='yellow') + ':\n\t ' + '\n\t '.join(click.style(path, fg='red') for path in files) ) return False
python
{ "resource": "" }
q15234
APIError.from_http_exception
train
def from_http_exception(cls, e): """Create ``APIError`` from ``requests.exception.HTTPError``.""" assert isinstance(e, requests.exceptions.HTTPError) response = e.response try: message = response.json()['message'] except (KeyError, ValueError): message = response.content.strip() raise cls(message)
python
{ "resource": "" }
q15235
UnexpectedStatusCode.return_or_raise
train
def return_or_raise(cls, response, expected_status_code): """Check for ``expected_status_code``.""" try: if response.status_code in expected_status_code: return response except TypeError: if response.status_code == expected_status_code: return response raise cls(response)
python
{ "resource": "" }
q15236
check_missing_references
train
def check_missing_references(client): """Find missing references.""" from renku.models.refs import LinkReference missing = [ ref for ref in LinkReference.iter_items(client) if not ref.reference.exists() ] if not missing: return True click.secho( WARNING + 'There are missing references.' '\n (use "git rm <name>" to clean them)\n\n\t' + '\n\t '.join( click.style(str(ref.path), fg='yellow') + ' -> ' + click.style(str(ref.reference), fg='red') for ref in missing ) + '\n' ) return False
python
{ "resource": "" }
q15237
get_git_home
train
def get_git_home(path='.'): """Get Git path from the current context.""" ctx = click.get_current_context(silent=True) if ctx and GIT_KEY in ctx.meta: return ctx.meta[GIT_KEY] from git import Repo return Repo(path, search_parent_directories=True).working_dir
python
{ "resource": "" }
q15238
get_git_isolation
train
def get_git_isolation(): """Get Git isolation from the current context.""" ctx = click.get_current_context(silent=True) if ctx and GIT_ISOLATION in ctx.meta: return ctx.meta[GIT_ISOLATION]
python
{ "resource": "" }
q15239
_safe_issue_checkout
train
def _safe_issue_checkout(repo, issue=None): """Safely checkout branch for the issue.""" branch_name = str(issue) if issue else 'master' if branch_name not in repo.heads: branch = repo.create_head(branch_name) else: branch = repo.heads[branch_name] branch.checkout()
python
{ "resource": "" }
q15240
attrib
train
def attrib(context=None, **kwargs): """Create a new attribute with context.""" kwargs.setdefault('metadata', {}) kwargs['metadata'][KEY] = context return attr.ib(**kwargs)
python
{ "resource": "" }
q15241
_container_attrib_builder
train
def _container_attrib_builder(name, container, mapper): """Builder for container attributes.""" context = {'@container': '@{0}'.format(name)} def _attrib(type, **kwargs): """Define a container attribute.""" kwargs.setdefault('metadata', {}) kwargs['metadata'][KEY_CLS] = type kwargs['default'] = Factory(container) def _converter(value): """Convert value to the given type.""" if isinstance(value, container): return mapper(type, value) elif value is None: return value raise ValueError(value) kwargs.setdefault('converter', _converter) context_ib = context.copy() context_ib.update(kwargs.pop('context', {})) return attrib(context=context_ib, **kwargs) return _attrib
python
{ "resource": "" }
q15242
asjsonld
train
def asjsonld( inst, recurse=True, filter=None, dict_factory=dict, retain_collection_types=False, export_context=True, basedir=None, ): """Dump a JSON-LD class to the JSON with generated ``@context`` field.""" jsonld_fields = inst.__class__._jsonld_fields attrs = tuple( field for field in fields(inst.__class__) if field.name in jsonld_fields ) rv = dict_factory() def convert_value(v): """Convert special types.""" if isinstance(v, Path): v = str(v) return os.path.relpath(v, str(basedir)) if basedir else v return v for a in attrs: v = getattr(inst, a.name) # skip proxies if isinstance(v, weakref.ReferenceType): continue # do not export context for containers ec = export_context and KEY_CLS not in a.metadata if filter is not None and not filter(a, v): continue if recurse is True: if has(v.__class__): rv[a.name] = asjsonld( v, recurse=True, filter=filter, dict_factory=dict_factory, basedir=basedir, ) elif isinstance(v, (tuple, list, set)): cf = v.__class__ if retain_collection_types is True else list rv[a.name] = cf([ asjsonld( i, recurse=True, filter=filter, dict_factory=dict_factory, export_context=ec, basedir=basedir, ) if has(i.__class__) else i for i in v ]) elif isinstance(v, dict): df = dict_factory rv[a.name] = df(( asjsonld( kk, dict_factory=df, basedir=basedir, ) if has(kk.__class__) else convert_value(kk), asjsonld( vv, dict_factory=df, export_context=ec, basedir=basedir, ) if has(vv.__class__) else vv ) for kk, vv in iteritems(v)) else: rv[a.name] = convert_value(v) else: rv[a.name] = convert_value(v) inst_cls = type(inst) if export_context: rv['@context'] = deepcopy(inst_cls._jsonld_context) if inst_cls._jsonld_type: rv['@type'] = inst_cls._jsonld_type return rv
python
{ "resource": "" }
q15243
JSONLDMixin.from_jsonld
train
def from_jsonld(cls, data, __reference__=None, __source__=None): """Instantiate a JSON-LD class from data.""" if isinstance(data, cls): return data if not isinstance(data, dict): raise ValueError(data) if '@type' in data: type_ = tuple(sorted(data['@type'])) if type_ in cls.__type_registry__ and getattr( cls, '_jsonld_type', None ) != type_: new_cls = cls.__type_registry__[type_] if cls != new_cls: return new_cls.from_jsonld(data) if cls._jsonld_translate: data = ld.compact(data, {'@context': cls._jsonld_translate}) data.pop('@context', None) data.setdefault('@context', cls._jsonld_context) if data['@context'] != cls._jsonld_context: compacted = ld.compact(data, {'@context': cls._jsonld_context}) else: compacted = data # assert compacted['@type'] == cls._jsonld_type, '@type must be equal' # TODO update self(not cls)._jsonld_context with data['@context'] fields = cls._jsonld_fields if __reference__: with with_reference(__reference__): self = cls( **{ k.lstrip('_'): v for k, v in compacted.items() if k in fields } ) else: self = cls( **{ k.lstrip('_'): v for k, v in compacted.items() if k in fields } ) if __source__: setattr(self, '__source__', __source__) return self
python
{ "resource": "" }
q15244
JSONLDMixin.asjsonld
train
def asjsonld(self): """Create JSON-LD with the original source data.""" source = {} if self.__source__: source.update(self.__source__) source.update(asjsonld(self)) return source
python
{ "resource": "" }
q15245
JSONLDMixin.to_yaml
train
def to_yaml(self): """Store an instance to the referenced YAML file.""" import yaml with self.__reference__.open('w') as fp: yaml.dump(self.asjsonld(), fp, default_flow_style=False)
python
{ "resource": "" }
q15246
send_mail
train
def send_mail(subject, body_text, addr_from, recipient_list, fail_silently=False, auth_user=None, auth_password=None, attachments=None, body_html=None, html_message=None, connection=None, headers=None): """ Sends a multipart email containing text and html versions which are encrypted for each recipient that has a valid gpg key installed. """ # Make sure only one HTML option is specified if body_html is not None and html_message is not None: # pragma: no cover raise ValueError("You cannot specify body_html and html_message at " "the same time. Please only use html_message.") # Push users to update their code if body_html is not None: # pragma: no cover warn("Using body_html is deprecated; use the html_message argument " "instead. Please update your code.", DeprecationWarning) html_message = body_html # Allow for a single address to be passed in. if isinstance(recipient_list, six.string_types): recipient_list = [recipient_list] connection = connection or get_connection( username=auth_user, password=auth_password, fail_silently=fail_silently) # Obtain a list of the recipients that have gpg keys installed. key_addresses = {} if USE_GNUPG: from email_extras.models import Address key_addresses = dict(Address.objects.filter(address__in=recipient_list) .values_list('address', 'use_asc')) # Create the gpg object. if key_addresses: gpg = GPG(gnupghome=GNUPG_HOME) if GNUPG_ENCODING is not None: gpg.encoding = GNUPG_ENCODING # Check if recipient has a gpg key installed def has_pgp_key(addr): return addr in key_addresses # Encrypts body if recipient has a gpg key installed. def encrypt_if_key(body, addr_list): if has_pgp_key(addr_list[0]): encrypted = gpg.encrypt(body, addr_list[0], always_trust=ALWAYS_TRUST) if encrypted == "" and body != "": # encryption failed raise EncryptionFailedError("Encrypting mail to %s failed.", addr_list[0]) return smart_text(encrypted) return body # Load attachments and create name/data tuples. attachments_parts = [] if attachments is not None: for attachment in attachments: # Attachments can be pairs of name/data, or filesystem paths. if not hasattr(attachment, "__iter__"): with open(attachment, "rb") as f: attachments_parts.append((basename(attachment), f.read())) else: attachments_parts.append(attachment) # Send emails - encrypted emails needs to be sent individually, while # non-encrypted emails can be sent in one send. So the final list of # lists of addresses to send to looks like: # [[unencrypted1, unencrypted2, unencrypted3], [encrypted1], [encrypted2]] unencrypted = [addr for addr in recipient_list if addr not in key_addresses] unencrypted = [unencrypted] if unencrypted else unencrypted encrypted = [[addr] for addr in key_addresses] for addr_list in unencrypted + encrypted: msg = EmailMultiAlternatives(subject, encrypt_if_key(body_text, addr_list), addr_from, addr_list, connection=connection, headers=headers) if html_message is not None: if has_pgp_key(addr_list[0]): mimetype = "application/gpg-encrypted" else: mimetype = "text/html" msg.attach_alternative(encrypt_if_key(html_message, addr_list), mimetype) for parts in attachments_parts: name = parts[0] if key_addresses.get(addr_list[0]): name += ".asc" msg.attach(name, encrypt_if_key(parts[1], addr_list)) msg.send(fail_silently=fail_silently)
python
{ "resource": "" }
q15247
topological
train
def topological(nodes): """Return nodes in a topological order.""" order, enter, state = deque(), set(nodes), {} def dfs(node): """Visit nodes in depth-first order.""" state[node] = GRAY for parent in nodes.get(node, ()): color = state.get(parent, None) if color == GRAY: raise ValueError('cycle') if color == BLACK: continue enter.discard(parent) dfs(parent) order.appendleft(node) state[node] = BLACK while enter: dfs(enter.pop()) return order
python
{ "resource": "" }
q15248
check_dataset_metadata
train
def check_dataset_metadata(client): """Check location of dataset metadata.""" # Find pre 0.3.4 metadata files. old_metadata = list(_dataset_metadata_pre_0_3_4(client)) if not old_metadata: return True click.secho( WARNING + 'There are metadata files in the old location.' '\n (use "renku migrate datasets" to move them)\n\n\t' + '\n\t'.join( click.style(str(path.relative_to(client.path)), fg='yellow') for path in old_metadata ) + '\n' ) return False
python
{ "resource": "" }
q15249
siblings
train
def siblings(client, revision, paths): """Show siblings for given paths.""" graph = Graph(client) nodes = graph.build(paths=paths, revision=revision) siblings_ = set(nodes) for node in nodes: siblings_ |= graph.siblings(node) paths = {node.path for node in siblings_} for path in paths: click.echo(graph._format_path(path))
python
{ "resource": "" }
q15250
inputs
train
def inputs(ctx, client, revision, paths): r"""Show inputs files in the repository. <PATHS> Files to show. If no files are given all input files are shown. """ from renku.models.provenance import ProcessRun graph = Graph(client) paths = set(paths) nodes = graph.build(revision=revision) commits = {node.commit for node in nodes} candidates = {(node.commit, node.path) for node in nodes if not paths or node.path in paths} input_paths = set() for commit in commits: activity = graph.activities[commit] if isinstance(activity, ProcessRun): for usage in activity.qualified_usage: for entity in usage.entity.entities: path = str((usage.client.path / entity.path).relative_to( client.path )) usage_key = (entity.commit, entity.path) if path not in input_paths and usage_key in candidates: input_paths.add(path) click.echo('\n'.join(graph._format_path(path) for path in input_paths)) ctx.exit(0 if not paths or len(input_paths) == len(paths) else 1)
python
{ "resource": "" }
q15251
outputs
train
def outputs(ctx, client, revision, paths): r"""Show output files in the repository. <PATHS> Files to show. If no files are given all output files are shown. """ graph = Graph(client) filter = graph.build(paths=paths, revision=revision) output_paths = graph.output_paths click.echo('\n'.join(graph._format_path(path) for path in output_paths)) if paths: if not output_paths: ctx.exit(1) from renku.models._datastructures import DirectoryTree tree = DirectoryTree.from_list(item.path for item in filter) for output in output_paths: if tree.get(output) is None: ctx.exit(1) return
python
{ "resource": "" }
q15252
_context_names
train
def _context_names(): """Return list of valid context names.""" import inspect from renku.models import provenance from renku.models._jsonld import JSONLDMixin for name in dir(provenance): cls = getattr(provenance, name) if inspect.isclass(cls) and issubclass(cls, JSONLDMixin): yield name
python
{ "resource": "" }
q15253
print_context_names
train
def print_context_names(ctx, param, value): """Print all possible types.""" if not value or ctx.resilient_parsing: return click.echo('\n'.join(_context_names())) ctx.exit()
python
{ "resource": "" }
q15254
_context_json
train
def _context_json(name): """Return JSON-LD string for given context name.""" from renku.models import provenance cls = getattr(provenance, name) return { '@context': cls._jsonld_context, '@type': cls._jsonld_type, }
python
{ "resource": "" }
q15255
context
train
def context(names): """Show JSON-LD context for repository objects.""" import json contexts = [_context_json(name) for name in set(names)] if contexts: click.echo( json.dumps( contexts[0] if len(contexts) == 1 else contexts, indent=2, ) )
python
{ "resource": "" }
q15256
workflow
train
def workflow(ctx, client): """List or manage workflows with subcommands.""" if ctx.invoked_subcommand is None: from renku.models.refs import LinkReference names = defaultdict(list) for ref in LinkReference.iter_items(client, common_path='workflows'): names[ref.reference.name].append(ref.name) for path in client.workflow_path.glob('*.cwl'): click.echo( '{path}: {names}'.format( path=path.name, names=', '.join( click.style(_deref(name), fg='green') for name in names[path.name] ), ) )
python
{ "resource": "" }
q15257
validate_path
train
def validate_path(ctx, param, value): """Detect a workflow path if it is not passed.""" client = ctx.obj if value is None: from renku.models.provenance import ProcessRun activity = client.process_commit() if not isinstance(activity, ProcessRun): raise click.BadParameter('No tool was found.') return activity.path return value
python
{ "resource": "" }
q15258
create
train
def create(client, output_file, revision, paths): """Create a workflow description for a file.""" graph = Graph(client) outputs = graph.build(paths=paths, revision=revision) output_file.write( yaml.dump( ascwl( graph.ascwl(outputs=outputs), filter=lambda _, x: x is not None and x != [], basedir=os.path.dirname(getattr(output_file, 'name', '.')) or '.', ), default_flow_style=False ) )
python
{ "resource": "" }
q15259
endpoint
train
def endpoint(ctx, config, verbose): """Manage set of platform API endpoints.""" if ctx.invoked_subcommand is None: # TODO default_endpoint = config.get('core', {}).get('default') for endpoint, values in config.get('endpoints', {}).items(): # TODO is_default = default_endpoint == endpoint if not verbose: click.echo(endpoint) else: click.echo( '{endpoint}\t{url}'.format( endpoint=endpoint, url=values.get('url', '') ) )
python
{ "resource": "" }
q15260
_wrap_path_or_stream
train
def _wrap_path_or_stream(method, mode): # noqa: D202 """Open path with context or close stream at the end.""" def decorator(path_or_stream): """Open the path if needed.""" if isinstance(path_or_stream, (str, Path)): return method(Path(path_or_stream).open(mode)) return method(path_or_stream) return decorator
python
{ "resource": "" }
q15261
doctor
train
def doctor(ctx, client): """Check your system and repository for potential problems.""" click.secho('\n'.join(textwrap.wrap(DOCTOR_INFO)) + '\n', bold=True) from . import _checks is_ok = True for attr in _checks.__all__: is_ok &= getattr(_checks, attr)(client) if is_ok: click.secho('Everything seems to be ok.', fg='green') ctx.exit(0 if is_ok else 1)
python
{ "resource": "" }
q15262
DirectoryTree.from_list
train
def from_list(cls, values): """Construct a tree from a list with paths.""" self = cls() for value in values: self.add(value) return self
python
{ "resource": "" }
q15263
DirectoryTree.get
train
def get(self, value, default=None): """Return a subtree if exists.""" path = value if isinstance(value, Path) else Path(str(value)) subtree = self for part in path.parts: try: subtree = subtree[part] except KeyError: return default return subtree
python
{ "resource": "" }
q15264
DirectoryTree.add
train
def add(self, value): """Create a safe directory from a value.""" path = value if isinstance(value, Path) else Path(str(value)) if path and path != path.parent: destination = self for part in path.parts: destination = destination.setdefault(part, DirectoryTree())
python
{ "resource": "" }
q15265
default_endpoint_from_config
train
def default_endpoint_from_config(config, option=None): """Return a default endpoint.""" default_endpoint = config.get('core', {}).get('default') project_endpoint = config.get('project', {}).get('core', {}).get('default', default_endpoint) return Endpoint( option or project_endpoint or default_endpoint, default=default_endpoint, project=project_endpoint, option=option )
python
{ "resource": "" }
q15266
install_completion
train
def install_completion(ctx, attr, value): # pragma: no cover """Install completion for the current shell.""" import click_completion.core if not value or ctx.resilient_parsing: return value shell, path = click_completion.core.install() click.secho( '{0} completion installed in {1}'.format(shell, path), fg='green' ) ctx.exit()
python
{ "resource": "" }
q15267
default_endpoint
train
def default_endpoint(ctx, param, value): """Return default endpoint if specified.""" if ctx.resilient_parsing: return config = ctx.obj['config'] endpoint = default_endpoint_from_config(config, option=value) if endpoint is None: raise click.UsageError('No default endpoint found.') return endpoint
python
{ "resource": "" }
q15268
validate_endpoint
train
def validate_endpoint(ctx, param, value): """Validate endpoint.""" try: config = ctx.obj['config'] except Exception: return endpoint = default_endpoint(ctx, param, value) if endpoint not in config.get('endpoints', {}): raise click.UsageError('Unknown endpoint: {0}'.format(endpoint)) return endpoint
python
{ "resource": "" }
q15269
check_siblings
train
def check_siblings(graph, outputs): """Check that all outputs have their siblings listed.""" siblings = set() for node in outputs: siblings |= graph.siblings(node) siblings = {node.path for node in siblings} missing = siblings - {node.path for node in outputs} if missing: msg = ( 'Include the files above in the command ' 'or use the --with-siblings option.' ) raise click.ClickException( 'There are missing output siblings:\n\n' '\t{0}\n\n{1}'.format( '\n\t'.join(click.style(path, fg='red') for path in missing), msg, ), ) return outputs
python
{ "resource": "" }
q15270
with_siblings
train
def with_siblings(graph, outputs): """Include all missing siblings.""" siblings = set() for node in outputs: siblings |= graph.siblings(node) return siblings
python
{ "resource": "" }
q15271
echo_via_pager
train
def echo_via_pager(*args, **kwargs): """Display pager only if it does not fit in one terminal screen. NOTE: The feature is available only on ``less``-based pager. """ try: restore = 'LESS' not in os.environ os.environ.setdefault('LESS', '-iXFR') click.echo_via_pager(*args, **kwargs) finally: if restore: os.environ.pop('LESS', None)
python
{ "resource": "" }
q15272
OptionalGroup.parse_args
train
def parse_args(self, ctx, args): """Check if the first argument is an existing command.""" if args and args[0] in self.commands: args.insert(0, '') super(OptionalGroup, self).parse_args(ctx, args)
python
{ "resource": "" }
q15273
execute
train
def execute(client, output_file, output_paths=None): """Run the generated workflow using cwltool library.""" output_paths = output_paths or set() import cwltool.factory from cwltool import workflow from cwltool.context import LoadingContext, RuntimeContext from cwltool.utils import visit_class def construct_tool_object(toolpath_object, *args, **kwargs): """Fix missing locations.""" protocol = 'file://' def addLocation(d): if 'location' not in d and 'path' in d: d['location'] = protocol + d['path'] visit_class(toolpath_object, ('File', 'Directory'), addLocation) return workflow.default_make_tool(toolpath_object, *args, **kwargs) argv = sys.argv sys.argv = ['cwltool'] # Keep all environment variables. runtime_context = RuntimeContext( kwargs={ 'rm_tmpdir': False, 'move_outputs': 'leave', 'preserve_entire_environment': True, } ) loading_context = LoadingContext( kwargs={ 'construct_tool_object': construct_tool_object, } ) factory = cwltool.factory.Factory( loading_context=loading_context, runtime_context=runtime_context, ) process = factory.make(os.path.relpath(str(output_file))) outputs = process() sys.argv = argv # Move outputs to correct location in the repository. output_dirs = process.factory.executor.output_dirs def remove_prefix(location, prefix='file://'): if location.startswith(prefix): return location[len(prefix):] return location locations = { remove_prefix(output['location']) for output in outputs.values() } with progressbar( locations, label='Moving outputs', ) as bar: for location in bar: for output_dir in output_dirs: if location.startswith(output_dir): output_path = location[len(output_dir):].lstrip( os.path.sep ) destination = client.path / output_path if destination.is_dir(): shutil.rmtree(str(destination)) destination = destination.parent shutil.move(location, str(destination)) continue unchanged_paths = client.remove_unmodified(output_paths) if unchanged_paths: click.echo( 'Unchanged files:\n\n\t{0}'.format( '\n\t'.join( click.style(path, fg='yellow') for path in unchanged_paths ) ) )
python
{ "resource": "" }
q15274
pull
train
def pull(client, revision, auto_login): """Pull an existing image from the project registry.""" registry_url = detect_registry_url(client, auto_login=auto_login) repo = client.repo sha = repo.rev_parse(revision).hexsha short_sha = repo.git.rev_parse(sha, short=7) image = '{registry}:{short_sha}'.format( registry=registry_url.image, short_sha=short_sha ) result = subprocess.run(['docker', 'image', 'pull', image]) if result.returncode != 0: raise click.ClickException( 'The image "{image}" was not pulled.\n\n' 'Push the repository to the server or build the image manually:\n' '\n\tdocker build -t {image} .'.format(image=image) )
python
{ "resource": "" }
q15275
tabular
train
def tabular(client, records): """Format dataset files with a tabular output. :param client: LocalClient instance. :param records: Filtered collection. """ from renku.models._tabulate import tabulate echo_via_pager( tabulate( records, headers=OrderedDict(( ('added', None), ('authors_csv', 'authors'), ('dataset', None), ('full_path', 'path'), )), ) )
python
{ "resource": "" }
q15276
jsonld
train
def jsonld(client, records): """Format dataset files as JSON-LD. :param client: LocalClient instance. :param records: Filtered collection. """ from renku.models._json import dumps from renku.models._jsonld import asjsonld data = [asjsonld(record) for record in records] echo_via_pager(dumps(data, indent=2))
python
{ "resource": "" }
q15277
IssueFromTraceback.main
train
def main(self, *args, **kwargs): """Catch all exceptions.""" try: result = super().main(*args, **kwargs) return result except Exception: if HAS_SENTRY: self._handle_sentry() if not (sys.stdin.isatty() and sys.stdout.isatty()): raise self._handle_github()
python
{ "resource": "" }
q15278
IssueFromTraceback._handle_sentry
train
def _handle_sentry(self): """Handle exceptions using Sentry.""" from sentry_sdk import capture_exception, configure_scope from sentry_sdk.utils import capture_internal_exceptions with configure_scope() as scope: with capture_internal_exceptions(): from git import Repo from renku.cli._git import get_git_home from renku.models.datasets import Author user = Author.from_git(Repo(get_git_home())) scope.user = {'name': user.name, 'email': user.email} event_id = capture_exception() click.echo( _BUG + 'Recorded in Sentry with ID: {0}\n'.format(event_id), err=True, ) raise
python
{ "resource": "" }
q15279
IssueFromTraceback._handle_github
train
def _handle_github(self): """Handle exception and submit it as GitHub issue.""" value = click.prompt( _BUG + click.style( '1. Open an issue by typing "open";\n', fg='green', ) + click.style( '2. Print human-readable information by typing ' '"print";\n', fg='yellow', ) + click.style( '3. See the full traceback without submitting details ' '(default: "ignore").\n\n', fg='red', ) + 'Please select an action by typing its name', type=click.Choice([ 'open', 'print', 'ignore', ], ), default='ignore', ) getattr(self, '_process_' + value)()
python
{ "resource": "" }
q15280
IssueFromTraceback._format_issue_body
train
def _format_issue_body(self, limit=-5): """Return formatted body.""" from renku import __version__ re_paths = r'(' + r'|'.join([path or os.getcwd() for path in sys.path]) + r')' tb = re.sub(re_paths, '[...]', traceback.format_exc(limit=limit)) return ( '## Describe the bug\nA clear and concise description.\n\n' '## Details\n' '*Please verify and redact the details.*\n\n' '**Renku version:** ' + __version__ + '\n' '**OS:** ' + platform.system() + ' (' + platform.version() + ')\n' '**Python:** ' + platform.python_version() + '\n\n' '### Traceback\n\n```\n' + tb + '```\n\n' '## Additional context\nAdd any other context about the problem.' )
python
{ "resource": "" }
q15281
IssueFromTraceback._format_issue_url
train
def _format_issue_url(self): """Format full issue URL.""" query = urlencode({ 'title': self._format_issue_title(), 'body': self._format_issue_body(), }) return self.REPO_URL + self.ISSUE_SUFFIX + '?' + query
python
{ "resource": "" }
q15282
IssueFromTraceback._process_open
train
def _process_open(self): """Open link in a browser.""" click.launch(self._format_issue_url()) if not click.confirm('Did it work?', default=True): click.echo() self._process_print() click.secho( '\nOpen the line manually and copy the text above\n', fg='yellow' ) click.secho( ' ' + self.REPO_URL + self.ISSUE_SUFFIX + '\n', bold=True )
python
{ "resource": "" }
q15283
pass_local_client
train
def pass_local_client( method=None, clean=None, up_to_date=None, commit=None, ignore_std_streams=True, lock=None, ): """Pass client from the current context to the decorated command.""" if method is None: return functools.partial( pass_local_client, clean=clean, up_to_date=up_to_date, commit=commit, ignore_std_streams=ignore_std_streams, lock=lock, ) def new_func(*args, **kwargs): ctx = click.get_current_context() client = ctx.ensure_object(LocalClient) stack = contextlib.ExitStack() # Handle --isolation option: if get_git_isolation(): client = stack.enter_context(client.worktree()) transaction = client.transaction( clean=clean, up_to_date=up_to_date, commit=commit, ignore_std_streams=ignore_std_streams ) stack.enter_context(transaction) if lock or (lock is None and commit): stack.enter_context(client.lock) with stack: result = ctx.invoke(method, client, *args, **kwargs) return result return functools.update_wrapper(new_func, method)
python
{ "resource": "" }
q15284
_mapped_std_streams
train
def _mapped_std_streams(lookup_paths, streams=('stdin', 'stdout', 'stderr')): """Get a mapping of standard streams to given paths.""" # FIXME add device number too standard_inos = {} for stream in streams: try: stream_stat = os.fstat(getattr(sys, stream).fileno()) key = stream_stat.st_dev, stream_stat.st_ino standard_inos[key] = stream except Exception: # FIXME UnsupportedOperation pass # FIXME if not getattr(sys, stream).istty() def stream_inos(paths): """Yield tuples with stats and path.""" for path in paths: try: stat = os.stat(path) key = (stat.st_dev, stat.st_ino) if key in standard_inos: yield standard_inos[key], path except FileNotFoundError: # pragma: no cover pass return dict(stream_inos(lookup_paths)) if standard_inos else {}
python
{ "resource": "" }
q15285
_clean_streams
train
def _clean_streams(repo, mapped_streams): """Clean mapped standard streams.""" for stream_name in ('stdout', 'stderr'): stream = mapped_streams.get(stream_name) if not stream: continue path = os.path.relpath(stream, start=repo.working_dir) if (path, 0) not in repo.index.entries: os.remove(stream) else: blob = repo.index.entries[(path, 0)].to_blob(repo) with open(path, 'wb') as fp: fp.write(blob.data_stream.read())
python
{ "resource": "" }
q15286
_expand_directories
train
def _expand_directories(paths): """Expand directory with all files it contains.""" for path in paths: path_ = Path(path) if path_.is_dir(): for expanded in path_.rglob('*'): yield str(expanded) else: yield path
python
{ "resource": "" }
q15287
dataset
train
def dataset(ctx, client, revision, datadir, format): """Handle datasets.""" ctx.meta['renku.datasets.datadir'] = datadir if ctx.invoked_subcommand is not None: return if revision is None: datasets = client.datasets.values() else: datasets = client.datasets_from_commit(client.repo.commit(revision)) DATASETS_FORMATS[format](client, datasets)
python
{ "resource": "" }
q15288
create
train
def create(client, name): """Create an empty dataset in the current repo.""" from renku.models.datasets import Author with client.with_dataset(name=name) as dataset: click.echo('Creating a dataset ... ', nl=False) author = Author.from_git(client.repo) if author not in dataset.authors: dataset.authors.append(author) click.secho('OK', fg='green')
python
{ "resource": "" }
q15289
add
train
def add(client, name, urls, link, relative_to, target, force): """Add data to a dataset.""" try: with client.with_dataset(name=name) as dataset: target = target if target else None with progressbar(urls, label='Adding data to dataset') as bar: for url in bar: client.add_data_to_dataset( dataset, url, link=link, target=target, relative_to=relative_to, force=force, ) except FileNotFoundError: raise BadParameter('Could not process {0}'.format(url))
python
{ "resource": "" }
q15290
ls_files
train
def ls_files(client, names, authors, include, exclude, format): """List files in dataset.""" records = _filter( client, names=names, authors=authors, include=include, exclude=exclude ) DATASET_FILES_FORMATS[format](client, records)
python
{ "resource": "" }
q15291
unlink
train
def unlink(client, name, include, exclude, yes): """Remove matching files from a dataset.""" dataset = client.load_dataset(name=name) records = _filter( client, names=[dataset.name], include=include, exclude=exclude ) if not yes and records: prompt_text = ( 'You are about to remove ' 'following from "{0}" dataset.\n'.format(dataset.name) + '\n'.join([str(record.full_path) for record in records]) + '\nDo you wish to continue?' ) click.confirm(WARNING + prompt_text, abort=True) if records: for item in records: dataset.unlink_file(item.path) dataset.to_yaml() click.secho('OK', fg='green')
python
{ "resource": "" }
q15292
_include_exclude
train
def _include_exclude(file_path, include=None, exclude=None): """Check if file matches one of include filters and not in exclude filter. :param file_path: Path to the file. :param include: Tuple containing patterns to which include from result. :param exclude: Tuple containing patterns to which exclude from result. """ if exclude is not None and exclude: for pattern in exclude: if file_path.match(pattern): return False if include is not None and include: for pattern in include: if file_path.match(pattern): return True return False return True
python
{ "resource": "" }
q15293
_filter
train
def _filter(client, names=None, authors=None, include=None, exclude=None): """Filter dataset files by specified filters. :param names: Filter by specified dataset names. :param authors: Filter by authors. :param include: Include files matching file pattern. :param exclude: Exclude files matching file pattern. """ if isinstance(authors, str): authors = set(authors.split(',')) if isinstance(authors, list) or isinstance(authors, tuple): authors = set(authors) records = [] for path_, dataset in client.datasets.items(): if not names or dataset.name in names: for file_ in dataset.files.values(): file_.dataset = dataset.name path_ = file_.full_path.relative_to(client.path) match = _include_exclude(path_, include, exclude) if authors: match = match and authors.issubset({ author.name for author in file_.authors }) if match: records.append(file_) return sorted(records, key=lambda file_: file_.added)
python
{ "resource": "" }
q15294
_split_section_and_key
train
def _split_section_and_key(key): """Return a tuple with config section and key.""" parts = key.split('.') if len(parts) > 1: return 'renku "{0}"'.format(parts[0]), '.'.join(parts[1:]) return 'renku', key
python
{ "resource": "" }
q15295
config
train
def config(client, key, value): """Get and set Renku repository and global options.""" if value is None: cfg = client.repo.config_reader() click.echo(cfg.get_value(*_split_section_and_key(key))) else: with client.repo.config_writer() as cfg: section, config_key = _split_section_and_key(key) cfg.set_value(section, config_key, value) click.echo(value)
python
{ "resource": "" }
q15296
check_for_git_repo
train
def check_for_git_repo(url): """Check if a url points to a git repository.""" u = parse.urlparse(url) is_git = False if os.path.splitext(u.path)[1] == '.git': is_git = True elif u.scheme in ('', 'file'): from git import InvalidGitRepositoryError, Repo try: Repo(u.path, search_parent_directories=True) is_git = True except InvalidGitRepositoryError: is_git = False return is_git
python
{ "resource": "" }
q15297
env
train
def env(config, endpoint): """Print RENKU environment variables. Run this command to configure your Renku client: $ eval "$(renku env)" """ access_token = config['endpoints'][endpoint]['token']['access_token'] click.echo('export {0}={1}'.format('RENKU_ENDPOINT', endpoint)) click.echo('export {0}={1}'.format('RENKU_ACCESS_TOKEN', access_token)) click.echo('# Run this command to configure your Renku client:') click.echo('# eval "$(renku env)"')
python
{ "resource": "" }
q15298
_check_version
train
def _check_version(): """Check renku version.""" from ._config import APP_NAME if VersionCache.load(APP_NAME).is_fresh: return from pkg_resources import parse_version from renku.version import __version__ version = parse_version(__version__) allow_prereleases = version.is_prerelease latest_version = find_latest_version( 'renku', allow_prereleases=allow_prereleases ) if version < latest_version: click.secho( 'You are using renku version {version}, however version ' '{latest_version} is available.\n' 'You should consider upgrading ...'.format( version=__version__, latest_version=latest_version, ), fg='yellow', bold=True, ) VersionCache(pypi_version=str(latest_version)).dump(APP_NAME)
python
{ "resource": "" }
q15299
check_version
train
def check_version(ctx, param, value): """Check for latest version of renku on PyPI.""" if ctx.resilient_parsing: return if not value and ctx.invoked_subcommand != 'run': ctx.call_on_close(_check_version)
python
{ "resource": "" }